diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index f6b28bfeeb..9579c91422 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -50,6 +50,8 @@ jobs: project test.backend -race -count=40 --x-env=ci done + echo "Testing files with !race directive" + project test.backend -count=40 --x-env=ci - name: Test frontend run: | diff --git a/.vscode/launch.json b/.vscode/launch.json index a94a783344..786afbd9a7 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -18,7 +18,9 @@ "mode": "auto", "cwd": ".", "program": "cmd/rest-server/main.go", - "args": ["-env=.env.dev"] + "args": [ + "-env=.env.dev" + ] }, { "name": "Launch CLI", @@ -27,7 +29,9 @@ "mode": "auto", "cwd": ".", "program": "cmd/cli/main.go", - "args": ["-env=.env.dev"] + "args": [ + "-env=.env.dev" + ] } ] } diff --git a/bin/.helpers.sh b/bin/.helpers.sh index dd01ef18e2..975494f4d1 100644 --- a/bin/.helpers.sh +++ b/bin/.helpers.sh @@ -48,6 +48,7 @@ ensure_pwd_is_top_level() { # Prompt the user for confirmation. # Most likely will want to always run as `with_tty confirm ...` confirm() { + test -n "$CI" && return test -n "$NO_CONFIRMATION" && return local prompt="$1" diff --git a/bin/.project.dependencies.sh b/bin/.project.dependencies.sh index ab655ef78e..0b7b33a852 100644 --- a/bin/.project.dependencies.sh +++ b/bin/.project.dependencies.sh @@ -256,10 +256,6 @@ install.bin.mkcert() { wget https://github.com/FiloSottile/mkcert/releases/download/v"$VERSION"/mkcert-v"$VERSION"-linux-amd64 -O mkcert chmod +x mkcert mv mkcert ./bin/tools/ - cd "$CERTIFICATES_DIR" || exit - echo "Setting up local certificates" - mkcert --cert-file localhost.pem --key-file localhost-key.pem "localhost" "*.e2e.localhost" "*.local.localhost" "*.dev.localhost" "*.ci.localhost" "*.prod.localhost" "127.0.0.1" "::1" "host.docker.internal" - cd .. mkcert -install } 2>&4 | xlog >&3; } 4>&1 | xerr >&3; } 3>&1 } diff --git a/bin/project b/bin/project index 3b951585a3..cab6b57e5e 100755 --- a/bin/project +++ b/bin/project @@ -132,6 +132,7 @@ x.install-tools() { go install github.com/danicc097/xo@v1.0.0 & go install github.com/mikefarah/yq/v4@v4.27.2 & go install github.com/hexdigest/gowrap/cmd/gowrap@latest & + go install golang.org/x/tools/cmd/stringer@latest & go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.28.1 & go install github.com/planetscale/vtprotobuf/cmd/protoc-gen-go-vtproto@v0.2.0 & @@ -168,7 +169,7 @@ x.gen.pregen() { missing_opids="$(yq e "(.paths[][] | select(has(\"operationId\") | not) | path) | join(\".\")" $SPEC)" [[ -n "$missing_opids" ]] && err "Paths missing operationId: $(printf "\t\n%s" ${missing_opids[*]})" - # spec must always be valid before generating. Schemas to generate must already exist with the appropiate vendor extension + # spec must always be valid before generating. Schemas to generate must already exist with the appropriate vendor extension pregen -env=".env.$env" --validate-spec-only sync_db_enums_with_spec @@ -882,7 +883,7 @@ x.test.backend.watch() { while read -r event_time event_file 2>/dev/null || sleep $latency; do clear - { APP_ENV="$env" go test -tags skipxo -count=1 "$@" ./...; } && echo "${GREEN}✓ All tests passing${OFF}" + { APP_ENV="$env" go test -tags skipxo "$@" ./...; } && echo "${GREEN}✓ All tests passing${OFF}" done done } @@ -993,7 +994,7 @@ x.recreate-shared-services() { x.release() { { { { search_stopship "STOPSHIP" & - # go mod verify & # TODO go.work issues solved in go 1.21 (https://github.com/golang/go/issues/54372) + GOWORK=off go mod verify & # (https://github.com/golang/go/issues/54372) wait_without_error } 2>&4 | xlog >&3; } 4>&1 | xerr >&3; } 3>&1 @@ -1007,13 +1008,36 @@ x.dev-utils.api-keys() { # Setups a traefik container with predefined configuration in `install-dir`. # Args: install-dir x.setup.traefik() { - git clone --depth=1 https://github.com/danicc097/traefik-bootstrap.git "$1" - docker network create traefik-net || true - mkdir -p "$1"/traefik/certificates - cp $CERTIFICATES_DIR/* "$1"/traefik/certificates - cd "$1" || exit - ./compose-up - cd - >/dev/null || exit + { { { + test -z "$1" && err "installation directory is required" + + x.setup.mkcert + + git clone --depth=1 https://github.com/danicc097/traefik-bootstrap.git "$1" + docker network create traefik-net || true + mkdir -p "$1"/traefik/certificates + cp $CERTIFICATES_DIR/* "$1"/traefik/certificates + cd "$1" || exit + cp traefik/dynamic_conf.yaml.example traefik/dynamic_conf.yaml + echo "Adding $PWD/certificates/" + yq e ".tls.certificates += [{ + \"certFile\": \"$PWD/$CERTIFICATES_DIR/localhost.pem\", + \"keyFile\": \"$PWD/$CERTIFICATES_DIR/localhost-key.pem\" + }]" -i traefik/dynamic_conf.yaml + + ./compose-up + cd - >/dev/null || exit + } 2>&4 | xlog >&3; } 4>&1 | xerr >&3; } 3>&1 +} + +# Installs mkcert local development certificates. +x.setup.mkcert() { + { { { + cd "$CERTIFICATES_DIR" || exit + echo "Setting up local certificates" + mkcert --cert-file localhost.pem --key-file localhost-key.pem "localhost" "*.e2e.localhost" "*.local.localhost" "*.dev.localhost" "*.ci.localhost" "*.prod.localhost" "127.0.0.1" "::1" "host.docker.internal" 2>&1 + cd - + } 2>&4 | xlog >&3; } 4>&1 | xerr >&3; } 3>&1 } ########################## migrations ########################## @@ -1119,7 +1143,6 @@ x.db.gen.initial-data() { # Seed database. x.db.initial-data() { - # xlog eats up read -p (prompt) { { { x.db.drop x.migrate up @@ -1152,7 +1175,6 @@ x.db.dump() { # Restore the database with the latest dump or `file` for the current environment. # Args: [file] x.db.restore() { - # xlog eats up read -p (prompt) dump_file="$1" if [[ -n $dump_file ]]; then [[ ! -f $dump_file ]] && err "$dump_file does not exist" @@ -1186,12 +1208,14 @@ x.e2e.run() { { { { name="$PROJECT_PREFIX-e2e" cd e2e - pnpm install -r --frozen-lockfile + pnpm i # cannot install playwright lib DOCKER_BUILDKIT=1 BUILDKIT_PROGRESS=plain docker build -t "$name" . cd - >/dev/null - # need symlink resolution - docker run -i --rm \ + # need symlink resolution for data + + test -t 0 && opts="-t" + docker run -i $opts --rm \ --ipc=host \ --network host \ -v "$(pwd)/cmd/oidc-server/data/:/cmd/oidc-server/data/" \ diff --git a/cmd/rest-server/main.go b/cmd/rest-server/main.go index 53d22f0641..61d9a46b84 100644 --- a/cmd/rest-server/main.go +++ b/cmd/rest-server/main.go @@ -6,6 +6,7 @@ import ( "log" "os/exec" "runtime" + "strings" "time" "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal" @@ -35,15 +36,24 @@ func openBrowser(url string) { } func main() { - var env, address, specPath, scopePolicyPath, rolePolicyPath string + var env, specPath, scopePolicyPath, rolePolicyPath string - flag.StringVar(&env, "env", ".env", "Environment Variables filename") - flag.StringVar(&address, "address", ":8090", "HTTP Server Address") + flag.StringVar(&env, "env", "", "Environment Variables filename") flag.StringVar(&specPath, "spec-path", "openapi.yaml", "OpenAPI specification filepath") flag.StringVar(&rolePolicyPath, "roles-path", "roles.json", "Roles policy JSON filepath") flag.StringVar(&scopePolicyPath, "scopes-path", "scopes.json", "Scopes policy JSON filepath") flag.Parse() + var errs []string + + if env == "" { + errs = append(errs, " - env is required but unset") + } + + if len(errs) > 0 { + log.Fatalf("error: \n" + strings.Join(errs, "\n")) + } + // go openBrowser(url) // dummy values for dashboard @@ -54,7 +64,7 @@ func main() { prometheus.MustRegister(cpuTemp) cpuTemp.Set(65.3) - errC, err := server.Run(env, address, specPath, rolePolicyPath, scopePolicyPath) + errC, err := server.Run(env, specPath, rolePolicyPath, scopePolicyPath) if err != nil { log.Fatalf("Couldn't run: %s", err) } diff --git a/db/migrations/0000002_init.up.sql b/db/migrations/0000002_init.up.sql index 40bd627296..e183082462 100644 --- a/db/migrations/0000002_init.up.sql +++ b/db/migrations/0000002_init.up.sql @@ -9,6 +9,8 @@ create extension if not exists pg_trgm schema extensions; create extension if not exists btree_gin schema extensions; +create extension if not exists rum schema extensions; + -- internal use. update whenever a project with its related workitems, -- etc. tables are created in migrations create table projects ( @@ -107,8 +109,6 @@ comment on column user_api_keys.user_api_key_id is '"properties":private'; -- external_id is null; -- composite on id, deleted_at, email, deleted_at, etc. will not improve speed -- create unique index on users (user_id) where deleted_at is null; -- helps if you have much more deleted rows only -create index on users (created_at); - -- create index on users (deleted_at); - not worth the extra overhead. -- for finding all deleted users exclusively create index on users (deleted_at) @@ -317,6 +317,14 @@ create table work_items ( create index on work_items (team_id); +create index on work_items using gin (title gin_trgm_ops); + +create index on work_items using gin (description gin_trgm_ops); + +create index on work_items using gin (title gin_trgm_ops , description gin_trgm_ops); + +create index on work_items using gin (title , description gin_trgm_ops); + /* when a new project is required -> manual table creation with empty new fields, just diff --git a/docker-compose.yml b/docker-compose.yml index cf4825aacc..f884718d48 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -8,14 +8,14 @@ x-logging: &default-logging # Debug config with docker-compose --env-file <...> config services: backend: - image: ${PROJECT_PREFIX:?not set}-${APP_ENV}-backend:latest + image: ${PROJECT_PREFIX:?not set}-${APP_ENV:?not set}-backend:latest container_name: backend_${PROJECT_PREFIX}_${APP_ENV} networks: - traefik-net command: - "--env=.env.${APP_ENV}" volumes: - - ./.env.${APP_ENV}:/.env.${APP_ENV} + - ${PWD}/.env.${APP_ENV}:/.env.${APP_ENV}:ro # required since mock server will use these - /etc/ssl/certs/ca-certificates.crt:/etc/ssl/certs/ca-certificates.crt:ro build: diff --git a/docker/docker-compose.oidc.yml b/docker/docker-compose.oidc.yml index d994f83ef2..68e1efe3b5 100644 --- a/docker/docker-compose.oidc.yml +++ b/docker/docker-compose.oidc.yml @@ -27,13 +27,13 @@ services: PORT: ${MOCK_OIDC_SERVER_PORT:?not set} labels: - traefik.enable=true - - traefik.http.routers.simple-oidc-server.rule=Host(`${OIDC_DOMAIN:?not set}`) && PathPrefix("/oidc") - - traefik.http.routers.simple-oidc-server.middlewares=strip_prefix_oidc # check docs to exclude requests from compression - - traefik.http.middlewares.strip_prefix_oidc.stripprefix.prefixes=/oidc + - traefik.http.routers.${PROJECT_PREFIX}-simple-oidc-server.rule=Host(`${OIDC_DOMAIN:?not set}`) && PathPrefix("/oidc") + - traefik.http.routers.${PROJECT_PREFIX}-simple-oidc-server.middlewares=${PROJECT_PREFIX}-simple-oidc-server-strip-prefix # check docs to exclude requests from compression + - traefik.http.middlewares.${PROJECT_PREFIX}-simple-oidc-server-strip-prefix.stripprefix.prefixes=/oidc - traefik.docker.network=traefik-net - - traefik.http.routers.simple-oidc-server.entrypoints=websecure - - traefik.http.routers.simple-oidc-server.tls=true - - traefik.http.services.simple-oidc-server.loadbalancer.server.port=${MOCK_OIDC_SERVER_PORT:?not set} + - traefik.http.routers.${PROJECT_PREFIX}-simple-oidc-server.entrypoints=websecure + - traefik.http.routers.${PROJECT_PREFIX}-simple-oidc-server.tls=true + - traefik.http.services.${PROJECT_PREFIX}-simple-oidc-server.loadbalancer.server.port=${MOCK_OIDC_SERVER_PORT:?not set} restart: unless-stopped logging: *default-logging healthcheck: diff --git a/e2e/Dockerfile b/e2e/Dockerfile index e7fc677869..09331e68a5 100644 --- a/e2e/Dockerfile +++ b/e2e/Dockerfile @@ -4,3 +4,6 @@ WORKDIR /e2e ENV PATH /e2e/node_modules/.bin:$PATH + + + diff --git a/e2e/__tests__/example.spec.ts b/e2e/__tests__/example.spec.ts index b79e7d0ad1..9b8a8ab4a6 100644 --- a/e2e/__tests__/example.spec.ts +++ b/e2e/__tests__/example.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from '@playwright/test' -import authServerUsers from '../auth-server-users-e2e.json' +import authServerUsers from '@users' test('users loaded', async ({ page }) => { expect(authServerUsers).toBeDefined() diff --git a/e2e/__tests__/pages/login.ts b/e2e/__tests__/pages/login.ts new file mode 100644 index 0000000000..e5f226441e --- /dev/null +++ b/e2e/__tests__/pages/login.ts @@ -0,0 +1,29 @@ +import type { Page, BrowserContext } from 'playwright' + +export class LoginPage { + private readonly page: Page + + constructor(page: Page) { + this.page = page + } + + static async create(page: Page): Promise { + const loginPage = new LoginPage(page) + await loginPage.navigateToLoginPage() + return loginPage + } + + private async navigateToLoginPage(): Promise { + await this.page.goto('') + } + + async enterCredentials(username: string, password: string): Promise { + await this.page.fill('#username', username) + await this.page.fill('#password', password) + } + + async submitLoginForm(): Promise { + await this.page.click('button[type="submit"]') + await this.page.waitForURL('**/') + } +} diff --git a/e2e/__tests__/specs/oidc-login.spec.ts b/e2e/__tests__/specs/oidc-login.spec.ts new file mode 100644 index 0000000000..46ce2f8c31 --- /dev/null +++ b/e2e/__tests__/specs/oidc-login.spec.ts @@ -0,0 +1,13 @@ +import test, { expect } from '@playwright/test' +import { LoginPage } from '__tests__/pages/login' +import authServerUsers from '@users' + +test('Login redirects to auth server and back if authenticated', async ({ page }) => { + test.skip() // TODO: + const loginPage = await LoginPage.create(page) + const user1 = authServerUsers.user1 + await loginPage.enterCredentials(user1.username, user1.password) + await loginPage.submitLoginForm() + + expect(page.url()).toBe('') +}) diff --git a/e2e/package.json b/e2e/package.json index f4c24bf690..a7000e5c84 100644 --- a/e2e/package.json +++ b/e2e/package.json @@ -7,6 +7,9 @@ "keywords": [], "author": "", "devDependencies": { + "typescript": "^4.8.2" + }, + "dependencies": { "@playwright/test": "^1.35.0" } } diff --git a/e2e/playwright.config.ts b/e2e/playwright.config.ts index 466c3dba2b..1247b7f86c 100644 --- a/e2e/playwright.config.ts +++ b/e2e/playwright.config.ts @@ -1,4 +1,4 @@ -import { defineConfig, devices } from "@playwright/test"; +import { defineConfig, devices } from '@playwright/test' /** * Read environment variables from file. @@ -10,7 +10,7 @@ import { defineConfig, devices } from "@playwright/test"; * See https://playwright.dev/docs/test-configuration. */ export default defineConfig({ - testDir: "./__tests__", + testDir: './__tests__', /* Run tests in files in parallel */ fullyParallel: true, /* Fail the build on CI if you accidentally left test.only in the source code. */ @@ -20,31 +20,31 @@ export default defineConfig({ /* Opt out of parallel tests on CI. */ workers: process.env.CI ? 1 : undefined, /* Reporter to use. See https://playwright.dev/docs/test-reporters */ - reporter: "html", + reporter: 'html', /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ use: { /* Base URL to use in actions like `await page.goto('/')`. */ // baseURL: 'http://127.0.0.1:3000', /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ - trace: "on-first-retry", + trace: 'on-first-retry', }, /* Configure projects for major browsers */ projects: [ { - name: "chromium", - use: { ...devices["Desktop Chrome"] }, + name: 'chromium', + use: { ...devices['Desktop Chrome'] }, }, { - name: "firefox", - use: { ...devices["Desktop Firefox"] }, + name: 'firefox', + use: { ...devices['Desktop Firefox'] }, }, { - name: "webkit", - use: { ...devices["Desktop Safari"] }, + name: 'webkit', + use: { ...devices['Desktop Safari'] }, }, /* Test against mobile viewports. */ @@ -74,4 +74,4 @@ export default defineConfig({ // url: 'http://127.0.0.1:3000', // reuseExistingServer: !process.env.CI, // }, -}); +}) diff --git a/e2e/pnpm-lock.yaml b/e2e/pnpm-lock.yaml index f989f3f79d..bfcf3ad387 100644 --- a/e2e/pnpm-lock.yaml +++ b/e2e/pnpm-lock.yaml @@ -1,10 +1,15 @@ lockfileVersion: '6.0' -devDependencies: +dependencies: '@playwright/test': specifier: ^1.35.0 version: 1.35.0 +devDependencies: + typescript: + specifier: ^4.8.2 + version: 4.8.2 + packages: /@playwright/test@1.35.0: @@ -12,26 +17,32 @@ packages: engines: {node: '>=16'} hasBin: true dependencies: - '@types/node': 20.3.0 + '@types/node': 20.3.1 playwright-core: 1.35.0 optionalDependencies: fsevents: 2.3.2 - dev: true + dev: false - /@types/node@20.3.0: - resolution: {integrity: sha512-cumHmIAf6On83X7yP+LrsEyUOf/YlociZelmpRYaGFydoaPdxdt80MAbu6vWerQT2COCp2nPvHdsbD7tHn/YlQ==} - dev: true + /@types/node@20.3.1: + resolution: {integrity: sha512-EhcH/wvidPy1WeML3TtYFGR83UzjxeWRen9V402T8aUGYsCHOmfoisV3ZSg03gAFIbLq8TnWOJ0f4cALtnSEUg==} + dev: false /fsevents@2.3.2: resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} os: [darwin] requiresBuild: true - dev: true + dev: false optional: true /playwright-core@1.35.0: resolution: {integrity: sha512-muMXyPmIx/2DPrCHOD1H1ePT01o7OdKxKj2ebmCAYvqhUy+Y1bpal7B0rdoxros7YrXI294JT/DWw2LqyiqTPA==} engines: {node: '>=16'} hasBin: true + dev: false + + /typescript@4.8.2: + resolution: {integrity: sha512-C0I1UsrrDHo2fYI5oaCGbSejwX4ch+9Y5jTQELvovfmFkK3HHSZJB8MSJcWLmCUBzQBchCrZ9rMRV6GuNrvGtw==} + engines: {node: '>=4.2.0'} + hasBin: true dev: true diff --git a/frontend/src/components/Permissions/ProtectedRoute.tsx b/frontend/src/components/Permissions/ProtectedRoute.tsx index 2049674021..e9e56613e3 100644 --- a/frontend/src/components/Permissions/ProtectedRoute.tsx +++ b/frontend/src/components/Permissions/ProtectedRoute.tsx @@ -8,6 +8,8 @@ import { useUISlice } from 'src/slices/ui' import { useAuthenticatedUser } from 'src/hooks/auth/useAuthenticatedUser' import { useEffect } from 'react' import { isAuthorized } from 'src/services/authorization' +import config from '@config' +import { apiPath } from 'src/services/apiPaths' type ProtectedRouteProps = { children: JSX.Element @@ -42,7 +44,7 @@ export default function ProtectedRoute({ children, requiredRole = null, required } if (!isAuthenticated && user) { - window.location.replace(`${import.meta.env.VITE_AUTH_SERVER}/login`) + window.location.replace(apiPath('/auth/myprovider/login')) } return {children} diff --git a/frontend/src/gen/default/default.ts b/frontend/src/gen/default/default.ts index 5fe31f588c..3b94897e60 100644 --- a/frontend/src/gen/default/default.ts +++ b/frontend/src/gen/default/default.ts @@ -16,7 +16,7 @@ import type { UseInfiniteQueryResult, QueryKey, } from '@tanstack/react-query' -import type { HTTPValidationError } from '.././model' +import type { EventsParams, HTTPValidationError } from '.././model' export const myProviderCallback = (options?: AxiosRequestConfig): Promise> => { return axios.get(`/auth/myprovider/callback`, options) @@ -174,24 +174,28 @@ export const useMyProviderLogin = < return query } -export const events = (options?: AxiosRequestConfig): Promise> => { - return axios.get(`/events`, options) +export const events = (params: EventsParams, options?: AxiosRequestConfig): Promise> => { + return axios.get(`/events`, { + ...options, + params: { ...params, ...options?.params }, + }) } -export const getEventsQueryKey = () => [`/events`] as const +export const getEventsQueryKey = (params: EventsParams) => [`/events`, ...(params ? [params] : [])] as const -export const getEventsInfiniteQueryOptions = < - TData = Awaited>, - TError = AxiosError, ->(options?: { - query?: UseInfiniteQueryOptions>, TError, TData> - axios?: AxiosRequestConfig -}): UseInfiniteQueryOptions>, TError, TData> & { queryKey: QueryKey } => { +export const getEventsInfiniteQueryOptions = >, TError = AxiosError>( + params: EventsParams, + options?: { + query?: UseInfiniteQueryOptions>, TError, TData> + axios?: AxiosRequestConfig + }, +): UseInfiniteQueryOptions>, TError, TData> & { queryKey: QueryKey } => { const { query: queryOptions, axios: axiosOptions } = options ?? {} - const queryKey = queryOptions?.queryKey ?? getEventsQueryKey() + const queryKey = queryOptions?.queryKey ?? getEventsQueryKey(params) - const queryFn: QueryFunction>> = ({ signal }) => events({ signal, ...axiosOptions }) + const queryFn: QueryFunction>> = ({ signal }) => + events(params, { signal, ...axiosOptions }) return { queryKey, queryFn, staleTime: 3600000, ...queryOptions } } @@ -199,11 +203,14 @@ export const getEventsInfiniteQueryOptions = < export type EventsInfiniteQueryResult = NonNullable>> export type EventsInfiniteQueryError = AxiosError -export const useEventsInfinite = >, TError = AxiosError>(options?: { - query?: UseInfiniteQueryOptions>, TError, TData> - axios?: AxiosRequestConfig -}): UseInfiniteQueryResult & { queryKey: QueryKey } => { - const queryOptions = getEventsInfiniteQueryOptions(options) +export const useEventsInfinite = >, TError = AxiosError>( + params: EventsParams, + options?: { + query?: UseInfiniteQueryOptions>, TError, TData> + axios?: AxiosRequestConfig + }, +): UseInfiniteQueryResult & { queryKey: QueryKey } => { + const queryOptions = getEventsInfiniteQueryOptions(params, options) const query = useInfiniteQuery(queryOptions) as UseInfiniteQueryResult & { queryKey: QueryKey } @@ -212,18 +219,16 @@ export const useEventsInfinite = >, TE return query } -export const getEventsQueryOptions = < - TData = Awaited>, - TError = AxiosError, ->(options?: { - query?: UseQueryOptions>, TError, TData> - axios?: AxiosRequestConfig -}): UseQueryOptions>, TError, TData> & { queryKey: QueryKey } => { +export const getEventsQueryOptions = >, TError = AxiosError>( + params: EventsParams, + options?: { query?: UseQueryOptions>, TError, TData>; axios?: AxiosRequestConfig }, +): UseQueryOptions>, TError, TData> & { queryKey: QueryKey } => { const { query: queryOptions, axios: axiosOptions } = options ?? {} - const queryKey = queryOptions?.queryKey ?? getEventsQueryKey() + const queryKey = queryOptions?.queryKey ?? getEventsQueryKey(params) - const queryFn: QueryFunction>> = ({ signal }) => events({ signal, ...axiosOptions }) + const queryFn: QueryFunction>> = ({ signal }) => + events(params, { signal, ...axiosOptions }) return { queryKey, queryFn, staleTime: 3600000, ...queryOptions } } @@ -231,11 +236,11 @@ export const getEventsQueryOptions = < export type EventsQueryResult = NonNullable>> export type EventsQueryError = AxiosError -export const useEvents = >, TError = AxiosError>(options?: { - query?: UseQueryOptions>, TError, TData> - axios?: AxiosRequestConfig -}): UseQueryResult & { queryKey: QueryKey } => { - const queryOptions = getEventsQueryOptions(options) +export const useEvents = >, TError = AxiosError>( + params: EventsParams, + options?: { query?: UseQueryOptions>, TError, TData>; axios?: AxiosRequestConfig }, +): UseQueryResult & { queryKey: QueryKey } => { + const queryOptions = getEventsQueryOptions(params, options) const query = useQuery(queryOptions) as UseQueryResult & { queryKey: QueryKey } diff --git a/frontend/src/gen/model/eventsParams.ts b/frontend/src/gen/model/eventsParams.ts new file mode 100644 index 0000000000..54370eb180 --- /dev/null +++ b/frontend/src/gen/model/eventsParams.ts @@ -0,0 +1,12 @@ +/** + * Generated by orval v6.15.0 🍺 + * Do not edit manually. + * OpenAPI openapi-go-gin-postgres-sqlc + * openapi-go-gin-postgres-sqlc + * OpenAPI spec version: 2.0.0 + */ +import type { Project } from './project' + +export type EventsParams = { + projectName: Project +} diff --git a/frontend/src/gen/model/index.ts b/frontend/src/gen/model/index.ts index cb2405441c..6bb78f2271 100644 --- a/frontend/src/gen/model/index.ts +++ b/frontend/src/gen/model/index.ts @@ -34,6 +34,7 @@ export * from './demoProjectKanbanSteps' export * from './demoTwoKanbanSteps' export * from './demoTwoWorkItemTypes' export * from './demoWorkItemTypes' +export * from './eventsParams' export * from './getProjectWorkitemsParams' export * from './hTTPValidationError' export * from './httpErrorType' diff --git a/frontend/src/services/apiPaths.ts b/frontend/src/services/apiPaths.ts new file mode 100644 index 0000000000..4f18c2d417 --- /dev/null +++ b/frontend/src/services/apiPaths.ts @@ -0,0 +1,7 @@ +import config from '@config' +import type { paths } from 'src/types/schema' + +export function apiPath(path: keyof paths) { + const port = config.API_PORT?.length > 0 ? ':' + config.API_PORT : '' + return `https://${config.DOMAIN}${port}${config.API_PREFIX}${config.API_VERSION}${path}` +} diff --git a/frontend/src/types/schema.d.ts b/frontend/src/types/schema.d.ts index 6038ea6cee..e11e2d5382 100644 --- a/frontend/src/types/schema.d.ts +++ b/frontend/src/types/schema.d.ts @@ -512,6 +512,11 @@ export interface operations { }; }; Events: { + parameters: { + query: { + projectName: components["schemas"]["Project"]; + }; + }; responses: { /** @description events */ 200: { diff --git a/go.mod b/go.mod index 13d6971e4b..a22a1b6ec6 100644 --- a/go.mod +++ b/go.mod @@ -20,7 +20,7 @@ require ( github.com/jackc/pgconn v1.14.0 github.com/jackc/pgerrcode v0.0.0-20220416144525-469b46aa5efa github.com/jackc/pgx-zap v0.0.0-20221202020421-94b1cb2f889f - github.com/jackc/pgx/v5 v5.3.2-0.20230421024359-6defa2a607fd + github.com/jackc/pgx/v5 v5.4.0 github.com/joho/godotenv v1.5.1 github.com/kenshaw/inflector v0.2.0 github.com/kenshaw/snaker v0.2.0 diff --git a/go.sum b/go.sum index 406b651093..30596f6174 100644 --- a/go.sum +++ b/go.sum @@ -201,6 +201,8 @@ github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgS github.com/jackc/pgx/v4 v4.18.1/go.mod h1:FydWkUyadDmdNH/mHnGob881GawxeEm7TcMCzkb+qQE= github.com/jackc/pgx/v5 v5.3.2-0.20230421024359-6defa2a607fd h1:RfTDl1TAYHx3Nizj2kQ7J7u/Z5atIweWvL0HR/m8GMs= github.com/jackc/pgx/v5 v5.3.2-0.20230421024359-6defa2a607fd/go.mod h1:sU+RaYl9qnhD3Ce+mwnFii6YEPx70mCYghBzKvqq4qo= +github.com/jackc/pgx/v5 v5.4.0 h1:BSr+GCm4N6QcgIwv0DyTFHK9ugfEFF9DzSbbzxOiXU0= +github.com/jackc/pgx/v5 v5.4.0/go.mod h1:q6iHT8uDNXWiFNOlRqJzBTaSH3+2xCXkokxHZC5qWFY= github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= diff --git a/internal/client/openapi_client.gen.go b/internal/client/openapi_client.gen.go index a0c05c6574..d63ffdfcec 100644 --- a/internal/client/openapi_client.gen.go +++ b/internal/client/openapi_client.gen.go @@ -657,6 +657,11 @@ type Serial = int // UUID defines the model for UUID. type UUID = string +// EventsParams defines parameters for Events. +type EventsParams struct { + ProjectName Project `form:"projectName" json:"projectName"` +} + // GetProjectWorkitemsParams defines parameters for GetProjectWorkitems. type GetProjectWorkitemsParams struct { Open *bool `form:"open,omitempty" json:"open,omitempty"` @@ -767,7 +772,7 @@ type ClientInterface interface { MyProviderLogin(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) // Events request - Events(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + Events(ctx context.Context, params *EventsParams, reqEditors ...RequestEditorFn) (*http.Response, error) // OpenapiYamlGet request OpenapiYamlGet(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -874,8 +879,8 @@ func (c *Client) MyProviderLogin(ctx context.Context, reqEditors ...RequestEdito return c.Client.Do(req) } -func (c *Client) Events(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewEventsRequest(c.Server) +func (c *Client) Events(ctx context.Context, params *EventsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEventsRequest(c.Server, params) if err != nil { return nil, err } @@ -1268,7 +1273,7 @@ func NewMyProviderLoginRequest(server string) (*http.Request, error) { } // NewEventsRequest generates requests for Events -func NewEventsRequest(server string) (*http.Request, error) { +func NewEventsRequest(server string, params *EventsParams) (*http.Request, error) { var err error serverURL, err := url.Parse(server) @@ -1286,6 +1291,22 @@ func NewEventsRequest(server string) (*http.Request, error) { return nil, err } + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "projectName", runtime.ParamLocationQuery, params.ProjectName); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err @@ -2058,7 +2079,7 @@ type ClientWithResponsesInterface interface { MyProviderLoginWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*MyProviderLoginResponse, error) // Events request - EventsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*EventsResponse, error) + EventsWithResponse(ctx context.Context, params *EventsParams, reqEditors ...RequestEditorFn) (*EventsResponse, error) // OpenapiYamlGet request OpenapiYamlGetWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*OpenapiYamlGetResponse, error) @@ -2645,8 +2666,8 @@ func (c *ClientWithResponses) MyProviderLoginWithResponse(ctx context.Context, r } // EventsWithResponse request returning *EventsResponse -func (c *ClientWithResponses) EventsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*EventsResponse, error) { - rsp, err := c.Events(ctx, reqEditors...) +func (c *ClientWithResponses) EventsWithResponse(ctx context.Context, params *EventsParams, reqEditors ...RequestEditorFn) (*EventsResponse, error) { + rsp, err := c.Events(ctx, params, reqEditors...) if err != nil { return nil, err } diff --git a/internal/errorcode_string.go b/internal/errorcode_string.go new file mode 100644 index 0000000000..e1da8f9736 --- /dev/null +++ b/internal/errorcode_string.go @@ -0,0 +1,34 @@ +// Code generated by "stringer -type=ErrorCode -trimprefix=ErrorCode"; DO NOT EDIT. + +package internal + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[ErrorCodeUnknown-0] + _ = x[ErrorCodePrivate-1] + _ = x[ErrorCodeNotFound-2] + _ = x[ErrorCodeInvalidArgument-3] + _ = x[ErrorCodeAlreadyExists-4] + _ = x[ErrorCodeUnauthorized-5] + _ = x[ErrorCodeUnauthenticated-6] + _ = x[ErrorCodeRequestValidation-7] + _ = x[ErrorCodeResponseValidation-8] + _ = x[ErrorCodeInvalidRole-9] + _ = x[ErrorCodeInvalidScope-10] + _ = x[ErrorCodeInvalidUUID-11] +} + +const _ErrorCode_name = "UnknownPrivateNotFoundInvalidArgumentAlreadyExistsUnauthorizedUnauthenticatedRequestValidationResponseValidationInvalidRoleInvalidScopeInvalidUUID" + +var _ErrorCode_index = [...]uint8{0, 7, 14, 22, 37, 50, 62, 77, 94, 112, 123, 135, 146} + +func (i ErrorCode) String() string { + if i >= ErrorCode(len(_ErrorCode_index)-1) { + return "ErrorCode(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _ErrorCode_name[_ErrorCode_index[i]:_ErrorCode_index[i+1]] +} diff --git a/internal/errors.go b/internal/errors.go index bc3064d4de..9835503a98 100644 --- a/internal/errors.go +++ b/internal/errors.go @@ -4,6 +4,7 @@ package internal import ( + "errors" "fmt" ) @@ -18,6 +19,7 @@ type Error struct { // ErrorCode defines supported error codes. type ErrorCode uint +//go:generate stringer -type=ErrorCode -trimprefix=ErrorCode const ( ErrorCodeUnknown ErrorCode = iota // ErrorCodePrivate marks an error to be hidden in response. @@ -72,17 +74,12 @@ func (e *Error) Code() ErrorCode { // Cause returns the root error cause in the chain. func (e *Error) Cause() error { var err error - root := e + err = e for { - if err = root.Unwrap(); err == nil { - return root - } - - r, ok := err.(*Error) - if !ok { + _err := errors.Unwrap(err) + if _err == nil { return err } - - root = r + err = _err } } diff --git a/internal/models/mock_oidc_server.go b/internal/models/mock_oidc_server.go index 63ebe38d74..30531574a7 100644 --- a/internal/models/mock_oidc_server.go +++ b/internal/models/mock_oidc_server.go @@ -5,6 +5,9 @@ import "golang.org/x/text/language" // User implements oidc-server storage.User. // It is used for development and testing purposes only. // nolint: revive +// Still cannot access common interface fields: +// +// https://go101.org/generics/888-the-status-quo-of-go-custom-generics.html type AuthServerUser struct { ID_ string `json:"id"` // need exported for unmarshalling Username_ string `json:"username"` diff --git a/internal/models/openapi_types.gen.go b/internal/models/openapi_types.gen.go index 1816ce4a79..95fa430501 100644 --- a/internal/models/openapi_types.gen.go +++ b/internal/models/openapi_types.gen.go @@ -644,6 +644,11 @@ type Serial = int // UUID defines the model for UUID. type UUID = string +// EventsParams defines parameters for Events. +type EventsParams struct { + ProjectName Project `form:"projectName" json:"projectName"` +} + // GetProjectWorkitemsParams defines parameters for GetProjectWorkitems. type GetProjectWorkitemsParams struct { Open *bool `form:"open,omitempty" json:"open,omitempty"` diff --git a/internal/postgen/structs/main.gen.go b/internal/postgen/structs/main.gen.go index 6fd302e6f0..06f4ade8fc 100644 --- a/internal/postgen/structs/main.gen.go +++ b/internal/postgen/structs/main.gen.go @@ -127,6 +127,7 @@ var PublicStructs = map[string]any{ "DbWorkItemWorkItemTagUpdateParams": db.WorkItemWorkItemTagUpdateParams{}, "DbWorkItem__WIAU_User": db.WorkItem__WIAU_User{}, "DbWorkItem__WIAU_WorkItemAssignedUser": db.WorkItem__WIAU_WorkItemAssignedUser{}, + "DbXoError": db.XoError{}, // diff --git a/internal/repos/postgresql/errors.go b/internal/repos/postgresql/errors.go index bfe092f894..c2706c8573 100644 --- a/internal/repos/postgresql/errors.go +++ b/internal/repos/postgresql/errors.go @@ -6,31 +6,58 @@ import ( "regexp" "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal" + "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/repos/postgresql/gen/db" + "github.com/jackc/pgx/v5" "github.com/jackc/pgx/v5/pgconn" "github.com/jackc/pgerrcode" ) -var errorDetailRegex = regexp.MustCompile(`\((.*)\)=\((.*)\)`) +var errorUniqueViolationRegex = regexp.MustCompile(`\((.*)\)=\((.*)\)`) func parseErrorDetail(err error) error { newErr := internal.WrapErrorf(err, internal.ErrorCodeUnknown, err.Error()) - var column, value string + /** + * TODO: will have generic xo Error struct, which has Entity field. + * Then in responses.go we use errors.As for this Error struct (will stop at Error, not pgx error) + * So we would grab e.Entity and construct the new string based on wrapped errors in Error + * which we already are handling (pgErr, pgx.ErrNoRows)... + * the end goal is that error.Title in responses.go err.Cause() gives something like: `<.Entity> not found`, `... already exists` + * which can directly be shown in a callout. + * + * + */ + var pgErr *pgconn.PgError if errors.As(err, &pgErr) { - switch pgErr.Code { - case pgerrcode.UniqueViolation: - matches := errorDetailRegex.FindStringSubmatch(pgErr.Detail) - if len(matches) == 0 { - break - } - column, value = matches[1], matches[2] - newErr = internal.WrapErrorf(err, internal.ErrorCodeAlreadyExists, fmt.Sprintf("%s %q already exists", column, value)) - default: - newErr = internal.WrapErrorf(err, internal.ErrorCodeUnknown, fmt.Sprintf("%s | %s", pgErr.Detail, pgErr.Message)) + newErr = convertPgErr(pgErr) + } + + var xoErr *db.XoError + if errors.As(err, &xoErr) { + if errors.Is(err, pgx.ErrNoRows) { + return internal.NewErrorf(internal.ErrorCodeNotFound, xoErr.Entity+" not found") } } return newErr } + +func convertPgErr(pgErr *pgconn.PgError) error { + var err error + var column, value string + switch pgErr.Code { + case pgerrcode.UniqueViolation: + matches := errorUniqueViolationRegex.FindStringSubmatch(pgErr.Detail) + if len(matches) == 0 { + break + } + column, value = matches[1], matches[2] + err = internal.NewErrorf(internal.ErrorCodeAlreadyExists, fmt.Sprintf("%s %q already exists", column, value)) + default: + err = internal.NewErrorf(internal.ErrorCodeUnknown, fmt.Sprintf("%s | %s", pgErr.Detail, pgErr.Message)) + } + + return err +} diff --git a/internal/repos/postgresql/gen/db/activity.xo.go b/internal/repos/postgresql/gen/db/activity.xo.go index 7a76b2e32b..cd56f601ec 100644 --- a/internal/repos/postgresql/gen/db/activity.xo.go +++ b/internal/repos/postgresql/gen/db/activity.xo.go @@ -167,11 +167,11 @@ func (a *Activity) Insert(ctx context.Context, db DB) (*Activity, error) { rows, err := db.Query(ctx, sqlstr, a.ProjectID, a.Name, a.Description, a.IsProductive) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Insert/db.Query: %w", &XoError{Entity: "Activity", Err: err})) } newa, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Activity", Err: err})) } *a = newa @@ -191,11 +191,11 @@ func (a *Activity) Update(ctx context.Context, db DB) (*Activity, error) { rows, err := db.Query(ctx, sqlstr, a.ProjectID, a.Name, a.Description, a.IsProductive, a.ActivityID) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Update/db.Query: %w", &XoError{Entity: "Activity", Err: err})) } newa, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Activity", Err: err})) } *a = newa @@ -203,7 +203,7 @@ func (a *Activity) Update(ctx context.Context, db DB) (*Activity, error) { } // Upsert upserts a Activity in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (a *Activity) Upsert(ctx context.Context, db DB, params *ActivityCreateParams) (*Activity, error) { var err error @@ -217,11 +217,11 @@ func (a *Activity) Upsert(ctx context.Context, db DB, params *ActivityCreatePara var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Activity", Err: err}) } a, err = a.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Activity", Err: err}) } } } @@ -315,11 +315,11 @@ func ActivityPaginatedByActivityIDAsc(ctx context.Context, db DB, activityID int rows, err := db.Query(ctx, sqlstr, append([]any{activityID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Paginated/Asc/db.Query: %w", &XoError{Entity: "Activity", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Activity", Err: err})) } return res, nil } @@ -398,11 +398,11 @@ func ActivityPaginatedByProjectIDAsc(ctx context.Context, db DB, projectID int, rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Paginated/Asc/db.Query: %w", &XoError{Entity: "Activity", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Activity", Err: err})) } return res, nil } @@ -481,11 +481,11 @@ func ActivityPaginatedByActivityIDDesc(ctx context.Context, db DB, activityID in rows, err := db.Query(ctx, sqlstr, append([]any{activityID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Paginated/Desc/db.Query: %w", &XoError{Entity: "Activity", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Activity", Err: err})) } return res, nil } @@ -564,11 +564,11 @@ func ActivityPaginatedByProjectIDDesc(ctx context.Context, db DB, projectID int, rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Paginated/Desc/db.Query: %w", &XoError{Entity: "Activity", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("Activity/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Activity/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Activity", Err: err})) } return res, nil } @@ -649,11 +649,11 @@ func ActivityByNameProjectID(ctx context.Context, db DB, name string, projectID // logf(sqlstr, name, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{name, projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("activities/ActivityByNameProjectID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("activities/ActivityByNameProjectID/db.Query: %w", &XoError{Entity: "Activity", Err: err})) } a, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("activities/ActivityByNameProjectID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("activities/ActivityByNameProjectID/pgx.CollectOneRow: %w", &XoError{Entity: "Activity", Err: err})) } return &a, nil @@ -735,14 +735,14 @@ func ActivitiesByName(ctx context.Context, db DB, name string, opts ...ActivityS // logf(sqlstr, name) rows, err := db.Query(ctx, sqlstr, append([]any{name}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Activity/ActivityByNameProjectID/Query: %w", err)) + return nil, logerror(fmt.Errorf("Activity/ActivityByNameProjectID/Query: %w", &XoError{Entity: "Activity", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("Activity/ActivityByNameProjectID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Activity/ActivityByNameProjectID/pgx.CollectRows: %w", &XoError{Entity: "Activity", Err: err})) } return res, nil } @@ -823,14 +823,14 @@ func ActivitiesByProjectID(ctx context.Context, db DB, projectID int, opts ...Ac // logf(sqlstr, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Activity/ActivityByNameProjectID/Query: %w", err)) + return nil, logerror(fmt.Errorf("Activity/ActivityByNameProjectID/Query: %w", &XoError{Entity: "Activity", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("Activity/ActivityByNameProjectID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Activity/ActivityByNameProjectID/pgx.CollectRows: %w", &XoError{Entity: "Activity", Err: err})) } return res, nil } @@ -911,11 +911,11 @@ func ActivityByActivityID(ctx context.Context, db DB, activityID int, opts ...Ac // logf(sqlstr, activityID) rows, err := db.Query(ctx, sqlstr, append([]any{activityID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("activities/ActivityByActivityID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("activities/ActivityByActivityID/db.Query: %w", &XoError{Entity: "Activity", Err: err})) } a, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Activity]) if err != nil { - return nil, logerror(fmt.Errorf("activities/ActivityByActivityID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("activities/ActivityByActivityID/pgx.CollectOneRow: %w", &XoError{Entity: "Activity", Err: err})) } return &a, nil diff --git a/internal/repos/postgresql/gen/db/cache/extra.xo.go b/internal/repos/postgresql/gen/db/cache/extra.xo.go index 06b8910724..1707c8b13c 100644 --- a/internal/repos/postgresql/gen/db/cache/extra.xo.go +++ b/internal/repos/postgresql/gen/db/cache/extra.xo.go @@ -2,6 +2,25 @@ package cache // Code generated by xo. DO NOT EDIT. +import ( + "fmt" +) + func newPointer[T any](v T) *T { return &v } + +type XoError struct { + Entity string + Err error +} + +// Error satisfies the error interface. +func (e *XoError) Error() string { + return fmt.Sprintf("%s %v", e.Entity, e.Err) +} + +// Unwrap satisfies the unwrap interface. +func (err *XoError) Unwrap() error { + return err.Err +} diff --git a/internal/repos/postgresql/gen/db/demotwoworkitem.xo.go b/internal/repos/postgresql/gen/db/demotwoworkitem.xo.go index 31fe89587d..9e8cc7cc8a 100644 --- a/internal/repos/postgresql/gen/db/demotwoworkitem.xo.go +++ b/internal/repos/postgresql/gen/db/demotwoworkitem.xo.go @@ -149,11 +149,11 @@ func (dtwi *DemoTwoWorkItem) Insert(ctx context.Context, db DB) (*DemoTwoWorkIte logf(sqlstr, dtwi.WorkItemID, dtwi.CustomDateForProject2) rows, err := db.Query(ctx, sqlstr, dtwi.WorkItemID, dtwi.CustomDateForProject2) if err != nil { - return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Insert/db.Query: %w", &XoError{Entity: "Demo two work item", Err: err})) } newdtwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoTwoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Demo two work item", Err: err})) } *dtwi = newdtwi @@ -172,11 +172,11 @@ func (dtwi *DemoTwoWorkItem) Update(ctx context.Context, db DB) (*DemoTwoWorkIte rows, err := db.Query(ctx, sqlstr, dtwi.CustomDateForProject2, dtwi.WorkItemID) if err != nil { - return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Update/db.Query: %w", &XoError{Entity: "Demo two work item", Err: err})) } newdtwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoTwoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Demo two work item", Err: err})) } *dtwi = newdtwi @@ -184,7 +184,7 @@ func (dtwi *DemoTwoWorkItem) Update(ctx context.Context, db DB) (*DemoTwoWorkIte } // Upsert upserts a DemoTwoWorkItem in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (dtwi *DemoTwoWorkItem) Upsert(ctx context.Context, db DB, params *DemoTwoWorkItemCreateParams) (*DemoTwoWorkItem, error) { var err error @@ -196,11 +196,11 @@ func (dtwi *DemoTwoWorkItem) Upsert(ctx context.Context, db DB, params *DemoTwoW var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Demo two work item", Err: err}) } dtwi, err = dtwi.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Demo two work item", Err: err}) } } } @@ -285,11 +285,11 @@ func DemoTwoWorkItemPaginatedByWorkItemIDAsc(ctx context.Context, db DB, workIte rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Paginated/Asc/db.Query: %w", &XoError{Entity: "Demo two work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DemoTwoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Demo two work item", Err: err})) } return res, nil } @@ -359,11 +359,11 @@ func DemoTwoWorkItemPaginatedByWorkItemIDDesc(ctx context.Context, db DB, workIt rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Paginated/Desc/db.Query: %w", &XoError{Entity: "Demo two work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DemoTwoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DemoTwoWorkItem/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Demo two work item", Err: err})) } return res, nil } @@ -435,11 +435,11 @@ func DemoTwoWorkItemByWorkItemID(ctx context.Context, db DB, workItemID int64, o // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("demo_two_work_items/DemoTwoWorkItemByWorkItemID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("demo_two_work_items/DemoTwoWorkItemByWorkItemID/db.Query: %w", &XoError{Entity: "Demo two work item", Err: err})) } dtwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoTwoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("demo_two_work_items/DemoTwoWorkItemByWorkItemID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("demo_two_work_items/DemoTwoWorkItemByWorkItemID/pgx.CollectOneRow: %w", &XoError{Entity: "Demo two work item", Err: err})) } return &dtwi, nil diff --git a/internal/repos/postgresql/gen/db/demoworkitem.xo.go b/internal/repos/postgresql/gen/db/demoworkitem.xo.go index 2dbdb75cfe..d8e1f1d570 100644 --- a/internal/repos/postgresql/gen/db/demoworkitem.xo.go +++ b/internal/repos/postgresql/gen/db/demoworkitem.xo.go @@ -170,11 +170,11 @@ func (dwi *DemoWorkItem) Insert(ctx context.Context, db DB) (*DemoWorkItem, erro logf(sqlstr, dwi.WorkItemID, dwi.Ref, dwi.Line, dwi.LastMessageAt, dwi.Reopened) rows, err := db.Query(ctx, sqlstr, dwi.WorkItemID, dwi.Ref, dwi.Line, dwi.LastMessageAt, dwi.Reopened) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } newdwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Demo work item", Err: err})) } *dwi = newdwi @@ -193,11 +193,11 @@ func (dwi *DemoWorkItem) Update(ctx context.Context, db DB) (*DemoWorkItem, erro rows, err := db.Query(ctx, sqlstr, dwi.Ref, dwi.Line, dwi.LastMessageAt, dwi.Reopened, dwi.WorkItemID) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Update/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } newdwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Demo work item", Err: err})) } *dwi = newdwi @@ -205,7 +205,7 @@ func (dwi *DemoWorkItem) Update(ctx context.Context, db DB) (*DemoWorkItem, erro } // Upsert upserts a DemoWorkItem in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (dwi *DemoWorkItem) Upsert(ctx context.Context, db DB, params *DemoWorkItemCreateParams) (*DemoWorkItem, error) { var err error @@ -220,11 +220,11 @@ func (dwi *DemoWorkItem) Upsert(ctx context.Context, db DB, params *DemoWorkItem var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Demo work item", Err: err}) } dwi, err = dwi.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Demo work item", Err: err}) } } } @@ -312,11 +312,11 @@ func DemoWorkItemPaginatedByWorkItemIDAsc(ctx context.Context, db DB, workItemID rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Demo work item", Err: err})) } return res, nil } @@ -389,11 +389,11 @@ func DemoWorkItemPaginatedByWorkItemIDDesc(ctx context.Context, db DB, workItemI rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Demo work item", Err: err})) } return res, nil } @@ -468,11 +468,11 @@ func DemoWorkItemByWorkItemID(ctx context.Context, db DB, workItemID int64, opts // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } dwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/pgx.CollectOneRow: %w", &XoError{Entity: "Demo work item", Err: err})) } return &dwi, nil @@ -548,14 +548,14 @@ func DemoWorkItemsByRefLine(ctx context.Context, db DB, ref string, line string, // logf(sqlstr, ref, line) rows, err := db.Query(ctx, sqlstr, append([]any{ref, line}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/DemoWorkItemsByRefLine/Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/DemoWorkItemsByRefLine/Query: %w", &XoError{Entity: "Demo work item", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/DemoWorkItemsByRefLine/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/DemoWorkItemsByRefLine/pgx.CollectRows: %w", &XoError{Entity: "Demo work item", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/gen/db/extra.xo.go b/internal/repos/postgresql/gen/db/extra.xo.go index 6abcb7131f..e99d36f51b 100644 --- a/internal/repos/postgresql/gen/db/extra.xo.go +++ b/internal/repos/postgresql/gen/db/extra.xo.go @@ -2,6 +2,25 @@ package db // Code generated by xo. DO NOT EDIT. +import ( + "fmt" +) + func newPointer[T any](v T) *T { return &v } + +type XoError struct { + Entity string + Err error +} + +// Error satisfies the error interface. +func (e *XoError) Error() string { + return fmt.Sprintf("%s %v", e.Entity, e.Err) +} + +// Unwrap satisfies the unwrap interface. +func (err *XoError) Unwrap() error { + return err.Err +} diff --git a/internal/repos/postgresql/gen/db/kanbanstep.xo.go b/internal/repos/postgresql/gen/db/kanbanstep.xo.go index e31b5e5008..e991ea6225 100644 --- a/internal/repos/postgresql/gen/db/kanbanstep.xo.go +++ b/internal/repos/postgresql/gen/db/kanbanstep.xo.go @@ -162,11 +162,11 @@ func (ks *KanbanStep) Insert(ctx context.Context, db DB) (*KanbanStep, error) { rows, err := db.Query(ctx, sqlstr, ks.ProjectID, ks.StepOrder, ks.Name, ks.Description, ks.Color, ks.TimeTrackable) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Insert/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } newks, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Kanban step", Err: err})) } *ks = newks @@ -186,11 +186,11 @@ func (ks *KanbanStep) Update(ctx context.Context, db DB) (*KanbanStep, error) { rows, err := db.Query(ctx, sqlstr, ks.ProjectID, ks.StepOrder, ks.Name, ks.Description, ks.Color, ks.TimeTrackable, ks.KanbanStepID) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Update/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } newks, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Kanban step", Err: err})) } *ks = newks @@ -198,7 +198,7 @@ func (ks *KanbanStep) Update(ctx context.Context, db DB) (*KanbanStep, error) { } // Upsert upserts a KanbanStep in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (ks *KanbanStep) Upsert(ctx context.Context, db DB, params *KanbanStepCreateParams) (*KanbanStep, error) { var err error @@ -214,11 +214,11 @@ func (ks *KanbanStep) Upsert(ctx context.Context, db DB, params *KanbanStepCreat var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Kanban step", Err: err}) } ks, err = ks.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Kanban step", Err: err}) } } } @@ -308,11 +308,11 @@ func KanbanStepPaginatedByKanbanStepIDAsc(ctx context.Context, db DB, kanbanStep rows, err := db.Query(ctx, sqlstr, append([]any{kanbanStepID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Kanban step", Err: err})) } return res, nil } @@ -387,11 +387,11 @@ func KanbanStepPaginatedByProjectIDAsc(ctx context.Context, db DB, projectID int rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Kanban step", Err: err})) } return res, nil } @@ -466,11 +466,11 @@ func KanbanStepPaginatedByStepOrderAsc(ctx context.Context, db DB, stepOrder int rows, err := db.Query(ctx, sqlstr, append([]any{stepOrder}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Kanban step", Err: err})) } return res, nil } @@ -545,11 +545,11 @@ func KanbanStepPaginatedByKanbanStepIDDesc(ctx context.Context, db DB, kanbanSte rows, err := db.Query(ctx, sqlstr, append([]any{kanbanStepID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Kanban step", Err: err})) } return res, nil } @@ -624,11 +624,11 @@ func KanbanStepPaginatedByProjectIDDesc(ctx context.Context, db DB, projectID in rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Kanban step", Err: err})) } return res, nil } @@ -703,11 +703,11 @@ func KanbanStepPaginatedByStepOrderDesc(ctx context.Context, db DB, stepOrder in rows, err := db.Query(ctx, sqlstr, append([]any{stepOrder}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Kanban step", Err: err})) } return res, nil } @@ -784,11 +784,11 @@ func KanbanStepByKanbanStepID(ctx context.Context, db DB, kanbanStepID int, opts // logf(sqlstr, kanbanStepID) rows, err := db.Query(ctx, sqlstr, append([]any{kanbanStepID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByKanbanStepID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByKanbanStepID/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } ks, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByKanbanStepID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByKanbanStepID/pgx.CollectOneRow: %w", &XoError{Entity: "Kanban step", Err: err})) } return &ks, nil @@ -866,11 +866,11 @@ func KanbanStepByProjectIDNameStepOrder(ctx context.Context, db DB, projectID in // logf(sqlstr, projectID, name, stepOrder) rows, err := db.Query(ctx, sqlstr, append([]any{projectID, name, stepOrder}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByProjectIDNameStepOrder/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByProjectIDNameStepOrder/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } ks, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByProjectIDNameStepOrder/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByProjectIDNameStepOrder/pgx.CollectOneRow: %w", &XoError{Entity: "Kanban step", Err: err})) } return &ks, nil @@ -948,14 +948,14 @@ func KanbanStepsByProjectID(ctx context.Context, db DB, projectID int, opts ...K // logf(sqlstr, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/Query: %w", &XoError{Entity: "Kanban step", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/pgx.CollectRows: %w", &XoError{Entity: "Kanban step", Err: err})) } return res, nil } @@ -1032,14 +1032,14 @@ func KanbanStepsByName(ctx context.Context, db DB, name string, opts ...KanbanSt // logf(sqlstr, name) rows, err := db.Query(ctx, sqlstr, append([]any{name}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/Query: %w", &XoError{Entity: "Kanban step", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/pgx.CollectRows: %w", &XoError{Entity: "Kanban step", Err: err})) } return res, nil } @@ -1116,14 +1116,14 @@ func KanbanStepsByStepOrder(ctx context.Context, db DB, stepOrder int, opts ...K // logf(sqlstr, stepOrder) rows, err := db.Query(ctx, sqlstr, append([]any{stepOrder}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/Query: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/Query: %w", &XoError{Entity: "Kanban step", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("KanbanStep/KanbanStepByProjectIDNameStepOrder/pgx.CollectRows: %w", &XoError{Entity: "Kanban step", Err: err})) } return res, nil } @@ -1200,11 +1200,11 @@ func KanbanStepByProjectIDStepOrder(ctx context.Context, db DB, projectID int, s // logf(sqlstr, projectID, stepOrder) rows, err := db.Query(ctx, sqlstr, append([]any{projectID, stepOrder}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByProjectIDStepOrder/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByProjectIDStepOrder/db.Query: %w", &XoError{Entity: "Kanban step", Err: err})) } ks, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[KanbanStep]) if err != nil { - return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByProjectIDStepOrder/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("kanban_steps/KanbanStepByProjectIDStepOrder/pgx.CollectOneRow: %w", &XoError{Entity: "Kanban step", Err: err})) } return &ks, nil diff --git a/internal/repos/postgresql/gen/db/movie.xo.go b/internal/repos/postgresql/gen/db/movie.xo.go index 269f3fdfb6..fe3093318c 100644 --- a/internal/repos/postgresql/gen/db/movie.xo.go +++ b/internal/repos/postgresql/gen/db/movie.xo.go @@ -126,11 +126,11 @@ func (m *Movie) Insert(ctx context.Context, db DB) (*Movie, error) { rows, err := db.Query(ctx, sqlstr, m.Title, m.Year, m.Synopsis) if err != nil { - return nil, logerror(fmt.Errorf("Movie/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Movie/Insert/db.Query: %w", &XoError{Entity: "Movie", Err: err})) } newm, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Movie]) if err != nil { - return nil, logerror(fmt.Errorf("Movie/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Movie/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Movie", Err: err})) } *m = newm @@ -150,11 +150,11 @@ func (m *Movie) Update(ctx context.Context, db DB) (*Movie, error) { rows, err := db.Query(ctx, sqlstr, m.Title, m.Year, m.Synopsis, m.MovieID) if err != nil { - return nil, logerror(fmt.Errorf("Movie/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Movie/Update/db.Query: %w", &XoError{Entity: "Movie", Err: err})) } newm, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Movie]) if err != nil { - return nil, logerror(fmt.Errorf("Movie/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Movie/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Movie", Err: err})) } *m = newm @@ -162,7 +162,7 @@ func (m *Movie) Update(ctx context.Context, db DB) (*Movie, error) { } // Upsert upserts a Movie in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (m *Movie) Upsert(ctx context.Context, db DB, params *MovieCreateParams) (*Movie, error) { var err error @@ -175,11 +175,11 @@ func (m *Movie) Upsert(ctx context.Context, db DB, params *MovieCreateParams) (* var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Movie", Err: err}) } m, err = m.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Movie", Err: err}) } } } @@ -260,11 +260,11 @@ func MoviePaginatedByMovieIDAsc(ctx context.Context, db DB, movieID int, opts .. rows, err := db.Query(ctx, sqlstr, append([]any{movieID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Movie/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Movie/Paginated/Asc/db.Query: %w", &XoError{Entity: "Movie", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Movie]) if err != nil { - return nil, logerror(fmt.Errorf("Movie/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Movie/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Movie", Err: err})) } return res, nil } @@ -330,11 +330,11 @@ func MoviePaginatedByMovieIDDesc(ctx context.Context, db DB, movieID int, opts . rows, err := db.Query(ctx, sqlstr, append([]any{movieID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Movie/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Movie/Paginated/Desc/db.Query: %w", &XoError{Entity: "Movie", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Movie]) if err != nil { - return nil, logerror(fmt.Errorf("Movie/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Movie/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Movie", Err: err})) } return res, nil } @@ -402,11 +402,11 @@ func MovieByMovieID(ctx context.Context, db DB, movieID int, opts ...MovieSelect // logf(sqlstr, movieID) rows, err := db.Query(ctx, sqlstr, append([]any{movieID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("movies/MovieByMovieID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("movies/MovieByMovieID/db.Query: %w", &XoError{Entity: "Movie", Err: err})) } m, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Movie]) if err != nil { - return nil, logerror(fmt.Errorf("movies/MovieByMovieID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("movies/MovieByMovieID/pgx.CollectOneRow: %w", &XoError{Entity: "Movie", Err: err})) } return &m, nil diff --git a/internal/repos/postgresql/gen/db/notification.xo.go b/internal/repos/postgresql/gen/db/notification.xo.go index 8d32f33ac5..f07c41669f 100644 --- a/internal/repos/postgresql/gen/db/notification.xo.go +++ b/internal/repos/postgresql/gen/db/notification.xo.go @@ -230,11 +230,11 @@ func (n *Notification) Insert(ctx context.Context, db DB) (*Notification, error) rows, err := db.Query(ctx, sqlstr, n.ReceiverRank, n.Title, n.Body, n.Label, n.Link, n.Sender, n.Receiver, n.NotificationType) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Insert/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } newn, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Notification", Err: err})) } *n = newn @@ -254,11 +254,11 @@ func (n *Notification) Update(ctx context.Context, db DB) (*Notification, error) rows, err := db.Query(ctx, sqlstr, n.ReceiverRank, n.Title, n.Body, n.Label, n.Link, n.Sender, n.Receiver, n.NotificationType, n.NotificationID) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Update/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } newn, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Notification", Err: err})) } *n = newn @@ -266,7 +266,7 @@ func (n *Notification) Update(ctx context.Context, db DB) (*Notification, error) } // Upsert upserts a Notification in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (n *Notification) Upsert(ctx context.Context, db DB, params *NotificationCreateParams) (*Notification, error) { var err error @@ -284,11 +284,11 @@ func (n *Notification) Upsert(ctx context.Context, db DB, params *NotificationCr var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Notification", Err: err}) } n, err = n.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Notification", Err: err}) } } } @@ -393,11 +393,11 @@ func NotificationPaginatedByNotificationIDAsc(ctx context.Context, db DB, notifi rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Notification", Err: err})) } return res, nil } @@ -487,11 +487,11 @@ func NotificationPaginatedByNotificationIDDesc(ctx context.Context, db DB, notif rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Notification", Err: err})) } return res, nil } @@ -583,11 +583,11 @@ func NotificationByNotificationID(ctx context.Context, db DB, notificationID int // logf(sqlstr, notificationID) rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } n, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/pgx.CollectOneRow: %w", &XoError{Entity: "Notification", Err: err})) } return &n, nil @@ -680,14 +680,14 @@ func NotificationsByReceiverRankNotificationTypeCreatedAt(ctx context.Context, d // logf(sqlstr, receiverRank, notificationType, createdAt) rows, err := db.Query(ctx, sqlstr, append([]any{receiverRank, notificationType, createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Notification/NotificationsByReceiverRankNotificationTypeCreatedAt/Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/NotificationsByReceiverRankNotificationTypeCreatedAt/Query: %w", &XoError{Entity: "Notification", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/NotificationsByReceiverRankNotificationTypeCreatedAt/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Notification/NotificationsByReceiverRankNotificationTypeCreatedAt/pgx.CollectRows: %w", &XoError{Entity: "Notification", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/gen/db/project.xo.go b/internal/repos/postgresql/gen/db/project.xo.go index 5db918981e..28865b7abd 100644 --- a/internal/repos/postgresql/gen/db/project.xo.go +++ b/internal/repos/postgresql/gen/db/project.xo.go @@ -257,11 +257,11 @@ func (p *Project) Insert(ctx context.Context, db DB) (*Project, error) { rows, err := db.Query(ctx, sqlstr, p.Name, p.Description, p.WorkItemsTableName, p.BoardConfig) if err != nil { - return nil, logerror(fmt.Errorf("Project/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Project/Insert/db.Query: %w", &XoError{Entity: "Project", Err: err})) } newp, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Project]) if err != nil { - return nil, logerror(fmt.Errorf("Project/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Project/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Project", Err: err})) } *p = newp @@ -281,11 +281,11 @@ func (p *Project) Update(ctx context.Context, db DB) (*Project, error) { rows, err := db.Query(ctx, sqlstr, p.Name, p.Description, p.WorkItemsTableName, p.BoardConfig, p.ProjectID) if err != nil { - return nil, logerror(fmt.Errorf("Project/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Project/Update/db.Query: %w", &XoError{Entity: "Project", Err: err})) } newp, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Project]) if err != nil { - return nil, logerror(fmt.Errorf("Project/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Project/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Project", Err: err})) } *p = newp @@ -293,7 +293,7 @@ func (p *Project) Update(ctx context.Context, db DB) (*Project, error) { } // Upsert upserts a Project in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (p *Project) Upsert(ctx context.Context, db DB, params *ProjectCreateParams) (*Project, error) { var err error @@ -307,11 +307,11 @@ func (p *Project) Upsert(ctx context.Context, db DB, params *ProjectCreateParams var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Project", Err: err}) } p, err = p.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Project", Err: err}) } } } @@ -425,11 +425,11 @@ func ProjectPaginatedByProjectIDAsc(ctx context.Context, db DB, projectID int, o rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Project/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Project/Paginated/Asc/db.Query: %w", &XoError{Entity: "Project", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Project]) if err != nil { - return nil, logerror(fmt.Errorf("Project/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Project/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Project", Err: err})) } return res, nil } @@ -528,11 +528,11 @@ func ProjectPaginatedByProjectIDDesc(ctx context.Context, db DB, projectID int, rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Project/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Project/Paginated/Desc/db.Query: %w", &XoError{Entity: "Project", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Project]) if err != nil { - return nil, logerror(fmt.Errorf("Project/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Project/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Project", Err: err})) } return res, nil } @@ -633,11 +633,11 @@ func ProjectByName(ctx context.Context, db DB, name models.Project, opts ...Proj // logf(sqlstr, name) rows, err := db.Query(ctx, sqlstr, append([]any{name}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("projects/ProjectByName/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("projects/ProjectByName/db.Query: %w", &XoError{Entity: "Project", Err: err})) } p, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Project]) if err != nil { - return nil, logerror(fmt.Errorf("projects/ProjectByName/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("projects/ProjectByName/pgx.CollectOneRow: %w", &XoError{Entity: "Project", Err: err})) } return &p, nil @@ -739,11 +739,11 @@ func ProjectByProjectID(ctx context.Context, db DB, projectID int, opts ...Proje // logf(sqlstr, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("projects/ProjectByProjectID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("projects/ProjectByProjectID/db.Query: %w", &XoError{Entity: "Project", Err: err})) } p, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Project]) if err != nil { - return nil, logerror(fmt.Errorf("projects/ProjectByProjectID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("projects/ProjectByProjectID/pgx.CollectOneRow: %w", &XoError{Entity: "Project", Err: err})) } return &p, nil @@ -845,11 +845,11 @@ func ProjectByWorkItemsTableName(ctx context.Context, db DB, workItemsTableName // logf(sqlstr, workItemsTableName) rows, err := db.Query(ctx, sqlstr, append([]any{workItemsTableName}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("projects/ProjectByWorkItemsTableName/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("projects/ProjectByWorkItemsTableName/db.Query: %w", &XoError{Entity: "Project", Err: err})) } p, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Project]) if err != nil { - return nil, logerror(fmt.Errorf("projects/ProjectByWorkItemsTableName/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("projects/ProjectByWorkItemsTableName/pgx.CollectOneRow: %w", &XoError{Entity: "Project", Err: err})) } return &p, nil diff --git a/internal/repos/postgresql/gen/db/schemamigration.xo.go b/internal/repos/postgresql/gen/db/schemamigration.xo.go index 2137c3458f..f0341d7fb1 100644 --- a/internal/repos/postgresql/gen/db/schemamigration.xo.go +++ b/internal/repos/postgresql/gen/db/schemamigration.xo.go @@ -118,11 +118,11 @@ func (sm *SchemaMigration) Insert(ctx context.Context, db DB) (*SchemaMigration, logf(sqlstr, sm.Version, sm.Dirty) rows, err := db.Query(ctx, sqlstr, sm.Version, sm.Dirty) if err != nil { - return nil, logerror(fmt.Errorf("SchemaMigration/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("SchemaMigration/Insert/db.Query: %w", &XoError{Entity: "Schema migration", Err: err})) } newsm, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[SchemaMigration]) if err != nil { - return nil, logerror(fmt.Errorf("SchemaMigration/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("SchemaMigration/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Schema migration", Err: err})) } *sm = newsm @@ -141,11 +141,11 @@ func (sm *SchemaMigration) Update(ctx context.Context, db DB) (*SchemaMigration, rows, err := db.Query(ctx, sqlstr, sm.Dirty, sm.Version) if err != nil { - return nil, logerror(fmt.Errorf("SchemaMigration/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("SchemaMigration/Update/db.Query: %w", &XoError{Entity: "Schema migration", Err: err})) } newsm, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[SchemaMigration]) if err != nil { - return nil, logerror(fmt.Errorf("SchemaMigration/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("SchemaMigration/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Schema migration", Err: err})) } *sm = newsm @@ -153,7 +153,7 @@ func (sm *SchemaMigration) Update(ctx context.Context, db DB) (*SchemaMigration, } // Upsert upserts a SchemaMigration in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (sm *SchemaMigration) Upsert(ctx context.Context, db DB, params *SchemaMigrationCreateParams) (*SchemaMigration, error) { var err error @@ -165,11 +165,11 @@ func (sm *SchemaMigration) Upsert(ctx context.Context, db DB, params *SchemaMigr var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Schema migration", Err: err}) } sm, err = sm.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Schema migration", Err: err}) } } } @@ -248,11 +248,11 @@ func SchemaMigrationPaginatedByVersionAsc(ctx context.Context, db DB, version in rows, err := db.Query(ctx, sqlstr, append([]any{version}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("SchemaMigration/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("SchemaMigration/Paginated/Asc/db.Query: %w", &XoError{Entity: "Schema migration", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[SchemaMigration]) if err != nil { - return nil, logerror(fmt.Errorf("SchemaMigration/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("SchemaMigration/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Schema migration", Err: err})) } return res, nil } @@ -316,11 +316,11 @@ func SchemaMigrationPaginatedByVersionDesc(ctx context.Context, db DB, version i rows, err := db.Query(ctx, sqlstr, append([]any{version}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("SchemaMigration/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("SchemaMigration/Paginated/Desc/db.Query: %w", &XoError{Entity: "Schema migration", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[SchemaMigration]) if err != nil { - return nil, logerror(fmt.Errorf("SchemaMigration/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("SchemaMigration/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Schema migration", Err: err})) } return res, nil } @@ -386,11 +386,11 @@ func SchemaMigrationByVersion(ctx context.Context, db DB, version int64, opts .. // logf(sqlstr, version) rows, err := db.Query(ctx, sqlstr, append([]any{version}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("schema_migrations/SchemaMigrationByVersion/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("schema_migrations/SchemaMigrationByVersion/db.Query: %w", &XoError{Entity: "Schema migration", Err: err})) } sm, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[SchemaMigration]) if err != nil { - return nil, logerror(fmt.Errorf("schema_migrations/SchemaMigrationByVersion/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("schema_migrations/SchemaMigrationByVersion/pgx.CollectOneRow: %w", &XoError{Entity: "Schema migration", Err: err})) } return &sm, nil diff --git a/internal/repos/postgresql/gen/db/team.xo.go b/internal/repos/postgresql/gen/db/team.xo.go index 22b1440e4b..7e20a74467 100644 --- a/internal/repos/postgresql/gen/db/team.xo.go +++ b/internal/repos/postgresql/gen/db/team.xo.go @@ -211,11 +211,11 @@ func (t *Team) Insert(ctx context.Context, db DB) (*Team, error) { rows, err := db.Query(ctx, sqlstr, t.ProjectID, t.Name, t.Description) if err != nil { - return nil, logerror(fmt.Errorf("Team/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Team/Insert/db.Query: %w", &XoError{Entity: "Team", Err: err})) } newt, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("Team/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Team/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Team", Err: err})) } *t = newt @@ -235,11 +235,11 @@ func (t *Team) Update(ctx context.Context, db DB) (*Team, error) { rows, err := db.Query(ctx, sqlstr, t.ProjectID, t.Name, t.Description, t.TeamID) if err != nil { - return nil, logerror(fmt.Errorf("Team/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Team/Update/db.Query: %w", &XoError{Entity: "Team", Err: err})) } newt, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("Team/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Team/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Team", Err: err})) } *t = newt @@ -247,7 +247,7 @@ func (t *Team) Update(ctx context.Context, db DB) (*Team, error) { } // Upsert upserts a Team in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (t *Team) Upsert(ctx context.Context, db DB, params *TeamCreateParams) (*Team, error) { var err error @@ -260,11 +260,11 @@ func (t *Team) Upsert(ctx context.Context, db DB, params *TeamCreateParams) (*Te var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Team", Err: err}) } t, err = t.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Team", Err: err}) } } } @@ -365,11 +365,11 @@ func TeamPaginatedByTeamIDAsc(ctx context.Context, db DB, teamID int, opts ...Te rows, err := db.Query(ctx, sqlstr, append([]any{teamID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Team/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Team/Paginated/Asc/db.Query: %w", &XoError{Entity: "Team", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("Team/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Team/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Team", Err: err})) } return res, nil } @@ -455,11 +455,11 @@ func TeamPaginatedByProjectIDAsc(ctx context.Context, db DB, projectID int, opts rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Team/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Team/Paginated/Asc/db.Query: %w", &XoError{Entity: "Team", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("Team/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Team/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Team", Err: err})) } return res, nil } @@ -545,11 +545,11 @@ func TeamPaginatedByTeamIDDesc(ctx context.Context, db DB, teamID int, opts ...T rows, err := db.Query(ctx, sqlstr, append([]any{teamID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Team/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Team/Paginated/Desc/db.Query: %w", &XoError{Entity: "Team", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("Team/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Team/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Team", Err: err})) } return res, nil } @@ -635,11 +635,11 @@ func TeamPaginatedByProjectIDDesc(ctx context.Context, db DB, projectID int, opt rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Team/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Team/Paginated/Desc/db.Query: %w", &XoError{Entity: "Team", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("Team/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Team/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Team", Err: err})) } return res, nil } @@ -727,11 +727,11 @@ func TeamByNameProjectID(ctx context.Context, db DB, name string, projectID int, // logf(sqlstr, name, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{name, projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("teams/TeamByNameProjectID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("teams/TeamByNameProjectID/db.Query: %w", &XoError{Entity: "Team", Err: err})) } t, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("teams/TeamByNameProjectID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("teams/TeamByNameProjectID/pgx.CollectOneRow: %w", &XoError{Entity: "Team", Err: err})) } return &t, nil @@ -820,14 +820,14 @@ func TeamsByName(ctx context.Context, db DB, name string, opts ...TeamSelectConf // logf(sqlstr, name) rows, err := db.Query(ctx, sqlstr, append([]any{name}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Team/TeamByNameProjectID/Query: %w", err)) + return nil, logerror(fmt.Errorf("Team/TeamByNameProjectID/Query: %w", &XoError{Entity: "Team", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("Team/TeamByNameProjectID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Team/TeamByNameProjectID/pgx.CollectRows: %w", &XoError{Entity: "Team", Err: err})) } return res, nil } @@ -915,14 +915,14 @@ func TeamsByProjectID(ctx context.Context, db DB, projectID int, opts ...TeamSel // logf(sqlstr, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Team/TeamByNameProjectID/Query: %w", err)) + return nil, logerror(fmt.Errorf("Team/TeamByNameProjectID/Query: %w", &XoError{Entity: "Team", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("Team/TeamByNameProjectID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Team/TeamByNameProjectID/pgx.CollectRows: %w", &XoError{Entity: "Team", Err: err})) } return res, nil } @@ -1010,11 +1010,11 @@ func TeamByTeamID(ctx context.Context, db DB, teamID int, opts ...TeamSelectConf // logf(sqlstr, teamID) rows, err := db.Query(ctx, sqlstr, append([]any{teamID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("teams/TeamByTeamID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("teams/TeamByTeamID/db.Query: %w", &XoError{Entity: "Team", Err: err})) } t, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Team]) if err != nil { - return nil, logerror(fmt.Errorf("teams/TeamByTeamID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("teams/TeamByTeamID/pgx.CollectOneRow: %w", &XoError{Entity: "Team", Err: err})) } return &t, nil diff --git a/internal/repos/postgresql/gen/db/timeentry.xo.go b/internal/repos/postgresql/gen/db/timeentry.xo.go index 5a3e9be6b3..f2e25ecf2a 100644 --- a/internal/repos/postgresql/gen/db/timeentry.xo.go +++ b/internal/repos/postgresql/gen/db/timeentry.xo.go @@ -229,11 +229,11 @@ func (te *TimeEntry) Insert(ctx context.Context, db DB) (*TimeEntry, error) { rows, err := db.Query(ctx, sqlstr, te.WorkItemID, te.ActivityID, te.TeamID, te.UserID, te.Comment, te.Start, te.DurationMinutes) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/Insert/db.Query: %w", &XoError{Entity: "Time entry", Err: err})) } newte, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[TimeEntry]) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Time entry", Err: err})) } *te = newte @@ -253,11 +253,11 @@ func (te *TimeEntry) Update(ctx context.Context, db DB) (*TimeEntry, error) { rows, err := db.Query(ctx, sqlstr, te.WorkItemID, te.ActivityID, te.TeamID, te.UserID, te.Comment, te.Start, te.DurationMinutes, te.TimeEntryID) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/Update/db.Query: %w", &XoError{Entity: "Time entry", Err: err})) } newte, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[TimeEntry]) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Time entry", Err: err})) } *te = newte @@ -265,7 +265,7 @@ func (te *TimeEntry) Update(ctx context.Context, db DB) (*TimeEntry, error) { } // Upsert upserts a TimeEntry in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (te *TimeEntry) Upsert(ctx context.Context, db DB, params *TimeEntryCreateParams) (*TimeEntry, error) { var err error @@ -282,11 +282,11 @@ func (te *TimeEntry) Upsert(ctx context.Context, db DB, params *TimeEntryCreateP var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Time entry", Err: err}) } te, err = te.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Time entry", Err: err}) } } } @@ -395,11 +395,11 @@ func TimeEntryPaginatedByTimeEntryIDAsc(ctx context.Context, db DB, timeEntryID rows, err := db.Query(ctx, sqlstr, append([]any{timeEntryID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/Paginated/Asc/db.Query: %w", &XoError{Entity: "Time entry", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[TimeEntry]) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Time entry", Err: err})) } return res, nil } @@ -493,11 +493,11 @@ func TimeEntryPaginatedByTimeEntryIDDesc(ctx context.Context, db DB, timeEntryID rows, err := db.Query(ctx, sqlstr, append([]any{timeEntryID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/Paginated/Desc/db.Query: %w", &XoError{Entity: "Time entry", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[TimeEntry]) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Time entry", Err: err})) } return res, nil } @@ -593,11 +593,11 @@ func TimeEntryByTimeEntryID(ctx context.Context, db DB, timeEntryID int64, opts // logf(sqlstr, timeEntryID) rows, err := db.Query(ctx, sqlstr, append([]any{timeEntryID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("time_entries/TimeEntryByTimeEntryID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("time_entries/TimeEntryByTimeEntryID/db.Query: %w", &XoError{Entity: "Time entry", Err: err})) } te, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[TimeEntry]) if err != nil { - return nil, logerror(fmt.Errorf("time_entries/TimeEntryByTimeEntryID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("time_entries/TimeEntryByTimeEntryID/pgx.CollectOneRow: %w", &XoError{Entity: "Time entry", Err: err})) } return &te, nil @@ -694,14 +694,14 @@ func TimeEntriesByUserIDTeamID(ctx context.Context, db DB, userID uuid.UUID, tea // logf(sqlstr, userID, teamID) rows, err := db.Query(ctx, sqlstr, append([]any{userID, teamID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/TimeEntriesByUserIDTeamID/Query: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/TimeEntriesByUserIDTeamID/Query: %w", &XoError{Entity: "Time entry", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[TimeEntry]) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/TimeEntriesByUserIDTeamID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/TimeEntriesByUserIDTeamID/pgx.CollectRows: %w", &XoError{Entity: "Time entry", Err: err})) } return res, nil } @@ -797,14 +797,14 @@ func TimeEntriesByWorkItemIDTeamID(ctx context.Context, db DB, workItemID *int64 // logf(sqlstr, workItemID, teamID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID, teamID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/TimeEntriesByWorkItemIDTeamID/Query: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/TimeEntriesByWorkItemIDTeamID/Query: %w", &XoError{Entity: "Time entry", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[TimeEntry]) if err != nil { - return nil, logerror(fmt.Errorf("TimeEntry/TimeEntriesByWorkItemIDTeamID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("TimeEntry/TimeEntriesByWorkItemIDTeamID/pgx.CollectRows: %w", &XoError{Entity: "Time entry", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/gen/db/user.xo.go b/internal/repos/postgresql/gen/db/user.xo.go index f226d94693..df057654f6 100644 --- a/internal/repos/postgresql/gen/db/user.xo.go +++ b/internal/repos/postgresql/gen/db/user.xo.go @@ -387,11 +387,11 @@ func (u *User) Insert(ctx context.Context, db DB) (*User, error) { rows, err := db.Query(ctx, sqlstr, u.Username, u.Email, u.FirstName, u.LastName, u.ExternalID, u.APIKeyID, u.Scopes, u.RoleRank, u.HasPersonalNotifications, u.HasGlobalNotifications, u.DeletedAt) if err != nil { - return nil, logerror(fmt.Errorf("User/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Insert/db.Query: %w", &XoError{Entity: "User", Err: err})) } newu, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("User/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } *u = newu @@ -411,11 +411,11 @@ func (u *User) Update(ctx context.Context, db DB) (*User, error) { rows, err := db.Query(ctx, sqlstr, u.Username, u.Email, u.FirstName, u.LastName, u.ExternalID, u.APIKeyID, u.Scopes, u.RoleRank, u.HasPersonalNotifications, u.HasGlobalNotifications, u.DeletedAt, u.UserID) if err != nil { - return nil, logerror(fmt.Errorf("User/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Update/db.Query: %w", &XoError{Entity: "User", Err: err})) } newu, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("User/Update/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } *u = newu @@ -423,7 +423,7 @@ func (u *User) Update(ctx context.Context, db DB) (*User, error) { } // Upsert upserts a User in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (u *User) Upsert(ctx context.Context, db DB, params *UserCreateParams) (*User, error) { var err error @@ -443,11 +443,11 @@ func (u *User) Upsert(ctx context.Context, db DB, params *UserCreateParams) (*Us var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "User", Err: err}) } u, err = u.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "User", Err: err}) } } } @@ -488,7 +488,7 @@ func (u *User) Restore(ctx context.Context, db DB) (*User, error) { u.DeletedAt = nil newu, err := u.Update(ctx, db) if err != nil { - return nil, logerror(fmt.Errorf("User/Restore/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/Restore/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return newu, nil } @@ -613,11 +613,11 @@ func UserPaginatedByCreatedAtAsc(ctx context.Context, db DB, createdAt time.Time rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Asc/db.Query: %w", &XoError{Entity: "User", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return res, nil } @@ -742,145 +742,11 @@ func UserPaginatedByCreatedAtDesc(ctx context.Context, db DB, createdAt time.Tim rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Desc/db.Query: %w", &XoError{Entity: "User", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Desc/pgx.CollectRows: %w", err)) - } - return res, nil -} - -// UsersByCreatedAt retrieves a row from 'public.users' as a User. -// -// Generated from index 'users_created_at_idx'. -func UsersByCreatedAt(ctx context.Context, db DB, createdAt time.Time, opts ...UserSelectConfigOption) ([]User, error) { - c := &UserSelectConfig{deletedAt: " null ", joins: UserJoins{}, filters: make(map[string][]any)} - - for _, o := range opts { - o(c) - } - - paramStart := 1 - nth := func() string { - paramStart++ - return strconv.Itoa(paramStart) - } - - var filterClauses []string - var filterParams []any - for filterTmpl, params := range c.filters { - filter := filterTmpl - for strings.Contains(filter, "$i") { - filter = strings.Replace(filter, "$i", "$"+nth(), 1) - } - filterClauses = append(filterClauses, filter) - filterParams = append(filterParams, params...) - } - - filters := "" - if len(filterClauses) > 0 { - filters = " AND " + strings.Join(filterClauses, " AND ") + " " - } - - var selectClauses []string - var joinClauses []string - var groupByClauses []string - - if c.joins.NotificationsReceiver { - selectClauses = append(selectClauses, userTableNotificationsReceiverSelectSQL) - joinClauses = append(joinClauses, userTableNotificationsReceiverJoinSQL) - groupByClauses = append(groupByClauses, userTableNotificationsReceiverGroupBySQL) - } - - if c.joins.NotificationsSender { - selectClauses = append(selectClauses, userTableNotificationsSenderSelectSQL) - joinClauses = append(joinClauses, userTableNotificationsSenderJoinSQL) - groupByClauses = append(groupByClauses, userTableNotificationsSenderGroupBySQL) - } - - if c.joins.TimeEntries { - selectClauses = append(selectClauses, userTableTimeEntriesSelectSQL) - joinClauses = append(joinClauses, userTableTimeEntriesJoinSQL) - groupByClauses = append(groupByClauses, userTableTimeEntriesGroupBySQL) - } - - if c.joins.UserNotifications { - selectClauses = append(selectClauses, userTableUserNotificationsSelectSQL) - joinClauses = append(joinClauses, userTableUserNotificationsJoinSQL) - groupByClauses = append(groupByClauses, userTableUserNotificationsGroupBySQL) - } - - if c.joins.TeamsMember { - selectClauses = append(selectClauses, userTableTeamsMemberSelectSQL) - joinClauses = append(joinClauses, userTableTeamsMemberJoinSQL) - groupByClauses = append(groupByClauses, userTableTeamsMemberGroupBySQL) - } - - if c.joins.UserAPIKey { - selectClauses = append(selectClauses, userTableUserAPIKeySelectSQL) - joinClauses = append(joinClauses, userTableUserAPIKeyJoinSQL) - groupByClauses = append(groupByClauses, userTableUserAPIKeyGroupBySQL) - } - - if c.joins.WorkItemsAssignedUser { - selectClauses = append(selectClauses, userTableWorkItemsAssignedUserSelectSQL) - joinClauses = append(joinClauses, userTableWorkItemsAssignedUserJoinSQL) - groupByClauses = append(groupByClauses, userTableWorkItemsAssignedUserGroupBySQL) - } - - if c.joins.WorkItemComments { - selectClauses = append(selectClauses, userTableWorkItemCommentsSelectSQL) - joinClauses = append(joinClauses, userTableWorkItemCommentsJoinSQL) - groupByClauses = append(groupByClauses, userTableWorkItemCommentsGroupBySQL) - } - - selects := "" - if len(selectClauses) > 0 { - selects = ", " + strings.Join(selectClauses, " ,\n ") + " " - } - joins := strings.Join(joinClauses, " \n ") + " " - groupbys := "" - if len(groupByClauses) > 0 { - groupbys = "GROUP BY " + strings.Join(groupByClauses, " ,\n ") + " " - } - - sqlstr := fmt.Sprintf(`SELECT - users.user_id, - users.username, - users.email, - users.first_name, - users.last_name, - users.full_name, - users.external_id, - users.api_key_id, - users.scopes, - users.role_rank, - users.has_personal_notifications, - users.has_global_notifications, - users.created_at, - users.updated_at, - users.deleted_at %s - FROM public.users %s - WHERE users.created_at = $1 - %s AND users.deleted_at is %s %s -`, selects, joins, filters, c.deletedAt, groupbys) - sqlstr += c.orderBy - sqlstr += c.limit - sqlstr = "/* UsersByCreatedAt */\n" + sqlstr - - // run - // logf(sqlstr, createdAt) - rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) - if err != nil { - return nil, logerror(fmt.Errorf("User/UsersByCreatedAt/Query: %w", err)) - } - defer rows.Close() - // process - - res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[User]) - if err != nil { - return nil, logerror(fmt.Errorf("User/UsersByCreatedAt/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return res, nil } @@ -1007,11 +873,11 @@ func UserByCreatedAt(ctx context.Context, db DB, createdAt time.Time, opts ...Us // logf(sqlstr, createdAt) rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByCreatedAt/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByCreatedAt/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByCreatedAt/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByCreatedAt/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil @@ -1139,14 +1005,14 @@ func UsersByDeletedAt_WhereDeletedAtIsNotNull(ctx context.Context, db DB, delete // logf(sqlstr, deletedAt) rows, err := db.Query(ctx, sqlstr, append([]any{deletedAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("User/UsersByDeletedAt/Query: %w", err)) + return nil, logerror(fmt.Errorf("User/UsersByDeletedAt/Query: %w", &XoError{Entity: "User", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/UsersByDeletedAt/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/UsersByDeletedAt/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return res, nil } @@ -1273,11 +1139,11 @@ func UserByEmail(ctx context.Context, db DB, email string, opts ...UserSelectCon // logf(sqlstr, email) rows, err := db.Query(ctx, sqlstr, append([]any{email}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByEmail/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByEmail/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByEmail/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByEmail/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil @@ -1405,11 +1271,11 @@ func UserByExternalID(ctx context.Context, db DB, externalID string, opts ...Use // logf(sqlstr, externalID) rows, err := db.Query(ctx, sqlstr, append([]any{externalID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByExternalID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByExternalID/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByExternalID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByExternalID/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil @@ -1537,11 +1403,11 @@ func UserByUserID(ctx context.Context, db DB, userID uuid.UUID, opts ...UserSele // logf(sqlstr, userID) rows, err := db.Query(ctx, sqlstr, append([]any{userID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByUserID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByUserID/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByUserID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByUserID/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil @@ -1669,14 +1535,14 @@ func UsersByUpdatedAt(ctx context.Context, db DB, updatedAt time.Time, opts ...U // logf(sqlstr, updatedAt) rows, err := db.Query(ctx, sqlstr, append([]any{updatedAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("User/UsersByUpdatedAt/Query: %w", err)) + return nil, logerror(fmt.Errorf("User/UsersByUpdatedAt/Query: %w", &XoError{Entity: "User", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/UsersByUpdatedAt/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/UsersByUpdatedAt/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return res, nil } @@ -1803,11 +1669,11 @@ func UserByUsername(ctx context.Context, db DB, username string, opts ...UserSel // logf(sqlstr, username) rows, err := db.Query(ctx, sqlstr, append([]any{username}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByUsername/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByUsername/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByUsername/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByUsername/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil diff --git a/internal/repos/postgresql/gen/db/userapikey.xo.go b/internal/repos/postgresql/gen/db/userapikey.xo.go index 8d75eb416a..fbc2ebc7f5 100644 --- a/internal/repos/postgresql/gen/db/userapikey.xo.go +++ b/internal/repos/postgresql/gen/db/userapikey.xo.go @@ -162,11 +162,11 @@ func (uak *UserAPIKey) Insert(ctx context.Context, db DB) (*UserAPIKey, error) { rows, err := db.Query(ctx, sqlstr, uak.APIKey, uak.ExpiresOn, uak.UserID) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Insert/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } newuak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } *uak = newuak @@ -186,11 +186,11 @@ func (uak *UserAPIKey) Update(ctx context.Context, db DB) (*UserAPIKey, error) { rows, err := db.Query(ctx, sqlstr, uak.APIKey, uak.ExpiresOn, uak.UserID, uak.UserAPIKeyID) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Update/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } newuak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Update/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } *uak = newuak @@ -198,7 +198,7 @@ func (uak *UserAPIKey) Update(ctx context.Context, db DB) (*UserAPIKey, error) { } // Upsert upserts a UserAPIKey in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (uak *UserAPIKey) Upsert(ctx context.Context, db DB, params *UserAPIKeyCreateParams) (*UserAPIKey, error) { var err error @@ -211,11 +211,11 @@ func (uak *UserAPIKey) Upsert(ctx context.Context, db DB, params *UserAPIKeyCrea var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "User api key", Err: err}) } uak, err = uak.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "User api key", Err: err}) } } } @@ -302,11 +302,11 @@ func UserAPIKeyPaginatedByUserAPIKeyIDAsc(ctx context.Context, db DB, userAPIKey rows, err := db.Query(ctx, sqlstr, append([]any{userAPIKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "User api key", Err: err})) } return res, nil } @@ -378,11 +378,11 @@ func UserAPIKeyPaginatedByUserAPIKeyIDDesc(ctx context.Context, db DB, userAPIKe rows, err := db.Query(ctx, sqlstr, append([]any{userAPIKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "User api key", Err: err})) } return res, nil } @@ -456,11 +456,11 @@ func UserAPIKeyByAPIKey(ctx context.Context, db DB, apiKey string, opts ...UserA // logf(sqlstr, apiKey) rows, err := db.Query(ctx, sqlstr, append([]any{apiKey}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } uak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } return &uak, nil @@ -535,11 +535,11 @@ func UserAPIKeyByUserAPIKeyID(ctx context.Context, db DB, userAPIKeyID int, opts // logf(sqlstr, userAPIKeyID) rows, err := db.Query(ctx, sqlstr, append([]any{userAPIKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } uak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } return &uak, nil @@ -614,11 +614,11 @@ func UserAPIKeyByUserID(ctx context.Context, db DB, userID uuid.UUID, opts ...Us // logf(sqlstr, userID) rows, err := db.Query(ctx, sqlstr, append([]any{userID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } uak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } return &uak, nil diff --git a/internal/repos/postgresql/gen/db/usernotification.xo.go b/internal/repos/postgresql/gen/db/usernotification.xo.go index ec90ceffe5..50f41e7a09 100644 --- a/internal/repos/postgresql/gen/db/usernotification.xo.go +++ b/internal/repos/postgresql/gen/db/usernotification.xo.go @@ -155,11 +155,11 @@ func (un *UserNotification) Insert(ctx context.Context, db DB) (*UserNotificatio rows, err := db.Query(ctx, sqlstr, un.NotificationID, un.Read, un.UserID) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Insert/db.Query: %w", &XoError{Entity: "User notification", Err: err})) } newun, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "User notification", Err: err})) } *un = newun @@ -179,11 +179,11 @@ func (un *UserNotification) Update(ctx context.Context, db DB) (*UserNotificatio rows, err := db.Query(ctx, sqlstr, un.NotificationID, un.Read, un.UserID, un.UserNotificationID) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Update/db.Query: %w", &XoError{Entity: "User notification", Err: err})) } newun, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Update/pgx.CollectOneRow: %w", &XoError{Entity: "User notification", Err: err})) } *un = newun @@ -191,7 +191,7 @@ func (un *UserNotification) Update(ctx context.Context, db DB) (*UserNotificatio } // Upsert upserts a UserNotification in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (un *UserNotification) Upsert(ctx context.Context, db DB, params *UserNotificationCreateParams) (*UserNotification, error) { var err error @@ -204,11 +204,11 @@ func (un *UserNotification) Upsert(ctx context.Context, db DB, params *UserNotif var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "User notification", Err: err}) } un, err = un.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "User notification", Err: err}) } } } @@ -301,11 +301,11 @@ func UserNotificationPaginatedByUserNotificationIDAsc(ctx context.Context, db DB rows, err := db.Query(ctx, sqlstr, append([]any{userNotificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Paginated/Asc/db.Query: %w", &XoError{Entity: "User notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "User notification", Err: err})) } return res, nil } @@ -383,11 +383,11 @@ func UserNotificationPaginatedByNotificationIDAsc(ctx context.Context, db DB, no rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Paginated/Asc/db.Query: %w", &XoError{Entity: "User notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "User notification", Err: err})) } return res, nil } @@ -465,11 +465,11 @@ func UserNotificationPaginatedByUserNotificationIDDesc(ctx context.Context, db D rows, err := db.Query(ctx, sqlstr, append([]any{userNotificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Paginated/Desc/db.Query: %w", &XoError{Entity: "User notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "User notification", Err: err})) } return res, nil } @@ -547,11 +547,11 @@ func UserNotificationPaginatedByNotificationIDDesc(ctx context.Context, db DB, n rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Paginated/Desc/db.Query: %w", &XoError{Entity: "User notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "User notification", Err: err})) } return res, nil } @@ -631,11 +631,11 @@ func UserNotificationByNotificationIDUserID(ctx context.Context, db DB, notifica // logf(sqlstr, notificationID, userID) rows, err := db.Query(ctx, sqlstr, append([]any{notificationID, userID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_notifications/UserNotificationByNotificationIDUserID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_notifications/UserNotificationByNotificationIDUserID/db.Query: %w", &XoError{Entity: "User notification", Err: err})) } un, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("user_notifications/UserNotificationByNotificationIDUserID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_notifications/UserNotificationByNotificationIDUserID/pgx.CollectOneRow: %w", &XoError{Entity: "User notification", Err: err})) } return &un, nil @@ -716,14 +716,14 @@ func UserNotificationsByNotificationID(ctx context.Context, db DB, notificationI // logf(sqlstr, notificationID) rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/UserNotificationByNotificationIDUserID/Query: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/UserNotificationByNotificationIDUserID/Query: %w", &XoError{Entity: "User notification", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/UserNotificationByNotificationIDUserID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/UserNotificationByNotificationIDUserID/pgx.CollectRows: %w", &XoError{Entity: "User notification", Err: err})) } return res, nil } @@ -803,11 +803,11 @@ func UserNotificationByUserNotificationID(ctx context.Context, db DB, userNotifi // logf(sqlstr, userNotificationID) rows, err := db.Query(ctx, sqlstr, append([]any{userNotificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_notifications/UserNotificationByUserNotificationID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_notifications/UserNotificationByUserNotificationID/db.Query: %w", &XoError{Entity: "User notification", Err: err})) } un, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("user_notifications/UserNotificationByUserNotificationID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_notifications/UserNotificationByUserNotificationID/pgx.CollectOneRow: %w", &XoError{Entity: "User notification", Err: err})) } return &un, nil @@ -888,14 +888,14 @@ func UserNotificationsByUserID(ctx context.Context, db DB, userID uuid.UUID, opt // logf(sqlstr, userID) rows, err := db.Query(ctx, sqlstr, append([]any{userID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/UserNotificationsByUserID/Query: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/UserNotificationsByUserID/Query: %w", &XoError{Entity: "User notification", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserNotification]) if err != nil { - return nil, logerror(fmt.Errorf("UserNotification/UserNotificationsByUserID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserNotification/UserNotificationsByUserID/pgx.CollectRows: %w", &XoError{Entity: "User notification", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/gen/db/userteam.xo.go b/internal/repos/postgresql/gen/db/userteam.xo.go index 4e9aa8a73b..3f2bc608a5 100644 --- a/internal/repos/postgresql/gen/db/userteam.xo.go +++ b/internal/repos/postgresql/gen/db/userteam.xo.go @@ -168,11 +168,11 @@ func (ut *UserTeam) Insert(ctx context.Context, db DB) (*UserTeam, error) { logf(sqlstr, ut.TeamID, ut.Member) rows, err := db.Query(ctx, sqlstr, ut.TeamID, ut.Member) if err != nil { - return nil, logerror(fmt.Errorf("UserTeam/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserTeam/Insert/db.Query: %w", &XoError{Entity: "User team", Err: err})) } newut, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserTeam]) if err != nil { - return nil, logerror(fmt.Errorf("UserTeam/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("UserTeam/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "User team", Err: err})) } *ut = newut @@ -266,14 +266,14 @@ func UserTeamsByMember(ctx context.Context, db DB, member uuid.UUID, opts ...Use // logf(sqlstr, member) rows, err := db.Query(ctx, sqlstr, append([]any{member}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserTeam/UserTeamByMember/Query: %w", err)) + return nil, logerror(fmt.Errorf("UserTeam/UserTeamByMember/Query: %w", &XoError{Entity: "User team", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserTeam]) if err != nil { - return nil, logerror(fmt.Errorf("UserTeam/UserTeamByMember/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserTeam/UserTeamByMember/pgx.CollectRows: %w", &XoError{Entity: "User team", Err: err})) } return res, nil } @@ -351,11 +351,11 @@ func UserTeamByMemberTeamID(ctx context.Context, db DB, member uuid.UUID, teamID // logf(sqlstr, member, teamID) rows, err := db.Query(ctx, sqlstr, append([]any{member, teamID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_team/UserTeamByMemberTeamID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_team/UserTeamByMemberTeamID/db.Query: %w", &XoError{Entity: "User team", Err: err})) } ut, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserTeam]) if err != nil { - return nil, logerror(fmt.Errorf("user_team/UserTeamByMemberTeamID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_team/UserTeamByMemberTeamID/pgx.CollectOneRow: %w", &XoError{Entity: "User team", Err: err})) } return &ut, nil @@ -434,14 +434,14 @@ func UserTeamsByTeamID(ctx context.Context, db DB, teamID int, opts ...UserTeamS // logf(sqlstr, teamID) rows, err := db.Query(ctx, sqlstr, append([]any{teamID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserTeam/UserTeamByMemberTeamID/Query: %w", err)) + return nil, logerror(fmt.Errorf("UserTeam/UserTeamByMemberTeamID/Query: %w", &XoError{Entity: "User team", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserTeam]) if err != nil { - return nil, logerror(fmt.Errorf("UserTeam/UserTeamByMemberTeamID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserTeam/UserTeamByMemberTeamID/pgx.CollectRows: %w", &XoError{Entity: "User team", Err: err})) } return res, nil } @@ -519,14 +519,14 @@ func UserTeamsByTeamIDMember(ctx context.Context, db DB, teamID int, member uuid // logf(sqlstr, teamID, member) rows, err := db.Query(ctx, sqlstr, append([]any{teamID, member}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserTeam/UserTeamByTeamIDMember/Query: %w", err)) + return nil, logerror(fmt.Errorf("UserTeam/UserTeamByTeamIDMember/Query: %w", &XoError{Entity: "User team", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserTeam]) if err != nil { - return nil, logerror(fmt.Errorf("UserTeam/UserTeamByTeamIDMember/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserTeam/UserTeamByTeamIDMember/pgx.CollectRows: %w", &XoError{Entity: "User team", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/gen/db/v/extra.xo.go b/internal/repos/postgresql/gen/db/v/extra.xo.go index 60e28f6920..5d305a068f 100644 --- a/internal/repos/postgresql/gen/db/v/extra.xo.go +++ b/internal/repos/postgresql/gen/db/v/extra.xo.go @@ -2,6 +2,25 @@ package v // Code generated by xo. DO NOT EDIT. +import ( + "fmt" +) + func newPointer[T any](v T) *T { return &v } + +type XoError struct { + Entity string + Err error +} + +// Error satisfies the error interface. +func (e *XoError) Error() string { + return fmt.Sprintf("%s %v", e.Entity, e.Err) +} + +// Unwrap satisfies the unwrap interface. +func (err *XoError) Unwrap() error { + return err.Err +} diff --git a/internal/repos/postgresql/gen/db/workitem.xo.go b/internal/repos/postgresql/gen/db/workitem.xo.go index b91b922a26..65d1d23043 100644 --- a/internal/repos/postgresql/gen/db/workitem.xo.go +++ b/internal/repos/postgresql/gen/db/workitem.xo.go @@ -371,11 +371,11 @@ func (wi *WorkItem) Insert(ctx context.Context, db DB) (*WorkItem, error) { rows, err := db.Query(ctx, sqlstr, wi.Title, wi.Description, wi.WorkItemTypeID, wi.Metadata, wi.TeamID, wi.KanbanStepID, wi.Closed, wi.TargetDate, wi.DeletedAt) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Insert/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } newwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item", Err: err})) } *wi = newwi @@ -395,11 +395,11 @@ func (wi *WorkItem) Update(ctx context.Context, db DB) (*WorkItem, error) { rows, err := db.Query(ctx, sqlstr, wi.Title, wi.Description, wi.WorkItemTypeID, wi.Metadata, wi.TeamID, wi.KanbanStepID, wi.Closed, wi.TargetDate, wi.DeletedAt, wi.WorkItemID) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Update/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } newwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Work item", Err: err})) } *wi = newwi @@ -407,7 +407,7 @@ func (wi *WorkItem) Update(ctx context.Context, db DB) (*WorkItem, error) { } // Upsert upserts a WorkItem in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (wi *WorkItem) Upsert(ctx context.Context, db DB, params *WorkItemCreateParams) (*WorkItem, error) { var err error @@ -425,11 +425,11 @@ func (wi *WorkItem) Upsert(ctx context.Context, db DB, params *WorkItemCreatePar var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Work item", Err: err}) } wi, err = wi.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Work item", Err: err}) } } } @@ -470,7 +470,7 @@ func (wi *WorkItem) Restore(ctx context.Context, db DB) (*WorkItem, error) { wi.DeletedAt = nil newwi, err := wi.Update(ctx, db) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Restore/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Restore/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) } return newwi, nil } @@ -598,11 +598,11 @@ func WorkItemPaginatedByWorkItemIDAsc(ctx context.Context, db DB, workItemID int rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) } return res, nil } @@ -730,11 +730,148 @@ func WorkItemPaginatedByWorkItemIDDesc(ctx context.Context, db DB, workItemID in rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) + } + return res, nil +} + +// WorkItems retrieves a row from 'public.work_items' as a WorkItem. +// +// Generated from index '[xo] base filter query'. +func WorkItems(ctx context.Context, db DB, opts ...WorkItemSelectConfigOption) ([]WorkItem, error) { + c := &WorkItemSelectConfig{deletedAt: " null ", joins: WorkItemJoins{}, filters: make(map[string][]any)} + + for _, o := range opts { + o(c) + } + + paramStart := 0 + nth := func() string { + paramStart++ + return strconv.Itoa(paramStart) + } + + var filterClauses []string + var filterParams []any + for filterTmpl, params := range c.filters { + filter := filterTmpl + for strings.Contains(filter, "$i") { + filter = strings.Replace(filter, "$i", "$"+nth(), 1) + } + filterClauses = append(filterClauses, filter) + filterParams = append(filterParams, params...) + } + + filters := "" + if len(filterClauses) > 0 { + filters = " AND " + strings.Join(filterClauses, " AND ") + " " + } + + var selectClauses []string + var joinClauses []string + var groupByClauses []string + + if c.joins.DemoTwoWorkItem { + selectClauses = append(selectClauses, workItemTableDemoTwoWorkItemSelectSQL) + joinClauses = append(joinClauses, workItemTableDemoTwoWorkItemJoinSQL) + groupByClauses = append(groupByClauses, workItemTableDemoTwoWorkItemGroupBySQL) + } + + if c.joins.DemoWorkItem { + selectClauses = append(selectClauses, workItemTableDemoWorkItemSelectSQL) + joinClauses = append(joinClauses, workItemTableDemoWorkItemJoinSQL) + groupByClauses = append(groupByClauses, workItemTableDemoWorkItemGroupBySQL) + } + + if c.joins.TimeEntries { + selectClauses = append(selectClauses, workItemTableTimeEntriesSelectSQL) + joinClauses = append(joinClauses, workItemTableTimeEntriesJoinSQL) + groupByClauses = append(groupByClauses, workItemTableTimeEntriesGroupBySQL) + } + + if c.joins.AssignedUsers { + selectClauses = append(selectClauses, workItemTableAssignedUsersSelectSQL) + joinClauses = append(joinClauses, workItemTableAssignedUsersJoinSQL) + groupByClauses = append(groupByClauses, workItemTableAssignedUsersGroupBySQL) + } + + if c.joins.WorkItemComments { + selectClauses = append(selectClauses, workItemTableWorkItemCommentsSelectSQL) + joinClauses = append(joinClauses, workItemTableWorkItemCommentsJoinSQL) + groupByClauses = append(groupByClauses, workItemTableWorkItemCommentsGroupBySQL) + } + + if c.joins.WorkItemTags { + selectClauses = append(selectClauses, workItemTableWorkItemTagsSelectSQL) + joinClauses = append(joinClauses, workItemTableWorkItemTagsJoinSQL) + groupByClauses = append(groupByClauses, workItemTableWorkItemTagsGroupBySQL) + } + + if c.joins.KanbanStep { + selectClauses = append(selectClauses, workItemTableKanbanStepSelectSQL) + joinClauses = append(joinClauses, workItemTableKanbanStepJoinSQL) + groupByClauses = append(groupByClauses, workItemTableKanbanStepGroupBySQL) + } + + if c.joins.Team { + selectClauses = append(selectClauses, workItemTableTeamSelectSQL) + joinClauses = append(joinClauses, workItemTableTeamJoinSQL) + groupByClauses = append(groupByClauses, workItemTableTeamGroupBySQL) + } + + if c.joins.WorkItemType { + selectClauses = append(selectClauses, workItemTableWorkItemTypeSelectSQL) + joinClauses = append(joinClauses, workItemTableWorkItemTypeJoinSQL) + groupByClauses = append(groupByClauses, workItemTableWorkItemTypeGroupBySQL) + } + + selects := "" + if len(selectClauses) > 0 { + selects = ", " + strings.Join(selectClauses, " ,\n ") + " " + } + joins := strings.Join(joinClauses, " \n ") + " " + groupbys := "" + if len(groupByClauses) > 0 { + groupbys = "GROUP BY " + strings.Join(groupByClauses, " ,\n ") + " " + } + + sqlstr := fmt.Sprintf(`SELECT + work_items.work_item_id, + work_items.title, + work_items.description, + work_items.work_item_type_id, + work_items.metadata, + work_items.team_id, + work_items.kanban_step_id, + work_items.closed, + work_items.target_date, + work_items.created_at, + work_items.updated_at, + work_items.deleted_at %s + FROM public.work_items %s + WHERE true + %s AND work_items.deleted_at is %s %s +`, selects, joins, filters, c.deletedAt, groupbys) + sqlstr += c.orderBy + sqlstr += c.limit + sqlstr = "/* WorkItems */\n" + sqlstr + + // run + // logf(sqlstr, ) + rows, err := db.Query(ctx, sqlstr, append([]any{}, filterParams...)...) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDescription/Query: %w", &XoError{Entity: "Work item", Err: err})) + } + defer rows.Close() + // process + + res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDescription/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) } return res, nil } @@ -864,14 +1001,14 @@ func WorkItemsByDeletedAt_WhereDeletedAtIsNotNull(ctx context.Context, db DB, de // logf(sqlstr, deletedAt) rows, err := db.Query(ctx, sqlstr, append([]any{deletedAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDeletedAt/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDeletedAt/Query: %w", &XoError{Entity: "Work item", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDeletedAt/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDeletedAt/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) } return res, nil } @@ -1001,11 +1138,11 @@ func WorkItemByWorkItemID(ctx context.Context, db DB, workItemID int64, opts ... // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } wi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/pgx.CollectOneRow: %w", &XoError{Entity: "Work item", Err: err})) } return &wi, nil @@ -1136,14 +1273,151 @@ func WorkItemsByTeamID(ctx context.Context, db DB, teamID int, opts ...WorkItemS // logf(sqlstr, teamID) rows, err := db.Query(ctx, sqlstr, append([]any{teamID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTeamID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTeamID/Query: %w", &XoError{Entity: "Work item", Err: err})) + } + defer rows.Close() + // process + + res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTeamID/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) + } + return res, nil +} + +// WorkItemsByTitle retrieves a row from 'public.work_items' as a WorkItem. +// +// Generated from index 'work_items_title_description_idx1'. +func WorkItemsByTitle(ctx context.Context, db DB, title string, opts ...WorkItemSelectConfigOption) ([]WorkItem, error) { + c := &WorkItemSelectConfig{deletedAt: " null ", joins: WorkItemJoins{}, filters: make(map[string][]any)} + + for _, o := range opts { + o(c) + } + + paramStart := 1 + nth := func() string { + paramStart++ + return strconv.Itoa(paramStart) + } + + var filterClauses []string + var filterParams []any + for filterTmpl, params := range c.filters { + filter := filterTmpl + for strings.Contains(filter, "$i") { + filter = strings.Replace(filter, "$i", "$"+nth(), 1) + } + filterClauses = append(filterClauses, filter) + filterParams = append(filterParams, params...) + } + + filters := "" + if len(filterClauses) > 0 { + filters = " AND " + strings.Join(filterClauses, " AND ") + " " + } + + var selectClauses []string + var joinClauses []string + var groupByClauses []string + + if c.joins.DemoTwoWorkItem { + selectClauses = append(selectClauses, workItemTableDemoTwoWorkItemSelectSQL) + joinClauses = append(joinClauses, workItemTableDemoTwoWorkItemJoinSQL) + groupByClauses = append(groupByClauses, workItemTableDemoTwoWorkItemGroupBySQL) + } + + if c.joins.DemoWorkItem { + selectClauses = append(selectClauses, workItemTableDemoWorkItemSelectSQL) + joinClauses = append(joinClauses, workItemTableDemoWorkItemJoinSQL) + groupByClauses = append(groupByClauses, workItemTableDemoWorkItemGroupBySQL) + } + + if c.joins.TimeEntries { + selectClauses = append(selectClauses, workItemTableTimeEntriesSelectSQL) + joinClauses = append(joinClauses, workItemTableTimeEntriesJoinSQL) + groupByClauses = append(groupByClauses, workItemTableTimeEntriesGroupBySQL) + } + + if c.joins.AssignedUsers { + selectClauses = append(selectClauses, workItemTableAssignedUsersSelectSQL) + joinClauses = append(joinClauses, workItemTableAssignedUsersJoinSQL) + groupByClauses = append(groupByClauses, workItemTableAssignedUsersGroupBySQL) + } + + if c.joins.WorkItemComments { + selectClauses = append(selectClauses, workItemTableWorkItemCommentsSelectSQL) + joinClauses = append(joinClauses, workItemTableWorkItemCommentsJoinSQL) + groupByClauses = append(groupByClauses, workItemTableWorkItemCommentsGroupBySQL) + } + + if c.joins.WorkItemTags { + selectClauses = append(selectClauses, workItemTableWorkItemTagsSelectSQL) + joinClauses = append(joinClauses, workItemTableWorkItemTagsJoinSQL) + groupByClauses = append(groupByClauses, workItemTableWorkItemTagsGroupBySQL) + } + + if c.joins.KanbanStep { + selectClauses = append(selectClauses, workItemTableKanbanStepSelectSQL) + joinClauses = append(joinClauses, workItemTableKanbanStepJoinSQL) + groupByClauses = append(groupByClauses, workItemTableKanbanStepGroupBySQL) + } + + if c.joins.Team { + selectClauses = append(selectClauses, workItemTableTeamSelectSQL) + joinClauses = append(joinClauses, workItemTableTeamJoinSQL) + groupByClauses = append(groupByClauses, workItemTableTeamGroupBySQL) + } + + if c.joins.WorkItemType { + selectClauses = append(selectClauses, workItemTableWorkItemTypeSelectSQL) + joinClauses = append(joinClauses, workItemTableWorkItemTypeJoinSQL) + groupByClauses = append(groupByClauses, workItemTableWorkItemTypeGroupBySQL) + } + + selects := "" + if len(selectClauses) > 0 { + selects = ", " + strings.Join(selectClauses, " ,\n ") + " " + } + joins := strings.Join(joinClauses, " \n ") + " " + groupbys := "" + if len(groupByClauses) > 0 { + groupbys = "GROUP BY " + strings.Join(groupByClauses, " ,\n ") + " " + } + + sqlstr := fmt.Sprintf(`SELECT + work_items.work_item_id, + work_items.title, + work_items.description, + work_items.work_item_type_id, + work_items.metadata, + work_items.team_id, + work_items.kanban_step_id, + work_items.closed, + work_items.target_date, + work_items.created_at, + work_items.updated_at, + work_items.deleted_at %s + FROM public.work_items %s + WHERE work_items.title = $1 + %s AND work_items.deleted_at is %s %s +`, selects, joins, filters, c.deletedAt, groupbys) + sqlstr += c.orderBy + sqlstr += c.limit + sqlstr = "/* WorkItemsByTitle */\n" + sqlstr + + // run + // logf(sqlstr, title) + rows, err := db.Query(ctx, sqlstr, append([]any{title}, filterParams...)...) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTitleDescription/Query: %w", &XoError{Entity: "Work item", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTeamID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTitleDescription/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/gen/db/workitemassigneduser.xo.go b/internal/repos/postgresql/gen/db/workitemassigneduser.xo.go index 5b8c492acb..4713fbab11 100644 --- a/internal/repos/postgresql/gen/db/workitemassigneduser.xo.go +++ b/internal/repos/postgresql/gen/db/workitemassigneduser.xo.go @@ -198,11 +198,11 @@ func (wiau *WorkItemAssignedUser) Insert(ctx context.Context, db DB) (*WorkItemA logf(sqlstr, wiau.WorkItemID, wiau.AssignedUser, wiau.Role) rows, err := db.Query(ctx, sqlstr, wiau.WorkItemID, wiau.AssignedUser, wiau.Role) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/db.Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } newwiau, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item assigned user", Err: err})) } *wiau = newwiau @@ -221,11 +221,11 @@ func (wiau *WorkItemAssignedUser) Update(ctx context.Context, db DB) (*WorkItemA rows, err := db.Query(ctx, sqlstr, wiau.Role, wiau.WorkItemID, wiau.AssignedUser) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/db.Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } newwiau, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Work item assigned user", Err: err})) } *wiau = newwiau @@ -233,7 +233,7 @@ func (wiau *WorkItemAssignedUser) Update(ctx context.Context, db DB) (*WorkItemA } // Upsert upserts a WorkItemAssignedUser in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (wiau *WorkItemAssignedUser) Upsert(ctx context.Context, db DB, params *WorkItemAssignedUserCreateParams) (*WorkItemAssignedUser, error) { var err error @@ -246,11 +246,11 @@ func (wiau *WorkItemAssignedUser) Upsert(ctx context.Context, db DB, params *Wor var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Work item assigned user", Err: err}) } wiau, err = wiau.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Work item assigned user", Err: err}) } } } @@ -344,14 +344,14 @@ func WorkItemAssignedUsersByAssignedUserWorkItemID(ctx context.Context, db DB, a // logf(sqlstr, assignedUser, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{assignedUser, workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/pgx.CollectRows: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return res, nil } @@ -430,11 +430,11 @@ func WorkItemAssignedUserByWorkItemIDAssignedUser(ctx context.Context, db DB, wo // logf(sqlstr, workItemID, assignedUser) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID, assignedUser}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/db.Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } wiau, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectOneRow: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return &wiau, nil @@ -514,14 +514,14 @@ func WorkItemAssignedUsersByWorkItemID(ctx context.Context, db DB, workItemID in // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return res, nil } @@ -600,14 +600,14 @@ func WorkItemAssignedUsersByAssignedUser(ctx context.Context, db DB, assignedUse // logf(sqlstr, assignedUser) rows, err := db.Query(ctx, sqlstr, append([]any{assignedUser}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/gen/db/workitemcomment.xo.go b/internal/repos/postgresql/gen/db/workitemcomment.xo.go index d0ae8222ea..3499cd0b37 100644 --- a/internal/repos/postgresql/gen/db/workitemcomment.xo.go +++ b/internal/repos/postgresql/gen/db/workitemcomment.xo.go @@ -181,11 +181,11 @@ func (wic *WorkItemComment) Insert(ctx context.Context, db DB) (*WorkItemComment rows, err := db.Query(ctx, sqlstr, wic.WorkItemID, wic.UserID, wic.Message) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/Insert/db.Query: %w", &XoError{Entity: "Work item comment", Err: err})) } newwic, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemComment]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item comment", Err: err})) } *wic = newwic @@ -205,11 +205,11 @@ func (wic *WorkItemComment) Update(ctx context.Context, db DB) (*WorkItemComment rows, err := db.Query(ctx, sqlstr, wic.WorkItemID, wic.UserID, wic.Message, wic.WorkItemCommentID) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/Update/db.Query: %w", &XoError{Entity: "Work item comment", Err: err})) } newwic, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemComment]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Work item comment", Err: err})) } *wic = newwic @@ -217,7 +217,7 @@ func (wic *WorkItemComment) Update(ctx context.Context, db DB) (*WorkItemComment } // Upsert upserts a WorkItemComment in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (wic *WorkItemComment) Upsert(ctx context.Context, db DB, params *WorkItemCommentCreateParams) (*WorkItemComment, error) { var err error @@ -230,11 +230,11 @@ func (wic *WorkItemComment) Upsert(ctx context.Context, db DB, params *WorkItemC var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Work item comment", Err: err}) } wic, err = wic.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Work item comment", Err: err}) } } } @@ -329,11 +329,11 @@ func WorkItemCommentPaginatedByWorkItemCommentIDAsc(ctx context.Context, db DB, rows, err := db.Query(ctx, sqlstr, append([]any{workItemCommentID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/Paginated/Asc/db.Query: %w", &XoError{Entity: "Work item comment", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemComment]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Work item comment", Err: err})) } return res, nil } @@ -413,11 +413,11 @@ func WorkItemCommentPaginatedByWorkItemCommentIDDesc(ctx context.Context, db DB, rows, err := db.Query(ctx, sqlstr, append([]any{workItemCommentID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/Paginated/Desc/db.Query: %w", &XoError{Entity: "Work item comment", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemComment]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Work item comment", Err: err})) } return res, nil } @@ -499,11 +499,11 @@ func WorkItemCommentByWorkItemCommentID(ctx context.Context, db DB, workItemComm // logf(sqlstr, workItemCommentID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemCommentID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_item_comments/WorkItemCommentByWorkItemCommentID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_item_comments/WorkItemCommentByWorkItemCommentID/db.Query: %w", &XoError{Entity: "Work item comment", Err: err})) } wic, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemComment]) if err != nil { - return nil, logerror(fmt.Errorf("work_item_comments/WorkItemCommentByWorkItemCommentID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_item_comments/WorkItemCommentByWorkItemCommentID/pgx.CollectOneRow: %w", &XoError{Entity: "Work item comment", Err: err})) } return &wic, nil @@ -586,14 +586,14 @@ func WorkItemCommentsByWorkItemID(ctx context.Context, db DB, workItemID int64, // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/WorkItemCommentsByWorkItemID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/WorkItemCommentsByWorkItemID/Query: %w", &XoError{Entity: "Work item comment", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemComment]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemComment/WorkItemCommentsByWorkItemID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemComment/WorkItemCommentsByWorkItemID/pgx.CollectRows: %w", &XoError{Entity: "Work item comment", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/gen/db/workitemtag.xo.go b/internal/repos/postgresql/gen/db/workitemtag.xo.go index 8f7c7e8561..8ba7ed66dd 100644 --- a/internal/repos/postgresql/gen/db/workitemtag.xo.go +++ b/internal/repos/postgresql/gen/db/workitemtag.xo.go @@ -173,11 +173,11 @@ func (wit *WorkItemTag) Insert(ctx context.Context, db DB) (*WorkItemTag, error) rows, err := db.Query(ctx, sqlstr, wit.ProjectID, wit.Name, wit.Description, wit.Color) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Insert/db.Query: %w", &XoError{Entity: "Work item tag", Err: err})) } newwit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item tag", Err: err})) } *wit = newwit @@ -197,11 +197,11 @@ func (wit *WorkItemTag) Update(ctx context.Context, db DB) (*WorkItemTag, error) rows, err := db.Query(ctx, sqlstr, wit.ProjectID, wit.Name, wit.Description, wit.Color, wit.WorkItemTagID) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Update/db.Query: %w", &XoError{Entity: "Work item tag", Err: err})) } newwit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Work item tag", Err: err})) } *wit = newwit @@ -209,7 +209,7 @@ func (wit *WorkItemTag) Update(ctx context.Context, db DB) (*WorkItemTag, error) } // Upsert upserts a WorkItemTag in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (wit *WorkItemTag) Upsert(ctx context.Context, db DB, params *WorkItemTagCreateParams) (*WorkItemTag, error) { var err error @@ -223,11 +223,11 @@ func (wit *WorkItemTag) Upsert(ctx context.Context, db DB, params *WorkItemTagCr var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Work item tag", Err: err}) } wit, err = wit.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Work item tag", Err: err}) } } } @@ -321,11 +321,11 @@ func WorkItemTagPaginatedByWorkItemTagIDAsc(ctx context.Context, db DB, workItem rows, err := db.Query(ctx, sqlstr, append([]any{workItemTagID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Asc/db.Query: %w", &XoError{Entity: "Work item tag", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Work item tag", Err: err})) } return res, nil } @@ -404,11 +404,11 @@ func WorkItemTagPaginatedByProjectIDAsc(ctx context.Context, db DB, projectID in rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Asc/db.Query: %w", &XoError{Entity: "Work item tag", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Work item tag", Err: err})) } return res, nil } @@ -487,11 +487,11 @@ func WorkItemTagPaginatedByWorkItemTagIDDesc(ctx context.Context, db DB, workIte rows, err := db.Query(ctx, sqlstr, append([]any{workItemTagID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Desc/db.Query: %w", &XoError{Entity: "Work item tag", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Work item tag", Err: err})) } return res, nil } @@ -570,11 +570,11 @@ func WorkItemTagPaginatedByProjectIDDesc(ctx context.Context, db DB, projectID i rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Desc/db.Query: %w", &XoError{Entity: "Work item tag", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Work item tag", Err: err})) } return res, nil } @@ -655,11 +655,11 @@ func WorkItemTagByNameProjectID(ctx context.Context, db DB, name string, project // logf(sqlstr, name, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{name, projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_item_tags/WorkItemTagByNameProjectID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_item_tags/WorkItemTagByNameProjectID/db.Query: %w", &XoError{Entity: "Work item tag", Err: err})) } wit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("work_item_tags/WorkItemTagByNameProjectID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_item_tags/WorkItemTagByNameProjectID/pgx.CollectOneRow: %w", &XoError{Entity: "Work item tag", Err: err})) } return &wit, nil @@ -741,14 +741,14 @@ func WorkItemTagsByName(ctx context.Context, db DB, name string, opts ...WorkIte // logf(sqlstr, name) rows, err := db.Query(ctx, sqlstr, append([]any{name}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/WorkItemTagByNameProjectID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/WorkItemTagByNameProjectID/Query: %w", &XoError{Entity: "Work item tag", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/WorkItemTagByNameProjectID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/WorkItemTagByNameProjectID/pgx.CollectRows: %w", &XoError{Entity: "Work item tag", Err: err})) } return res, nil } @@ -829,14 +829,14 @@ func WorkItemTagsByProjectID(ctx context.Context, db DB, projectID int, opts ... // logf(sqlstr, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/WorkItemTagByNameProjectID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/WorkItemTagByNameProjectID/Query: %w", &XoError{Entity: "Work item tag", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemTag/WorkItemTagByNameProjectID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemTag/WorkItemTagByNameProjectID/pgx.CollectRows: %w", &XoError{Entity: "Work item tag", Err: err})) } return res, nil } @@ -917,11 +917,11 @@ func WorkItemTagByWorkItemTagID(ctx context.Context, db DB, workItemTagID int, o // logf(sqlstr, workItemTagID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemTagID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_item_tags/WorkItemTagByWorkItemTagID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_item_tags/WorkItemTagByWorkItemTagID/db.Query: %w", &XoError{Entity: "Work item tag", Err: err})) } wit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("work_item_tags/WorkItemTagByWorkItemTagID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_item_tags/WorkItemTagByWorkItemTagID/pgx.CollectOneRow: %w", &XoError{Entity: "Work item tag", Err: err})) } return &wit, nil diff --git a/internal/repos/postgresql/gen/db/workitemtype.xo.go b/internal/repos/postgresql/gen/db/workitemtype.xo.go index 24f3121c1a..5a92d235d9 100644 --- a/internal/repos/postgresql/gen/db/workitemtype.xo.go +++ b/internal/repos/postgresql/gen/db/workitemtype.xo.go @@ -148,11 +148,11 @@ func (wit *WorkItemType) Insert(ctx context.Context, db DB) (*WorkItemType, erro rows, err := db.Query(ctx, sqlstr, wit.ProjectID, wit.Name, wit.Description, wit.Color) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Insert/db.Query: %w", &XoError{Entity: "Work item type", Err: err})) } newwit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item type", Err: err})) } *wit = newwit @@ -172,11 +172,11 @@ func (wit *WorkItemType) Update(ctx context.Context, db DB) (*WorkItemType, erro rows, err := db.Query(ctx, sqlstr, wit.ProjectID, wit.Name, wit.Description, wit.Color, wit.WorkItemTypeID) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Update/db.Query: %w", &XoError{Entity: "Work item type", Err: err})) } newwit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Work item type", Err: err})) } *wit = newwit @@ -184,7 +184,7 @@ func (wit *WorkItemType) Update(ctx context.Context, db DB) (*WorkItemType, erro } // Upsert upserts a WorkItemType in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (wit *WorkItemType) Upsert(ctx context.Context, db DB, params *WorkItemTypeCreateParams) (*WorkItemType, error) { var err error @@ -198,11 +198,11 @@ func (wit *WorkItemType) Upsert(ctx context.Context, db DB, params *WorkItemType var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Work item type", Err: err}) } wit, err = wit.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Work item type", Err: err}) } } } @@ -290,11 +290,11 @@ func WorkItemTypePaginatedByWorkItemTypeIDAsc(ctx context.Context, db DB, workIt rows, err := db.Query(ctx, sqlstr, append([]any{workItemTypeID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Asc/db.Query: %w", &XoError{Entity: "Work item type", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Work item type", Err: err})) } return res, nil } @@ -367,11 +367,11 @@ func WorkItemTypePaginatedByProjectIDAsc(ctx context.Context, db DB, projectID i rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Asc/db.Query: %w", &XoError{Entity: "Work item type", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Work item type", Err: err})) } return res, nil } @@ -444,11 +444,11 @@ func WorkItemTypePaginatedByWorkItemTypeIDDesc(ctx context.Context, db DB, workI rows, err := db.Query(ctx, sqlstr, append([]any{workItemTypeID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Desc/db.Query: %w", &XoError{Entity: "Work item type", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Work item type", Err: err})) } return res, nil } @@ -521,11 +521,11 @@ func WorkItemTypePaginatedByProjectIDDesc(ctx context.Context, db DB, projectID rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Desc/db.Query: %w", &XoError{Entity: "Work item type", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Work item type", Err: err})) } return res, nil } @@ -600,11 +600,11 @@ func WorkItemTypeByNameProjectID(ctx context.Context, db DB, name string, projec // logf(sqlstr, name, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{name, projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_item_types/WorkItemTypeByNameProjectID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_item_types/WorkItemTypeByNameProjectID/db.Query: %w", &XoError{Entity: "Work item type", Err: err})) } wit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("work_item_types/WorkItemTypeByNameProjectID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_item_types/WorkItemTypeByNameProjectID/pgx.CollectOneRow: %w", &XoError{Entity: "Work item type", Err: err})) } return &wit, nil @@ -680,14 +680,14 @@ func WorkItemTypesByName(ctx context.Context, db DB, name string, opts ...WorkIt // logf(sqlstr, name) rows, err := db.Query(ctx, sqlstr, append([]any{name}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/WorkItemTypeByNameProjectID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/WorkItemTypeByNameProjectID/Query: %w", &XoError{Entity: "Work item type", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/WorkItemTypeByNameProjectID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/WorkItemTypeByNameProjectID/pgx.CollectRows: %w", &XoError{Entity: "Work item type", Err: err})) } return res, nil } @@ -762,14 +762,14 @@ func WorkItemTypesByProjectID(ctx context.Context, db DB, projectID int, opts .. // logf(sqlstr, projectID) rows, err := db.Query(ctx, sqlstr, append([]any{projectID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/WorkItemTypeByNameProjectID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/WorkItemTypeByNameProjectID/Query: %w", &XoError{Entity: "Work item type", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemType/WorkItemTypeByNameProjectID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemType/WorkItemTypeByNameProjectID/pgx.CollectRows: %w", &XoError{Entity: "Work item type", Err: err})) } return res, nil } @@ -844,11 +844,11 @@ func WorkItemTypeByWorkItemTypeID(ctx context.Context, db DB, workItemTypeID int // logf(sqlstr, workItemTypeID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemTypeID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_item_types/WorkItemTypeByWorkItemTypeID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_item_types/WorkItemTypeByWorkItemTypeID/db.Query: %w", &XoError{Entity: "Work item type", Err: err})) } wit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemType]) if err != nil { - return nil, logerror(fmt.Errorf("work_item_types/WorkItemTypeByWorkItemTypeID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_item_types/WorkItemTypeByWorkItemTypeID/pgx.CollectOneRow: %w", &XoError{Entity: "Work item type", Err: err})) } return &wit, nil diff --git a/internal/repos/postgresql/gen/db/workitemworkitemtag.xo.go b/internal/repos/postgresql/gen/db/workitemworkitemtag.xo.go index 8998a0260f..97f402fa8b 100644 --- a/internal/repos/postgresql/gen/db/workitemworkitemtag.xo.go +++ b/internal/repos/postgresql/gen/db/workitemworkitemtag.xo.go @@ -167,11 +167,11 @@ func (wiwit *WorkItemWorkItemTag) Insert(ctx context.Context, db DB) (*WorkItemW logf(sqlstr, wiwit.WorkItemTagID, wiwit.WorkItemID) rows, err := db.Query(ctx, sqlstr, wiwit.WorkItemTagID, wiwit.WorkItemID) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Insert/db.Query: %w", &XoError{Entity: "Work item work item tag", Err: err})) } newwiwit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemWorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item work item tag", Err: err})) } *wiwit = newwiwit @@ -264,11 +264,11 @@ func WorkItemWorkItemTagPaginatedByWorkItemTagIDWorkItemIDAsc(ctx context.Contex rows, err := db.Query(ctx, sqlstr, append([]any{workItemTagID, workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Paginated/Asc/db.Query: %w", &XoError{Entity: "Work item work item tag", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemWorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Work item work item tag", Err: err})) } return res, nil } @@ -345,11 +345,11 @@ func WorkItemWorkItemTagPaginatedByWorkItemTagIDWorkItemIDDesc(ctx context.Conte rows, err := db.Query(ctx, sqlstr, append([]any{workItemTagID, workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Paginated/Desc/db.Query: %w", &XoError{Entity: "Work item work item tag", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemWorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Work item work item tag", Err: err})) } return res, nil } @@ -427,11 +427,11 @@ func WorkItemWorkItemTagByWorkItemIDWorkItemTagID(ctx context.Context, db DB, wo // logf(sqlstr, workItemID, workItemTagID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID, workItemTagID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_item_work_item_tag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_item_work_item_tag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/db.Query: %w", &XoError{Entity: "Work item work item tag", Err: err})) } wiwit, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemWorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("work_item_work_item_tag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_item_work_item_tag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/pgx.CollectOneRow: %w", &XoError{Entity: "Work item work item tag", Err: err})) } return &wiwit, nil @@ -510,14 +510,14 @@ func WorkItemWorkItemTagsByWorkItemID(ctx context.Context, db DB, workItemID int // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/Query: %w", &XoError{Entity: "Work item work item tag", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemWorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/pgx.CollectRows: %w", &XoError{Entity: "Work item work item tag", Err: err})) } return res, nil } @@ -595,14 +595,14 @@ func WorkItemWorkItemTagsByWorkItemTagID(ctx context.Context, db DB, workItemTag // logf(sqlstr, workItemTagID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemTagID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/Query: %w", &XoError{Entity: "Work item work item tag", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemWorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemIDWorkItemTagID/pgx.CollectRows: %w", &XoError{Entity: "Work item work item tag", Err: err})) } return res, nil } @@ -680,14 +680,14 @@ func WorkItemWorkItemTagsByWorkItemTagIDWorkItemID(ctx context.Context, db DB, w // logf(sqlstr, workItemTagID, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemTagID, workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemTagIDWorkItemID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemTagIDWorkItemID/Query: %w", &XoError{Entity: "Work item work item tag", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemWorkItemTag]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemTagIDWorkItemID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemWorkItemTag/WorkItemWorkItemTagByWorkItemTagIDWorkItemID/pgx.CollectRows: %w", &XoError{Entity: "Work item work item tag", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/project.go b/internal/repos/postgresql/project.go index 9164bb1b53..7f9de703ae 100644 --- a/internal/repos/postgresql/project.go +++ b/internal/repos/postgresql/project.go @@ -24,7 +24,12 @@ func NewProject() *Project { var _ repos.Project = (*Project)(nil) func (u *Project) ByID(ctx context.Context, d db.DBTX, id int) (*db.Project, error) { - return db.ProjectByProjectID(ctx, d, id) + project, err := db.ProjectByProjectID(ctx, d, id) + if err != nil { + return nil, fmt.Errorf("could not get project: %w", parseErrorDetail(err)) + } + + return project, nil } func (u *Project) ByName(ctx context.Context, d db.DBTX, name models.Project) (*db.Project, error) { diff --git a/internal/repos/postgresql/testing_setup_test.go b/internal/repos/postgresql/testing_setup_test.go index 4e81ec3c1e..804f7b3cea 100644 --- a/internal/repos/postgresql/testing_setup_test.go +++ b/internal/repos/postgresql/testing_setup_test.go @@ -11,7 +11,7 @@ import ( ) const ( - errNoRows = "no rows in result set" + errNoRows = "not found" errViolatesCheckConstraint = "violates check constraint" ) diff --git a/internal/repos/postgresql/work_item_tag.go b/internal/repos/postgresql/work_item_tag.go index 3c90a29a58..4b9b8f0cc8 100644 --- a/internal/repos/postgresql/work_item_tag.go +++ b/internal/repos/postgresql/work_item_tag.go @@ -25,7 +25,7 @@ var _ repos.WorkItemTag = (*WorkItemTag)(nil) func (wit *WorkItemTag) Create(ctx context.Context, d db.DBTX, params *db.WorkItemTagCreateParams) (*db.WorkItemTag, error) { workItemTag, err := db.CreateWorkItemTag(ctx, d, params) if err != nil { - return nil, fmt.Errorf("could not create time entry: %w", parseErrorDetail(err)) + return nil, fmt.Errorf("could not create work item tag: %w", parseErrorDetail(err)) } return workItemTag, nil diff --git a/internal/repos/postgresql/work_item_type.go b/internal/repos/postgresql/work_item_type.go index b11c8d8f9f..7245e7d9a2 100644 --- a/internal/repos/postgresql/work_item_type.go +++ b/internal/repos/postgresql/work_item_type.go @@ -25,7 +25,7 @@ var _ repos.WorkItemType = (*WorkItemType)(nil) func (wit *WorkItemType) ByName(ctx context.Context, d db.DBTX, name string, projectID int) (*db.WorkItemType, error) { workItemType, err := db.WorkItemTypeByNameProjectID(ctx, d, name, projectID) if err != nil { - return nil, fmt.Errorf("could not get workItemType: %w", parseErrorDetail(err)) + return nil, fmt.Errorf("could not get work item type: %w", parseErrorDetail(err)) } return workItemType, nil @@ -34,7 +34,7 @@ func (wit *WorkItemType) ByName(ctx context.Context, d db.DBTX, name string, pro func (wit *WorkItemType) ByID(ctx context.Context, d db.DBTX, id int) (*db.WorkItemType, error) { workItemType, err := db.WorkItemTypeByWorkItemTypeID(ctx, d, id) if err != nil { - return nil, fmt.Errorf("could not get workItemType: %w", parseErrorDetail(err)) + return nil, fmt.Errorf("could not get work item type: %w", parseErrorDetail(err)) } return workItemType, nil diff --git a/internal/repos/postgresql/xo-templates/extra.xo.go.tpl b/internal/repos/postgresql/xo-templates/extra.xo.go.tpl index ec072ccf52..794e9adefb 100644 --- a/internal/repos/postgresql/xo-templates/extra.xo.go.tpl +++ b/internal/repos/postgresql/xo-templates/extra.xo.go.tpl @@ -4,4 +4,21 @@ func newPointer[T any](v T) *T { return &v } +type XoError struct { + Entity string + Err error +} + +// Error satisfies the error interface. +func (e *XoError) Error() string { + return fmt.Sprintf("%s %v", e.Entity, e.Err) +} + +// Unwrap satisfies the unwrap interface. +func (err *XoError) Unwrap() error { + return err.Err +} + + {{- end }} + diff --git a/internal/repos/postgresql/xo-templates/go.go b/internal/repos/postgresql/xo-templates/go.go index 2932d7d711..7f3a94098a 100644 --- a/internal/repos/postgresql/xo-templates/go.go +++ b/internal/repos/postgresql/xo-templates/go.go @@ -28,6 +28,9 @@ import ( "mvdan.cc/gofumpt/format" ) +// TODO configurable +var excludedIndexTypes = []string{"gin_trgm_ops"} + type cardinality string const ( @@ -93,6 +96,16 @@ func IsUpper(s string) bool { return true } +func removeEmptyStrings(arr []string) []string { + result := make([]string, 0) + for _, str := range arr { + if str != "" { + result = append(result, str) + } + } + return result +} + func IsLower(s string) bool { for _, r := range s { if !unicode.IsLower(r) && unicode.IsLetter(r) { @@ -732,6 +745,19 @@ func emitSchema(ctx context.Context, schema xo.Schema, emit func(xo.Template)) e return err } + newFields, base := removeExcludedIndexTypes(index, excludedIndexTypes) + if newFields != nil { + index.Fields = newFields + } + if base { + index.SQLName = "[xo] base filter query" + } + + idxIdentifier := extractIndexIdentifier(index) + if contains(emittedIndexes, idxIdentifier) { + continue + } + // emit normal index emit(xo.Template{ Dest: strings.ToLower(table.GoName) + ext, @@ -754,6 +780,14 @@ func emitSchema(ctx context.Context, schema xo.Schema, emit func(xo.Template)) e return err } + newFields, base := removeExcludedIndexTypes(index, excludedIndexTypes) + if newFields != nil { + index.Fields = newFields + } + if base { + index.SQLName = "[xo] base filter query" + } + if index.IsUnique && len(index.Fields) > 1 { // patch each index and constraints and emit queries with a subset of index fields index.IsUnique = false @@ -806,10 +840,75 @@ func emitSchema(ctx context.Context, schema xo.Schema, emit func(xo.Template)) e return nil } +func removeExcludedIndexTypes(index Index, excludedIndexTypes []string) ([]Field, bool) { + base := false + excludedColumnNames := extractExcludedColumnNames(index.Definition, excludedIndexTypes) + if len(excludedColumnNames) == len(index.Fields) { + fmt.Println("skipping index where all fields are excluded index types: ", index.Definition) + return []Field{}, true + } + if len(excludedColumnNames) > 0 { + fmt.Println("patching index containing excluded index types: ", index.Definition) + return patchIndexFields(index.Fields, excludedColumnNames), false + } + + return nil, base +} + +func extractExcludedColumnNames(definition string, excludedIndexTypes []string) []string { + var excludedColumnNames []string + + re := regexp.MustCompile(`INDEX .*? USING .*?\((?P[\w\s.,]+)`) + match := re.FindStringSubmatch(definition) + subexpNames := re.SubexpNames() + + for i, name := range subexpNames { + if name == "columns" && len(match) > i { + idxColumns := strings.Split(match[i], ",") + for _, idxColumn := range idxColumns { + column, idxTypePath, _ := strings.Cut(strings.TrimSpace(idxColumn), " ") + pp := strings.Split(idxTypePath, ".") + indexTypeName := strings.TrimSpace(pp[len(pp)-1]) + if contains(excludedIndexTypes, indexTypeName) { + excludedColumnNames = append(excludedColumnNames, column) + } + } + } + } + + return excludedColumnNames +} + +func patchIndexFields(fields []Field, excludedColumnNames []string) []Field { + var patchedFields []Field + + fmt.Printf("excludedColumnNames: %v\n", excludedColumnNames) + + for _, field := range fields { + includeField := true + for _, columnName := range excludedColumnNames { + if field.SQLName == columnName { + includeField = false + break + } + } + if includeField { + patchedFields = append(patchedFields, field) + } + } + + return patchedFields +} + // extractIndexIdentifier generates a unique identifier for patched index generation. func extractIndexIdentifier(i Index) string { + excludedColumnNames := extractExcludedColumnNames(i.Definition, excludedIndexTypes) + var fields []string for _, field := range i.Fields { + if contains(excludedColumnNames, field.SQLName) { + continue + } fields = append(fields, field.GoName) } @@ -1431,19 +1530,25 @@ func (f *Funcs) camel(names ...string) string { return snaker.ForceLowerCamelIdentifier(strings.Join(names, "_")) } +func (f *Funcs) sentence_case(names ...string) string { + c := strings.Title(snaker.CamelToSnake(strings.Join(names, "_"))) + return inflector.Singularize(strings.ReplaceAll(c, "_", " ")) +} + // FuncMap returns the func map. func (f *Funcs) FuncMap() template.FuncMap { return template.FuncMap{ // general - "camel": f.camel, - "lowerFirst": f.lower_first, - "first": f.firstfn, - "driver": f.driverfn, - "schema": f.schemafn, - "pkg": f.pkgfn, - "tags": f.tagsfn, - "imports": f.importsfn, - "inject": f.injectfn, + "sentence_case": f.sentence_case, + "camel": f.camel, + "lowerFirst": f.lower_first, + "first": f.firstfn, + "driver": f.driverfn, + "schema": f.schemafn, + "pkg": f.pkgfn, + "tags": f.tagsfn, + "imports": f.importsfn, + "inject": f.injectfn, // context "context": f.contextfn, "context_both": f.context_both, @@ -1726,7 +1831,12 @@ func (f *Funcs) func_name_context(v any, suffix string) string { // need custom Func to handle additional index creation instead of Func field // https://github.com/danicc097/xo/blob/main/cmd/schema.go#L629 which originally sets i.Func - funcName := name + "By" + strings.Join(fields, "") + suffix + cond := "" + if len(fields) > 0 { + cond = "By" + strings.Join(fields, "") + } + + funcName := name + cond + suffix return funcName } @@ -1784,7 +1894,13 @@ func (f *Funcs) funcfn(name string, context bool, v any, columns []Field) string return fmt.Sprintf("[[ UNSUPPORTED TYPE 3: %T ]]", v) } returns = append(returns, "error") - return fmt.Sprintf("func %s(%s) (%s)", name, strings.Join(params, ", "), strings.Join(returns, ", ")) + + p := "" + params = removeEmptyStrings(params) + if len(params) > 0 { + p = strings.Join(params, ", ") + } + return fmt.Sprintf("func %s(%s) (%s)", name, p, strings.Join(returns, ", ")) } // initial_opts returns base conf for select queries. @@ -2161,7 +2277,11 @@ func (f *Funcs) recv(name string, context bool, t Table, v any) string { r = append(r, "*"+t.GoName) } r = append(r, "error") - return fmt.Sprintf("func (%s *%s) %s(%s) (%s)", short, t.GoName, name, strings.Join(p, ", "), strings.Join(r, ", ")) + params := "" + if len(p) > 0 { + params = strings.Join(p, ", ") + } + return fmt.Sprintf("func (%s *%s) %s(%s) (%s)", short, t.GoName, name, params, strings.Join(r, ", ")) } func fields_to_goname(fields []Field, sep string) string { @@ -2365,7 +2485,11 @@ func (f *Funcs) logf(v any, ignore ...any) string { default: return fmt.Sprintf("[[ UNSUPPORTED TYPE 12: %T ]]", v) } - return fmt.Sprintf("logf(%s)", strings.Join(p, ", ")) + x := "" + if len(p) > 0 { + x = strings.Join(p, ", ") + } + return fmt.Sprintf("logf(%s)", x) } func (f *Funcs) logf_update(v any) string { @@ -2417,16 +2541,27 @@ func (f *Funcs) namesfn(all bool, prefix string, z ...any) string { case Index: names = append(names, f.params(x.Fields, false)) - return "ctx, sqlstr, append([]any{" + strings.Join(names[2:], ", ") + "}, filterParams...)..." + nn := "" + if len(names[2:]) > 0 { + nn = strings.Join(names[2:], ", ") + } + return "ctx, sqlstr, append([]any{" + nn + "}, filterParams...)..." case CursorPagination: names = append(names, f.params(x.Fields, false)) - - return "ctx, sqlstr, append([]any{" + strings.Join(names[2:], ", ") + "}, filterParams...)..." + nn := "" + if len(names[2:]) > 0 { + nn = strings.Join(names[2:], ", ") + } + return "ctx, sqlstr, append([]any{" + nn + "}, filterParams...)..." default: names = append(names, fmt.Sprintf("/* UNSUPPORTED TYPE 14 (%d): %T */", i, v)) } } - return strings.Join(names, ", ") + x := "" + if len(names) > 0 { + x = strings.Join(names, ", ") + } + return x } func (f *Funcs) initialize_constraints(t Table, constraints []Constraint) bool { @@ -2673,11 +2808,16 @@ func (f *Funcs) sqlstr_paginated(v any, tables Tables, columns []Field, order st n++ } + ff := "true" + if len(filters) > 0 { + ff = strings.Join(filters, " AND ") + } + lines := []string{ "SELECT ", strings.Join(fields, ",\n\t") + " %s ", " FROM " + f.schemafn(x.SQLName) + " %s ", - " WHERE " + strings.Join(filters, " AND "), + " WHERE " + ff, " %s ", } @@ -3028,11 +3168,16 @@ func (f *Funcs) sqlstr_index(v any, tables Tables) string { filters = append(filters, after) } + ff := "true" + if len(filters) > 0 { + ff = strings.Join(filters, " AND ") + } + lines := []string{ "SELECT ", strings.Join(fields, ",\n\t") + " %s ", " FROM " + f.schemafn(x.Table.SQLName) + " %s ", - " WHERE " + strings.Join(filters, " AND "), + " WHERE " + ff, " %s ", } @@ -3359,7 +3504,12 @@ func (f *Funcs) convertTypes(fkey ForeignKey) string { } p = append(p, expr) } - return strings.Join(p, ", ") + + x := "" + if len(p) > 0 { + x = strings.Join(p, ", ") + } + return x } // params converts a list of fields into their named Go parameters, skipping @@ -3378,6 +3528,9 @@ func (f *Funcs) params(fields []Field, addType bool) string { for _, field := range fields { vals = append(vals, f.param(field, addType)) } + if len(vals) == 0 { + return "" + } return strings.Join(vals, ", ") } @@ -4540,7 +4693,7 @@ func addLegacyFuncs(ctx context.Context, funcs template.FuncMap) { // } // return expr // } - // getstartcount returns a starting count for numbering columsn in queries + // getstartcount returns a starting count for numbering columns in queries funcs["getstartcount"] = func(fields []*Field, pkFields []*Field) int { return len(fields) - len(pkFields) } diff --git a/internal/repos/postgresql/xo-templates/schema.xo.go.tpl b/internal/repos/postgresql/xo-templates/schema.xo.go.tpl index f9361ac867..efb58dea4d 100644 --- a/internal/repos/postgresql/xo-templates/schema.xo.go.tpl +++ b/internal/repos/postgresql/xo-templates/schema.xo.go.tpl @@ -92,11 +92,12 @@ func All{{ $e.GoName }}Values() []{{ $e.GoName }} { {{ define "index" }} {{- $i := .Data.Index -}} +{{- $t := $i.Table -}} {{- $tables := .Data.Tables -}} {{- $constraints := .Data.Constraints -}} {{/* TODO: maybe can be init beforehand */}} -{{- $_ := initialize_constraints $i.Table $constraints }} -// {{ func_name_context $i "" }} retrieves a row from '{{ schema $i.Table.SQLName }}' as a {{ $i.Table.GoName }}. +{{- $_ := initialize_constraints $t $constraints }} +// {{ func_name_context $i "" }} retrieves a row from '{{ schema $t.SQLName }}' as a {{ $t.GoName }}. // // Generated from index '{{ $i.SQLName }}'. {{ func_context $i "" "" }} { @@ -140,28 +141,28 @@ func All{{ $e.GoName }}Values() []{{ $e.GoName }} { {{- if $i.IsUnique }} rows, err := {{ db "Query" $i }} if err != nil { - return nil, logerror(fmt.Errorf("{{ $i.Table.SQLName }}/{{ $i.Func }}/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.SQLName }}/{{ $i.Func }}/db.Query: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } - {{ short $i.Table }}, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[{{$i.Table.GoName}}]) + {{ short $t }}, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[{{$t.GoName}}]) if err != nil { - return nil, logerror(fmt.Errorf("{{ $i.Table.SQLName }}/{{ $i.Func }}/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.SQLName }}/{{ $i.Func }}/pgx.CollectOneRow: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } - {{- if $i.Table.PrimaryKeys }} + {{- if $t.PrimaryKeys }} {{ end }} - return &{{ short $i.Table }}, nil + return &{{ short $t }}, nil {{- else }} rows, err := {{ db "Query" $i }} if err != nil { - return nil, logerror(fmt.Errorf("{{ $i.Table.GoName }}/{{ $i.Func }}/Query: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/{{ $i.Func }}/Query: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } defer rows.Close() // process {{/* might need to use non pointer [] in return if we get a NumField of non-struct type*/}} - res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[{{$i.Table.GoName}}]) + res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[{{$t.GoName}}]) if err != nil { - return nil, logerror(fmt.Errorf("{{ $i.Table.GoName }}/{{ $i.Func }}/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/{{ $i.Func }}/pgx.CollectRows: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } return res, nil {{- end }} @@ -287,11 +288,11 @@ func ({{ short $t }} *{{ $t.GoName }}) SetUpdateParams(params *{{ $t.GoName }}Up {{ logf $t }} rows, err := {{ db_prefix "Query" false false $t }} if err != nil { - return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Insert/db.Query: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } new{{ short $t }}, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[{{$t.GoName}}]) if err != nil { - return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } {{- else -}} // insert (primary key generated and returned by database) @@ -301,11 +302,11 @@ func ({{ short $t }} *{{ $t.GoName }}) SetUpdateParams(params *{{ $t.GoName }}Up rows, err := {{ db_prefix "Query" false false $t }} if err != nil { - return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Insert/db.Query: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } new{{ short $t }}, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[{{$t.GoName}}]) if err != nil { - return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } {{ end }} *{{ short $t }} = new{{ short $t }} @@ -326,11 +327,11 @@ func ({{ short $t }} *{{ $t.GoName }}) SetUpdateParams(params *{{ $t.GoName }}Up rows, err := {{ db_update "Query" $t }} if err != nil { - return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Update/db.Query: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } new{{ short $t }}, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[{{$t.GoName}}]) if err != nil { - return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Update/pgx.CollectOneRow: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } *{{ short $t }} = new{{ short $t }} @@ -339,7 +340,7 @@ func ({{ short $t }} *{{ $t.GoName }}) SetUpdateParams(params *{{ $t.GoName }}Up // {{ func_name_context "Upsert" "" }} upserts a {{ $t.GoName }} in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. {{ recv_context $t "Upsert" "" }} { var err error @@ -352,11 +353,11 @@ func ({{ short $t }} *{{ $t.GoName }}) SetUpdateParams(params *{{ $t.GoName }}Up var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err }) } {{ short $t }}, err = {{ short $t }}.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err }) } } } @@ -417,7 +418,7 @@ func ({{ short $t }} *{{ $t.GoName }}) SetUpdateParams(params *{{ $t.GoName }}Up {{ short $t }}.DeletedAt = nil new{{ short $t }}, err:= {{ short $t }}.Update(ctx,db) if err != nil { - return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Restore/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Restore/pgx.CollectRows: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } return new{{ short $t }}, nil } @@ -468,11 +469,11 @@ func ({{ short $t }} *{{ $t.GoName }}) SetUpdateParams(params *{{ $t.GoName }}Up rows, err := {{ db_paginated "Query" $t $cursor_fields }} if err != nil { - return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Paginated/{{ $order }}/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Paginated/{{ $order }}/db.Query: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[{{$t.GoName}}]) if err != nil { - return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Paginated/{{ $order }}/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("{{ $t.GoName }}/Paginated/{{ $order }}/pgx.CollectRows: %w", &XoError{Entity: "{{ sentence_case $t.SQLName }}", Err: err })) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/got/book.xo.go b/internal/repos/postgresql/xo-templates/tests/got/book.xo.go index d26dbd8c2b..a48c4fd85b 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/book.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/book.xo.go @@ -223,11 +223,11 @@ func (b *Book) Insert(ctx context.Context, db DB) (*Book, error) { rows, err := db.Query(ctx, sqlstr, b.Name) if err != nil { - return nil, logerror(fmt.Errorf("Book/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Book/Insert/db.Query: %w", &XoError{Entity: "Book", Err: err})) } newb, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("Book/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Book/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book", Err: err})) } *b = newb @@ -247,11 +247,11 @@ func (b *Book) Update(ctx context.Context, db DB) (*Book, error) { rows, err := db.Query(ctx, sqlstr, b.Name, b.BookID) if err != nil { - return nil, logerror(fmt.Errorf("Book/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Book/Update/db.Query: %w", &XoError{Entity: "Book", Err: err})) } newb, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("Book/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Book/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Book", Err: err})) } *b = newb @@ -259,7 +259,7 @@ func (b *Book) Update(ctx context.Context, db DB) (*Book, error) { } // Upsert upserts a Book in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (b *Book) Upsert(ctx context.Context, db DB, params *BookCreateParams) (*Book, error) { var err error @@ -270,11 +270,11 @@ func (b *Book) Upsert(ctx context.Context, db DB, params *BookCreateParams) (*Bo var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Book", Err: err}) } b, err = b.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Book", Err: err}) } } } @@ -377,11 +377,11 @@ func BookPaginatedByBookIDAsc(ctx context.Context, db DB, bookID int, opts ...Bo rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Book/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Book/Paginated/Asc/db.Query: %w", &XoError{Entity: "Book", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("Book/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Book/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Book", Err: err})) } return res, nil } @@ -469,11 +469,11 @@ func BookPaginatedByBookIDDesc(ctx context.Context, db DB, bookID int, opts ...B rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Book/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Book/Paginated/Desc/db.Query: %w", &XoError{Entity: "Book", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("Book/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Book/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Book", Err: err})) } return res, nil } @@ -563,11 +563,11 @@ func BookByBookID(ctx context.Context, db DB, bookID int, opts ...BookSelectConf // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("books/BookByBookID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("books/BookByBookID/db.Query: %w", &XoError{Entity: "Book", Err: err})) } b, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("books/BookByBookID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("books/BookByBookID/pgx.CollectOneRow: %w", &XoError{Entity: "Book", Err: err})) } return &b, nil diff --git a/internal/repos/postgresql/xo-templates/tests/got/bookauthor.xo.go b/internal/repos/postgresql/xo-templates/tests/got/bookauthor.xo.go index 7a7adaef55..896e310990 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/bookauthor.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/bookauthor.xo.go @@ -193,11 +193,11 @@ func (ba *BookAuthor) Insert(ctx context.Context, db DB) (*BookAuthor, error) { logf(sqlstr, ba.BookID, ba.AuthorID, ba.Pseudonym) rows, err := db.Query(ctx, sqlstr, ba.BookID, ba.AuthorID, ba.Pseudonym) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/Insert/db.Query: %w", &XoError{Entity: "Book author", Err: err})) } newba, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book author", Err: err})) } *ba = newba @@ -216,11 +216,11 @@ func (ba *BookAuthor) Update(ctx context.Context, db DB) (*BookAuthor, error) { rows, err := db.Query(ctx, sqlstr, ba.Pseudonym, ba.BookID, ba.AuthorID) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/Update/db.Query: %w", &XoError{Entity: "Book author", Err: err})) } newba, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Book author", Err: err})) } *ba = newba @@ -228,7 +228,7 @@ func (ba *BookAuthor) Update(ctx context.Context, db DB) (*BookAuthor, error) { } // Upsert upserts a BookAuthor in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (ba *BookAuthor) Upsert(ctx context.Context, db DB, params *BookAuthorCreateParams) (*BookAuthor, error) { var err error @@ -241,11 +241,11 @@ func (ba *BookAuthor) Upsert(ctx context.Context, db DB, params *BookAuthorCreat var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Book author", Err: err}) } ba, err = ba.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Book author", Err: err}) } } } @@ -339,11 +339,11 @@ func BookAuthorByBookIDAuthorID(ctx context.Context, db DB, bookID int, authorID // logf(sqlstr, bookID, authorID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID, authorID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_authors/BookAuthorByBookIDAuthorID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_authors/BookAuthorByBookIDAuthorID/db.Query: %w", &XoError{Entity: "Book author", Err: err})) } ba, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("book_authors/BookAuthorByBookIDAuthorID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_authors/BookAuthorByBookIDAuthorID/pgx.CollectOneRow: %w", &XoError{Entity: "Book author", Err: err})) } return &ba, nil @@ -423,14 +423,14 @@ func BookAuthorsByBookID(ctx context.Context, db DB, bookID int, opts ...BookAut // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/Query: %w", &XoError{Entity: "Book author", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/pgx.CollectRows: %w", &XoError{Entity: "Book author", Err: err})) } return res, nil } @@ -509,14 +509,14 @@ func BookAuthorsByAuthorID(ctx context.Context, db DB, authorID uuid.UUID, opts // logf(sqlstr, authorID) rows, err := db.Query(ctx, sqlstr, append([]any{authorID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/Query: %w", &XoError{Entity: "Book author", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/pgx.CollectRows: %w", &XoError{Entity: "Book author", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/got/bookauthorssurrogatekey.xo.go b/internal/repos/postgresql/xo-templates/tests/got/bookauthorssurrogatekey.xo.go index 4eb644ad31..99e6cce72d 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/bookauthorssurrogatekey.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/bookauthorssurrogatekey.xo.go @@ -194,11 +194,11 @@ func (bask *BookAuthorsSurrogateKey) Insert(ctx context.Context, db DB) (*BookAu rows, err := db.Query(ctx, sqlstr, bask.BookID, bask.AuthorID, bask.Pseudonym) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Insert/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } newbask, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } *bask = newbask @@ -218,11 +218,11 @@ func (bask *BookAuthorsSurrogateKey) Update(ctx context.Context, db DB) (*BookAu rows, err := db.Query(ctx, sqlstr, bask.BookID, bask.AuthorID, bask.Pseudonym, bask.BookAuthorsSurrogateKeyID) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Update/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } newbask, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } *bask = newbask @@ -230,7 +230,7 @@ func (bask *BookAuthorsSurrogateKey) Update(ctx context.Context, db DB) (*BookAu } // Upsert upserts a BookAuthorsSurrogateKey in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (bask *BookAuthorsSurrogateKey) Upsert(ctx context.Context, db DB, params *BookAuthorsSurrogateKeyCreateParams) (*BookAuthorsSurrogateKey, error) { var err error @@ -243,11 +243,11 @@ func (bask *BookAuthorsSurrogateKey) Upsert(ctx context.Context, db DB, params * var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Book authors surrogate key", Err: err}) } bask, err = bask.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Book authors surrogate key", Err: err}) } } } @@ -340,11 +340,11 @@ func BookAuthorsSurrogateKeyPaginatedByBookAuthorsSurrogateKeyIDAsc(ctx context. rows, err := db.Query(ctx, sqlstr, append([]any{bookAuthorsSurrogateKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Asc/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return res, nil } @@ -422,11 +422,11 @@ func BookAuthorsSurrogateKeyPaginatedByBookAuthorsSurrogateKeyIDDesc(ctx context rows, err := db.Query(ctx, sqlstr, append([]any{bookAuthorsSurrogateKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Desc/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return res, nil } @@ -506,11 +506,11 @@ func BookAuthorsSurrogateKeyByBookIDAuthorID(ctx context.Context, db DB, bookID // logf(sqlstr, bookID, authorID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID, authorID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookIDAuthorID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookIDAuthorID/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } bask, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectOneRow: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return &bask, nil @@ -591,14 +591,14 @@ func BookAuthorsSurrogateKeysByBookID(ctx context.Context, db DB, bookID int, op // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectRows: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return res, nil } @@ -678,14 +678,14 @@ func BookAuthorsSurrogateKeysByAuthorID(ctx context.Context, db DB, authorID uui // logf(sqlstr, authorID) rows, err := db.Query(ctx, sqlstr, append([]any{authorID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectRows: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return res, nil } @@ -765,11 +765,11 @@ func BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID(ctx context.Context, db // logf(sqlstr, bookAuthorsSurrogateKeyID) rows, err := db.Query(ctx, sqlstr, append([]any{bookAuthorsSurrogateKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } bask, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID/pgx.CollectOneRow: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return &bask, nil diff --git a/internal/repos/postgresql/xo-templates/tests/got/bookreview.xo.go b/internal/repos/postgresql/xo-templates/tests/got/bookreview.xo.go index 68b50ce602..e8e72ed977 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/bookreview.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/bookreview.xo.go @@ -145,11 +145,11 @@ func (br *BookReview) Insert(ctx context.Context, db DB) (*BookReview, error) { rows, err := db.Query(ctx, sqlstr, br.BookID, br.Reviewer) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Insert/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } newbr, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book review", Err: err})) } *br = newbr @@ -169,11 +169,11 @@ func (br *BookReview) Update(ctx context.Context, db DB) (*BookReview, error) { rows, err := db.Query(ctx, sqlstr, br.BookID, br.Reviewer, br.BookReviewID) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Update/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } newbr, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Book review", Err: err})) } *br = newbr @@ -181,7 +181,7 @@ func (br *BookReview) Update(ctx context.Context, db DB) (*BookReview, error) { } // Upsert upserts a BookReview in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (br *BookReview) Upsert(ctx context.Context, db DB, params *BookReviewCreateParams) (*BookReview, error) { var err error @@ -193,11 +193,11 @@ func (br *BookReview) Upsert(ctx context.Context, db DB, params *BookReviewCreat var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Book review", Err: err}) } br, err = br.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Book review", Err: err}) } } } @@ -289,11 +289,11 @@ func BookReviewPaginatedByBookReviewIDAsc(ctx context.Context, db DB, bookReview rows, err := db.Query(ctx, sqlstr, append([]any{bookReviewID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -370,11 +370,11 @@ func BookReviewPaginatedByBookIDAsc(ctx context.Context, db DB, bookID int, opts rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -451,11 +451,11 @@ func BookReviewPaginatedByBookReviewIDDesc(ctx context.Context, db DB, bookRevie rows, err := db.Query(ctx, sqlstr, append([]any{bookReviewID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -532,11 +532,11 @@ func BookReviewPaginatedByBookIDDesc(ctx context.Context, db DB, bookID int, opt rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -615,11 +615,11 @@ func BookReviewByBookReviewID(ctx context.Context, db DB, bookReviewID int, opts // logf(sqlstr, bookReviewID) rows, err := db.Query(ctx, sqlstr, append([]any{bookReviewID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_reviews/BookReviewByBookReviewID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_reviews/BookReviewByBookReviewID/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } br, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("book_reviews/BookReviewByBookReviewID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_reviews/BookReviewByBookReviewID/pgx.CollectOneRow: %w", &XoError{Entity: "Book review", Err: err})) } return &br, nil @@ -699,11 +699,11 @@ func BookReviewByReviewerBookID(ctx context.Context, db DB, reviewer uuid.UUID, // logf(sqlstr, reviewer, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{reviewer, bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_reviews/BookReviewByReviewerBookID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_reviews/BookReviewByReviewerBookID/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } br, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("book_reviews/BookReviewByReviewerBookID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_reviews/BookReviewByReviewerBookID/pgx.CollectOneRow: %w", &XoError{Entity: "Book review", Err: err})) } return &br, nil @@ -783,14 +783,14 @@ func BookReviewsByReviewer(ctx context.Context, db DB, reviewer uuid.UUID, opts // logf(sqlstr, reviewer) rows, err := db.Query(ctx, sqlstr, append([]any{reviewer}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/Query: %w", &XoError{Entity: "Book review", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -869,14 +869,14 @@ func BookReviewsByBookID(ctx context.Context, db DB, bookID int, opts ...BookRev // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/Query: %w", &XoError{Entity: "Book review", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/got/bookseller.xo.go b/internal/repos/postgresql/xo-templates/tests/got/bookseller.xo.go index 453d4179ca..06a420daf7 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/bookseller.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/bookseller.xo.go @@ -165,11 +165,11 @@ func (bs *BookSeller) Insert(ctx context.Context, db DB) (*BookSeller, error) { logf(sqlstr, bs.BookID, bs.Seller) rows, err := db.Query(ctx, sqlstr, bs.BookID, bs.Seller) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/Insert/db.Query: %w", &XoError{Entity: "Book seller", Err: err})) } newbs, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book seller", Err: err})) } *bs = newbs @@ -263,101 +263,18 @@ func BookSellersByBookIDSeller(ctx context.Context, db DB, bookID int, seller uu // logf(sqlstr, bookID, seller) rows, err := db.Query(ctx, sqlstr, append([]any{bookID, seller}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellersByBookIDSeller/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellersByBookIDSeller/Query: %w", &XoError{Entity: "Book seller", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellersByBookIDSeller/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellersByBookIDSeller/pgx.CollectRows: %w", &XoError{Entity: "Book seller", Err: err})) } return res, nil } -// BookSellerByBookIDSeller retrieves a row from 'xo_tests.book_sellers' as a BookSeller. -// -// Generated from index 'book_sellers_pkey'. -func BookSellerByBookIDSeller(ctx context.Context, db DB, bookID int, seller uuid.UUID, opts ...BookSellerSelectConfigOption) (*BookSeller, error) { - c := &BookSellerSelectConfig{joins: BookSellerJoins{}, filters: make(map[string][]any)} - - for _, o := range opts { - o(c) - } - - paramStart := 2 - nth := func() string { - paramStart++ - return strconv.Itoa(paramStart) - } - - var filterClauses []string - var filterParams []any - for filterTmpl, params := range c.filters { - filter := filterTmpl - for strings.Contains(filter, "$i") { - filter = strings.Replace(filter, "$i", "$"+nth(), 1) - } - filterClauses = append(filterClauses, filter) - filterParams = append(filterParams, params...) - } - - filters := "" - if len(filterClauses) > 0 { - filters = " AND " + strings.Join(filterClauses, " AND ") + " " - } - - var selectClauses []string - var joinClauses []string - var groupByClauses []string - - if c.joins.Sellers { - selectClauses = append(selectClauses, bookSellerTableSellersSelectSQL) - joinClauses = append(joinClauses, bookSellerTableSellersJoinSQL) - groupByClauses = append(groupByClauses, bookSellerTableSellersGroupBySQL) - } - - if c.joins.BooksSeller { - selectClauses = append(selectClauses, bookSellerTableBooksSellerSelectSQL) - joinClauses = append(joinClauses, bookSellerTableBooksSellerJoinSQL) - groupByClauses = append(groupByClauses, bookSellerTableBooksSellerGroupBySQL) - } - - selects := "" - if len(selectClauses) > 0 { - selects = ", " + strings.Join(selectClauses, " ,\n ") + " " - } - joins := strings.Join(joinClauses, " \n ") + " " - groupbys := "" - if len(groupByClauses) > 0 { - groupbys = "GROUP BY " + strings.Join(groupByClauses, " ,\n ") + " " - } - - sqlstr := fmt.Sprintf(`SELECT - book_sellers.book_id, - book_sellers.seller %s - FROM xo_tests.book_sellers %s - WHERE book_sellers.book_id = $1 AND book_sellers.seller = $2 - %s %s -`, selects, joins, filters, groupbys) - sqlstr += c.orderBy - sqlstr += c.limit - sqlstr = "/* BookSellerByBookIDSeller */\n" + sqlstr - - // run - // logf(sqlstr, bookID, seller) - rows, err := db.Query(ctx, sqlstr, append([]any{bookID, seller}, filterParams...)...) - if err != nil { - return nil, logerror(fmt.Errorf("book_sellers/BookSellerByBookIDSeller/db.Query: %w", err)) - } - bs, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookSeller]) - if err != nil { - return nil, logerror(fmt.Errorf("book_sellers/BookSellerByBookIDSeller/pgx.CollectOneRow: %w", err)) - } - - return &bs, nil -} - // BookSellersByBookID retrieves a row from 'xo_tests.book_sellers' as a BookSeller. // // Generated from index 'book_sellers_pkey'. @@ -431,14 +348,14 @@ func BookSellersByBookID(ctx context.Context, db DB, bookID int, opts ...BookSel // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/Query: %w", &XoError{Entity: "Book seller", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/pgx.CollectRows: %w", &XoError{Entity: "Book seller", Err: err})) } return res, nil } @@ -516,14 +433,14 @@ func BookSellersBySeller(ctx context.Context, db DB, seller uuid.UUID, opts ...B // logf(sqlstr, seller) rows, err := db.Query(ctx, sqlstr, append([]any{seller}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/Query: %w", &XoError{Entity: "Book seller", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/pgx.CollectRows: %w", &XoError{Entity: "Book seller", Err: err})) } return res, nil } @@ -601,14 +518,14 @@ func BookSellersBySellerBookID(ctx context.Context, db DB, seller uuid.UUID, boo // logf(sqlstr, seller, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{seller, bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellersBySellerBookID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellersBySellerBookID/Query: %w", &XoError{Entity: "Book seller", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellersBySellerBookID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellersBySellerBookID/pgx.CollectRows: %w", &XoError{Entity: "Book seller", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/got/demoworkitem.xo.go b/internal/repos/postgresql/xo-templates/tests/got/demoworkitem.xo.go index 44ed20f232..6f4ed1c557 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/demoworkitem.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/demoworkitem.xo.go @@ -126,11 +126,11 @@ func (dwi *DemoWorkItem) Insert(ctx context.Context, db DB) (*DemoWorkItem, erro logf(sqlstr, dwi.WorkItemID, dwi.Checked) rows, err := db.Query(ctx, sqlstr, dwi.WorkItemID, dwi.Checked) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } newdwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Demo work item", Err: err})) } *dwi = newdwi @@ -149,11 +149,11 @@ func (dwi *DemoWorkItem) Update(ctx context.Context, db DB) (*DemoWorkItem, erro rows, err := db.Query(ctx, sqlstr, dwi.Checked, dwi.WorkItemID) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Update/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } newdwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Demo work item", Err: err})) } *dwi = newdwi @@ -161,7 +161,7 @@ func (dwi *DemoWorkItem) Update(ctx context.Context, db DB) (*DemoWorkItem, erro } // Upsert upserts a DemoWorkItem in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (dwi *DemoWorkItem) Upsert(ctx context.Context, db DB, params *DemoWorkItemCreateParams) (*DemoWorkItem, error) { var err error @@ -173,11 +173,11 @@ func (dwi *DemoWorkItem) Upsert(ctx context.Context, db DB, params *DemoWorkItem var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Demo work item", Err: err}) } dwi, err = dwi.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Demo work item", Err: err}) } } } @@ -262,11 +262,11 @@ func DemoWorkItemPaginatedByWorkItemIDAsc(ctx context.Context, db DB, workItemID rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Demo work item", Err: err})) } return res, nil } @@ -336,11 +336,11 @@ func DemoWorkItemPaginatedByWorkItemIDDesc(ctx context.Context, db DB, workItemI rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Demo work item", Err: err})) } return res, nil } @@ -412,11 +412,11 @@ func DemoWorkItemByWorkItemID(ctx context.Context, db DB, workItemID int64, opts // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } dwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/pgx.CollectOneRow: %w", &XoError{Entity: "Demo work item", Err: err})) } return &dwi, nil diff --git a/internal/repos/postgresql/xo-templates/tests/got/dummyjoin.xo.go b/internal/repos/postgresql/xo-templates/tests/got/dummyjoin.xo.go index fd34e8d037..7e97a6bef3 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/dummyjoin.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/dummyjoin.xo.go @@ -108,11 +108,11 @@ func (dj *DummyJoin) Insert(ctx context.Context, db DB) (*DummyJoin, error) { rows, err := db.Query(ctx, sqlstr, dj.Name) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Insert/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } newdj, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Dummy join", Err: err})) } *dj = newdj @@ -132,11 +132,11 @@ func (dj *DummyJoin) Update(ctx context.Context, db DB) (*DummyJoin, error) { rows, err := db.Query(ctx, sqlstr, dj.Name, dj.DummyJoinID) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Update/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } newdj, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Dummy join", Err: err})) } *dj = newdj @@ -144,7 +144,7 @@ func (dj *DummyJoin) Update(ctx context.Context, db DB) (*DummyJoin, error) { } // Upsert upserts a DummyJoin in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (dj *DummyJoin) Upsert(ctx context.Context, db DB, params *DummyJoinCreateParams) (*DummyJoin, error) { var err error @@ -155,11 +155,11 @@ func (dj *DummyJoin) Upsert(ctx context.Context, db DB, params *DummyJoinCreateP var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Dummy join", Err: err}) } dj, err = dj.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Dummy join", Err: err}) } } } @@ -238,11 +238,11 @@ func DummyJoinPaginatedByDummyJoinIDAsc(ctx context.Context, db DB, dummyJoinID rows, err := db.Query(ctx, sqlstr, append([]any{dummyJoinID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Asc/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Dummy join", Err: err})) } return res, nil } @@ -306,11 +306,11 @@ func DummyJoinPaginatedByDummyJoinIDDesc(ctx context.Context, db DB, dummyJoinID rows, err := db.Query(ctx, sqlstr, append([]any{dummyJoinID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Desc/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Dummy join", Err: err})) } return res, nil } @@ -376,11 +376,11 @@ func DummyJoinByDummyJoinID(ctx context.Context, db DB, dummyJoinID int, opts .. // logf(sqlstr, dummyJoinID) rows, err := db.Query(ctx, sqlstr, append([]any{dummyJoinID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("dummy_join/DummyJoinByDummyJoinID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("dummy_join/DummyJoinByDummyJoinID/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } dj, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("dummy_join/DummyJoinByDummyJoinID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("dummy_join/DummyJoinByDummyJoinID/pgx.CollectOneRow: %w", &XoError{Entity: "Dummy join", Err: err})) } return &dj, nil diff --git a/internal/repos/postgresql/xo-templates/tests/got/extra.xo.go b/internal/repos/postgresql/xo-templates/tests/got/extra.xo.go index 51a14d2e0d..71dc3967df 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/extra.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/extra.xo.go @@ -2,6 +2,25 @@ package got // Code generated by xo. DO NOT EDIT. +import ( + "fmt" +) + func newPointer[T any](v T) *T { return &v } + +type XoError struct { + Entity string + Err error +} + +// Error satisfies the error interface. +func (e *XoError) Error() string { + return fmt.Sprintf("%s %v", e.Entity, e.Err) +} + +// Unwrap satisfies the unwrap interface. +func (err *XoError) Unwrap() error { + return err.Err +} diff --git a/internal/repos/postgresql/xo-templates/tests/got/notification.xo.go b/internal/repos/postgresql/xo-templates/tests/got/notification.xo.go index 38c30ed841..49f7b83d29 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/notification.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/notification.xo.go @@ -152,11 +152,11 @@ func (n *Notification) Insert(ctx context.Context, db DB) (*Notification, error) rows, err := db.Query(ctx, sqlstr, n.Body, n.Sender, n.Receiver) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Insert/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } newn, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Notification", Err: err})) } *n = newn @@ -176,11 +176,11 @@ func (n *Notification) Update(ctx context.Context, db DB) (*Notification, error) rows, err := db.Query(ctx, sqlstr, n.Body, n.Sender, n.Receiver, n.NotificationID) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Update/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } newn, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Notification", Err: err})) } *n = newn @@ -188,7 +188,7 @@ func (n *Notification) Update(ctx context.Context, db DB) (*Notification, error) } // Upsert upserts a Notification in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (n *Notification) Upsert(ctx context.Context, db DB, params *NotificationCreateParams) (*Notification, error) { var err error @@ -201,11 +201,11 @@ func (n *Notification) Upsert(ctx context.Context, db DB, params *NotificationCr var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Notification", Err: err}) } n, err = n.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Notification", Err: err}) } } } @@ -298,11 +298,11 @@ func NotificationPaginatedByNotificationIDAsc(ctx context.Context, db DB, notifi rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Notification", Err: err})) } return res, nil } @@ -380,11 +380,11 @@ func NotificationPaginatedByNotificationIDDesc(ctx context.Context, db DB, notif rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Notification", Err: err})) } return res, nil } @@ -464,11 +464,11 @@ func NotificationByNotificationID(ctx context.Context, db DB, notificationID int // logf(sqlstr, notificationID) rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } n, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/pgx.CollectOneRow: %w", &XoError{Entity: "Notification", Err: err})) } return &n, nil @@ -549,14 +549,14 @@ func NotificationsBySender(ctx context.Context, db DB, sender uuid.UUID, opts .. // logf(sqlstr, sender) rows, err := db.Query(ctx, sqlstr, append([]any{sender}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Notification/NotificationsBySender/Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/NotificationsBySender/Query: %w", &XoError{Entity: "Notification", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/NotificationsBySender/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Notification/NotificationsBySender/pgx.CollectRows: %w", &XoError{Entity: "Notification", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/got/pagelement.xo.go b/internal/repos/postgresql/xo-templates/tests/got/pagelement.xo.go index 95303acb70..87c0eea83f 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/pagelement.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/pagelement.xo.go @@ -155,11 +155,11 @@ func (pe *PagElement) Insert(ctx context.Context, db DB) (*PagElement, error) { rows, err := db.Query(ctx, sqlstr, pe.Name, pe.Dummy) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Insert/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } newpe, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Pag element", Err: err})) } *pe = newpe @@ -179,11 +179,11 @@ func (pe *PagElement) Update(ctx context.Context, db DB) (*PagElement, error) { rows, err := db.Query(ctx, sqlstr, pe.Name, pe.Dummy, pe.PaginatedElementID) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Update/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } newpe, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Pag element", Err: err})) } *pe = newpe @@ -191,7 +191,7 @@ func (pe *PagElement) Update(ctx context.Context, db DB) (*PagElement, error) { } // Upsert upserts a PagElement in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (pe *PagElement) Upsert(ctx context.Context, db DB, params *PagElementCreateParams) (*PagElement, error) { var err error @@ -203,11 +203,11 @@ func (pe *PagElement) Upsert(ctx context.Context, db DB, params *PagElementCreat var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Pag element", Err: err}) } pe, err = pe.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Pag element", Err: err}) } } } @@ -294,11 +294,11 @@ func PagElementPaginatedByCreatedAtAsc(ctx context.Context, db DB, createdAt tim rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Paginated/Asc/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Pag element", Err: err})) } return res, nil } @@ -370,11 +370,11 @@ func PagElementPaginatedByCreatedAtDesc(ctx context.Context, db DB, createdAt ti rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Paginated/Desc/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Pag element", Err: err})) } return res, nil } @@ -448,11 +448,11 @@ func PagElementByCreatedAt(ctx context.Context, db DB, createdAt time.Time, opts // logf(sqlstr, createdAt) rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("pag_element/PagElementByCreatedAt/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("pag_element/PagElementByCreatedAt/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } pe, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("pag_element/PagElementByCreatedAt/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("pag_element/PagElementByCreatedAt/pgx.CollectOneRow: %w", &XoError{Entity: "Pag element", Err: err})) } return &pe, nil @@ -527,11 +527,11 @@ func PagElementByPaginatedElementID(ctx context.Context, db DB, paginatedElement // logf(sqlstr, paginatedElementID) rows, err := db.Query(ctx, sqlstr, append([]any{paginatedElementID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("pag_element/PagElementByPaginatedElementID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("pag_element/PagElementByPaginatedElementID/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } pe, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("pag_element/PagElementByPaginatedElementID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("pag_element/PagElementByPaginatedElementID/pgx.CollectOneRow: %w", &XoError{Entity: "Pag element", Err: err})) } return &pe, nil diff --git a/internal/repos/postgresql/xo-templates/tests/got/user.xo.go b/internal/repos/postgresql/xo-templates/tests/got/user.xo.go index ce03d6a94b..b0f6ae6167 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/user.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/user.xo.go @@ -352,11 +352,11 @@ func (u *User) Insert(ctx context.Context, db DB) (*User, error) { rows, err := db.Query(ctx, sqlstr, u.Name, u.APIKeyID, u.DeletedAt) if err != nil { - return nil, logerror(fmt.Errorf("User/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Insert/db.Query: %w", &XoError{Entity: "User", Err: err})) } newu, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("User/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } *u = newu @@ -376,11 +376,11 @@ func (u *User) Update(ctx context.Context, db DB) (*User, error) { rows, err := db.Query(ctx, sqlstr, u.Name, u.APIKeyID, u.DeletedAt, u.UserID) if err != nil { - return nil, logerror(fmt.Errorf("User/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Update/db.Query: %w", &XoError{Entity: "User", Err: err})) } newu, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("User/Update/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } *u = newu @@ -388,7 +388,7 @@ func (u *User) Update(ctx context.Context, db DB) (*User, error) { } // Upsert upserts a User in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (u *User) Upsert(ctx context.Context, db DB, params *UserCreateParams) (*User, error) { var err error @@ -400,11 +400,11 @@ func (u *User) Upsert(ctx context.Context, db DB, params *UserCreateParams) (*Us var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "User", Err: err}) } u, err = u.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "User", Err: err}) } } } @@ -445,7 +445,7 @@ func (u *User) Restore(ctx context.Context, db DB) (*User, error) { u.DeletedAt = nil newu, err := u.Update(ctx, db) if err != nil { - return nil, logerror(fmt.Errorf("User/Restore/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/Restore/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return newu, nil } @@ -560,11 +560,11 @@ func UserPaginatedByCreatedAtAsc(ctx context.Context, db DB, createdAt time.Time rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Asc/db.Query: %w", &XoError{Entity: "User", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return res, nil } @@ -679,11 +679,11 @@ func UserPaginatedByCreatedAtDesc(ctx context.Context, db DB, createdAt time.Tim rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Desc/db.Query: %w", &XoError{Entity: "User", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return res, nil } @@ -800,11 +800,11 @@ func UserByCreatedAt(ctx context.Context, db DB, createdAt time.Time, opts ...Us // logf(sqlstr, createdAt) rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByCreatedAt/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByCreatedAt/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByCreatedAt/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByCreatedAt/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil @@ -922,11 +922,11 @@ func UserByName(ctx context.Context, db DB, name string, opts ...UserSelectConfi // logf(sqlstr, name) rows, err := db.Query(ctx, sqlstr, append([]any{name}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByName/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByName/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByName/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByName/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil @@ -1044,11 +1044,11 @@ func UserByUserID(ctx context.Context, db DB, userID uuid.UUID, opts ...UserSele // logf(sqlstr, userID) rows, err := db.Query(ctx, sqlstr, append([]any{userID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByUserID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByUserID/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByUserID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByUserID/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil diff --git a/internal/repos/postgresql/xo-templates/tests/got/userapikey.xo.go b/internal/repos/postgresql/xo-templates/tests/got/userapikey.xo.go index a5dd900370..486f97e1e8 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/userapikey.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/userapikey.xo.go @@ -161,11 +161,11 @@ func (uak *UserAPIKey) Insert(ctx context.Context, db DB) (*UserAPIKey, error) { rows, err := db.Query(ctx, sqlstr, uak.APIKey, uak.ExpiresOn, uak.UserID) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Insert/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } newuak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } *uak = newuak @@ -185,11 +185,11 @@ func (uak *UserAPIKey) Update(ctx context.Context, db DB) (*UserAPIKey, error) { rows, err := db.Query(ctx, sqlstr, uak.APIKey, uak.ExpiresOn, uak.UserID, uak.UserAPIKeyID) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Update/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } newuak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Update/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } *uak = newuak @@ -197,7 +197,7 @@ func (uak *UserAPIKey) Update(ctx context.Context, db DB) (*UserAPIKey, error) { } // Upsert upserts a UserAPIKey in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (uak *UserAPIKey) Upsert(ctx context.Context, db DB, params *UserAPIKeyCreateParams) (*UserAPIKey, error) { var err error @@ -210,11 +210,11 @@ func (uak *UserAPIKey) Upsert(ctx context.Context, db DB, params *UserAPIKeyCrea var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "User api key", Err: err}) } uak, err = uak.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "User api key", Err: err}) } } } @@ -301,11 +301,11 @@ func UserAPIKeyPaginatedByUserAPIKeyIDAsc(ctx context.Context, db DB, userAPIKey rows, err := db.Query(ctx, sqlstr, append([]any{userAPIKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "User api key", Err: err})) } return res, nil } @@ -377,11 +377,11 @@ func UserAPIKeyPaginatedByUserAPIKeyIDDesc(ctx context.Context, db DB, userAPIKe rows, err := db.Query(ctx, sqlstr, append([]any{userAPIKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "User api key", Err: err})) } return res, nil } @@ -455,11 +455,11 @@ func UserAPIKeyByAPIKey(ctx context.Context, db DB, apiKey string, opts ...UserA // logf(sqlstr, apiKey) rows, err := db.Query(ctx, sqlstr, append([]any{apiKey}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } uak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } return &uak, nil @@ -534,11 +534,11 @@ func UserAPIKeyByUserAPIKeyID(ctx context.Context, db DB, userAPIKeyID int, opts // logf(sqlstr, userAPIKeyID) rows, err := db.Query(ctx, sqlstr, append([]any{userAPIKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } uak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } return &uak, nil @@ -613,11 +613,11 @@ func UserAPIKeyByUserID(ctx context.Context, db DB, userID uuid.UUID, opts ...Us // logf(sqlstr, userID) rows, err := db.Query(ctx, sqlstr, append([]any{userID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } uak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } return &uak, nil diff --git a/internal/repos/postgresql/xo-templates/tests/got/workitem.xo.go b/internal/repos/postgresql/xo-templates/tests/got/workitem.xo.go index 59b666378f..67d205b45d 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/workitem.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/workitem.xo.go @@ -21,8 +21,9 @@ import ( // - "cardinality": to generate/override joins explicitly. Only O2O is inferred. // - "tags": to append literal struct tag strings. type WorkItem struct { - WorkItemID int64 `json:"workItemID" db:"work_item_id" required:"true"` // work_item_id - Title *string `json:"title" db:"title" required:"true"` // title + WorkItemID int64 `json:"workItemID" db:"work_item_id" required:"true"` // work_item_id + Title *string `json:"title" db:"title" required:"true"` // title + Description *string `json:"description" db:"description" required:"true"` // description DemoWorkItemJoin *DemoWorkItem `json:"-" db:"demo_work_item_work_item_id" openapi-go:"ignore"` // O2O demo_work_items (inferred) WorkItemAssignedUsersJoin *[]User__WIAU_WorkItem `json:"-" db:"work_item_assigned_user_assigned_users" openapi-go:"ignore"` // M2M work_item_assigned_user @@ -30,13 +31,15 @@ type WorkItem struct { // WorkItemCreateParams represents insert params for 'xo_tests.work_items'. type WorkItemCreateParams struct { - Title *string `json:"title" required:"true"` // title + Title *string `json:"title" required:"true"` // title + Description *string `json:"description" required:"true"` // description } // CreateWorkItem creates a new WorkItem in the database with the given params. func CreateWorkItem(ctx context.Context, db DB, params *WorkItemCreateParams) (*WorkItem, error) { wi := &WorkItem{ - Title: params.Title, + Title: params.Title, + Description: params.Description, } return wi.Insert(ctx, db) @@ -44,7 +47,8 @@ func CreateWorkItem(ctx context.Context, db DB, params *WorkItemCreateParams) (* // WorkItemUpdateParams represents update params for 'xo_tests.work_items'. type WorkItemUpdateParams struct { - Title **string `json:"title" required:"true"` // title + Title **string `json:"title" required:"true"` // title + Description **string `json:"description" required:"true"` // description } // SetUpdateParams updates xo_tests.work_items struct fields with the specified params. @@ -52,6 +56,9 @@ func (wi *WorkItem) SetUpdateParams(params *WorkItemUpdateParams) { if params.Title != nil { wi.Title = *params.Title } + if params.Description != nil { + wi.Description = *params.Description + } } type WorkItemSelectConfig struct { @@ -148,20 +155,20 @@ const workItemTableAssignedUsersGroupBySQL = `work_items.work_item_id, work_item func (wi *WorkItem) Insert(ctx context.Context, db DB) (*WorkItem, error) { // insert (primary key generated and returned by database) sqlstr := `INSERT INTO xo_tests.work_items ( - title + title, description ) VALUES ( - $1 + $1, $2 ) RETURNING * ` // run - logf(sqlstr, wi.Title) + logf(sqlstr, wi.Title, wi.Description) - rows, err := db.Query(ctx, sqlstr, wi.Title) + rows, err := db.Query(ctx, sqlstr, wi.Title, wi.Description) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Insert/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } newwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item", Err: err})) } *wi = newwi @@ -173,19 +180,19 @@ func (wi *WorkItem) Insert(ctx context.Context, db DB) (*WorkItem, error) { func (wi *WorkItem) Update(ctx context.Context, db DB) (*WorkItem, error) { // update with composite primary key sqlstr := `UPDATE xo_tests.work_items SET - title = $1 - WHERE work_item_id = $2 + title = $1, description = $2 + WHERE work_item_id = $3 RETURNING * ` // run - logf(sqlstr, wi.Title, wi.WorkItemID) + logf(sqlstr, wi.Title, wi.Description, wi.WorkItemID) - rows, err := db.Query(ctx, sqlstr, wi.Title, wi.WorkItemID) + rows, err := db.Query(ctx, sqlstr, wi.Title, wi.Description, wi.WorkItemID) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Update/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } newwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Work item", Err: err})) } *wi = newwi @@ -193,22 +200,23 @@ func (wi *WorkItem) Update(ctx context.Context, db DB) (*WorkItem, error) { } // Upsert upserts a WorkItem in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (wi *WorkItem) Upsert(ctx context.Context, db DB, params *WorkItemCreateParams) (*WorkItem, error) { var err error wi.Title = params.Title + wi.Description = params.Description wi, err = wi.Insert(ctx, db) if err != nil { var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Work item", Err: err}) } wi, err = wi.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Work item", Err: err}) } } } @@ -286,7 +294,8 @@ func WorkItemPaginatedByWorkItemIDAsc(ctx context.Context, db DB, workItemID int sqlstr := fmt.Sprintf(`SELECT work_items.work_item_id, - work_items.title %s + work_items.title, + work_items.description %s FROM xo_tests.work_items %s WHERE work_items.work_item_id > $1 %s %s @@ -299,11 +308,11 @@ func WorkItemPaginatedByWorkItemIDAsc(ctx context.Context, db DB, workItemID int rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) } return res, nil } @@ -366,7 +375,8 @@ func WorkItemPaginatedByWorkItemIDDesc(ctx context.Context, db DB, workItemID in sqlstr := fmt.Sprintf(`SELECT work_items.work_item_id, - work_items.title %s + work_items.title, + work_items.description %s FROM xo_tests.work_items %s WHERE work_items.work_item_id < $1 %s %s @@ -379,11 +389,97 @@ func WorkItemPaginatedByWorkItemIDDesc(ctx context.Context, db DB, workItemID in rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) + } + return res, nil +} + +// WorkItems retrieves a row from 'xo_tests.work_items' as a WorkItem. +// +// Generated from index '[xo] base filter query'. +func WorkItems(ctx context.Context, db DB, opts ...WorkItemSelectConfigOption) ([]WorkItem, error) { + c := &WorkItemSelectConfig{joins: WorkItemJoins{}, filters: make(map[string][]any)} + + for _, o := range opts { + o(c) + } + + paramStart := 0 + nth := func() string { + paramStart++ + return strconv.Itoa(paramStart) + } + + var filterClauses []string + var filterParams []any + for filterTmpl, params := range c.filters { + filter := filterTmpl + for strings.Contains(filter, "$i") { + filter = strings.Replace(filter, "$i", "$"+nth(), 1) + } + filterClauses = append(filterClauses, filter) + filterParams = append(filterParams, params...) + } + + filters := "" + if len(filterClauses) > 0 { + filters = " AND " + strings.Join(filterClauses, " AND ") + " " + } + + var selectClauses []string + var joinClauses []string + var groupByClauses []string + + if c.joins.DemoWorkItem { + selectClauses = append(selectClauses, workItemTableDemoWorkItemSelectSQL) + joinClauses = append(joinClauses, workItemTableDemoWorkItemJoinSQL) + groupByClauses = append(groupByClauses, workItemTableDemoWorkItemGroupBySQL) + } + + if c.joins.AssignedUsers { + selectClauses = append(selectClauses, workItemTableAssignedUsersSelectSQL) + joinClauses = append(joinClauses, workItemTableAssignedUsersJoinSQL) + groupByClauses = append(groupByClauses, workItemTableAssignedUsersGroupBySQL) + } + + selects := "" + if len(selectClauses) > 0 { + selects = ", " + strings.Join(selectClauses, " ,\n ") + " " + } + joins := strings.Join(joinClauses, " \n ") + " " + groupbys := "" + if len(groupByClauses) > 0 { + groupbys = "GROUP BY " + strings.Join(groupByClauses, " ,\n ") + " " + } + + sqlstr := fmt.Sprintf(`SELECT + work_items.work_item_id, + work_items.title, + work_items.description %s + FROM xo_tests.work_items %s + WHERE true + %s %s +`, selects, joins, filters, groupbys) + sqlstr += c.orderBy + sqlstr += c.limit + sqlstr = "/* WorkItems */\n" + sqlstr + + // run + // logf(sqlstr, ) + rows, err := db.Query(ctx, sqlstr, append([]any{}, filterParams...)...) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDescription/Query: %w", &XoError{Entity: "Work item", Err: err})) + } + defer rows.Close() + // process + + res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDescription/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) } return res, nil } @@ -448,7 +544,8 @@ func WorkItemByWorkItemID(ctx context.Context, db DB, workItemID int64, opts ... sqlstr := fmt.Sprintf(`SELECT work_items.work_item_id, - work_items.title %s + work_items.title, + work_items.description %s FROM xo_tests.work_items %s WHERE work_items.work_item_id = $1 %s %s @@ -461,12 +558,98 @@ func WorkItemByWorkItemID(ctx context.Context, db DB, workItemID int64, opts ... // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } wi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/pgx.CollectOneRow: %w", &XoError{Entity: "Work item", Err: err})) } return &wi, nil } + +// WorkItemsByTitle retrieves a row from 'xo_tests.work_items' as a WorkItem. +// +// Generated from index 'work_items_title_description_idx1'. +func WorkItemsByTitle(ctx context.Context, db DB, title *string, opts ...WorkItemSelectConfigOption) ([]WorkItem, error) { + c := &WorkItemSelectConfig{joins: WorkItemJoins{}, filters: make(map[string][]any)} + + for _, o := range opts { + o(c) + } + + paramStart := 1 + nth := func() string { + paramStart++ + return strconv.Itoa(paramStart) + } + + var filterClauses []string + var filterParams []any + for filterTmpl, params := range c.filters { + filter := filterTmpl + for strings.Contains(filter, "$i") { + filter = strings.Replace(filter, "$i", "$"+nth(), 1) + } + filterClauses = append(filterClauses, filter) + filterParams = append(filterParams, params...) + } + + filters := "" + if len(filterClauses) > 0 { + filters = " AND " + strings.Join(filterClauses, " AND ") + " " + } + + var selectClauses []string + var joinClauses []string + var groupByClauses []string + + if c.joins.DemoWorkItem { + selectClauses = append(selectClauses, workItemTableDemoWorkItemSelectSQL) + joinClauses = append(joinClauses, workItemTableDemoWorkItemJoinSQL) + groupByClauses = append(groupByClauses, workItemTableDemoWorkItemGroupBySQL) + } + + if c.joins.AssignedUsers { + selectClauses = append(selectClauses, workItemTableAssignedUsersSelectSQL) + joinClauses = append(joinClauses, workItemTableAssignedUsersJoinSQL) + groupByClauses = append(groupByClauses, workItemTableAssignedUsersGroupBySQL) + } + + selects := "" + if len(selectClauses) > 0 { + selects = ", " + strings.Join(selectClauses, " ,\n ") + " " + } + joins := strings.Join(joinClauses, " \n ") + " " + groupbys := "" + if len(groupByClauses) > 0 { + groupbys = "GROUP BY " + strings.Join(groupByClauses, " ,\n ") + " " + } + + sqlstr := fmt.Sprintf(`SELECT + work_items.work_item_id, + work_items.title, + work_items.description %s + FROM xo_tests.work_items %s + WHERE work_items.title = $1 + %s %s +`, selects, joins, filters, groupbys) + sqlstr += c.orderBy + sqlstr += c.limit + sqlstr = "/* WorkItemsByTitle */\n" + sqlstr + + // run + // logf(sqlstr, title) + rows, err := db.Query(ctx, sqlstr, append([]any{title}, filterParams...)...) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTitleDescription/Query: %w", &XoError{Entity: "Work item", Err: err})) + } + defer rows.Close() + // process + + res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTitleDescription/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) + } + return res, nil +} diff --git a/internal/repos/postgresql/xo-templates/tests/got/workitemassigneduser.xo.go b/internal/repos/postgresql/xo-templates/tests/got/workitemassigneduser.xo.go index 96eeb8224a..0f139c79cb 100644 --- a/internal/repos/postgresql/xo-templates/tests/got/workitemassigneduser.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/got/workitemassigneduser.xo.go @@ -193,11 +193,11 @@ func (wiau *WorkItemAssignedUser) Insert(ctx context.Context, db DB) (*WorkItemA logf(sqlstr, wiau.WorkItemID, wiau.AssignedUser, wiau.Role) rows, err := db.Query(ctx, sqlstr, wiau.WorkItemID, wiau.AssignedUser, wiau.Role) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/db.Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } newwiau, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item assigned user", Err: err})) } *wiau = newwiau @@ -216,11 +216,11 @@ func (wiau *WorkItemAssignedUser) Update(ctx context.Context, db DB) (*WorkItemA rows, err := db.Query(ctx, sqlstr, wiau.Role, wiau.WorkItemID, wiau.AssignedUser) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/db.Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } newwiau, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Work item assigned user", Err: err})) } *wiau = newwiau @@ -228,7 +228,7 @@ func (wiau *WorkItemAssignedUser) Update(ctx context.Context, db DB) (*WorkItemA } // Upsert upserts a WorkItemAssignedUser in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (wiau *WorkItemAssignedUser) Upsert(ctx context.Context, db DB, params *WorkItemAssignedUserCreateParams) (*WorkItemAssignedUser, error) { var err error @@ -241,11 +241,11 @@ func (wiau *WorkItemAssignedUser) Upsert(ctx context.Context, db DB, params *Wor var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Work item assigned user", Err: err}) } wiau, err = wiau.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Work item assigned user", Err: err}) } } } @@ -339,14 +339,14 @@ func WorkItemAssignedUsersByAssignedUserWorkItemID(ctx context.Context, db DB, a // logf(sqlstr, assignedUser, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{assignedUser, workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/pgx.CollectRows: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return res, nil } @@ -425,11 +425,11 @@ func WorkItemAssignedUserByWorkItemIDAssignedUser(ctx context.Context, db DB, wo // logf(sqlstr, workItemID, assignedUser) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID, assignedUser}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/db.Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } wiau, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectOneRow: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return &wiau, nil @@ -509,14 +509,14 @@ func WorkItemAssignedUsersByWorkItemID(ctx context.Context, db DB, workItemID in // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return res, nil } @@ -595,14 +595,14 @@ func WorkItemAssignedUsersByAssignedUser(ctx context.Context, db DB, assignedUse // logf(sqlstr, assignedUser) rows, err := db.Query(ctx, sqlstr, append([]any{assignedUser}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/query_test.go b/internal/repos/postgresql/xo-templates/tests/query_test.go index aa73570a82..10c0ccf398 100644 --- a/internal/repos/postgresql/xo-templates/tests/query_test.go +++ b/internal/repos/postgresql/xo-templates/tests/query_test.go @@ -5,6 +5,7 @@ package tests import ( "context" + "fmt" "testing" "time" @@ -23,6 +24,12 @@ import ( name clash probably needs to be detected between constraints, check M2M-M2O and M2O-O2O at the same time * IMPORTANT: explain analyze to ensure dynamic sql query plans for joins dont do hash joins + +-- TODO: tests for excluded indexes + test trgm queries out with generic func () WithFilters: ilike, etc. +create index on work_items using gin (title gin_trgm_ops); +create index on work_items using gin (description gin_trgm_ops); +create index on work_items using gin (title gin_trgm_ops, description gin_trgm_ops); +create index on work_items using gin (title, description gin_trgm_ops); */ func TestCursorPagination_Timestamp(t *testing.T) { @@ -59,6 +66,17 @@ func Test_Filters(t *testing.T) { assert.Equal(t, ee[1].Name, "element -4 days") } +func TestTrigram_Filters(t *testing.T) { + t.Parallel() + + ctx := context.Background() + + ww, err := db.WorkItems(ctx, testPool, db.WithWorkItemFilters(map[string][]any{"description ILIKE '%' || $1 || '%'": {"rome"}})) + assert.NoError(t, err) + assert.Len(t, ww, 1) + assert.Contains(t, *ww[0].Description, "Rome") +} + func TestM2M_SelectFilter(t *testing.T) { t.Parallel() @@ -233,6 +251,7 @@ func TestCRUD_UniqueIndex(t *testing.T) { assert.NoError(t, err) _, err = db.UserByName(ctx, testPool, u1.Name) + fmt.Printf("err: %v\n", err) assert.ErrorContains(t, err, errNoRows) // test soft delete and restore diff --git a/internal/repos/postgresql/xo-templates/tests/schema.sql b/internal/repos/postgresql/xo-templates/tests/schema.sql index 182f3d3c52..cbacf982a5 100644 --- a/internal/repos/postgresql/xo-templates/tests/schema.sql +++ b/internal/repos/postgresql/xo-templates/tests/schema.sql @@ -2,6 +2,10 @@ create schema if not exists extensions; create extension if not exists pg_stat_statements schema extensions; +create extension if not exists pg_trgm schema extensions; + +create extension if not exists btree_gin schema extensions; + -- ensure up to date drop schema if exists xo_tests cascade; @@ -116,8 +120,17 @@ comment on column xo_tests.notifications.receiver is '"cardinality":M2O'; create table xo_tests.work_items ( work_item_id bigserial primary key , title text + , description text ); +create index on xo_tests.work_items using gin (title extensions.gin_trgm_ops); + +create index on xo_tests.work_items using gin (description extensions.gin_trgm_ops); + +create index on xo_tests.work_items using gin (title extensions.gin_trgm_ops , description extensions.gin_trgm_ops); + +create index on xo_tests.work_items using gin (title , description extensions.gin_trgm_ops); + create type xo_tests.work_item_role as ENUM ( 'preparer' , 'reviewer' @@ -204,10 +217,10 @@ begin insert into xo_tests.notifications (body , receiver , sender) values ('body 2' , user_1_id , user_2_id); - insert into xo_tests.work_items (title) - values ('Work Item 1'); - insert into xo_tests.work_items (title) - values ('Work Item 2'); + insert into xo_tests.work_items (title , description) + values ('Work Item 1' , 'Every cloud has a silver lining.'); + insert into xo_tests.work_items (title , description) + values ('Work Item 2' , 'When in Rome, do as the Romans do.'); insert into xo_tests.work_items (title) values ('Work Item 3'); diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/book.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/book.xo.go index d26dbd8c2b..a48c4fd85b 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/book.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/book.xo.go @@ -223,11 +223,11 @@ func (b *Book) Insert(ctx context.Context, db DB) (*Book, error) { rows, err := db.Query(ctx, sqlstr, b.Name) if err != nil { - return nil, logerror(fmt.Errorf("Book/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Book/Insert/db.Query: %w", &XoError{Entity: "Book", Err: err})) } newb, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("Book/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Book/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book", Err: err})) } *b = newb @@ -247,11 +247,11 @@ func (b *Book) Update(ctx context.Context, db DB) (*Book, error) { rows, err := db.Query(ctx, sqlstr, b.Name, b.BookID) if err != nil { - return nil, logerror(fmt.Errorf("Book/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Book/Update/db.Query: %w", &XoError{Entity: "Book", Err: err})) } newb, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("Book/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Book/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Book", Err: err})) } *b = newb @@ -259,7 +259,7 @@ func (b *Book) Update(ctx context.Context, db DB) (*Book, error) { } // Upsert upserts a Book in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (b *Book) Upsert(ctx context.Context, db DB, params *BookCreateParams) (*Book, error) { var err error @@ -270,11 +270,11 @@ func (b *Book) Upsert(ctx context.Context, db DB, params *BookCreateParams) (*Bo var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Book", Err: err}) } b, err = b.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Book", Err: err}) } } } @@ -377,11 +377,11 @@ func BookPaginatedByBookIDAsc(ctx context.Context, db DB, bookID int, opts ...Bo rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Book/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Book/Paginated/Asc/db.Query: %w", &XoError{Entity: "Book", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("Book/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Book/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Book", Err: err})) } return res, nil } @@ -469,11 +469,11 @@ func BookPaginatedByBookIDDesc(ctx context.Context, db DB, bookID int, opts ...B rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Book/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Book/Paginated/Desc/db.Query: %w", &XoError{Entity: "Book", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("Book/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Book/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Book", Err: err})) } return res, nil } @@ -563,11 +563,11 @@ func BookByBookID(ctx context.Context, db DB, bookID int, opts ...BookSelectConf // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("books/BookByBookID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("books/BookByBookID/db.Query: %w", &XoError{Entity: "Book", Err: err})) } b, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Book]) if err != nil { - return nil, logerror(fmt.Errorf("books/BookByBookID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("books/BookByBookID/pgx.CollectOneRow: %w", &XoError{Entity: "Book", Err: err})) } return &b, nil diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/bookauthor.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/bookauthor.xo.go index 7a7adaef55..896e310990 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/bookauthor.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/bookauthor.xo.go @@ -193,11 +193,11 @@ func (ba *BookAuthor) Insert(ctx context.Context, db DB) (*BookAuthor, error) { logf(sqlstr, ba.BookID, ba.AuthorID, ba.Pseudonym) rows, err := db.Query(ctx, sqlstr, ba.BookID, ba.AuthorID, ba.Pseudonym) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/Insert/db.Query: %w", &XoError{Entity: "Book author", Err: err})) } newba, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book author", Err: err})) } *ba = newba @@ -216,11 +216,11 @@ func (ba *BookAuthor) Update(ctx context.Context, db DB) (*BookAuthor, error) { rows, err := db.Query(ctx, sqlstr, ba.Pseudonym, ba.BookID, ba.AuthorID) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/Update/db.Query: %w", &XoError{Entity: "Book author", Err: err})) } newba, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Book author", Err: err})) } *ba = newba @@ -228,7 +228,7 @@ func (ba *BookAuthor) Update(ctx context.Context, db DB) (*BookAuthor, error) { } // Upsert upserts a BookAuthor in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (ba *BookAuthor) Upsert(ctx context.Context, db DB, params *BookAuthorCreateParams) (*BookAuthor, error) { var err error @@ -241,11 +241,11 @@ func (ba *BookAuthor) Upsert(ctx context.Context, db DB, params *BookAuthorCreat var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Book author", Err: err}) } ba, err = ba.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Book author", Err: err}) } } } @@ -339,11 +339,11 @@ func BookAuthorByBookIDAuthorID(ctx context.Context, db DB, bookID int, authorID // logf(sqlstr, bookID, authorID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID, authorID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_authors/BookAuthorByBookIDAuthorID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_authors/BookAuthorByBookIDAuthorID/db.Query: %w", &XoError{Entity: "Book author", Err: err})) } ba, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("book_authors/BookAuthorByBookIDAuthorID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_authors/BookAuthorByBookIDAuthorID/pgx.CollectOneRow: %w", &XoError{Entity: "Book author", Err: err})) } return &ba, nil @@ -423,14 +423,14 @@ func BookAuthorsByBookID(ctx context.Context, db DB, bookID int, opts ...BookAut // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/Query: %w", &XoError{Entity: "Book author", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/pgx.CollectRows: %w", &XoError{Entity: "Book author", Err: err})) } return res, nil } @@ -509,14 +509,14 @@ func BookAuthorsByAuthorID(ctx context.Context, db DB, authorID uuid.UUID, opts // logf(sqlstr, authorID) rows, err := db.Query(ctx, sqlstr, append([]any{authorID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/Query: %w", &XoError{Entity: "Book author", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthor]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthor/BookAuthorByBookIDAuthorID/pgx.CollectRows: %w", &XoError{Entity: "Book author", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/bookauthorssurrogatekey.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/bookauthorssurrogatekey.xo.go index 4eb644ad31..99e6cce72d 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/bookauthorssurrogatekey.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/bookauthorssurrogatekey.xo.go @@ -194,11 +194,11 @@ func (bask *BookAuthorsSurrogateKey) Insert(ctx context.Context, db DB) (*BookAu rows, err := db.Query(ctx, sqlstr, bask.BookID, bask.AuthorID, bask.Pseudonym) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Insert/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } newbask, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } *bask = newbask @@ -218,11 +218,11 @@ func (bask *BookAuthorsSurrogateKey) Update(ctx context.Context, db DB) (*BookAu rows, err := db.Query(ctx, sqlstr, bask.BookID, bask.AuthorID, bask.Pseudonym, bask.BookAuthorsSurrogateKeyID) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Update/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } newbask, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } *bask = newbask @@ -230,7 +230,7 @@ func (bask *BookAuthorsSurrogateKey) Update(ctx context.Context, db DB) (*BookAu } // Upsert upserts a BookAuthorsSurrogateKey in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (bask *BookAuthorsSurrogateKey) Upsert(ctx context.Context, db DB, params *BookAuthorsSurrogateKeyCreateParams) (*BookAuthorsSurrogateKey, error) { var err error @@ -243,11 +243,11 @@ func (bask *BookAuthorsSurrogateKey) Upsert(ctx context.Context, db DB, params * var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Book authors surrogate key", Err: err}) } bask, err = bask.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Book authors surrogate key", Err: err}) } } } @@ -340,11 +340,11 @@ func BookAuthorsSurrogateKeyPaginatedByBookAuthorsSurrogateKeyIDAsc(ctx context. rows, err := db.Query(ctx, sqlstr, append([]any{bookAuthorsSurrogateKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Asc/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return res, nil } @@ -422,11 +422,11 @@ func BookAuthorsSurrogateKeyPaginatedByBookAuthorsSurrogateKeyIDDesc(ctx context rows, err := db.Query(ctx, sqlstr, append([]any{bookAuthorsSurrogateKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Desc/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return res, nil } @@ -506,11 +506,11 @@ func BookAuthorsSurrogateKeyByBookIDAuthorID(ctx context.Context, db DB, bookID // logf(sqlstr, bookID, authorID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID, authorID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookIDAuthorID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookIDAuthorID/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } bask, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectOneRow: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return &bask, nil @@ -591,14 +591,14 @@ func BookAuthorsSurrogateKeysByBookID(ctx context.Context, db DB, bookID int, op // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectRows: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return res, nil } @@ -678,14 +678,14 @@ func BookAuthorsSurrogateKeysByAuthorID(ctx context.Context, db DB, authorID uui // logf(sqlstr, authorID) rows, err := db.Query(ctx, sqlstr, append([]any{authorID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookAuthorsSurrogateKey/BookAuthorsSurrogateKeyByBookIDAuthorID/pgx.CollectRows: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return res, nil } @@ -765,11 +765,11 @@ func BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID(ctx context.Context, db // logf(sqlstr, bookAuthorsSurrogateKeyID) rows, err := db.Query(ctx, sqlstr, append([]any{bookAuthorsSurrogateKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID/db.Query: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } bask, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookAuthorsSurrogateKey]) if err != nil { - return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_authors_surrogate_key/BookAuthorsSurrogateKeyByBookAuthorsSurrogateKeyID/pgx.CollectOneRow: %w", &XoError{Entity: "Book authors surrogate key", Err: err})) } return &bask, nil diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/bookreview.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/bookreview.xo.go index 68b50ce602..e8e72ed977 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/bookreview.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/bookreview.xo.go @@ -145,11 +145,11 @@ func (br *BookReview) Insert(ctx context.Context, db DB) (*BookReview, error) { rows, err := db.Query(ctx, sqlstr, br.BookID, br.Reviewer) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Insert/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } newbr, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book review", Err: err})) } *br = newbr @@ -169,11 +169,11 @@ func (br *BookReview) Update(ctx context.Context, db DB) (*BookReview, error) { rows, err := db.Query(ctx, sqlstr, br.BookID, br.Reviewer, br.BookReviewID) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Update/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } newbr, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Book review", Err: err})) } *br = newbr @@ -181,7 +181,7 @@ func (br *BookReview) Update(ctx context.Context, db DB) (*BookReview, error) { } // Upsert upserts a BookReview in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (br *BookReview) Upsert(ctx context.Context, db DB, params *BookReviewCreateParams) (*BookReview, error) { var err error @@ -193,11 +193,11 @@ func (br *BookReview) Upsert(ctx context.Context, db DB, params *BookReviewCreat var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Book review", Err: err}) } br, err = br.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Book review", Err: err}) } } } @@ -289,11 +289,11 @@ func BookReviewPaginatedByBookReviewIDAsc(ctx context.Context, db DB, bookReview rows, err := db.Query(ctx, sqlstr, append([]any{bookReviewID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -370,11 +370,11 @@ func BookReviewPaginatedByBookIDAsc(ctx context.Context, db DB, bookID int, opts rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -451,11 +451,11 @@ func BookReviewPaginatedByBookReviewIDDesc(ctx context.Context, db DB, bookRevie rows, err := db.Query(ctx, sqlstr, append([]any{bookReviewID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -532,11 +532,11 @@ func BookReviewPaginatedByBookIDDesc(ctx context.Context, db DB, bookID int, opt rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -615,11 +615,11 @@ func BookReviewByBookReviewID(ctx context.Context, db DB, bookReviewID int, opts // logf(sqlstr, bookReviewID) rows, err := db.Query(ctx, sqlstr, append([]any{bookReviewID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_reviews/BookReviewByBookReviewID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_reviews/BookReviewByBookReviewID/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } br, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("book_reviews/BookReviewByBookReviewID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_reviews/BookReviewByBookReviewID/pgx.CollectOneRow: %w", &XoError{Entity: "Book review", Err: err})) } return &br, nil @@ -699,11 +699,11 @@ func BookReviewByReviewerBookID(ctx context.Context, db DB, reviewer uuid.UUID, // logf(sqlstr, reviewer, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{reviewer, bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("book_reviews/BookReviewByReviewerBookID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("book_reviews/BookReviewByReviewerBookID/db.Query: %w", &XoError{Entity: "Book review", Err: err})) } br, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("book_reviews/BookReviewByReviewerBookID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("book_reviews/BookReviewByReviewerBookID/pgx.CollectOneRow: %w", &XoError{Entity: "Book review", Err: err})) } return &br, nil @@ -783,14 +783,14 @@ func BookReviewsByReviewer(ctx context.Context, db DB, reviewer uuid.UUID, opts // logf(sqlstr, reviewer) rows, err := db.Query(ctx, sqlstr, append([]any{reviewer}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/Query: %w", &XoError{Entity: "Book review", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } @@ -869,14 +869,14 @@ func BookReviewsByBookID(ctx context.Context, db DB, bookID int, opts ...BookRev // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/Query: %w", &XoError{Entity: "Book review", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookReview]) if err != nil { - return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookReview/BookReviewByReviewerBookID/pgx.CollectRows: %w", &XoError{Entity: "Book review", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/bookseller.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/bookseller.xo.go index 453d4179ca..06a420daf7 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/bookseller.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/bookseller.xo.go @@ -165,11 +165,11 @@ func (bs *BookSeller) Insert(ctx context.Context, db DB) (*BookSeller, error) { logf(sqlstr, bs.BookID, bs.Seller) rows, err := db.Query(ctx, sqlstr, bs.BookID, bs.Seller) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/Insert/db.Query: %w", &XoError{Entity: "Book seller", Err: err})) } newbs, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Book seller", Err: err})) } *bs = newbs @@ -263,101 +263,18 @@ func BookSellersByBookIDSeller(ctx context.Context, db DB, bookID int, seller uu // logf(sqlstr, bookID, seller) rows, err := db.Query(ctx, sqlstr, append([]any{bookID, seller}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellersByBookIDSeller/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellersByBookIDSeller/Query: %w", &XoError{Entity: "Book seller", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellersByBookIDSeller/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellersByBookIDSeller/pgx.CollectRows: %w", &XoError{Entity: "Book seller", Err: err})) } return res, nil } -// BookSellerByBookIDSeller retrieves a row from 'xo_tests.book_sellers' as a BookSeller. -// -// Generated from index 'book_sellers_pkey'. -func BookSellerByBookIDSeller(ctx context.Context, db DB, bookID int, seller uuid.UUID, opts ...BookSellerSelectConfigOption) (*BookSeller, error) { - c := &BookSellerSelectConfig{joins: BookSellerJoins{}, filters: make(map[string][]any)} - - for _, o := range opts { - o(c) - } - - paramStart := 2 - nth := func() string { - paramStart++ - return strconv.Itoa(paramStart) - } - - var filterClauses []string - var filterParams []any - for filterTmpl, params := range c.filters { - filter := filterTmpl - for strings.Contains(filter, "$i") { - filter = strings.Replace(filter, "$i", "$"+nth(), 1) - } - filterClauses = append(filterClauses, filter) - filterParams = append(filterParams, params...) - } - - filters := "" - if len(filterClauses) > 0 { - filters = " AND " + strings.Join(filterClauses, " AND ") + " " - } - - var selectClauses []string - var joinClauses []string - var groupByClauses []string - - if c.joins.Sellers { - selectClauses = append(selectClauses, bookSellerTableSellersSelectSQL) - joinClauses = append(joinClauses, bookSellerTableSellersJoinSQL) - groupByClauses = append(groupByClauses, bookSellerTableSellersGroupBySQL) - } - - if c.joins.BooksSeller { - selectClauses = append(selectClauses, bookSellerTableBooksSellerSelectSQL) - joinClauses = append(joinClauses, bookSellerTableBooksSellerJoinSQL) - groupByClauses = append(groupByClauses, bookSellerTableBooksSellerGroupBySQL) - } - - selects := "" - if len(selectClauses) > 0 { - selects = ", " + strings.Join(selectClauses, " ,\n ") + " " - } - joins := strings.Join(joinClauses, " \n ") + " " - groupbys := "" - if len(groupByClauses) > 0 { - groupbys = "GROUP BY " + strings.Join(groupByClauses, " ,\n ") + " " - } - - sqlstr := fmt.Sprintf(`SELECT - book_sellers.book_id, - book_sellers.seller %s - FROM xo_tests.book_sellers %s - WHERE book_sellers.book_id = $1 AND book_sellers.seller = $2 - %s %s -`, selects, joins, filters, groupbys) - sqlstr += c.orderBy - sqlstr += c.limit - sqlstr = "/* BookSellerByBookIDSeller */\n" + sqlstr - - // run - // logf(sqlstr, bookID, seller) - rows, err := db.Query(ctx, sqlstr, append([]any{bookID, seller}, filterParams...)...) - if err != nil { - return nil, logerror(fmt.Errorf("book_sellers/BookSellerByBookIDSeller/db.Query: %w", err)) - } - bs, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[BookSeller]) - if err != nil { - return nil, logerror(fmt.Errorf("book_sellers/BookSellerByBookIDSeller/pgx.CollectOneRow: %w", err)) - } - - return &bs, nil -} - // BookSellersByBookID retrieves a row from 'xo_tests.book_sellers' as a BookSeller. // // Generated from index 'book_sellers_pkey'. @@ -431,14 +348,14 @@ func BookSellersByBookID(ctx context.Context, db DB, bookID int, opts ...BookSel // logf(sqlstr, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/Query: %w", &XoError{Entity: "Book seller", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/pgx.CollectRows: %w", &XoError{Entity: "Book seller", Err: err})) } return res, nil } @@ -516,14 +433,14 @@ func BookSellersBySeller(ctx context.Context, db DB, seller uuid.UUID, opts ...B // logf(sqlstr, seller) rows, err := db.Query(ctx, sqlstr, append([]any{seller}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/Query: %w", &XoError{Entity: "Book seller", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellerByBookIDSeller/pgx.CollectRows: %w", &XoError{Entity: "Book seller", Err: err})) } return res, nil } @@ -601,14 +518,14 @@ func BookSellersBySellerBookID(ctx context.Context, db DB, seller uuid.UUID, boo // logf(sqlstr, seller, bookID) rows, err := db.Query(ctx, sqlstr, append([]any{seller, bookID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellersBySellerBookID/Query: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellersBySellerBookID/Query: %w", &XoError{Entity: "Book seller", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[BookSeller]) if err != nil { - return nil, logerror(fmt.Errorf("BookSeller/BookSellersBySellerBookID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("BookSeller/BookSellersBySellerBookID/pgx.CollectRows: %w", &XoError{Entity: "Book seller", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/demoworkitem.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/demoworkitem.xo.go index 44ed20f232..6f4ed1c557 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/demoworkitem.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/demoworkitem.xo.go @@ -126,11 +126,11 @@ func (dwi *DemoWorkItem) Insert(ctx context.Context, db DB) (*DemoWorkItem, erro logf(sqlstr, dwi.WorkItemID, dwi.Checked) rows, err := db.Query(ctx, sqlstr, dwi.WorkItemID, dwi.Checked) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } newdwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Demo work item", Err: err})) } *dwi = newdwi @@ -149,11 +149,11 @@ func (dwi *DemoWorkItem) Update(ctx context.Context, db DB) (*DemoWorkItem, erro rows, err := db.Query(ctx, sqlstr, dwi.Checked, dwi.WorkItemID) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Update/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } newdwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Demo work item", Err: err})) } *dwi = newdwi @@ -161,7 +161,7 @@ func (dwi *DemoWorkItem) Update(ctx context.Context, db DB) (*DemoWorkItem, erro } // Upsert upserts a DemoWorkItem in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (dwi *DemoWorkItem) Upsert(ctx context.Context, db DB, params *DemoWorkItemCreateParams) (*DemoWorkItem, error) { var err error @@ -173,11 +173,11 @@ func (dwi *DemoWorkItem) Upsert(ctx context.Context, db DB, params *DemoWorkItem var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Demo work item", Err: err}) } dwi, err = dwi.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Demo work item", Err: err}) } } } @@ -262,11 +262,11 @@ func DemoWorkItemPaginatedByWorkItemIDAsc(ctx context.Context, db DB, workItemID rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Demo work item", Err: err})) } return res, nil } @@ -336,11 +336,11 @@ func DemoWorkItemPaginatedByWorkItemIDDesc(ctx context.Context, db DB, workItemI rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DemoWorkItem/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Demo work item", Err: err})) } return res, nil } @@ -412,11 +412,11 @@ func DemoWorkItemByWorkItemID(ctx context.Context, db DB, workItemID int64, opts // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/db.Query: %w", &XoError{Entity: "Demo work item", Err: err})) } dwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DemoWorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("demo_work_items/DemoWorkItemByWorkItemID/pgx.CollectOneRow: %w", &XoError{Entity: "Demo work item", Err: err})) } return &dwi, nil diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/dummyjoin.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/dummyjoin.xo.go index fd34e8d037..7e97a6bef3 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/dummyjoin.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/dummyjoin.xo.go @@ -108,11 +108,11 @@ func (dj *DummyJoin) Insert(ctx context.Context, db DB) (*DummyJoin, error) { rows, err := db.Query(ctx, sqlstr, dj.Name) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Insert/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } newdj, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Dummy join", Err: err})) } *dj = newdj @@ -132,11 +132,11 @@ func (dj *DummyJoin) Update(ctx context.Context, db DB) (*DummyJoin, error) { rows, err := db.Query(ctx, sqlstr, dj.Name, dj.DummyJoinID) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Update/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } newdj, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Dummy join", Err: err})) } *dj = newdj @@ -144,7 +144,7 @@ func (dj *DummyJoin) Update(ctx context.Context, db DB) (*DummyJoin, error) { } // Upsert upserts a DummyJoin in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (dj *DummyJoin) Upsert(ctx context.Context, db DB, params *DummyJoinCreateParams) (*DummyJoin, error) { var err error @@ -155,11 +155,11 @@ func (dj *DummyJoin) Upsert(ctx context.Context, db DB, params *DummyJoinCreateP var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Dummy join", Err: err}) } dj, err = dj.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Dummy join", Err: err}) } } } @@ -238,11 +238,11 @@ func DummyJoinPaginatedByDummyJoinIDAsc(ctx context.Context, db DB, dummyJoinID rows, err := db.Query(ctx, sqlstr, append([]any{dummyJoinID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Asc/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Dummy join", Err: err})) } return res, nil } @@ -306,11 +306,11 @@ func DummyJoinPaginatedByDummyJoinIDDesc(ctx context.Context, db DB, dummyJoinID rows, err := db.Query(ctx, sqlstr, append([]any{dummyJoinID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Desc/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("DummyJoin/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Dummy join", Err: err})) } return res, nil } @@ -376,11 +376,11 @@ func DummyJoinByDummyJoinID(ctx context.Context, db DB, dummyJoinID int, opts .. // logf(sqlstr, dummyJoinID) rows, err := db.Query(ctx, sqlstr, append([]any{dummyJoinID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("dummy_join/DummyJoinByDummyJoinID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("dummy_join/DummyJoinByDummyJoinID/db.Query: %w", &XoError{Entity: "Dummy join", Err: err})) } dj, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[DummyJoin]) if err != nil { - return nil, logerror(fmt.Errorf("dummy_join/DummyJoinByDummyJoinID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("dummy_join/DummyJoinByDummyJoinID/pgx.CollectOneRow: %w", &XoError{Entity: "Dummy join", Err: err})) } return &dj, nil diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/extra.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/extra.xo.go index 51a14d2e0d..71dc3967df 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/extra.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/extra.xo.go @@ -2,6 +2,25 @@ package got // Code generated by xo. DO NOT EDIT. +import ( + "fmt" +) + func newPointer[T any](v T) *T { return &v } + +type XoError struct { + Entity string + Err error +} + +// Error satisfies the error interface. +func (e *XoError) Error() string { + return fmt.Sprintf("%s %v", e.Entity, e.Err) +} + +// Unwrap satisfies the unwrap interface. +func (err *XoError) Unwrap() error { + return err.Err +} diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/notification.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/notification.xo.go index 38c30ed841..49f7b83d29 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/notification.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/notification.xo.go @@ -152,11 +152,11 @@ func (n *Notification) Insert(ctx context.Context, db DB) (*Notification, error) rows, err := db.Query(ctx, sqlstr, n.Body, n.Sender, n.Receiver) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Insert/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } newn, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Notification", Err: err})) } *n = newn @@ -176,11 +176,11 @@ func (n *Notification) Update(ctx context.Context, db DB) (*Notification, error) rows, err := db.Query(ctx, sqlstr, n.Body, n.Sender, n.Receiver, n.NotificationID) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Update/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } newn, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Notification", Err: err})) } *n = newn @@ -188,7 +188,7 @@ func (n *Notification) Update(ctx context.Context, db DB) (*Notification, error) } // Upsert upserts a Notification in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (n *Notification) Upsert(ctx context.Context, db DB, params *NotificationCreateParams) (*Notification, error) { var err error @@ -201,11 +201,11 @@ func (n *Notification) Upsert(ctx context.Context, db DB, params *NotificationCr var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Notification", Err: err}) } n, err = n.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Notification", Err: err}) } } } @@ -298,11 +298,11 @@ func NotificationPaginatedByNotificationIDAsc(ctx context.Context, db DB, notifi rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Notification", Err: err})) } return res, nil } @@ -380,11 +380,11 @@ func NotificationPaginatedByNotificationIDDesc(ctx context.Context, db DB, notif rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Notification/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Notification", Err: err})) } return res, nil } @@ -464,11 +464,11 @@ func NotificationByNotificationID(ctx context.Context, db DB, notificationID int // logf(sqlstr, notificationID) rows, err := db.Query(ctx, sqlstr, append([]any{notificationID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/db.Query: %w", &XoError{Entity: "Notification", Err: err})) } n, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("notifications/NotificationByNotificationID/pgx.CollectOneRow: %w", &XoError{Entity: "Notification", Err: err})) } return &n, nil @@ -549,14 +549,14 @@ func NotificationsBySender(ctx context.Context, db DB, sender uuid.UUID, opts .. // logf(sqlstr, sender) rows, err := db.Query(ctx, sqlstr, append([]any{sender}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("Notification/NotificationsBySender/Query: %w", err)) + return nil, logerror(fmt.Errorf("Notification/NotificationsBySender/Query: %w", &XoError{Entity: "Notification", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[Notification]) if err != nil { - return nil, logerror(fmt.Errorf("Notification/NotificationsBySender/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("Notification/NotificationsBySender/pgx.CollectRows: %w", &XoError{Entity: "Notification", Err: err})) } return res, nil } diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/pagelement.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/pagelement.xo.go index 95303acb70..87c0eea83f 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/pagelement.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/pagelement.xo.go @@ -155,11 +155,11 @@ func (pe *PagElement) Insert(ctx context.Context, db DB) (*PagElement, error) { rows, err := db.Query(ctx, sqlstr, pe.Name, pe.Dummy) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Insert/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } newpe, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Pag element", Err: err})) } *pe = newpe @@ -179,11 +179,11 @@ func (pe *PagElement) Update(ctx context.Context, db DB) (*PagElement, error) { rows, err := db.Query(ctx, sqlstr, pe.Name, pe.Dummy, pe.PaginatedElementID) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Update/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } newpe, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Pag element", Err: err})) } *pe = newpe @@ -191,7 +191,7 @@ func (pe *PagElement) Update(ctx context.Context, db DB) (*PagElement, error) { } // Upsert upserts a PagElement in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (pe *PagElement) Upsert(ctx context.Context, db DB, params *PagElementCreateParams) (*PagElement, error) { var err error @@ -203,11 +203,11 @@ func (pe *PagElement) Upsert(ctx context.Context, db DB, params *PagElementCreat var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Pag element", Err: err}) } pe, err = pe.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Pag element", Err: err}) } } } @@ -294,11 +294,11 @@ func PagElementPaginatedByCreatedAtAsc(ctx context.Context, db DB, createdAt tim rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Paginated/Asc/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Pag element", Err: err})) } return res, nil } @@ -370,11 +370,11 @@ func PagElementPaginatedByCreatedAtDesc(ctx context.Context, db DB, createdAt ti rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Paginated/Desc/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("PagElement/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("PagElement/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Pag element", Err: err})) } return res, nil } @@ -448,11 +448,11 @@ func PagElementByCreatedAt(ctx context.Context, db DB, createdAt time.Time, opts // logf(sqlstr, createdAt) rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("pag_element/PagElementByCreatedAt/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("pag_element/PagElementByCreatedAt/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } pe, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("pag_element/PagElementByCreatedAt/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("pag_element/PagElementByCreatedAt/pgx.CollectOneRow: %w", &XoError{Entity: "Pag element", Err: err})) } return &pe, nil @@ -527,11 +527,11 @@ func PagElementByPaginatedElementID(ctx context.Context, db DB, paginatedElement // logf(sqlstr, paginatedElementID) rows, err := db.Query(ctx, sqlstr, append([]any{paginatedElementID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("pag_element/PagElementByPaginatedElementID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("pag_element/PagElementByPaginatedElementID/db.Query: %w", &XoError{Entity: "Pag element", Err: err})) } pe, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[PagElement]) if err != nil { - return nil, logerror(fmt.Errorf("pag_element/PagElementByPaginatedElementID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("pag_element/PagElementByPaginatedElementID/pgx.CollectOneRow: %w", &XoError{Entity: "Pag element", Err: err})) } return &pe, nil diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/user.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/user.xo.go index ce03d6a94b..b0f6ae6167 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/user.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/user.xo.go @@ -352,11 +352,11 @@ func (u *User) Insert(ctx context.Context, db DB) (*User, error) { rows, err := db.Query(ctx, sqlstr, u.Name, u.APIKeyID, u.DeletedAt) if err != nil { - return nil, logerror(fmt.Errorf("User/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Insert/db.Query: %w", &XoError{Entity: "User", Err: err})) } newu, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("User/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } *u = newu @@ -376,11 +376,11 @@ func (u *User) Update(ctx context.Context, db DB) (*User, error) { rows, err := db.Query(ctx, sqlstr, u.Name, u.APIKeyID, u.DeletedAt, u.UserID) if err != nil { - return nil, logerror(fmt.Errorf("User/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Update/db.Query: %w", &XoError{Entity: "User", Err: err})) } newu, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("User/Update/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } *u = newu @@ -388,7 +388,7 @@ func (u *User) Update(ctx context.Context, db DB) (*User, error) { } // Upsert upserts a User in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (u *User) Upsert(ctx context.Context, db DB, params *UserCreateParams) (*User, error) { var err error @@ -400,11 +400,11 @@ func (u *User) Upsert(ctx context.Context, db DB, params *UserCreateParams) (*Us var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "User", Err: err}) } u, err = u.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "User", Err: err}) } } } @@ -445,7 +445,7 @@ func (u *User) Restore(ctx context.Context, db DB) (*User, error) { u.DeletedAt = nil newu, err := u.Update(ctx, db) if err != nil { - return nil, logerror(fmt.Errorf("User/Restore/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/Restore/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return newu, nil } @@ -560,11 +560,11 @@ func UserPaginatedByCreatedAtAsc(ctx context.Context, db DB, createdAt time.Time rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Asc/db.Query: %w", &XoError{Entity: "User", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return res, nil } @@ -679,11 +679,11 @@ func UserPaginatedByCreatedAtDesc(ctx context.Context, db DB, createdAt time.Tim rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Desc/db.Query: %w", &XoError{Entity: "User", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("User/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("User/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "User", Err: err})) } return res, nil } @@ -800,11 +800,11 @@ func UserByCreatedAt(ctx context.Context, db DB, createdAt time.Time, opts ...Us // logf(sqlstr, createdAt) rows, err := db.Query(ctx, sqlstr, append([]any{createdAt}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByCreatedAt/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByCreatedAt/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByCreatedAt/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByCreatedAt/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil @@ -922,11 +922,11 @@ func UserByName(ctx context.Context, db DB, name string, opts ...UserSelectConfi // logf(sqlstr, name) rows, err := db.Query(ctx, sqlstr, append([]any{name}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByName/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByName/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByName/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByName/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil @@ -1044,11 +1044,11 @@ func UserByUserID(ctx context.Context, db DB, userID uuid.UUID, opts ...UserSele // logf(sqlstr, userID) rows, err := db.Query(ctx, sqlstr, append([]any{userID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByUserID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByUserID/db.Query: %w", &XoError{Entity: "User", Err: err})) } u, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[User]) if err != nil { - return nil, logerror(fmt.Errorf("users/UserByUserID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("users/UserByUserID/pgx.CollectOneRow: %w", &XoError{Entity: "User", Err: err})) } return &u, nil diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/userapikey.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/userapikey.xo.go index a5dd900370..486f97e1e8 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/userapikey.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/userapikey.xo.go @@ -161,11 +161,11 @@ func (uak *UserAPIKey) Insert(ctx context.Context, db DB) (*UserAPIKey, error) { rows, err := db.Query(ctx, sqlstr, uak.APIKey, uak.ExpiresOn, uak.UserID) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Insert/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } newuak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } *uak = newuak @@ -185,11 +185,11 @@ func (uak *UserAPIKey) Update(ctx context.Context, db DB) (*UserAPIKey, error) { rows, err := db.Query(ctx, sqlstr, uak.APIKey, uak.ExpiresOn, uak.UserID, uak.UserAPIKeyID) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Update/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } newuak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Update/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } *uak = newuak @@ -197,7 +197,7 @@ func (uak *UserAPIKey) Update(ctx context.Context, db DB) (*UserAPIKey, error) { } // Upsert upserts a UserAPIKey in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (uak *UserAPIKey) Upsert(ctx context.Context, db DB, params *UserAPIKeyCreateParams) (*UserAPIKey, error) { var err error @@ -210,11 +210,11 @@ func (uak *UserAPIKey) Upsert(ctx context.Context, db DB, params *UserAPIKeyCrea var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "User api key", Err: err}) } uak, err = uak.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "User api key", Err: err}) } } } @@ -301,11 +301,11 @@ func UserAPIKeyPaginatedByUserAPIKeyIDAsc(ctx context.Context, db DB, userAPIKey rows, err := db.Query(ctx, sqlstr, append([]any{userAPIKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "User api key", Err: err})) } return res, nil } @@ -377,11 +377,11 @@ func UserAPIKeyPaginatedByUserAPIKeyIDDesc(ctx context.Context, db DB, userAPIKe rows, err := db.Query(ctx, sqlstr, append([]any{userAPIKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("UserAPIKey/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "User api key", Err: err})) } return res, nil } @@ -455,11 +455,11 @@ func UserAPIKeyByAPIKey(ctx context.Context, db DB, apiKey string, opts ...UserA // logf(sqlstr, apiKey) rows, err := db.Query(ctx, sqlstr, append([]any{apiKey}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } uak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByAPIKey/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } return &uak, nil @@ -534,11 +534,11 @@ func UserAPIKeyByUserAPIKeyID(ctx context.Context, db DB, userAPIKeyID int, opts // logf(sqlstr, userAPIKeyID) rows, err := db.Query(ctx, sqlstr, append([]any{userAPIKeyID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } uak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserAPIKeyID/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } return &uak, nil @@ -613,11 +613,11 @@ func UserAPIKeyByUserID(ctx context.Context, db DB, userID uuid.UUID, opts ...Us // logf(sqlstr, userID) rows, err := db.Query(ctx, sqlstr, append([]any{userID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/db.Query: %w", &XoError{Entity: "User api key", Err: err})) } uak, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[UserAPIKey]) if err != nil { - return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("user_api_keys/UserAPIKeyByUserID/pgx.CollectOneRow: %w", &XoError{Entity: "User api key", Err: err})) } return &uak, nil diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/workitem.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/workitem.xo.go index 59b666378f..67d205b45d 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/workitem.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/workitem.xo.go @@ -21,8 +21,9 @@ import ( // - "cardinality": to generate/override joins explicitly. Only O2O is inferred. // - "tags": to append literal struct tag strings. type WorkItem struct { - WorkItemID int64 `json:"workItemID" db:"work_item_id" required:"true"` // work_item_id - Title *string `json:"title" db:"title" required:"true"` // title + WorkItemID int64 `json:"workItemID" db:"work_item_id" required:"true"` // work_item_id + Title *string `json:"title" db:"title" required:"true"` // title + Description *string `json:"description" db:"description" required:"true"` // description DemoWorkItemJoin *DemoWorkItem `json:"-" db:"demo_work_item_work_item_id" openapi-go:"ignore"` // O2O demo_work_items (inferred) WorkItemAssignedUsersJoin *[]User__WIAU_WorkItem `json:"-" db:"work_item_assigned_user_assigned_users" openapi-go:"ignore"` // M2M work_item_assigned_user @@ -30,13 +31,15 @@ type WorkItem struct { // WorkItemCreateParams represents insert params for 'xo_tests.work_items'. type WorkItemCreateParams struct { - Title *string `json:"title" required:"true"` // title + Title *string `json:"title" required:"true"` // title + Description *string `json:"description" required:"true"` // description } // CreateWorkItem creates a new WorkItem in the database with the given params. func CreateWorkItem(ctx context.Context, db DB, params *WorkItemCreateParams) (*WorkItem, error) { wi := &WorkItem{ - Title: params.Title, + Title: params.Title, + Description: params.Description, } return wi.Insert(ctx, db) @@ -44,7 +47,8 @@ func CreateWorkItem(ctx context.Context, db DB, params *WorkItemCreateParams) (* // WorkItemUpdateParams represents update params for 'xo_tests.work_items'. type WorkItemUpdateParams struct { - Title **string `json:"title" required:"true"` // title + Title **string `json:"title" required:"true"` // title + Description **string `json:"description" required:"true"` // description } // SetUpdateParams updates xo_tests.work_items struct fields with the specified params. @@ -52,6 +56,9 @@ func (wi *WorkItem) SetUpdateParams(params *WorkItemUpdateParams) { if params.Title != nil { wi.Title = *params.Title } + if params.Description != nil { + wi.Description = *params.Description + } } type WorkItemSelectConfig struct { @@ -148,20 +155,20 @@ const workItemTableAssignedUsersGroupBySQL = `work_items.work_item_id, work_item func (wi *WorkItem) Insert(ctx context.Context, db DB) (*WorkItem, error) { // insert (primary key generated and returned by database) sqlstr := `INSERT INTO xo_tests.work_items ( - title + title, description ) VALUES ( - $1 + $1, $2 ) RETURNING * ` // run - logf(sqlstr, wi.Title) + logf(sqlstr, wi.Title, wi.Description) - rows, err := db.Query(ctx, sqlstr, wi.Title) + rows, err := db.Query(ctx, sqlstr, wi.Title, wi.Description) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Insert/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } newwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item", Err: err})) } *wi = newwi @@ -173,19 +180,19 @@ func (wi *WorkItem) Insert(ctx context.Context, db DB) (*WorkItem, error) { func (wi *WorkItem) Update(ctx context.Context, db DB) (*WorkItem, error) { // update with composite primary key sqlstr := `UPDATE xo_tests.work_items SET - title = $1 - WHERE work_item_id = $2 + title = $1, description = $2 + WHERE work_item_id = $3 RETURNING * ` // run - logf(sqlstr, wi.Title, wi.WorkItemID) + logf(sqlstr, wi.Title, wi.Description, wi.WorkItemID) - rows, err := db.Query(ctx, sqlstr, wi.Title, wi.WorkItemID) + rows, err := db.Query(ctx, sqlstr, wi.Title, wi.Description, wi.WorkItemID) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Update/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } newwi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Work item", Err: err})) } *wi = newwi @@ -193,22 +200,23 @@ func (wi *WorkItem) Update(ctx context.Context, db DB) (*WorkItem, error) { } // Upsert upserts a WorkItem in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (wi *WorkItem) Upsert(ctx context.Context, db DB, params *WorkItemCreateParams) (*WorkItem, error) { var err error wi.Title = params.Title + wi.Description = params.Description wi, err = wi.Insert(ctx, db) if err != nil { var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Work item", Err: err}) } wi, err = wi.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Work item", Err: err}) } } } @@ -286,7 +294,8 @@ func WorkItemPaginatedByWorkItemIDAsc(ctx context.Context, db DB, workItemID int sqlstr := fmt.Sprintf(`SELECT work_items.work_item_id, - work_items.title %s + work_items.title, + work_items.description %s FROM xo_tests.work_items %s WHERE work_items.work_item_id > $1 %s %s @@ -299,11 +308,11 @@ func WorkItemPaginatedByWorkItemIDAsc(ctx context.Context, db DB, workItemID int rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Asc/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) } return res, nil } @@ -366,7 +375,8 @@ func WorkItemPaginatedByWorkItemIDDesc(ctx context.Context, db DB, workItemID in sqlstr := fmt.Sprintf(`SELECT work_items.work_item_id, - work_items.title %s + work_items.title, + work_items.description %s FROM xo_tests.work_items %s WHERE work_items.work_item_id < $1 %s %s @@ -379,11 +389,97 @@ func WorkItemPaginatedByWorkItemIDDesc(ctx context.Context, db DB, workItemID in rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItem/Paginated/Desc/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) + } + return res, nil +} + +// WorkItems retrieves a row from 'xo_tests.work_items' as a WorkItem. +// +// Generated from index '[xo] base filter query'. +func WorkItems(ctx context.Context, db DB, opts ...WorkItemSelectConfigOption) ([]WorkItem, error) { + c := &WorkItemSelectConfig{joins: WorkItemJoins{}, filters: make(map[string][]any)} + + for _, o := range opts { + o(c) + } + + paramStart := 0 + nth := func() string { + paramStart++ + return strconv.Itoa(paramStart) + } + + var filterClauses []string + var filterParams []any + for filterTmpl, params := range c.filters { + filter := filterTmpl + for strings.Contains(filter, "$i") { + filter = strings.Replace(filter, "$i", "$"+nth(), 1) + } + filterClauses = append(filterClauses, filter) + filterParams = append(filterParams, params...) + } + + filters := "" + if len(filterClauses) > 0 { + filters = " AND " + strings.Join(filterClauses, " AND ") + " " + } + + var selectClauses []string + var joinClauses []string + var groupByClauses []string + + if c.joins.DemoWorkItem { + selectClauses = append(selectClauses, workItemTableDemoWorkItemSelectSQL) + joinClauses = append(joinClauses, workItemTableDemoWorkItemJoinSQL) + groupByClauses = append(groupByClauses, workItemTableDemoWorkItemGroupBySQL) + } + + if c.joins.AssignedUsers { + selectClauses = append(selectClauses, workItemTableAssignedUsersSelectSQL) + joinClauses = append(joinClauses, workItemTableAssignedUsersJoinSQL) + groupByClauses = append(groupByClauses, workItemTableAssignedUsersGroupBySQL) + } + + selects := "" + if len(selectClauses) > 0 { + selects = ", " + strings.Join(selectClauses, " ,\n ") + " " + } + joins := strings.Join(joinClauses, " \n ") + " " + groupbys := "" + if len(groupByClauses) > 0 { + groupbys = "GROUP BY " + strings.Join(groupByClauses, " ,\n ") + " " + } + + sqlstr := fmt.Sprintf(`SELECT + work_items.work_item_id, + work_items.title, + work_items.description %s + FROM xo_tests.work_items %s + WHERE true + %s %s +`, selects, joins, filters, groupbys) + sqlstr += c.orderBy + sqlstr += c.limit + sqlstr = "/* WorkItems */\n" + sqlstr + + // run + // logf(sqlstr, ) + rows, err := db.Query(ctx, sqlstr, append([]any{}, filterParams...)...) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDescription/Query: %w", &XoError{Entity: "Work item", Err: err})) + } + defer rows.Close() + // process + + res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByDescription/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) } return res, nil } @@ -448,7 +544,8 @@ func WorkItemByWorkItemID(ctx context.Context, db DB, workItemID int64, opts ... sqlstr := fmt.Sprintf(`SELECT work_items.work_item_id, - work_items.title %s + work_items.title, + work_items.description %s FROM xo_tests.work_items %s WHERE work_items.work_item_id = $1 %s %s @@ -461,12 +558,98 @@ func WorkItemByWorkItemID(ctx context.Context, db DB, workItemID int64, opts ... // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/db.Query: %w", &XoError{Entity: "Work item", Err: err})) } wi, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItem]) if err != nil { - return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_items/WorkItemByWorkItemID/pgx.CollectOneRow: %w", &XoError{Entity: "Work item", Err: err})) } return &wi, nil } + +// WorkItemsByTitle retrieves a row from 'xo_tests.work_items' as a WorkItem. +// +// Generated from index 'work_items_title_description_idx1'. +func WorkItemsByTitle(ctx context.Context, db DB, title *string, opts ...WorkItemSelectConfigOption) ([]WorkItem, error) { + c := &WorkItemSelectConfig{joins: WorkItemJoins{}, filters: make(map[string][]any)} + + for _, o := range opts { + o(c) + } + + paramStart := 1 + nth := func() string { + paramStart++ + return strconv.Itoa(paramStart) + } + + var filterClauses []string + var filterParams []any + for filterTmpl, params := range c.filters { + filter := filterTmpl + for strings.Contains(filter, "$i") { + filter = strings.Replace(filter, "$i", "$"+nth(), 1) + } + filterClauses = append(filterClauses, filter) + filterParams = append(filterParams, params...) + } + + filters := "" + if len(filterClauses) > 0 { + filters = " AND " + strings.Join(filterClauses, " AND ") + " " + } + + var selectClauses []string + var joinClauses []string + var groupByClauses []string + + if c.joins.DemoWorkItem { + selectClauses = append(selectClauses, workItemTableDemoWorkItemSelectSQL) + joinClauses = append(joinClauses, workItemTableDemoWorkItemJoinSQL) + groupByClauses = append(groupByClauses, workItemTableDemoWorkItemGroupBySQL) + } + + if c.joins.AssignedUsers { + selectClauses = append(selectClauses, workItemTableAssignedUsersSelectSQL) + joinClauses = append(joinClauses, workItemTableAssignedUsersJoinSQL) + groupByClauses = append(groupByClauses, workItemTableAssignedUsersGroupBySQL) + } + + selects := "" + if len(selectClauses) > 0 { + selects = ", " + strings.Join(selectClauses, " ,\n ") + " " + } + joins := strings.Join(joinClauses, " \n ") + " " + groupbys := "" + if len(groupByClauses) > 0 { + groupbys = "GROUP BY " + strings.Join(groupByClauses, " ,\n ") + " " + } + + sqlstr := fmt.Sprintf(`SELECT + work_items.work_item_id, + work_items.title, + work_items.description %s + FROM xo_tests.work_items %s + WHERE work_items.title = $1 + %s %s +`, selects, joins, filters, groupbys) + sqlstr += c.orderBy + sqlstr += c.limit + sqlstr = "/* WorkItemsByTitle */\n" + sqlstr + + // run + // logf(sqlstr, title) + rows, err := db.Query(ctx, sqlstr, append([]any{title}, filterParams...)...) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTitleDescription/Query: %w", &XoError{Entity: "Work item", Err: err})) + } + defer rows.Close() + // process + + res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItem]) + if err != nil { + return nil, logerror(fmt.Errorf("WorkItem/WorkItemsByTitleDescription/pgx.CollectRows: %w", &XoError{Entity: "Work item", Err: err})) + } + return res, nil +} diff --git a/internal/repos/postgresql/xo-templates/tests/snapshot/workitemassigneduser.xo.go b/internal/repos/postgresql/xo-templates/tests/snapshot/workitemassigneduser.xo.go index 96eeb8224a..0f139c79cb 100644 --- a/internal/repos/postgresql/xo-templates/tests/snapshot/workitemassigneduser.xo.go +++ b/internal/repos/postgresql/xo-templates/tests/snapshot/workitemassigneduser.xo.go @@ -193,11 +193,11 @@ func (wiau *WorkItemAssignedUser) Insert(ctx context.Context, db DB) (*WorkItemA logf(sqlstr, wiau.WorkItemID, wiau.AssignedUser, wiau.Role) rows, err := db.Query(ctx, sqlstr, wiau.WorkItemID, wiau.AssignedUser, wiau.Role) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/db.Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } newwiau, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Insert/pgx.CollectOneRow: %w", &XoError{Entity: "Work item assigned user", Err: err})) } *wiau = newwiau @@ -216,11 +216,11 @@ func (wiau *WorkItemAssignedUser) Update(ctx context.Context, db DB) (*WorkItemA rows, err := db.Query(ctx, sqlstr, wiau.Role, wiau.WorkItemID, wiau.AssignedUser) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/db.Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } newwiau, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/Update/pgx.CollectOneRow: %w", &XoError{Entity: "Work item assigned user", Err: err})) } *wiau = newwiau @@ -228,7 +228,7 @@ func (wiau *WorkItemAssignedUser) Update(ctx context.Context, db DB) (*WorkItemA } // Upsert upserts a WorkItemAssignedUser in the database. -// Requires appropiate PK(s) to be set beforehand. +// Requires appropriate PK(s) to be set beforehand. func (wiau *WorkItemAssignedUser) Upsert(ctx context.Context, db DB, params *WorkItemAssignedUserCreateParams) (*WorkItemAssignedUser, error) { var err error @@ -241,11 +241,11 @@ func (wiau *WorkItemAssignedUser) Upsert(ctx context.Context, db DB, params *Wor var pgErr *pgconn.PgError if errors.As(err, &pgErr) { if pgErr.Code != pgerrcode.UniqueViolation { - return nil, fmt.Errorf("UpsertUser/Insert: %w", err) + return nil, fmt.Errorf("UpsertUser/Insert: %w", &XoError{Entity: "Work item assigned user", Err: err}) } wiau, err = wiau.Update(ctx, db) if err != nil { - return nil, fmt.Errorf("UpsertUser/Update: %w", err) + return nil, fmt.Errorf("UpsertUser/Update: %w", &XoError{Entity: "Work item assigned user", Err: err}) } } } @@ -339,14 +339,14 @@ func WorkItemAssignedUsersByAssignedUserWorkItemID(ctx context.Context, db DB, a // logf(sqlstr, assignedUser, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{assignedUser, workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByAssignedUserWorkItemID/pgx.CollectRows: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return res, nil } @@ -425,11 +425,11 @@ func WorkItemAssignedUserByWorkItemIDAssignedUser(ctx context.Context, db DB, wo // logf(sqlstr, workItemID, assignedUser) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID, assignedUser}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/db.Query: %w", err)) + return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/db.Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } wiau, err := pgx.CollectOneRow(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectOneRow: %w", err)) + return nil, logerror(fmt.Errorf("work_item_assigned_user/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectOneRow: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return &wiau, nil @@ -509,14 +509,14 @@ func WorkItemAssignedUsersByWorkItemID(ctx context.Context, db DB, workItemID in // logf(sqlstr, workItemID) rows, err := db.Query(ctx, sqlstr, append([]any{workItemID}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return res, nil } @@ -595,14 +595,14 @@ func WorkItemAssignedUsersByAssignedUser(ctx context.Context, db DB, assignedUse // logf(sqlstr, assignedUser) rows, err := db.Query(ctx, sqlstr, append([]any{assignedUser}, filterParams...)...) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/Query: %w", &XoError{Entity: "Work item assigned user", Err: err})) } defer rows.Close() // process res, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[WorkItemAssignedUser]) if err != nil { - return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", err)) + return nil, logerror(fmt.Errorf("WorkItemAssignedUser/WorkItemAssignedUserByWorkItemIDAssignedUser/pgx.CollectRows: %w", &XoError{Entity: "Work item assigned user", Err: err})) } return res, nil } diff --git a/internal/rest/api_admin_test.go b/internal/rest/api_admin_test.go index bedf3fd5f4..c952caa0f1 100644 --- a/internal/rest/api_admin_test.go +++ b/internal/rest/api_admin_test.go @@ -4,10 +4,10 @@ import ( "context" "net/http" "net/http/httptest" - "os" "testing" "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/models" + "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/rest/resttestutil" "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/services/servicetestutil" "github.com/gin-gonic/gin" "github.com/stretchr/testify/assert" @@ -35,7 +35,7 @@ func TestAdminPingRoute(t *testing.T) { defer srv.Close() resp := httptest.NewRecorder() - req, _ := http.NewRequest(http.MethodGet, os.Getenv("API_VERSION")+"/admin/ping", nil) + req, _ := http.NewRequest(http.MethodGet, resttestutil.MustConstructInternalPath("/admin/ping"), nil) req.Header.Add("x-api-key", ufixture.APIKey.APIKey) srv.Handler.ServeHTTP(resp, req) diff --git a/internal/rest/api_default.go b/internal/rest/api_default.go index ea87e2cfe3..f5a29f9aaa 100644 --- a/internal/rest/api_default.go +++ b/internal/rest/api_default.go @@ -7,12 +7,11 @@ import ( "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal" "github.com/gin-gonic/gin" + "github.com/jackc/pgx/v5" ) // OpenapiYamlGet returns this very openapi spec. func (h *Handlers) OpenapiYamlGet(c *gin.Context) { - c.Set(skipResponseValidation, true) - oas, err := os.ReadFile(h.specPath) if err != nil { panic("openapi spec not found") @@ -24,5 +23,15 @@ func (h *Handlers) OpenapiYamlGet(c *gin.Context) { // Ping ping pongs. func (h *Handlers) Ping(c *gin.Context) { fmt.Printf("internal.Config.AppEnv: %v\n", internal.Config.AppEnv) + + ctx := c.Request.Context() + tx, err := h.pool.BeginTx(ctx, pgx.TxOptions{}) + if err != nil { + renderErrorResponse(c, "database error", internal.WrapErrorf(err, internal.ErrorCodePrivate, "could not being tx")) + + return + } + defer tx.Rollback(ctx) + c.String(http.StatusOK, "pong") } diff --git a/internal/rest/api_default_test.go b/internal/rest/api_default_test.go index 4ec596215d..7b0e28d548 100644 --- a/internal/rest/api_default_test.go +++ b/internal/rest/api_default_test.go @@ -3,9 +3,9 @@ package rest import ( "net/http" "net/http/httptest" - "os" "testing" + "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/rest/resttestutil" "github.com/gin-gonic/gin" "github.com/stretchr/testify/assert" ) @@ -19,7 +19,7 @@ func TestPingRoute(t *testing.T) { } defer srv.Close() - req, _ := http.NewRequest(http.MethodGet, os.Getenv("API_VERSION")+"/ping", nil) + req, _ := http.NewRequest(http.MethodGet, resttestutil.MustConstructInternalPath("/ping"), nil) resp := httptest.NewRecorder() srv.Handler.ServeHTTP(resp, req) diff --git a/internal/rest/api_events.go b/internal/rest/api_events.go index 095be9d05a..bc3d81015f 100644 --- a/internal/rest/api_events.go +++ b/internal/rest/api_events.go @@ -167,10 +167,8 @@ channel use cases,etc: // Events represents server events. // TODO requires query param projectId=... // to subscribe to the current project's topics only -func (h *Handlers) Events(c *gin.Context) { +func (h *Handlers) Events(c *gin.Context, params models.EventsParams) { c.Set(skipRequestValidation, true) - c.Set(skipResponseValidation, true) - clientChan, ok := c.Value("clientChan").(ClientChan) if !ok { return diff --git a/internal/rest/api_events_test.go b/internal/rest/api_events_test.go index b7d37e6a4c..fcc89003a8 100644 --- a/internal/rest/api_events_test.go +++ b/internal/rest/api_events_test.go @@ -6,12 +6,12 @@ import ( "context" "net/http" "net/http/httptest" - "os" "strings" "testing" "time" "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/models" + "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/rest/resttestutil" "github.com/gin-gonic/gin" "github.com/stretchr/testify/assert" ) @@ -64,7 +64,7 @@ func TestSSEStream(t *testing.T) { t.Parallel() res := NewStreamRecorder() - req := httptest.NewRequest(http.MethodGet, os.Getenv("API_VERSION")+"/events", nil) + req := httptest.NewRequest(http.MethodGet, resttestutil.MustConstructInternalPath("/events", resttestutil.WithQueryParams(models.EventsParams{ProjectName: models.ProjectDemo})), nil) ctx, cancel := context.WithCancel(context.Background()) req = req.WithContext(ctx) @@ -94,12 +94,14 @@ func TestSSEStream(t *testing.T) { // TODO also test 2 clients concurrently receive, and when one leaves, the other still receives. // ff - assert.Eventually(t, func() bool { + if !assert.Eventually(t, func() bool { body := strings.ReplaceAll(res.Body.String(), " ", "") return strings.Count(body, "event:"+string(models.TopicsGlobalAlerts)) == 1 && strings.Count(body, "event:test-event") == 1 - }, 10*time.Second, 100*time.Millisecond) + }, 10*time.Second, 100*time.Millisecond) { + t.Fatalf("did not receive event") + } cancel() // handler should be stopped before reading body snapshot. to not have an arbitrary time sleep diff --git a/internal/rest/api_user.go b/internal/rest/api_user.go index 11f4f520f6..81cc619af3 100644 --- a/internal/rest/api_user.go +++ b/internal/rest/api_user.go @@ -51,7 +51,7 @@ func (h *Handlers) UpdateUser(c *gin.Context, id string) { tx, err := h.pool.BeginTx(ctx, pgx.TxOptions{}) if err != nil { - renderErrorResponse(c, "database error", err) + renderErrorResponse(c, "database error", internal.WrapErrorf(err, internal.ErrorCodePrivate, "could not being tx")) return } @@ -67,29 +67,28 @@ func (h *Handlers) UpdateUser(c *gin.Context, id string) { caller := getUserFromCtx(c) if caller == nil { - renderErrorResponse(c, "Could not get user from context.", nil) + renderErrorResponse(c, "Could not get current user", nil) return } user, err := h.usvc.Update(c, tx, id, caller, body) if err != nil { - renderErrorResponse(c, "err: ", err) + renderErrorResponse(c, "Could not update user", err) return } err = tx.Commit(ctx) if err != nil { - renderErrorResponse(c, "could not save changes", err) + renderErrorResponse(c, "Could not save changes", err) return } role, ok := h.authzsvc.RoleByRank(user.RoleRank) if !ok { - msg := fmt.Sprintf("role with rank %d not found", user.RoleRank) - renderErrorResponse(c, msg, errors.New(msg)) + renderErrorResponse(c, fmt.Sprintf("Role with rank %d not found", user.RoleRank), nil) return } @@ -109,7 +108,7 @@ func (h *Handlers) UpdateUserAuthorization(c *gin.Context, id string) { tx, err := h.pool.BeginTx(ctx, pgx.TxOptions{}) if err != nil { - renderErrorResponse(c, "database error", err) + renderErrorResponse(c, "database error", internal.WrapErrorf(err, internal.ErrorCodePrivate, "could not being tx")) return } @@ -125,7 +124,7 @@ func (h *Handlers) UpdateUserAuthorization(c *gin.Context, id string) { caller := getUserFromCtx(c) if caller == nil { - renderErrorResponse(c, "Could not get user from context.", nil) + renderErrorResponse(c, "Could not get current user", nil) return } diff --git a/internal/rest/api_user_test.go b/internal/rest/api_user_test.go index fdc0c2ea5a..01d1a2ef5d 100644 --- a/internal/rest/api_user_test.go +++ b/internal/rest/api_user_test.go @@ -7,10 +7,10 @@ import ( "fmt" "net/http" "net/http/httptest" - "os" "testing" "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/models" + "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/rest/resttestutil" "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/services/servicetestutil" "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/utils/format" "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal/utils/pointers" @@ -47,7 +47,7 @@ func TestGetUserRoute(t *testing.T) { t.Fatalf("ff.CreateUser: %s", err) } - req, err := http.NewRequest(http.MethodGet, os.Getenv("API_VERSION")+"/user/me", &bytes.Buffer{}) + req, err := http.NewRequest(http.MethodGet, resttestutil.MustConstructInternalPath("/user/me"), &bytes.Buffer{}) if err != nil { t.Errorf("%v", err) } @@ -114,7 +114,7 @@ func TestUpdateUserRoute(t *testing.T) { t.Errorf("unexpected error %v", err) } - path := os.Getenv("API_VERSION") + fmt.Sprintf("/user/%s/authorization", normalUser.User.UserID) + path := resttestutil.MustConstructInternalPath(fmt.Sprintf("/user/%s/authorization", normalUser.User.UserID)) req, err := http.NewRequest(http.MethodPatch, path, &buf) if err != nil { t.Errorf("unexpected error %v", err) @@ -150,7 +150,7 @@ func TestUpdateUserRoute(t *testing.T) { t.Errorf("unexpected error %v", err) } - path := os.Getenv("API_VERSION") + fmt.Sprintf("/user/%s", normalUser.User.UserID) + path := resttestutil.MustConstructInternalPath(fmt.Sprintf("/user/%s", normalUser.User.UserID)) req, err := http.NewRequest(http.MethodPatch, path, &buf) if err != nil { t.Errorf("unexpected error %v", err) diff --git a/internal/rest/context.go b/internal/rest/context.go index 01f4034fcf..601c66bd86 100644 --- a/internal/rest/context.go +++ b/internal/rest/context.go @@ -8,13 +8,13 @@ import ( ) const ( - userCtxKey = "user" - userInfoCtxKey = "user-info" - responseWriteCtxKey = "response-writer" - ginContextKey = "middleware.openapi/gin-context" - userDataKey = "middleware.openapi/user-data" - skipResponseValidation = "skip-response-validation" - skipRequestValidation = "skip-request-validation" + userCtxKey = "user" + userInfoCtxKey = "user-info" + responseWriteCtxKey = "response-writer" + ginContextKey = "middleware.openapi/gin-context" + userDataKey = "middleware.openapi/user-data" + validateResponse = "skip-response-validation" + skipRequestValidation = "skip-request-validation" ) func getSkipRequestValidationFromCtx(c *gin.Context) bool { @@ -26,8 +26,8 @@ func getSkipRequestValidationFromCtx(c *gin.Context) bool { return skip } -func getSkipResponseValidationFromCtx(c *gin.Context) bool { - skip, ok := c.Value(skipResponseValidation).(bool) +func getValidateResponseFromCtx(c *gin.Context) bool { + skip, ok := c.Value(validateResponse).(bool) if !ok { return false } diff --git a/internal/rest/middleware.auth.go b/internal/rest/middleware.auth.go index 11a765c52e..cb1a8866c5 100644 --- a/internal/rest/middleware.auth.go +++ b/internal/rest/middleware.auth.go @@ -93,7 +93,7 @@ func (a *authMiddleware) EnsureAuthorized(config AuthRestriction) gin.HandlerFun return func(c *gin.Context) { user := getUserFromCtx(c) if user == nil { - renderErrorResponse(c, "Could not get user from context.", nil) + renderErrorResponse(c, "Could not get current user.", nil) c.Abort() return diff --git a/internal/rest/middleware.openapi.go b/internal/rest/middleware.openapi.go index a66498e29c..d05ac8b1bb 100644 --- a/internal/rest/middleware.openapi.go +++ b/internal/rest/middleware.openapi.go @@ -92,7 +92,7 @@ func (o *openapiMiddleware) RequestValidatorWithOptions(options *OAValidatorOpti c.Next() // handle actual endpoint - if !options.ValidateResponse || getSkipResponseValidationFromCtx(c) { + if !options.ValidateResponse && !getValidateResponseFromCtx(c) { rbw.ResponseWriter.Write(rbw.body.Bytes()) return diff --git a/internal/rest/openapi_server.gen.go b/internal/rest/openapi_server.gen.go index 045ed56b4c..e005dca376 100644 --- a/internal/rest/openapi_server.gen.go +++ b/internal/rest/openapi_server.gen.go @@ -25,7 +25,7 @@ type ServerInterface interface { MyProviderLogin(c *gin.Context) // (GET /events) - Events(c *gin.Context) + Events(c *gin.Context, params externalRef0.EventsParams) // Returns this very OpenAPI spec. // (GET /openapi.yaml) OpenapiYamlGet(c *gin.Context) @@ -117,7 +117,27 @@ func (siw *ServerInterfaceWrapper) MyProviderLogin(c *gin.Context) { // Events operation with its own middleware. func (siw *ServerInterfaceWrapper) Events(c *gin.Context) { - siw.Handler.Events(c) + var err error + + // Parameter object where we will unmarshal all parameters from the context + var params externalRef0.EventsParams + + // ------------- Required query parameter "projectName" ------------- + + if paramValue := c.Query("projectName"); paramValue != "" { + + } else { + c.JSON(http.StatusBadRequest, gin.H{"msg": "Query argument projectName is required, but not found"}) + return + } + + err = runtime.BindQueryParameter("form", true, true, "projectName", c.Request.URL.Query(), ¶ms.ProjectName) + if err != nil { + c.JSON(http.StatusBadRequest, gin.H{"msg": fmt.Sprintf("Invalid format for parameter projectName: %s", err)}) + return + } + + siw.Handler.Events(c, params) } // OpenapiYamlGet operation with its own middleware. diff --git a/internal/rest/responses.go b/internal/rest/responses.go index 010222a4ab..13c4713d28 100644 --- a/internal/rest/responses.go +++ b/internal/rest/responses.go @@ -15,51 +15,86 @@ import ( // ErrorResponse represents a response containing an error message. type ErrorResponse struct { - Error string `json:"error"` - Message string `json:"message"` - ValidationError models.HTTPValidationError `json:"validationError,omitempty"` + Title string `json:"title"` + Detail string `json:"detail"` + Status int `json:"status"` + Error string `json:"error"` + Type string `json:"type"` + ValidationError *models.HTTPValidationError `json:"validationError,omitempty"` } -// renderErrorResponse writes an error response from message and error. -func renderErrorResponse(c *gin.Context, msg string, err error) { - resp := ErrorResponse{Error: msg} - status := http.StatusInternalServerError +// renderErrorResponse writes an error response from title and error. +// Inspired by https://www.rfc-editor.org/rfc/rfc7807. +func renderErrorResponse(c *gin.Context, title string, err error) { + resp := ErrorResponse{ + Title: title, Error: err.Error(), + Type: internal.ErrorCodeUnknown.String(), + Status: http.StatusInternalServerError, + } + + /** + * + * + * + + o "type" (string) - A URI reference [RFC3986] that identifies the + problem type. This specification encourages that, when + dereferenced, it provide human-readable documentation for the + problem type (e.g., using HTML [W3C.REC-html5-20141028]). When + this member is not present, its value is assumed to be + "about:blank". + TODO: simple html page with fragments, generated from mapping + ErrCode to go tmpl + : /problems#NotFound + + o "title" (string) - A short, human-readable summary of the problem + type. It SHOULD NOT change from occurrence to occurrence of the + problem, except for purposes of localization (e.g., using + proactive content negotiation; see [RFC7231], Section 3.4). + + o "status" (number) - The HTTP status code ([RFC7231], Section 6) + generated by the origin server for this occurrence of the problem. + + o "detail" (string) - A human-readable explanation specific to this + occurrence of the problem. + */ var ierr *internal.Error if !errors.As(err, &ierr) { - resp.Error = "internal error" - resp.Message = msg + resp.Title = "internal error" + resp.Detail = title } else { - resp.Message = ierr.Cause().Error() // do we really want cause only + resp.Type = ierr.Code().String() + resp.Detail = ierr.Cause().Error() switch ierr.Code() { case internal.ErrorCodeNotFound: - status = http.StatusNotFound + resp.Status = http.StatusNotFound case internal.ErrorCodeInvalidArgument: - status = http.StatusBadRequest - case internal.ErrorCodeInvalidRole, internal.ErrorCodeInvalidScope: - status = http.StatusBadRequest + resp.Status = http.StatusBadRequest + case internal.ErrorCodeInvalidRole, internal.ErrorCodeInvalidScope, internal.ErrorCodeInvalidUUID: + resp.Status = http.StatusBadRequest case internal.ErrorCodeRequestValidation: - status = http.StatusBadRequest - resp.Message = "OpenAPI request validation failed" + resp.Status = http.StatusBadRequest + resp.Detail = "OpenAPI request validation failed" resp.ValidationError = extractValidationError(err, "request") case internal.ErrorCodeResponseValidation: - status = http.StatusInternalServerError - resp.Message = "OpenAPI response validation failed" + resp.Status = http.StatusInternalServerError + resp.Detail = "OpenAPI response validation failed" resp.ValidationError = extractValidationError(err, "response") case internal.ErrorCodeAlreadyExists: - status = http.StatusConflict + resp.Status = http.StatusConflict case internal.ErrorCodeUnauthorized: - status = http.StatusForbidden + resp.Status = http.StatusForbidden case internal.ErrorCodeUnauthenticated: - status = http.StatusUnauthorized + resp.Status = http.StatusUnauthorized case internal.ErrorCodePrivate: - resp.Message = "internal error" - resp.Error = "internal error" + resp = ErrorResponse{Title: "internal error", Detail: "internal error"} + fallthrough case internal.ErrorCodeUnknown: fallthrough default: - status = http.StatusInternalServerError + resp.Status = http.StatusInternalServerError } } @@ -70,10 +105,10 @@ func renderErrorResponse(c *gin.Context, msg string, err error) { span.RecordError(err) } - renderResponse(c, resp, status) + renderResponse(c, resp, resp.Status) } -func extractValidationError(err error, typ string) models.HTTPValidationError { +func extractValidationError(err error, typ string) *models.HTTPValidationError { var origErrs []string var vErrs []models.ValidationError @@ -148,7 +183,7 @@ func extractValidationError(err error, typ string) models.HTTPValidationError { Messages: slices.RemoveEmptyString(origErrs), } - return httpValidationError + return &httpValidationError } func renderResponse(c *gin.Context, res any, status int) { diff --git a/internal/rest/resttestutil/url.go b/internal/rest/resttestutil/url.go new file mode 100644 index 0000000000..756c6f5644 --- /dev/null +++ b/internal/rest/resttestutil/url.go @@ -0,0 +1,96 @@ +package resttestutil + +import ( + "fmt" + "net/url" + "reflect" + "strings" + + "github.com/danicc097/openapi-go-gin-postgres-sqlc/internal" +) + +type constructURLOptions struct { + params any +} + +// ConstructURLOption is the type for options that can be passed to ConstructInternalPath. +type ConstructURLOption func(*constructURLOptions) + +// WithQueryParams specifies the struct containing the query parameters. +func WithQueryParams(params any) ConstructURLOption { + return func(o *constructURLOptions) { + o.params = params + } +} + +// ConstructInternalPath constructs a URL with encoded parameters based +// on the non-nil fields of the provided struct via the form tag. +// Required path prefixes are added automatically. +func ConstructInternalPath(subpath string, options ...ConstructURLOption) (string, error) { + cleanSubpath := strings.TrimPrefix(strings.TrimPrefix(subpath, internal.Config.APIVersion), "/") + u, err := url.Parse(internal.Config.APIVersion + "/" + cleanSubpath) + if err != nil { + return "", fmt.Errorf("could not parse URL: %w", err) + } + + query := u.Query() + + // Process options + opts := &constructURLOptions{} + for _, opt := range options { + opt(opts) + } + + if opts.params == nil { + return u.String(), nil + } + + v := reflect.ValueOf(opts.params) + t := reflect.TypeOf(opts.params) + + if t.Kind() == reflect.Ptr { + t = t.Elem() + v = v.Elem() + } + + if t.Kind() != reflect.Struct { + return "", fmt.Errorf("params must be a struct") + } + + for i := 0; i < t.NumField(); i++ { + field := t.Field(i) + value := v.Field(i) + + if value.Kind() == reflect.Ptr { + if value.IsNil() { + continue + } + + value = value.Elem() + } + + fieldName := field.Name + formTag := field.Tag.Get("form") + if formTag != "" { + fieldName = formTag + } + + fieldValue := fmt.Sprintf("%v", value.Interface()) + query.Add(fieldName, fieldValue) + } + + u.RawQuery = query.Encode() + + return u.String(), nil +} + +// MustConstructInternalPath constructs a URL with encoded parameters based on the non-nil fields of the provided struct. +// Required path prefixes are added automatically. It panics if an error occurs during URL construction. +func MustConstructInternalPath(subpath string, options ...ConstructURLOption) string { + url, err := ConstructInternalPath(subpath, options...) + if err != nil { + panic(fmt.Errorf("could not construct URL: %w", err)) + } + + return url +} diff --git a/internal/rest/server.go b/internal/rest/server.go index 230d95a6ea..dee9f16bbb 100644 --- a/internal/rest/server.go +++ b/internal/rest/server.go @@ -276,7 +276,7 @@ func NewServer(conf Config, opts ...ServerOption) (*server, error) { // Run configures a server and underlying services with the given configuration. // NewServer takes its own config as is now -func Run(env, address, specPath, rolePolicyPath, scopePolicyPath string) (<-chan error, error) { +func Run(env, specPath, rolePolicyPath, scopePolicyPath string) (<-chan error, error) { var err error if err = envvar.Load(env); err != nil { @@ -288,7 +288,7 @@ func Run(env, address, specPath, rolePolicyPath, scopePolicyPath string) (<-chan var logger *zap.Logger // XXX there's work being done in https://github.com/uptrace/opentelemetry-go-extra/tree/main/otelzap switch cfg.AppEnv { - case "prod", "e2e": + case "prod": logger, err = zap.NewProduction() default: logger, err = zap.NewDevelopment() @@ -328,7 +328,7 @@ func Run(env, address, specPath, rolePolicyPath, scopePolicyPath string) (<-chan } srv, err := NewServer(Config{ - Address: address, + Address: ":" + strings.TrimPrefix(cfg.APIPort, ":"), Pool: pool, SQLPool: sqlpool, Redis: rdb, @@ -383,7 +383,7 @@ func Run(env, address, specPath, rolePolicyPath, scopePolicyPath string) (<-chan }() go func() { - logger.Info("Listening and serving", zap.String("address", address)) + logger.Info("Listening and serving", zap.String("address", cfg.APIPort)) // "ListenAndServe always returns a non-nil error. After Shutdown or Close, the returned error is // ErrServerClosed." @@ -421,7 +421,7 @@ func createOpenAPIValidatorOptions() OAValidatorOptions { oafilterOpts.WithCustomSchemaErrorFunc(CustomSchemaErrorFunc) oaOptions := OAValidatorOptions{ - ValidateResponse: true, + ValidateResponse: os.Getenv("IS_TESTING") != "", Options: oafilterOpts, } diff --git a/internal/rest/server_test.go b/internal/rest/server_test.go index 701cb19091..436e9447d4 100644 --- a/internal/rest/server_test.go +++ b/internal/rest/server_test.go @@ -46,7 +46,7 @@ func TestValidationErrorsResponse(t *testing.T) { req, _ := http.NewRequest(http.MethodGet, "/validation_errors", nil) engine.ServeHTTP(resp, req) - jsonErr := "{\"error\":\"invalid response\",\"message\":\"OpenAPI response validation failed\",\"validationError\":{\"detail\":[{\"detail\":{\"schema\":{\"type\":\"integer\"},\"value\":\"\\\"a_wrong_id\\\"\"},\"loc\":[\"id\"],\"msg\":\"value must be an integer\",\"type\":\"response_validation\"}],\"messages\":[\"response body error\"]}}" + jsonErr := "{\"title\":\"invalid response\",\"detail\":\"OpenAPI response validation failed\",\"status\":500,\"error\":\"OpenAPI response validation failed: response body doesn't match schema: $$$${\\\"detail\\\":{\\\"schema\\\":{\\\"type\\\":\\\"integer\\\"},\\\"value\\\":\\\"\\\\\\\"a_wrong_id\\\\\\\"\\\"},\\\"loc\\\":[\\\"id\\\"],\\\"msg\\\":\\\"value must be an integer\\\",\\\"type\\\":\\\"unknown\\\"}\",\"type\":\"ResponseValidation\",\"validationError\":{\"detail\":[{\"detail\":{\"schema\":{\"type\":\"integer\"},\"value\":\"\\\"a_wrong_id\\\"\"},\"loc\":[\"id\"],\"msg\":\"value must be an integer\",\"type\":\"response_validation\"}],\"messages\":[\"response body error\"]}}" assert.Equal(t, jsonErr, resp.Body.String()) assert.Equal(t, http.StatusInternalServerError, resp.Code) @@ -90,7 +90,7 @@ func TestValidationErrorsResponse(t *testing.T) { req.Header.Add("Content-Type", "application/json") engine.ServeHTTP(resp, req) - jsonErr := "{\"error\":\"invalid request\",\"message\":\"OpenAPI request validation failed\",\"validationError\":{\"detail\":[{\"detail\":{\"schema\":{\"type\":\"integer\"},\"value\":\"\\\"a_wrong_id\\\"\"},\"loc\":[\"id\"],\"msg\":\"value must be an integer\",\"type\":\"request_validation\"},{\"detail\":{\"schema\":{\"type\":\"string\"},\"value\":\"1234\"},\"loc\":[\"name\"],\"msg\":\"value must be a string\",\"type\":\"request_validation\"},{\"detail\":{\"schema\":{\"properties\":{\"color\":{\"type\":\"string\"},\"nestedProperty\":{\"type\":\"string\"}},\"required\":[\"nestedProperty\"],\"type\":\"object\"},\"value\":\"{\\\"color\\\":\\\"color\\\"}\"},\"loc\":[\"nested\",\"nestedProperty\"],\"msg\":\"property \\\"nestedProperty\\\" is missing\",\"type\":\"request_validation\"}],\"messages\":[\"request body has an error: doesn't match schema\"]}}" + jsonErr := "{\"title\":\"invalid request\",\"detail\":\"OpenAPI request validation failed\",\"status\":400,\"error\":\"OpenAPI request validation failed: validation errors encountered: request body has an error: doesn't match schema: $$$${\\\"detail\\\":{\\\"schema\\\":{\\\"type\\\":\\\"integer\\\"},\\\"value\\\":\\\"\\\\\\\"a_wrong_id\\\\\\\"\\\"},\\\"loc\\\":[\\\"id\\\"],\\\"msg\\\":\\\"value must be an integer\\\",\\\"type\\\":\\\"unknown\\\"} | $$$${\\\"detail\\\":{\\\"schema\\\":{\\\"type\\\":\\\"string\\\"},\\\"value\\\":\\\"1234\\\"},\\\"loc\\\":[\\\"name\\\"],\\\"msg\\\":\\\"value must be a string\\\",\\\"type\\\":\\\"unknown\\\"} | $$$${\\\"detail\\\":{\\\"schema\\\":{\\\"properties\\\":{\\\"color\\\":{\\\"type\\\":\\\"string\\\"},\\\"nestedProperty\\\":{\\\"type\\\":\\\"string\\\"}},\\\"required\\\":[\\\"nestedProperty\\\"],\\\"type\\\":\\\"object\\\"},\\\"value\\\":\\\"{\\\\\\\"color\\\\\\\":\\\\\\\"color\\\\\\\"}\\\"},\\\"loc\\\":[\\\"nested\\\",\\\"nestedProperty\\\"],\\\"msg\\\":\\\"property \\\\\\\"nestedProperty\\\\\\\" is missing\\\",\\\"type\\\":\\\"unknown\\\"}\",\"type\":\"RequestValidation\",\"validationError\":{\"detail\":[{\"detail\":{\"schema\":{\"type\":\"integer\"},\"value\":\"\\\"a_wrong_id\\\"\"},\"loc\":[\"id\"],\"msg\":\"value must be an integer\",\"type\":\"request_validation\"},{\"detail\":{\"schema\":{\"type\":\"string\"},\"value\":\"1234\"},\"loc\":[\"name\"],\"msg\":\"value must be a string\",\"type\":\"request_validation\"},{\"detail\":{\"schema\":{\"properties\":{\"color\":{\"type\":\"string\"},\"nestedProperty\":{\"type\":\"string\"}},\"required\":[\"nestedProperty\"],\"type\":\"object\"},\"value\":\"{\\\"color\\\":\\\"color\\\"}\"},\"loc\":[\"nested\",\"nestedProperty\"],\"msg\":\"property \\\"nestedProperty\\\" is missing\",\"type\":\"request_validation\"}],\"messages\":[\"request body has an error: doesn't match schema\"]}}" assert.Equal(t, jsonErr, resp.Body.String()) assert.Equal(t, http.StatusBadRequest, resp.Code) diff --git a/internal/services/project.go b/internal/services/project.go index 5db03b3d9c..3826162fab 100644 --- a/internal/services/project.go +++ b/internal/services/project.go @@ -3,6 +3,7 @@ package services import ( "context" "encoding/json" + "errors" "fmt" "reflect" "strings" @@ -65,8 +66,8 @@ func (p *Project) ByName(ctx context.Context, d db.DBTX, name models.Project) (* // we are not typing the update to save ourselves from manually adding a migration to change projects.board_config // when _any_ field changes. we generate a new config the way it must be and merge with whatever was in db's board_config there at app startup. // the endpoint to update it will be validated by openapi libs as usual. -func (p *Project) MergeConfigFields(ctx context.Context, d db.DBTX, projectID int, update map[string]any) (*models.ProjectConfig, error) { - project, err := p.projectRepo.ByID(ctx, d, projectID) +func (p *Project) MergeConfigFields(ctx context.Context, d db.DBTX, projectName models.Project, update map[string]any) (*models.ProjectConfig, error) { + project, err := p.projectRepo.ByName(ctx, d, projectName) if err != nil { return nil, internal.NewErrorf(internal.ErrorCodeNotFound, "project not found") } @@ -76,14 +77,18 @@ func (p *Project) MergeConfigFields(ctx context.Context, d db.DBTX, projectID in fmt.Printf("project.BoardConfig: %v\n", project.BoardConfig) var workItem any - switch project.Name { + // explicitly initialize what we want to allow an admin to edit in project config ui + switch projectName { case models.ProjectDemo: - // explicitly initialize what we want to allow an admin to edit in project config ui workItem = &models.RestDemoWorkItemsResponse{DemoWorkItem: models.DbDemoWorkItem{}, Closed: pointers.New(time.Now())} // workItem = structs.InitializeFields(reflect.ValueOf(workItem), 1).Interface() // we want very specific fields to be editable in config so it doesn't clutter it fmt.Printf("workItem: %+v\n", workItem) + case models.ProjectDemoTwo: + fallthrough + default: + return nil, errors.New("not implemented") } - pathKeys := structs.GetKeys(workItem, "") + pathKeys := structs.GetKeys("json", workItem, "") // index ProjectConfig.Fields by path for simpler logic for _, path := range pathKeys { @@ -94,8 +99,10 @@ func (p *Project) MergeConfigFields(ctx context.Context, d db.DBTX, projectID in fj, _ := json.Marshal(project.BoardConfig) json.Unmarshal(fj, &boardConfigMap) + // update default config with current db config and merge updated config on top + // merge with default config is necessary for project init, + // but merge with existing db config isn't really necessary. p.mergeFieldsMap(fieldsMap, boardConfigMap) - p.mergeFieldsMap(fieldsMap, update) project.BoardConfig.Fields = make([]models.ProjectConfigField, 0, len(fieldsMap)) diff --git a/internal/services/project_test.go b/internal/services/project_test.go index 66479f1d6b..3a283dbbe1 100644 --- a/internal/services/project_test.go +++ b/internal/services/project_test.go @@ -18,30 +18,35 @@ import ( func Test_MergeConfigFields(t *testing.T) { t.Parallel() - fakeProjectRepo := &repostesting.FakeProject{} - fakeProjectRepo.ByIDStub = func(ctx context.Context, d db.DBTX, i int) (*db.Project, error) { - return &db.Project{ - Name: models.ProjectDemo, - BoardConfig: models.ProjectConfig{ - Header: []string{"demoProject.ref", "workItemType"}, - Fields: []models.ProjectConfigField{ - { - IsEditable: true, - ShowCollapsed: true, - IsVisible: true, - Path: "demoWorkItem", - Name: "Demo project", - }, - { - IsEditable: true, - ShowCollapsed: true, - IsVisible: true, - Path: "demoWorkItem.ref", - Name: "Reference", - }, + proj := &db.Project{ + Name: models.ProjectDemo, + BoardConfig: models.ProjectConfig{ + Header: []string{"demoProject.ref", "workItemType"}, + Fields: []models.ProjectConfigField{ + { + IsEditable: true, + ShowCollapsed: true, + IsVisible: true, + Path: "demoWorkItem", + Name: "Demo project", + }, + { + IsEditable: true, + ShowCollapsed: true, + IsVisible: true, + Path: "demoWorkItem.ref", + Name: "Reference", }, }, - }, nil + }, + } + + fakeProjectRepo := &repostesting.FakeProject{} + fakeProjectRepo.ByIDStub = func(ctx context.Context, d db.DBTX, i int) (*db.Project, error) { + return proj, nil + } + fakeProjectRepo.ByNameStub = func(ctx context.Context, d db.DBTX, p models.Project) (*db.Project, error) { + return proj, nil } fakeTeamRepo := &repostesting.FakeTeam{} p := services.NewProject(zaptest.NewLogger(t).Sugar(), fakeProjectRepo, fakeTeamRepo) @@ -93,7 +98,7 @@ func Test_MergeConfigFields(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - got, err := p.MergeConfigFields(context.Background(), &pgxpool.Pool{}, 1, tc.args.update) + got, err := p.MergeConfigFields(context.Background(), &pgxpool.Pool{}, models.ProjectDemo, tc.args.update) if (err != nil) && tc.error == "" { t.Fatalf("unexpected error = %v", err) } diff --git a/internal/tracing/attributes.go b/internal/tracing/attributes.go index de55669de2..859464a4a6 100644 --- a/internal/tracing/attributes.go +++ b/internal/tracing/attributes.go @@ -5,6 +5,7 @@ import ( ) // Filterable with user-id="..." -// In frontend we would have something unique (and not personally identifiable) +// In frontend we would have a combination of user-id and random string // to each navigation to correlate user interaction, fetch and document load traces +// from each open instance. const UserIDAttribute = attribute.Key("user-id") diff --git a/internal/utils/structs/structs.go b/internal/utils/structs/structs.go index 03984dde86..971d75bfb6 100644 --- a/internal/utils/structs/structs.go +++ b/internal/utils/structs/structs.go @@ -5,8 +5,8 @@ import ( "strings" ) -// GetKeys returns a slice of json keys extracted from an initialized struct's tags. -func GetKeys(s any, parent string) []string { +// GetKeys returns a slice of tag values extracted from an initialized struct. +func GetKeys(tag string, s any, parent string) []string { keys := []string{} if s == nil { @@ -24,7 +24,7 @@ func GetKeys(s any, parent string) []string { if val.Kind() == reflect.Slice || val.Kind() == reflect.Array { for j := 0; j < val.Len(); j++ { elem := val.Index(j).Interface() - subkeys := GetKeys(elem, "") + subkeys := GetKeys(tag, elem, "") for _, subkey := range subkeys { keys = append(keys, parent+"."+subkey) } @@ -34,11 +34,11 @@ func GetKeys(s any, parent string) []string { if val.Kind() == reflect.Struct { for idx := 0; idx < val.NumField(); idx++ { typeField := val.Type().Field(idx) - jsonTag := typeField.Tag.Get("json") - if jsonTag == "" { + tagValue := typeField.Tag.Get(tag) + if tagValue == "" { continue } - key := strings.Split(jsonTag, ",")[0] + key := strings.Split(tagValue, ",")[0] switch typeField.Type.Kind() { case reflect.Array, reflect.Slice: @@ -47,14 +47,14 @@ func GetKeys(s any, parent string) []string { } for j := 0; j < val.Field(idx).Len(); j++ { elem := val.Field(idx).Index(j).Interface() - subkeys := GetKeys(elem, key) + subkeys := GetKeys(tag, elem, key) for _, subkey := range subkeys { keys = append(keys, key+"."+subkey) } } case reflect.Struct: keys = append(keys, key) - subkeys := GetKeys(val.Field(idx).Interface(), key) + subkeys := GetKeys(tag, val.Field(idx).Interface(), key) for _, subkey := range subkeys { keys = append(keys, key+"."+subkey) } @@ -63,7 +63,7 @@ func GetKeys(s any, parent string) []string { continue } keys = append(keys, key) - subkeys := GetKeys(val.Field(idx).Interface(), key) + subkeys := GetKeys(tag, val.Field(idx).Interface(), key) for _, subkey := range subkeys { keys = append(keys, key+"."+subkey) } diff --git a/internal/utils/structs/structs_test.go b/internal/utils/structs/structs_test.go index 59535c3100..6eb0bb3b77 100644 --- a/internal/utils/structs/structs_test.go +++ b/internal/utils/structs/structs_test.go @@ -22,7 +22,7 @@ func TestGetKeys(t *testing.T) { "nestedStruct.nestedStruct2.nestedKey3", } - if diff := cmp.Diff(want, structs.GetKeys(ex, "")); diff != "" { + if diff := cmp.Diff(want, structs.GetKeys("json", ex, "")); diff != "" { t.Errorf("GetKeys() mismatch (-want +got):\n%s", diff) } }) @@ -51,7 +51,7 @@ func TestGetKeys(t *testing.T) { "nestedStruct.nestedStruct2.nestedKey3", } - if diff := cmp.Diff(want, structs.GetKeys(ex, "")); diff != "" { + if diff := cmp.Diff(want, structs.GetKeys("json", ex, "")); diff != "" { t.Errorf("GetKeys() mismatch (-want +got):\n%s", diff) } }) @@ -77,7 +77,7 @@ func TestGetKeys(t *testing.T) { "stringArray", } - if diff := cmp.Diff(want, structs.GetKeys(ex, "")); diff != "" { + if diff := cmp.Diff(want, structs.GetKeys("json", ex, "")); diff != "" { t.Errorf("GetKeys() mismatch (-want +got):\n%s", diff) } }) diff --git a/openapi-go.code-workspace.example b/openapi-go.code-workspace.example index 0631ce8dc2..1451d3ce58 100644 --- a/openapi-go.code-workspace.example +++ b/openapi-go.code-workspace.example @@ -2,6 +2,9 @@ "folders": [ { "path": "." + }, + { + "path": "e2e" } ], "settings": { diff --git a/openapi.yaml b/openapi.yaml index 08f345522d..e806535895 100644 --- a/openapi.yaml +++ b/openapi.yaml @@ -56,6 +56,12 @@ paths: /events: get: operationId: Events + parameters: + - name: projectName + required: true + schema: + $ref: '#/components/schemas/Project' + in: query responses: "200": description: events diff --git a/vendor/github.com/jackc/pgx/v5/CHANGELOG.md b/vendor/github.com/jackc/pgx/v5/CHANGELOG.md index ec90631cd5..f923fd8ca4 100644 --- a/vendor/github.com/jackc/pgx/v5/CHANGELOG.md +++ b/vendor/github.com/jackc/pgx/v5/CHANGELOG.md @@ -1,3 +1,26 @@ +# 5.4.0 (June 14, 2023) + +* Replace platform specific syscalls for non-blocking IO with more traditional goroutines and deadlines. This returns to the v4 approach with some additional improvements and fixes. This restores the ability to use a pgx.Conn over an ssh.Conn as well as other non-TCP or Unix socket connections. In addition, it is a significantly simpler implementation that is less likely to have cross platform issues. +* Optimization: The default type registrations are now shared among all connections. This saves about 100KB of memory per connection. `pgtype.Type` and `pgtype.Codec` values are now required to be immutable after registration. This was already necessary in most cases but wasn't documented until now. (Lev Zakharov) +* Fix: Ensure pgxpool.Pool.QueryRow.Scan releases connection on panic +* CancelRequest: don't try to read the reply (Nicola Murino) +* Fix: correctly handle bool type aliases (Wichert Akkerman) +* Fix: pgconn.CancelRequest: Fix unix sockets: don't use RemoteAddr() +* Fix: pgx.Conn memory leak with prepared statement caching (Evan Jones) +* Add BeforeClose to pgxpool.Pool (Evan Cordell) +* Fix: various hstore fixes and optimizations (Evan Jones) +* Fix: RowToStructByPos with embedded unexported struct +* Support different bool string representations (Lev Zakharov) +* Fix: error when using BatchResults.Exec on a select that returns an error after some rows. +* Fix: pipelineBatchResults.Exec() not returning error from ResultReader +* Fix: pipeline batch results not closing pipeline when error occurs while reading directly from results instead of using + a callback. +* Fix: scanning a table type into a struct +* Fix: scan array of record to pointer to slice of struct +* Fix: handle null for json (Cemre Mengu) +* Batch Query callback is called even when there is an error +* Add RowTo(AddrOf)StructByNameLax (Audi P. Risa P) + # 5.3.1 (February 27, 2023) * Fix: Support v4 and v5 stdlib in same program (Tomáš Procházka) diff --git a/vendor/github.com/jackc/pgx/v5/README.md b/vendor/github.com/jackc/pgx/v5/README.md index ab3f5ea7f0..14327f2c65 100644 --- a/vendor/github.com/jackc/pgx/v5/README.md +++ b/vendor/github.com/jackc/pgx/v5/README.md @@ -132,9 +132,15 @@ These adapters can be used with the tracelog package. * [github.com/jackc/pgx-logrus](https://github.com/jackc/pgx-logrus) * [github.com/jackc/pgx-zap](https://github.com/jackc/pgx-zap) * [github.com/jackc/pgx-zerolog](https://github.com/jackc/pgx-zerolog) +* [github.com/mcosta74/pgx-slog](https://github.com/mcosta74/pgx-slog) ## 3rd Party Libraries with PGX Support +### [github.com/pashagolub/pgxmock](https://github.com/pashagolub/pgxmock) + +pgxmock is a mock library implementing pgx interfaces. +pgxmock has one and only purpose - to simulate pgx behavior in tests, without needing a real database connection. + ### [github.com/georgysavva/scany](https://github.com/georgysavva/scany) Library for scanning data from a database into Go structs and more. diff --git a/vendor/github.com/jackc/pgx/v5/conn.go b/vendor/github.com/jackc/pgx/v5/conn.go index dc98603593..a609d10020 100644 --- a/vendor/github.com/jackc/pgx/v5/conn.go +++ b/vendor/github.com/jackc/pgx/v5/conn.go @@ -178,7 +178,7 @@ func ParseConfigWithOptions(connString string, options ParseConfigOptions) (*Con case "simple_protocol": defaultQueryExecMode = QueryExecModeSimpleProtocol default: - return nil, fmt.Errorf("invalid default_query_exec_mode: %v", err) + return nil, fmt.Errorf("invalid default_query_exec_mode: %s", s) } } @@ -382,11 +382,9 @@ func quoteIdentifier(s string) string { return `"` + strings.ReplaceAll(s, `"`, `""`) + `"` } -// Ping executes an empty sql statement against the *Conn -// If the sql returns without error, the database Ping is considered successful, otherwise, the error is returned. +// Ping delegates to the underlying *pgconn.PgConn.Ping. func (c *Conn) Ping(ctx context.Context) error { - _, err := c.Exec(ctx, ";") - return err + return c.pgConn.Ping(ctx) } // PgConn returns the underlying *pgconn.PgConn. This is an escape hatch method that allows lower level access to the @@ -585,8 +583,10 @@ const ( QueryExecModeCacheDescribe // Get the statement description on every execution. This uses the extended protocol. Queries require two round trips - // to execute. It does not use prepared statements (allowing usage with most connection poolers) and is safe even - // when the the database schema is modified concurrently. + // to execute. It does not use named prepared statements. But it does use the unnamed prepared statement to get the + // statement description on the first round trip and then uses it to execute the query on the second round trip. This + // may cause problems with connection poolers that switch the underlying connection between round trips. It is safe + // even when the the database schema is modified concurrently. QueryExecModeDescribeExec // Assume the PostgreSQL query parameter types based on the Go type of the arguments. This uses the extended protocol @@ -648,6 +648,9 @@ type QueryRewriter interface { // returned Rows even if an error is returned. The error will be the available in rows.Err() after rows are closed. It // is allowed to ignore the error returned from Query and handle it in Rows. // +// It is possible for a call of FieldDescriptions on the returned Rows to return nil even if the Query call did not +// return an error. +// // It is possible for a query to return one or more rows before encountering an error. In most cases the rows should be // collected before processing rather than processed while receiving each row. This avoids the possibility of the // application processing rows from a query that the server rejected. The CollectRows function is useful here. @@ -1326,6 +1329,7 @@ func (c *Conn) deallocateInvalidatedCachedStatements(ctx context.Context) error for _, sd := range invalidatedStatements { pipeline.SendDeallocate(sd.Name) + delete(c.preparedStatements, sd.Name) } err := pipeline.Sync() diff --git a/vendor/github.com/jackc/pgx/v5/internal/nbconn/bufferqueue.go b/vendor/github.com/jackc/pgx/v5/internal/nbconn/bufferqueue.go deleted file mode 100644 index 4bf25481c5..0000000000 --- a/vendor/github.com/jackc/pgx/v5/internal/nbconn/bufferqueue.go +++ /dev/null @@ -1,70 +0,0 @@ -package nbconn - -import ( - "sync" -) - -const minBufferQueueLen = 8 - -type bufferQueue struct { - lock sync.Mutex - queue []*[]byte - r, w int -} - -func (bq *bufferQueue) pushBack(buf *[]byte) { - bq.lock.Lock() - defer bq.lock.Unlock() - - if bq.w >= len(bq.queue) { - bq.growQueue() - } - bq.queue[bq.w] = buf - bq.w++ -} - -func (bq *bufferQueue) pushFront(buf *[]byte) { - bq.lock.Lock() - defer bq.lock.Unlock() - - if bq.w >= len(bq.queue) { - bq.growQueue() - } - copy(bq.queue[bq.r+1:bq.w+1], bq.queue[bq.r:bq.w]) - bq.queue[bq.r] = buf - bq.w++ -} - -func (bq *bufferQueue) popFront() *[]byte { - bq.lock.Lock() - defer bq.lock.Unlock() - - if bq.r == bq.w { - return nil - } - - buf := bq.queue[bq.r] - bq.queue[bq.r] = nil // Clear reference so it can be garbage collected. - bq.r++ - - if bq.r == bq.w { - bq.r = 0 - bq.w = 0 - if len(bq.queue) > minBufferQueueLen { - bq.queue = make([]*[]byte, minBufferQueueLen) - } - } - - return buf -} - -func (bq *bufferQueue) growQueue() { - desiredLen := (len(bq.queue) + 1) * 3 / 2 - if desiredLen < minBufferQueueLen { - desiredLen = minBufferQueueLen - } - - newQueue := make([]*[]byte, desiredLen) - copy(newQueue, bq.queue) - bq.queue = newQueue -} diff --git a/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn.go b/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn.go deleted file mode 100644 index 38489a74b6..0000000000 --- a/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn.go +++ /dev/null @@ -1,549 +0,0 @@ -// Package nbconn implements a non-blocking net.Conn wrapper. -// -// It is designed to solve three problems. -// -// The first is resolving the deadlock that can occur when both sides of a connection are blocked writing because all -// buffers between are full. See https://github.com/jackc/pgconn/issues/27 for discussion. -// -// The second is the inability to use a write deadline with a TLS.Conn without killing the connection. -// -// The third is to efficiently check if a connection has been closed via a non-blocking read. -package nbconn - -import ( - "crypto/tls" - "errors" - "net" - "os" - "sync" - "sync/atomic" - "syscall" - "time" - - "github.com/jackc/pgx/v5/internal/iobufpool" -) - -var errClosed = errors.New("closed") -var ErrWouldBlock = new(wouldBlockError) - -const fakeNonblockingWriteWaitDuration = 100 * time.Millisecond -const minNonblockingReadWaitDuration = time.Microsecond -const maxNonblockingReadWaitDuration = 100 * time.Millisecond - -// NonBlockingDeadline is a magic value that when passed to Set[Read]Deadline places the connection in non-blocking read -// mode. -var NonBlockingDeadline = time.Date(1900, 1, 1, 0, 0, 0, 608536336, time.UTC) - -// disableSetDeadlineDeadline is a magic value that when passed to Set[Read|Write]Deadline causes those methods to -// ignore all future calls. -var disableSetDeadlineDeadline = time.Date(1900, 1, 1, 0, 0, 0, 968549727, time.UTC) - -// wouldBlockError implements net.Error so tls.Conn will recognize ErrWouldBlock as a temporary error. -type wouldBlockError struct{} - -func (*wouldBlockError) Error() string { - return "would block" -} - -func (*wouldBlockError) Timeout() bool { return true } -func (*wouldBlockError) Temporary() bool { return true } - -// Conn is a net.Conn where Write never blocks and always succeeds. Flush or Read must be called to actually write to -// the underlying connection. -type Conn interface { - net.Conn - - // Flush flushes any buffered writes. - Flush() error - - // BufferReadUntilBlock reads and buffers any successfully read bytes until the read would block. - BufferReadUntilBlock() error -} - -// NetConn is a non-blocking net.Conn wrapper. It implements net.Conn. -type NetConn struct { - // 64 bit fields accessed with atomics must be at beginning of struct to guarantee alignment for certain 32-bit - // architectures. See BUGS section of https://pkg.go.dev/sync/atomic and https://github.com/jackc/pgx/issues/1288 and - // https://github.com/jackc/pgx/issues/1307. Only access with atomics - closed int64 // 0 = not closed, 1 = closed - - conn net.Conn - rawConn syscall.RawConn - - readQueue bufferQueue - writeQueue bufferQueue - - readFlushLock sync.Mutex - // non-blocking writes with syscall.RawConn are done with a callback function. By using these fields instead of the - // callback functions closure to pass the buf argument and receive the n and err results we avoid some allocations. - nonblockWriteFunc func(fd uintptr) (done bool) - nonblockWriteBuf []byte - nonblockWriteErr error - nonblockWriteN int - - // non-blocking reads with syscall.RawConn are done with a callback function. By using these fields instead of the - // callback functions closure to pass the buf argument and receive the n and err results we avoid some allocations. - nonblockReadFunc func(fd uintptr) (done bool) - nonblockReadBuf []byte - nonblockReadErr error - nonblockReadN int - - readDeadlineLock sync.Mutex - readDeadline time.Time - readNonblocking bool - fakeNonBlockingShortReadCount int - fakeNonblockingReadWaitDuration time.Duration - - writeDeadlineLock sync.Mutex - writeDeadline time.Time - - // nbOperCnt Tracks how many operations performing simultaneously - nbOperCnt int - // nbOperMu Used to prevent concurrent SetBlockingMode calls - nbOperMu sync.Mutex -} - -func NewNetConn(conn net.Conn, fakeNonBlockingIO bool) *NetConn { - nc := &NetConn{ - conn: conn, - fakeNonblockingReadWaitDuration: maxNonblockingReadWaitDuration, - } - - if !fakeNonBlockingIO { - if sc, ok := conn.(syscall.Conn); ok { - if rawConn, err := sc.SyscallConn(); err == nil { - nc.rawConn = rawConn - } - } - } - - return nc -} - -// Read implements io.Reader. -func (c *NetConn) Read(b []byte) (n int, err error) { - if c.isClosed() { - return 0, errClosed - } - - c.readFlushLock.Lock() - defer c.readFlushLock.Unlock() - - err = c.flush() - if err != nil { - return 0, err - } - - for n < len(b) { - buf := c.readQueue.popFront() - if buf == nil { - break - } - copiedN := copy(b[n:], *buf) - if copiedN < len(*buf) { - *buf = (*buf)[copiedN:] - c.readQueue.pushFront(buf) - } else { - iobufpool.Put(buf) - } - n += copiedN - } - - // If any bytes were already buffered return them without trying to do a Read. Otherwise, when the caller is trying to - // Read up to len(b) bytes but all available bytes have already been buffered the underlying Read would block. - if n > 0 { - return n, nil - } - - var readNonblocking bool - c.readDeadlineLock.Lock() - readNonblocking = c.readNonblocking - c.readDeadlineLock.Unlock() - - var readN int - if readNonblocking { - if setSockModeErr := c.SetBlockingMode(false); setSockModeErr != nil { - return n, setSockModeErr - } - - defer func() { - _ = c.SetBlockingMode(true) - }() - - readN, err = c.nonblockingRead(b[n:]) - } else { - readN, err = c.conn.Read(b[n:]) - } - n += readN - return n, err -} - -// Write implements io.Writer. It never blocks due to buffering all writes. It will only return an error if the Conn is -// closed. Call Flush to actually write to the underlying connection. -func (c *NetConn) Write(b []byte) (n int, err error) { - if c.isClosed() { - return 0, errClosed - } - - buf := iobufpool.Get(len(b)) - copy(*buf, b) - c.writeQueue.pushBack(buf) - return len(b), nil -} - -func (c *NetConn) Close() (err error) { - swapped := atomic.CompareAndSwapInt64(&c.closed, 0, 1) - if !swapped { - return errClosed - } - - defer func() { - closeErr := c.conn.Close() - if err == nil { - err = closeErr - } - }() - - c.readFlushLock.Lock() - defer c.readFlushLock.Unlock() - err = c.flush() - if err != nil { - return err - } - - return nil -} - -func (c *NetConn) LocalAddr() net.Addr { - return c.conn.LocalAddr() -} - -func (c *NetConn) RemoteAddr() net.Addr { - return c.conn.RemoteAddr() -} - -// SetDeadline is the equivalent of calling SetReadDealine(t) and SetWriteDeadline(t). -func (c *NetConn) SetDeadline(t time.Time) error { - err := c.SetReadDeadline(t) - if err != nil { - return err - } - return c.SetWriteDeadline(t) -} - -// SetReadDeadline sets the read deadline as t. If t == NonBlockingDeadline then future reads will be non-blocking. -func (c *NetConn) SetReadDeadline(t time.Time) error { - if c.isClosed() { - return errClosed - } - - c.readDeadlineLock.Lock() - defer c.readDeadlineLock.Unlock() - if c.readDeadline == disableSetDeadlineDeadline { - return nil - } - if t == disableSetDeadlineDeadline { - c.readDeadline = t - return nil - } - - if t == NonBlockingDeadline { - c.readNonblocking = true - t = time.Time{} - } else { - c.readNonblocking = false - } - - c.readDeadline = t - - return c.conn.SetReadDeadline(t) -} - -func (c *NetConn) SetWriteDeadline(t time.Time) error { - if c.isClosed() { - return errClosed - } - - c.writeDeadlineLock.Lock() - defer c.writeDeadlineLock.Unlock() - if c.writeDeadline == disableSetDeadlineDeadline { - return nil - } - if t == disableSetDeadlineDeadline { - c.writeDeadline = t - return nil - } - - c.writeDeadline = t - - return c.conn.SetWriteDeadline(t) -} - -func (c *NetConn) Flush() error { - if c.isClosed() { - return errClosed - } - - c.readFlushLock.Lock() - defer c.readFlushLock.Unlock() - return c.flush() -} - -// flush does the actual work of flushing the writeQueue. readFlushLock must already be held. -func (c *NetConn) flush() error { - var stopChan chan struct{} - var errChan chan error - - if err := c.SetBlockingMode(false); err != nil { - return err - } - - defer func() { - _ = c.SetBlockingMode(true) - }() - - defer func() { - if stopChan != nil { - select { - case stopChan <- struct{}{}: - case <-errChan: - } - } - }() - - for buf := c.writeQueue.popFront(); buf != nil; buf = c.writeQueue.popFront() { - remainingBuf := *buf - for len(remainingBuf) > 0 { - n, err := c.nonblockingWrite(remainingBuf) - remainingBuf = remainingBuf[n:] - if err != nil { - if !errors.Is(err, ErrWouldBlock) { - *buf = (*buf)[:len(remainingBuf)] - copy(*buf, remainingBuf) - c.writeQueue.pushFront(buf) - return err - } - - // Writing was blocked. Reading might unblock it. - if stopChan == nil { - stopChan, errChan = c.bufferNonblockingRead() - } - - select { - case err := <-errChan: - stopChan = nil - return err - default: - } - - } - } - iobufpool.Put(buf) - } - - return nil -} - -func (c *NetConn) BufferReadUntilBlock() error { - if err := c.SetBlockingMode(false); err != nil { - return err - } - - defer func() { - _ = c.SetBlockingMode(true) - }() - - for { - buf := iobufpool.Get(8 * 1024) - n, err := c.nonblockingRead(*buf) - if n > 0 { - *buf = (*buf)[:n] - c.readQueue.pushBack(buf) - } else if n == 0 { - iobufpool.Put(buf) - } - - if err != nil { - if errors.Is(err, ErrWouldBlock) { - return nil - } else { - return err - } - } - } -} - -func (c *NetConn) bufferNonblockingRead() (stopChan chan struct{}, errChan chan error) { - stopChan = make(chan struct{}) - errChan = make(chan error, 1) - - go func() { - for { - err := c.BufferReadUntilBlock() - if err != nil { - errChan <- err - return - } - - select { - case <-stopChan: - return - default: - } - } - }() - - return stopChan, errChan -} - -func (c *NetConn) isClosed() bool { - closed := atomic.LoadInt64(&c.closed) - return closed == 1 -} - -func (c *NetConn) nonblockingWrite(b []byte) (n int, err error) { - if c.rawConn == nil { - return c.fakeNonblockingWrite(b) - } else { - return c.realNonblockingWrite(b) - } -} - -func (c *NetConn) fakeNonblockingWrite(b []byte) (n int, err error) { - c.writeDeadlineLock.Lock() - defer c.writeDeadlineLock.Unlock() - - deadline := time.Now().Add(fakeNonblockingWriteWaitDuration) - if c.writeDeadline.IsZero() || deadline.Before(c.writeDeadline) { - err = c.conn.SetWriteDeadline(deadline) - if err != nil { - return 0, err - } - defer func() { - // Ignoring error resetting deadline as there is nothing that can reasonably be done if it fails. - c.conn.SetWriteDeadline(c.writeDeadline) - - if err != nil { - if errors.Is(err, os.ErrDeadlineExceeded) { - err = ErrWouldBlock - } - } - }() - } - - return c.conn.Write(b) -} - -func (c *NetConn) nonblockingRead(b []byte) (n int, err error) { - if c.rawConn == nil { - return c.fakeNonblockingRead(b) - } else { - return c.realNonblockingRead(b) - } -} - -func (c *NetConn) fakeNonblockingRead(b []byte) (n int, err error) { - c.readDeadlineLock.Lock() - defer c.readDeadlineLock.Unlock() - - // The first 5 reads only read 1 byte at a time. This should give us 4 chances to read when we are sure the bytes are - // already in Go or the OS's receive buffer. - if c.fakeNonBlockingShortReadCount < 5 && len(b) > 0 && c.fakeNonblockingReadWaitDuration < minNonblockingReadWaitDuration { - b = b[:1] - } - - startTime := time.Now() - deadline := startTime.Add(c.fakeNonblockingReadWaitDuration) - if c.readDeadline.IsZero() || deadline.Before(c.readDeadline) { - err = c.conn.SetReadDeadline(deadline) - if err != nil { - return 0, err - } - defer func() { - // If the read was successful and the wait duration is not already the minimum - if err == nil && c.fakeNonblockingReadWaitDuration > minNonblockingReadWaitDuration { - endTime := time.Now() - - if n > 0 && c.fakeNonBlockingShortReadCount < 5 { - c.fakeNonBlockingShortReadCount++ - } - - // The wait duration should be 2x the fastest read that has occurred. This should give reasonable assurance that - // a Read deadline will not block a read before it has a chance to read data already in Go or the OS's receive - // buffer. - proposedWait := endTime.Sub(startTime) * 2 - if proposedWait < minNonblockingReadWaitDuration { - proposedWait = minNonblockingReadWaitDuration - } - if proposedWait < c.fakeNonblockingReadWaitDuration { - c.fakeNonblockingReadWaitDuration = proposedWait - } - } - - // Ignoring error resetting deadline as there is nothing that can reasonably be done if it fails. - c.conn.SetReadDeadline(c.readDeadline) - - if err != nil { - if errors.Is(err, os.ErrDeadlineExceeded) { - err = ErrWouldBlock - } - } - }() - } - - return c.conn.Read(b) -} - -// syscall.Conn is interface - -// TLSClient establishes a TLS connection as a client over conn using config. -// -// To avoid the first Read on the returned *TLSConn also triggering a Write due to the TLS handshake and thereby -// potentially causing a read and write deadlines to behave unexpectedly, Handshake is called explicitly before the -// *TLSConn is returned. -func TLSClient(conn *NetConn, config *tls.Config) (*TLSConn, error) { - tc := tls.Client(conn, config) - err := tc.Handshake() - if err != nil { - return nil, err - } - - // Ensure last written part of Handshake is actually sent. - err = conn.Flush() - if err != nil { - return nil, err - } - - return &TLSConn{ - tlsConn: tc, - nbConn: conn, - }, nil -} - -// TLSConn is a TLS wrapper around a *Conn. It works around a temporary write error (such as a timeout) being fatal to a -// tls.Conn. -type TLSConn struct { - tlsConn *tls.Conn - nbConn *NetConn -} - -func (tc *TLSConn) Read(b []byte) (n int, err error) { return tc.tlsConn.Read(b) } -func (tc *TLSConn) Write(b []byte) (n int, err error) { return tc.tlsConn.Write(b) } -func (tc *TLSConn) BufferReadUntilBlock() error { return tc.nbConn.BufferReadUntilBlock() } -func (tc *TLSConn) Flush() error { return tc.nbConn.Flush() } -func (tc *TLSConn) LocalAddr() net.Addr { return tc.tlsConn.LocalAddr() } -func (tc *TLSConn) RemoteAddr() net.Addr { return tc.tlsConn.RemoteAddr() } - -func (tc *TLSConn) Close() error { - // tls.Conn.closeNotify() sets a 5 second deadline to avoid blocking, sends a TLS alert close notification, and then - // sets the deadline to now. This causes NetConn's Close not to be able to flush the write buffer. Instead we set our - // own 5 second deadline then make all set deadlines no-op. - tc.tlsConn.SetDeadline(time.Now().Add(time.Second * 5)) - tc.tlsConn.SetDeadline(disableSetDeadlineDeadline) - - return tc.tlsConn.Close() -} - -func (tc *TLSConn) SetDeadline(t time.Time) error { return tc.tlsConn.SetDeadline(t) } -func (tc *TLSConn) SetReadDeadline(t time.Time) error { return tc.tlsConn.SetReadDeadline(t) } -func (tc *TLSConn) SetWriteDeadline(t time.Time) error { return tc.tlsConn.SetWriteDeadline(t) } diff --git a/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn_fake_non_block.go b/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn_fake_non_block.go deleted file mode 100644 index 71c7388dd0..0000000000 --- a/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn_fake_non_block.go +++ /dev/null @@ -1,11 +0,0 @@ -//go:build !unix && !windows - -package nbconn - -func (c *NetConn) realNonblockingWrite(b []byte) (n int, err error) { - return c.fakeNonblockingWrite(b) -} - -func (c *NetConn) realNonblockingRead(b []byte) (n int, err error) { - return c.fakeNonblockingRead(b) -} diff --git a/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn_real_non_block.go b/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn_real_non_block.go deleted file mode 100644 index 863b86ad47..0000000000 --- a/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn_real_non_block.go +++ /dev/null @@ -1,86 +0,0 @@ -//go:build unix - -package nbconn - -import ( - "errors" - "io" - "syscall" -) - -// realNonblockingWrite does a non-blocking write. readFlushLock must already be held. -func (c *NetConn) realNonblockingWrite(b []byte) (n int, err error) { - if c.nonblockWriteFunc == nil { - c.nonblockWriteFunc = func(fd uintptr) (done bool) { - c.nonblockWriteN, c.nonblockWriteErr = syscall.Write(int(fd), c.nonblockWriteBuf) - return true - } - } - c.nonblockWriteBuf = b - c.nonblockWriteN = 0 - c.nonblockWriteErr = nil - - err = c.rawConn.Write(c.nonblockWriteFunc) - n = c.nonblockWriteN - c.nonblockWriteBuf = nil // ensure that no reference to b is kept. - if err == nil && c.nonblockWriteErr != nil { - if errors.Is(c.nonblockWriteErr, syscall.EWOULDBLOCK) { - err = ErrWouldBlock - } else { - err = c.nonblockWriteErr - } - } - if err != nil { - // n may be -1 when an error occurs. - if n < 0 { - n = 0 - } - - return n, err - } - - return n, nil -} - -func (c *NetConn) realNonblockingRead(b []byte) (n int, err error) { - if c.nonblockReadFunc == nil { - c.nonblockReadFunc = func(fd uintptr) (done bool) { - c.nonblockReadN, c.nonblockReadErr = syscall.Read(int(fd), c.nonblockReadBuf) - return true - } - } - c.nonblockReadBuf = b - c.nonblockReadN = 0 - c.nonblockReadErr = nil - - err = c.rawConn.Read(c.nonblockReadFunc) - n = c.nonblockReadN - c.nonblockReadBuf = nil // ensure that no reference to b is kept. - if err == nil && c.nonblockReadErr != nil { - if errors.Is(c.nonblockReadErr, syscall.EWOULDBLOCK) { - err = ErrWouldBlock - } else { - err = c.nonblockReadErr - } - } - if err != nil { - // n may be -1 when an error occurs. - if n < 0 { - n = 0 - } - - return n, err - } - - // syscall read did not return an error and 0 bytes were read means EOF. - if n == 0 { - return 0, io.EOF - } - - return n, nil -} - -func (c *NetConn) SetBlockingMode(blocking bool) error { - // Do nothing on UNIX systems - return nil -} diff --git a/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn_real_non_block_windows.go b/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn_real_non_block_windows.go deleted file mode 100644 index fdf628f4ca..0000000000 --- a/vendor/github.com/jackc/pgx/v5/internal/nbconn/nbconn_real_non_block_windows.go +++ /dev/null @@ -1,227 +0,0 @@ -//go:build windows - -package nbconn - -import ( - "errors" - "fmt" - "golang.org/x/sys/windows" - "io" - "syscall" - "time" - "unsafe" -) - -var dll = syscall.MustLoadDLL("ws2_32.dll") - -// int ioctlsocket( -// -// [in] SOCKET s, -// [in] long cmd, -// [in, out] u_long *argp -// -// ); -var ioctlsocket = dll.MustFindProc("ioctlsocket") - -var deadlineExpErr = errors.New("i/o timeout") - -type sockMode int - -const ( - FIONBIO uint32 = 0x8004667e - sockModeBlocking sockMode = 0 - sockModeNonBlocking sockMode = 1 -) - -func setSockMode(fd uintptr, mode sockMode) error { - res, _, err := ioctlsocket.Call(fd, uintptr(FIONBIO), uintptr(unsafe.Pointer(&mode))) - // Upon successful completion, the ioctlsocket returns zero. - if res != 0 && err != nil { - return err - } - - return nil -} - -func (c *NetConn) isDeadlineSet(dl time.Time) bool { - return !dl.IsZero() && !dl.Equal(NonBlockingDeadline) && !dl.Equal(disableSetDeadlineDeadline) -} - -func (c *NetConn) isWriteDeadlineExpired() bool { - c.writeDeadlineLock.Lock() - defer c.writeDeadlineLock.Unlock() - - return c.isDeadlineSet(c.writeDeadline) && !time.Now().Before(c.writeDeadline) -} - -func (c *NetConn) isReadDeadlineExpired() bool { - c.readDeadlineLock.Lock() - defer c.readDeadlineLock.Unlock() - - return c.isDeadlineSet(c.readDeadline) && !time.Now().Before(c.readDeadline) -} - -// realNonblockingWrite does a non-blocking write. readFlushLock must already be held. -func (c *NetConn) realNonblockingWrite(b []byte) (n int, err error) { - if c.nonblockWriteFunc == nil { - c.nonblockWriteFunc = func(fd uintptr) (done bool) { - var written uint32 - var buf syscall.WSABuf - buf.Buf = &c.nonblockWriteBuf[0] - buf.Len = uint32(len(c.nonblockWriteBuf)) - c.nonblockWriteErr = syscall.WSASend(syscall.Handle(fd), &buf, 1, &written, 0, nil, nil) - c.nonblockWriteN = int(written) - - return true - } - } - c.nonblockWriteBuf = b - c.nonblockWriteN = 0 - c.nonblockWriteErr = nil - - if c.isWriteDeadlineExpired() { - c.nonblockWriteErr = deadlineExpErr - - return 0, c.nonblockWriteErr - } - - err = c.rawConn.Write(c.nonblockWriteFunc) - n = c.nonblockWriteN - c.nonblockWriteBuf = nil // ensure that no reference to b is kept. - if err == nil && c.nonblockWriteErr != nil { - if errors.Is(c.nonblockWriteErr, windows.WSAEWOULDBLOCK) { - err = ErrWouldBlock - } else { - err = c.nonblockWriteErr - } - } - if err != nil { - // n may be -1 when an error occurs. - if n < 0 { - n = 0 - } - - return n, err - } - - return n, nil -} - -func (c *NetConn) realNonblockingRead(b []byte) (n int, err error) { - if c.nonblockReadFunc == nil { - c.nonblockReadFunc = func(fd uintptr) (done bool) { - var read uint32 - var flags uint32 - var buf syscall.WSABuf - buf.Buf = &c.nonblockReadBuf[0] - buf.Len = uint32(len(c.nonblockReadBuf)) - c.nonblockReadErr = syscall.WSARecv(syscall.Handle(fd), &buf, 1, &read, &flags, nil, nil) - c.nonblockReadN = int(read) - - return true - } - } - c.nonblockReadBuf = b - c.nonblockReadN = 0 - c.nonblockReadErr = nil - - if c.isReadDeadlineExpired() { - c.nonblockReadErr = deadlineExpErr - - return 0, c.nonblockReadErr - } - - err = c.rawConn.Read(c.nonblockReadFunc) - n = c.nonblockReadN - c.nonblockReadBuf = nil // ensure that no reference to b is kept. - if err == nil && c.nonblockReadErr != nil { - if errors.Is(c.nonblockReadErr, windows.WSAEWOULDBLOCK) { - err = ErrWouldBlock - } else { - err = c.nonblockReadErr - } - } - if err != nil { - // n may be -1 when an error occurs. - if n < 0 { - n = 0 - } - - return n, err - } - - // syscall read did not return an error and 0 bytes were read means EOF. - if n == 0 { - return 0, io.EOF - } - - return n, nil -} - -func (c *NetConn) SetBlockingMode(blocking bool) error { - // Fake non-blocking I/O is ignored - if c.rawConn == nil { - return nil - } - - // Prevent concurrent SetBlockingMode calls - c.nbOperMu.Lock() - defer c.nbOperMu.Unlock() - - // Guard against negative value (which should never happen in practice) - if c.nbOperCnt < 0 { - c.nbOperCnt = 0 - } - - if blocking { - // Socket is already in blocking mode - if c.nbOperCnt == 0 { - return nil - } - - c.nbOperCnt-- - - // Not ready to exit from non-blocking mode, there is pending non-blocking operations - if c.nbOperCnt > 0 { - return nil - } - } else { - c.nbOperCnt++ - - // Socket is already in non-blocking mode - if c.nbOperCnt > 1 { - return nil - } - } - - mode := sockModeNonBlocking - if blocking { - mode = sockModeBlocking - } - - var ctrlErr, err error - - ctrlErr = c.rawConn.Control(func(fd uintptr) { - err = setSockMode(fd, mode) - }) - - if ctrlErr != nil || err != nil { - retErr := ctrlErr - if retErr == nil { - retErr = err - } - - // Revert counters inc/dec in case of error - if blocking { - c.nbOperCnt++ - - return fmt.Errorf("cannot set socket to blocking mode: %w", retErr) - } else { - c.nbOperCnt-- - - return fmt.Errorf("cannot set socket to non-blocking mode: %w", retErr) - } - } - - return nil -} diff --git a/vendor/github.com/jackc/pgx/v5/pgconn/auth_scram.go b/vendor/github.com/jackc/pgx/v5/pgconn/auth_scram.go index 6ca9e33791..8c4b2de3cb 100644 --- a/vendor/github.com/jackc/pgx/v5/pgconn/auth_scram.go +++ b/vendor/github.com/jackc/pgx/v5/pgconn/auth_scram.go @@ -42,7 +42,7 @@ func (c *PgConn) scramAuth(serverAuthMechanisms []string) error { Data: sc.clientFirstMessage(), } c.frontend.Send(saslInitialResponse) - err = c.frontend.Flush() + err = c.flushWithPotentialWriteReadDeadlock() if err != nil { return err } @@ -62,7 +62,7 @@ func (c *PgConn) scramAuth(serverAuthMechanisms []string) error { Data: []byte(sc.clientFinalMessage()), } c.frontend.Send(saslResponse) - err = c.frontend.Flush() + err = c.flushWithPotentialWriteReadDeadlock() if err != nil { return err } diff --git a/vendor/github.com/jackc/pgx/v5/pgconn/internal/bgreader/bgreader.go b/vendor/github.com/jackc/pgx/v5/pgconn/internal/bgreader/bgreader.go new file mode 100644 index 0000000000..aa1a3d39c8 --- /dev/null +++ b/vendor/github.com/jackc/pgx/v5/pgconn/internal/bgreader/bgreader.go @@ -0,0 +1,132 @@ +// Package bgreader provides a io.Reader that can optionally buffer reads in the background. +package bgreader + +import ( + "io" + "sync" + + "github.com/jackc/pgx/v5/internal/iobufpool" +) + +const ( + bgReaderStatusStopped = iota + bgReaderStatusRunning + bgReaderStatusStopping +) + +// BGReader is an io.Reader that can optionally buffer reads in the background. It is safe for concurrent use. +type BGReader struct { + r io.Reader + + cond *sync.Cond + bgReaderStatus int32 + readResults []readResult +} + +type readResult struct { + buf *[]byte + err error +} + +// Start starts the backgrounder reader. If the background reader is already running this is a no-op. The background +// reader will stop automatically when the underlying reader returns an error. +func (r *BGReader) Start() { + r.cond.L.Lock() + defer r.cond.L.Unlock() + + switch r.bgReaderStatus { + case bgReaderStatusStopped: + r.bgReaderStatus = bgReaderStatusRunning + go r.bgRead() + case bgReaderStatusRunning: + // no-op + case bgReaderStatusStopping: + r.bgReaderStatus = bgReaderStatusRunning + } +} + +// Stop tells the background reader to stop after the in progress Read returns. It is safe to call Stop when the +// background reader is not running. +func (r *BGReader) Stop() { + r.cond.L.Lock() + defer r.cond.L.Unlock() + + switch r.bgReaderStatus { + case bgReaderStatusStopped: + // no-op + case bgReaderStatusRunning: + r.bgReaderStatus = bgReaderStatusStopping + case bgReaderStatusStopping: + // no-op + } +} + +func (r *BGReader) bgRead() { + keepReading := true + for keepReading { + buf := iobufpool.Get(8192) + n, err := r.r.Read(*buf) + *buf = (*buf)[:n] + + r.cond.L.Lock() + r.readResults = append(r.readResults, readResult{buf: buf, err: err}) + if r.bgReaderStatus == bgReaderStatusStopping || err != nil { + r.bgReaderStatus = bgReaderStatusStopped + keepReading = false + } + r.cond.L.Unlock() + r.cond.Broadcast() + } +} + +// Read implements the io.Reader interface. +func (r *BGReader) Read(p []byte) (int, error) { + r.cond.L.Lock() + defer r.cond.L.Unlock() + + if len(r.readResults) > 0 { + return r.readFromReadResults(p) + } + + // There are no unread background read results and the background reader is stopped. + if r.bgReaderStatus == bgReaderStatusStopped { + return r.r.Read(p) + } + + // Wait for results from the background reader + for len(r.readResults) == 0 { + r.cond.Wait() + } + return r.readFromReadResults(p) +} + +// readBackgroundResults reads a result previously read by the background reader. r.cond.L must be held. +func (r *BGReader) readFromReadResults(p []byte) (int, error) { + buf := r.readResults[0].buf + var err error + + n := copy(p, *buf) + if n == len(*buf) { + err = r.readResults[0].err + iobufpool.Put(buf) + if len(r.readResults) == 1 { + r.readResults = nil + } else { + r.readResults = r.readResults[1:] + } + } else { + *buf = (*buf)[n:] + r.readResults[0].buf = buf + } + + return n, err +} + +func New(r io.Reader) *BGReader { + return &BGReader{ + r: r, + cond: &sync.Cond{ + L: &sync.Mutex{}, + }, + } +} diff --git a/vendor/github.com/jackc/pgx/v5/pgconn/krb5.go b/vendor/github.com/jackc/pgx/v5/pgconn/krb5.go index 969675fd27..3c1af34773 100644 --- a/vendor/github.com/jackc/pgx/v5/pgconn/krb5.go +++ b/vendor/github.com/jackc/pgx/v5/pgconn/krb5.go @@ -63,7 +63,7 @@ func (c *PgConn) gssAuth() error { Data: nextData, } c.frontend.Send(gssResponse) - err = c.frontend.Flush() + err = c.flushWithPotentialWriteReadDeadlock() if err != nil { return err } diff --git a/vendor/github.com/jackc/pgx/v5/pgconn/pgconn.go b/vendor/github.com/jackc/pgx/v5/pgconn/pgconn.go index 8656ea5180..9f84605fef 100644 --- a/vendor/github.com/jackc/pgx/v5/pgconn/pgconn.go +++ b/vendor/github.com/jackc/pgx/v5/pgconn/pgconn.go @@ -13,11 +13,12 @@ import ( "net" "strconv" "strings" + "sync" "time" "github.com/jackc/pgx/v5/internal/iobufpool" - "github.com/jackc/pgx/v5/internal/nbconn" "github.com/jackc/pgx/v5/internal/pgio" + "github.com/jackc/pgx/v5/pgconn/internal/bgreader" "github.com/jackc/pgx/v5/pgconn/internal/ctxwatch" "github.com/jackc/pgx/v5/pgproto3" ) @@ -65,17 +66,24 @@ type NotificationHandler func(*PgConn, *Notification) // PgConn is a low-level PostgreSQL connection handle. It is not safe for concurrent usage. type PgConn struct { - conn nbconn.Conn // the non-blocking wrapper for the underlying TCP or unix domain socket connection + conn net.Conn pid uint32 // backend pid secretKey uint32 // key to use to send a cancel query message to the server parameterStatuses map[string]string // parameters that have been reported by the server txStatus byte frontend *pgproto3.Frontend + bgReader *bgreader.BGReader + slowWriteTimer *time.Timer config *Config status byte // One of connStatus* constants + bufferingReceive bool + bufferingReceiveMux sync.Mutex + bufferingReceiveMsg pgproto3.BackendMessage + bufferingReceiveErr error + peekedMsg pgproto3.BackendMessage // Reusable / preallocated resources @@ -266,14 +274,13 @@ func connect(ctx context.Context, config *Config, fallbackConfig *FallbackConfig if err != nil { return nil, &connectError{config: config, msg: "dial error", err: normalizeTimeoutError(ctx, err)} } - nbNetConn := nbconn.NewNetConn(netConn, false) - pgConn.conn = nbNetConn - pgConn.contextWatcher = newContextWatcher(nbNetConn) + pgConn.conn = netConn + pgConn.contextWatcher = newContextWatcher(netConn) pgConn.contextWatcher.Watch(ctx) if fallbackConfig.TLSConfig != nil { - nbTLSConn, err := startTLS(nbNetConn, fallbackConfig.TLSConfig) + nbTLSConn, err := startTLS(netConn, fallbackConfig.TLSConfig) pgConn.contextWatcher.Unwatch() // Always unwatch `netConn` after TLS. if err != nil { netConn.Close() @@ -289,7 +296,9 @@ func connect(ctx context.Context, config *Config, fallbackConfig *FallbackConfig pgConn.parameterStatuses = make(map[string]string) pgConn.status = connStatusConnecting - pgConn.frontend = config.BuildFrontend(pgConn.conn, pgConn.conn) + pgConn.bgReader = bgreader.New(pgConn.conn) + pgConn.slowWriteTimer = time.AfterFunc(time.Duration(math.MaxInt64), pgConn.bgReader.Start) + pgConn.frontend = config.BuildFrontend(pgConn.bgReader, pgConn.conn) startupMsg := pgproto3.StartupMessage{ ProtocolVersion: pgproto3.ProtocolVersionNumber, @@ -307,9 +316,9 @@ func connect(ctx context.Context, config *Config, fallbackConfig *FallbackConfig } pgConn.frontend.Send(&startupMsg) - if err := pgConn.frontend.Flush(); err != nil { + if err := pgConn.flushWithPotentialWriteReadDeadlock(); err != nil { pgConn.conn.Close() - return nil, &connectError{config: config, msg: "failed to write startup message", err: err} + return nil, &connectError{config: config, msg: "failed to write startup message", err: normalizeTimeoutError(ctx, err)} } for { @@ -392,7 +401,7 @@ func newContextWatcher(conn net.Conn) *ctxwatch.ContextWatcher { ) } -func startTLS(conn *nbconn.NetConn, tlsConfig *tls.Config) (*nbconn.TLSConn, error) { +func startTLS(conn net.Conn, tlsConfig *tls.Config) (net.Conn, error) { err := binary.Write(conn, binary.BigEndian, []int32{8, 80877103}) if err != nil { return nil, err @@ -407,17 +416,12 @@ func startTLS(conn *nbconn.NetConn, tlsConfig *tls.Config) (*nbconn.TLSConn, err return nil, errors.New("server refused TLS connection") } - tlsConn, err := nbconn.TLSClient(conn, tlsConfig) - if err != nil { - return nil, err - } - - return tlsConn, nil + return tls.Client(conn, tlsConfig), nil } func (pgConn *PgConn) txPasswordMessage(password string) (err error) { pgConn.frontend.Send(&pgproto3.PasswordMessage{Password: password}) - return pgConn.frontend.Flush() + return pgConn.flushWithPotentialWriteReadDeadlock() } func hexMD5(s string) string { @@ -426,6 +430,24 @@ func hexMD5(s string) string { return hex.EncodeToString(hash.Sum(nil)) } +func (pgConn *PgConn) signalMessage() chan struct{} { + if pgConn.bufferingReceive { + panic("BUG: signalMessage when already in progress") + } + + pgConn.bufferingReceive = true + pgConn.bufferingReceiveMux.Lock() + + ch := make(chan struct{}) + go func() { + pgConn.bufferingReceiveMsg, pgConn.bufferingReceiveErr = pgConn.frontend.Receive() + pgConn.bufferingReceiveMux.Unlock() + close(ch) + }() + + return ch +} + // ReceiveMessage receives one wire protocol message from the PostgreSQL server. It must only be used when the // connection is not busy. e.g. It is an error to call ReceiveMessage while reading the result of a query. The messages // are still handled by the core pgconn message handling system so receiving a NotificationResponse will still trigger @@ -465,13 +487,25 @@ func (pgConn *PgConn) peekMessage() (pgproto3.BackendMessage, error) { return pgConn.peekedMsg, nil } - msg, err := pgConn.frontend.Receive() - - if err != nil { - if errors.Is(err, nbconn.ErrWouldBlock) { - return nil, err + var msg pgproto3.BackendMessage + var err error + if pgConn.bufferingReceive { + pgConn.bufferingReceiveMux.Lock() + msg = pgConn.bufferingReceiveMsg + err = pgConn.bufferingReceiveErr + pgConn.bufferingReceiveMux.Unlock() + pgConn.bufferingReceive = false + + // If a timeout error happened in the background try the read again. + var netErr net.Error + if errors.As(err, &netErr) && netErr.Timeout() { + msg, err = pgConn.frontend.Receive() } + } else { + msg, err = pgConn.frontend.Receive() + } + if err != nil { // Close on anything other than timeout error - everything else is fatal var netErr net.Error isNetErr := errors.As(err, &netErr) @@ -582,7 +616,7 @@ func (pgConn *PgConn) Close(ctx context.Context) error { // // See https://github.com/jackc/pgx/issues/637 pgConn.frontend.Send(&pgproto3.Terminate{}) - pgConn.frontend.Flush() + pgConn.flushWithPotentialWriteReadDeadlock() return pgConn.conn.Close() } @@ -609,7 +643,7 @@ func (pgConn *PgConn) asyncClose() { pgConn.conn.SetDeadline(deadline) pgConn.frontend.Send(&pgproto3.Terminate{}) - pgConn.frontend.Flush() + pgConn.flushWithPotentialWriteReadDeadlock() }() } @@ -784,7 +818,7 @@ func (pgConn *PgConn) Prepare(ctx context.Context, name, sql string, paramOIDs [ pgConn.frontend.SendParse(&pgproto3.Parse{Name: name, Query: sql, ParameterOIDs: paramOIDs}) pgConn.frontend.SendDescribe(&pgproto3.Describe{ObjectType: 'S', Name: name}) pgConn.frontend.SendSync(&pgproto3.Sync{}) - err := pgConn.frontend.Flush() + err := pgConn.flushWithPotentialWriteReadDeadlock() if err != nil { pgConn.asyncClose() return nil, err @@ -857,9 +891,28 @@ func (pgConn *PgConn) CancelRequest(ctx context.Context) error { // the connection config. This is important in high availability configurations where fallback connections may be // specified or DNS may be used to load balance. serverAddr := pgConn.conn.RemoteAddr() - cancelConn, err := pgConn.config.DialFunc(ctx, serverAddr.Network(), serverAddr.String()) + var serverNetwork string + var serverAddress string + if serverAddr.Network() == "unix" { + // for unix sockets, RemoteAddr() calls getpeername() which returns the name the + // server passed to bind(). For Postgres, this is always a relative path "./.s.PGSQL.5432" + // so connecting to it will fail. Fall back to the config's value + serverNetwork, serverAddress = NetworkAddress(pgConn.config.Host, pgConn.config.Port) + } else { + serverNetwork, serverAddress = serverAddr.Network(), serverAddr.String() + } + cancelConn, err := pgConn.config.DialFunc(ctx, serverNetwork, serverAddress) if err != nil { - return err + // In case of unix sockets, RemoteAddr() returns only the file part of the path. If the + // first connect failed, try the config. + if serverAddr.Network() != "unix" { + return err + } + serverNetwork, serverAddr := NetworkAddress(pgConn.config.Host, pgConn.config.Port) + cancelConn, err = pgConn.config.DialFunc(ctx, serverNetwork, serverAddr) + if err != nil { + return err + } } defer cancelConn.Close() @@ -877,17 +930,11 @@ func (pgConn *PgConn) CancelRequest(ctx context.Context) error { binary.BigEndian.PutUint32(buf[4:8], 80877102) binary.BigEndian.PutUint32(buf[8:12], uint32(pgConn.pid)) binary.BigEndian.PutUint32(buf[12:16], uint32(pgConn.secretKey)) + // Postgres will process the request and close the connection + // so when don't need to read the reply + // https://www.postgresql.org/docs/current/protocol-flow.html#id-1.10.6.7.10 _, err = cancelConn.Write(buf) - if err != nil { - return err - } - - _, err = cancelConn.Read(buf) - if err != io.EOF { - return err - } - - return nil + return err } // WaitForNotification waits for a LISTON/NOTIFY message to be received. It returns an error if a notification was not @@ -953,7 +1000,7 @@ func (pgConn *PgConn) Exec(ctx context.Context, sql string) *MultiResultReader { } pgConn.frontend.SendQuery(&pgproto3.Query{String: sql}) - err := pgConn.frontend.Flush() + err := pgConn.flushWithPotentialWriteReadDeadlock() if err != nil { pgConn.asyncClose() pgConn.contextWatcher.Unwatch() @@ -1064,7 +1111,7 @@ func (pgConn *PgConn) execExtendedSuffix(result *ResultReader) { pgConn.frontend.SendExecute(&pgproto3.Execute{}) pgConn.frontend.SendSync(&pgproto3.Sync{}) - err := pgConn.frontend.Flush() + err := pgConn.flushWithPotentialWriteReadDeadlock() if err != nil { pgConn.asyncClose() result.concludeCommand(CommandTag{}, err) @@ -1097,7 +1144,7 @@ func (pgConn *PgConn) CopyTo(ctx context.Context, w io.Writer, sql string) (Comm // Send copy to command pgConn.frontend.SendQuery(&pgproto3.Query{String: sql}) - err := pgConn.frontend.Flush() + err := pgConn.flushWithPotentialWriteReadDeadlock() if err != nil { pgConn.asyncClose() pgConn.unlock() @@ -1153,85 +1200,91 @@ func (pgConn *PgConn) CopyFrom(ctx context.Context, r io.Reader, sql string) (Co defer pgConn.contextWatcher.Unwatch() } - // Send copy to command + // Send copy from query pgConn.frontend.SendQuery(&pgproto3.Query{String: sql}) - err := pgConn.frontend.Flush() - if err != nil { - pgConn.asyncClose() - return CommandTag{}, err - } - - err = pgConn.conn.SetReadDeadline(nbconn.NonBlockingDeadline) + err := pgConn.flushWithPotentialWriteReadDeadlock() if err != nil { pgConn.asyncClose() return CommandTag{}, err } - nonblocking := true - defer func() { - if nonblocking { - pgConn.conn.SetReadDeadline(time.Time{}) - } - }() - buf := iobufpool.Get(65536) - defer iobufpool.Put(buf) - (*buf)[0] = 'd' + // Send copy data + abortCopyChan := make(chan struct{}) + copyErrChan := make(chan error, 1) + signalMessageChan := pgConn.signalMessage() + var wg sync.WaitGroup + wg.Add(1) - var readErr, pgErr error - for pgErr == nil { - // Read chunk from r. - var n int - n, readErr = r.Read((*buf)[5:cap(*buf)]) + go func() { + defer wg.Done() + buf := iobufpool.Get(65536) + defer iobufpool.Put(buf) + (*buf)[0] = 'd' - // Send chunk to PostgreSQL. - if n > 0 { - *buf = (*buf)[0 : n+5] - pgio.SetInt32((*buf)[1:], int32(n+4)) + for { + n, readErr := r.Read((*buf)[5:cap(*buf)]) + if n > 0 { + *buf = (*buf)[0 : n+5] + pgio.SetInt32((*buf)[1:], int32(n+4)) + + writeErr := pgConn.frontend.SendUnbufferedEncodedCopyData(*buf) + if writeErr != nil { + // Write errors are always fatal, but we can't use asyncClose because we are in a different goroutine. Not + // setting pgConn.status or closing pgConn.cleanupDone for the same reason. + pgConn.conn.Close() - writeErr := pgConn.frontend.SendUnbufferedEncodedCopyData(*buf) - if writeErr != nil { - pgConn.asyncClose() - return CommandTag{}, err + copyErrChan <- writeErr + return + } + } + if readErr != nil { + copyErrChan <- readErr + return } - } - // Abort loop if there was a read error. - if readErr != nil { - break + select { + case <-abortCopyChan: + return + default: + } } + }() - // Read messages until error or none available. - for pgErr == nil { - msg, err := pgConn.receiveMessage() - if err != nil { - if errors.Is(err, nbconn.ErrWouldBlock) { - break - } - pgConn.asyncClose() + var pgErr error + var copyErr error + for copyErr == nil && pgErr == nil { + select { + case copyErr = <-copyErrChan: + case <-signalMessageChan: + // If pgConn.receiveMessage encounters an error it will call pgConn.asyncClose. But that is a race condition with + // the goroutine. So instead check pgConn.bufferingReceiveErr which will have been set by the signalMessage. If an + // error is found then forcibly close the connection without sending the Terminate message. + if err := pgConn.bufferingReceiveErr; err != nil { + pgConn.status = connStatusClosed + pgConn.conn.Close() + close(pgConn.cleanupDone) return CommandTag{}, normalizeTimeoutError(ctx, err) } + msg, _ := pgConn.receiveMessage() switch msg := msg.(type) { case *pgproto3.ErrorResponse: pgErr = ErrorResponseToPgError(msg) - break + default: + signalMessageChan = pgConn.signalMessage() } } } + close(abortCopyChan) + // Make sure io goroutine finishes before writing. + wg.Wait() - err = pgConn.conn.SetReadDeadline(time.Time{}) - if err != nil { - pgConn.asyncClose() - return CommandTag{}, err - } - nonblocking = false - - if readErr == io.EOF || pgErr != nil { + if copyErr == io.EOF || pgErr != nil { pgConn.frontend.Send(&pgproto3.CopyDone{}) } else { - pgConn.frontend.Send(&pgproto3.CopyFail{Message: readErr.Error()}) + pgConn.frontend.Send(&pgproto3.CopyFail{Message: copyErr.Error()}) } - err = pgConn.frontend.Flush() + err = pgConn.flushWithPotentialWriteReadDeadlock() if err != nil { pgConn.asyncClose() return CommandTag{}, err @@ -1426,7 +1479,8 @@ func (rr *ResultReader) NextRow() bool { } // FieldDescriptions returns the field descriptions for the current result set. The returned slice is only valid until -// the ResultReader is closed. +// the ResultReader is closed. It may return nil (for example, if the query did not return a result set or an error was +// encountered.) func (rr *ResultReader) FieldDescriptions() []FieldDescription { return rr.fieldDescriptions } @@ -1592,7 +1646,9 @@ func (pgConn *PgConn) ExecBatch(ctx context.Context, batch *Batch) *MultiResultR batch.buf = (&pgproto3.Sync{}).Encode(batch.buf) + pgConn.enterPotentialWriteReadDeadlock() _, err := pgConn.conn.Write(batch.buf) + pgConn.exitPotentialWriteReadDeadlock() if err != nil { multiResult.closed = true multiResult.err = err @@ -1620,29 +1676,72 @@ func (pgConn *PgConn) EscapeString(s string) (string, error) { return strings.Replace(s, "'", "''", -1), nil } -// CheckConn checks the underlying connection without writing any bytes. This is currently implemented by reading and -// buffering until the read would block or an error occurs. This can be used to check if the server has closed the -// connection. If this is done immediately before sending a query it reduces the chances a query will be sent that fails +// CheckConn checks the underlying connection without writing any bytes. This is currently implemented by doing a read +// with a very short deadline. This can be useful because a TCP connection can be broken such that a write will appear +// to succeed even though it will never actually reach the server. Reading immediately before a write will detect this +// condition. If this is done immediately before sending a query it reduces the chances a query will be sent that fails // without the client knowing whether the server received it or not. +// +// Deprecated: CheckConn is deprecated in favor of Ping. CheckConn cannot detect all types of broken connections where +// the write would still appear to succeed. Prefer Ping unless on a high latency connection. func (pgConn *PgConn) CheckConn() error { - err := pgConn.conn.BufferReadUntilBlock() - if err != nil && !errors.Is(err, nbconn.ErrWouldBlock) { - return err + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Millisecond) + defer cancel() + + _, err := pgConn.ReceiveMessage(ctx) + if err != nil { + if !Timeout(err) { + return err + } } + return nil } +// Ping pings the server. This can be useful because a TCP connection can be broken such that a write will appear to +// succeed even though it will never actually reach the server. Pinging immediately before sending a query reduces the +// chances a query will be sent that fails without the client knowing whether the server received it or not. +func (pgConn *PgConn) Ping(ctx context.Context) error { + return pgConn.Exec(ctx, "-- ping").Close() +} + // makeCommandTag makes a CommandTag. It does not retain a reference to buf or buf's underlying memory. func (pgConn *PgConn) makeCommandTag(buf []byte) CommandTag { return CommandTag{s: string(buf)} } +// enterPotentialWriteReadDeadlock must be called before a write that could deadlock if the server is simultaneously +// blocked writing to us. +func (pgConn *PgConn) enterPotentialWriteReadDeadlock() { + // The time to wait is somewhat arbitrary. A Write should only take as long as the syscall and memcpy to the OS + // outbound network buffer unless the buffer is full (which potentially is a block). It needs to be long enough for + // the normal case, but short enough not to kill performance if a block occurs. + // + // In addition, on Windows the default timer resolution is 15.6ms. So setting the timer to less than that is + // ineffective. + pgConn.slowWriteTimer.Reset(15 * time.Millisecond) +} + +// exitPotentialWriteReadDeadlock must be called after a call to enterPotentialWriteReadDeadlock. +func (pgConn *PgConn) exitPotentialWriteReadDeadlock() { + if !pgConn.slowWriteTimer.Reset(time.Duration(math.MaxInt64)) { + pgConn.slowWriteTimer.Stop() + } +} + +func (pgConn *PgConn) flushWithPotentialWriteReadDeadlock() error { + pgConn.enterPotentialWriteReadDeadlock() + err := pgConn.frontend.Flush() + pgConn.exitPotentialWriteReadDeadlock() + return err +} + // HijackedConn is the result of hijacking a connection. // // Due to the necessary exposure of internal implementation details, it is not covered by the semantic versioning // compatibility. type HijackedConn struct { - Conn nbconn.Conn // the non-blocking wrapper of the underlying TCP or unix domain socket connection + Conn net.Conn PID uint32 // backend pid SecretKey uint32 // key to use to send a cancel query message to the server ParameterStatuses map[string]string // parameters that have been reported by the server @@ -1695,6 +1794,8 @@ func Construct(hc *HijackedConn) (*PgConn, error) { } pgConn.contextWatcher = newContextWatcher(pgConn.conn) + pgConn.bgReader = bgreader.New(pgConn.conn) + pgConn.slowWriteTimer = time.AfterFunc(time.Duration(math.MaxInt64), pgConn.bgReader.Start) return pgConn, nil } @@ -1817,7 +1918,7 @@ func (p *Pipeline) Flush() error { return errors.New("pipeline closed") } - err := p.conn.frontend.Flush() + err := p.conn.flushWithPotentialWriteReadDeadlock() if err != nil { err = normalizeTimeoutError(p.ctx, err) diff --git a/vendor/github.com/jackc/pgx/v5/pgtype/bool.go b/vendor/github.com/jackc/pgx/v5/pgtype/bool.go index e7be27e2d6..71caffa74e 100644 --- a/vendor/github.com/jackc/pgx/v5/pgtype/bool.go +++ b/vendor/github.com/jackc/pgx/v5/pgtype/bool.go @@ -1,10 +1,12 @@ package pgtype import ( + "bytes" "database/sql/driver" "encoding/json" "fmt" "strconv" + "strings" ) type BoolScanner interface { @@ -264,8 +266,8 @@ func (scanPlanTextAnyToBool) Scan(src []byte, dst any) error { return fmt.Errorf("cannot scan NULL into %T", dst) } - if len(src) != 1 { - return fmt.Errorf("invalid length for bool: %v", len(src)) + if len(src) == 0 { + return fmt.Errorf("cannot scan empty string into %T", dst) } p, ok := (dst).(*bool) @@ -273,7 +275,12 @@ func (scanPlanTextAnyToBool) Scan(src []byte, dst any) error { return ErrScanTargetTypeChanged } - *p = src[0] == 't' + v, err := planTextToBool(src) + if err != nil { + return err + } + + *p = v return nil } @@ -309,9 +316,28 @@ func (scanPlanTextAnyToBoolScanner) Scan(src []byte, dst any) error { return s.ScanBool(Bool{}) } - if len(src) != 1 { - return fmt.Errorf("invalid length for bool: %v", len(src)) + if len(src) == 0 { + return fmt.Errorf("cannot scan empty string into %T", dst) } - return s.ScanBool(Bool{Bool: src[0] == 't', Valid: true}) + v, err := planTextToBool(src) + if err != nil { + return err + } + + return s.ScanBool(Bool{Bool: v, Valid: true}) +} + +// https://www.postgresql.org/docs/11/datatype-boolean.html +func planTextToBool(src []byte) (bool, error) { + s := string(bytes.ToLower(bytes.TrimSpace(src))) + + switch { + case strings.HasPrefix("true", s), strings.HasPrefix("yes", s), s == "on", s == "1": + return true, nil + case strings.HasPrefix("false", s), strings.HasPrefix("no", s), strings.HasPrefix("off", s), s == "0": + return false, nil + default: + return false, fmt.Errorf("unknown boolean string representation %q", src) + } } diff --git a/vendor/github.com/jackc/pgx/v5/pgtype/convert.go b/vendor/github.com/jackc/pgx/v5/pgtype/convert.go index 4eb8014ac6..7fddeaa8a9 100644 --- a/vendor/github.com/jackc/pgx/v5/pgtype/convert.go +++ b/vendor/github.com/jackc/pgx/v5/pgtype/convert.go @@ -64,6 +64,9 @@ func underlyingNumberType(val any) (any, bool) { case reflect.String: convVal := refVal.String() return convVal, reflect.TypeOf(convVal) != refVal.Type() + case reflect.Bool: + convVal := refVal.Bool() + return convVal, reflect.TypeOf(convVal) != refVal.Type() } return nil, false diff --git a/vendor/github.com/jackc/pgx/v5/pgtype/hstore.go b/vendor/github.com/jackc/pgx/v5/pgtype/hstore.go index 4743643e5a..e4695819a5 100644 --- a/vendor/github.com/jackc/pgx/v5/pgtype/hstore.go +++ b/vendor/github.com/jackc/pgx/v5/pgtype/hstore.go @@ -6,7 +6,6 @@ import ( "encoding/binary" "errors" "fmt" - "strings" "unicode" "unicode/utf8" @@ -43,7 +42,7 @@ func (h *Hstore) Scan(src any) error { switch src := src.(type) { case string: - return scanPlanTextAnyToHstoreScanner{}.Scan([]byte(src), h) + return scanPlanTextAnyToHstoreScanner{}.scanString(src, h) } return fmt.Errorf("cannot scan %T", src) @@ -137,13 +136,20 @@ func (encodePlanHstoreCodecText) Encode(value any, buf []byte) (newBuf []byte, e buf = append(buf, ',') } - buf = append(buf, quoteHstoreElementIfNeeded(k)...) + // unconditionally quote hstore keys/values like Postgres does + // this avoids a Mac OS X Postgres hstore parsing bug: + // https://www.postgresql.org/message-id/CA%2BHWA9awUW0%2BRV_gO9r1ABZwGoZxPztcJxPy8vMFSTbTfi4jig%40mail.gmail.com + buf = append(buf, '"') + buf = append(buf, quoteArrayReplacer.Replace(k)...) + buf = append(buf, '"') buf = append(buf, "=>"...) if v == nil { buf = append(buf, "NULL"...) } else { - buf = append(buf, quoteHstoreElementIfNeeded(*v)...) + buf = append(buf, '"') + buf = append(buf, quoteArrayReplacer.Replace(*v)...) + buf = append(buf, '"') } } @@ -174,7 +180,7 @@ func (scanPlanBinaryHstoreToHstoreScanner) Scan(src []byte, dst any) error { scanner := (dst).(HstoreScanner) if src == nil { - return scanner.ScanHstore(Hstore{}) + return scanner.ScanHstore(Hstore(nil)) } rp := 0 @@ -230,14 +236,18 @@ func (scanPlanBinaryHstoreToHstoreScanner) Scan(src []byte, dst any) error { type scanPlanTextAnyToHstoreScanner struct{} -func (scanPlanTextAnyToHstoreScanner) Scan(src []byte, dst any) error { +func (s scanPlanTextAnyToHstoreScanner) Scan(src []byte, dst any) error { scanner := (dst).(HstoreScanner) if src == nil { - return scanner.ScanHstore(Hstore{}) + return scanner.ScanHstore(Hstore(nil)) } + return s.scanString(string(src), scanner) +} - keys, values, err := parseHstore(string(src)) +// scanString does not return nil hstore values because string cannot be nil. +func (scanPlanTextAnyToHstoreScanner) scanString(src string, scanner HstoreScanner) error { + keys, values, err := parseHstore(src) if err != nil { return err } @@ -271,19 +281,6 @@ func (c HstoreCodec) DecodeValue(m *Map, oid uint32, format int16, src []byte) ( return hstore, nil } -var quoteHstoreReplacer = strings.NewReplacer(`\`, `\\`, `"`, `\"`) - -func quoteHstoreElement(src string) string { - return `"` + quoteArrayReplacer.Replace(src) + `"` -} - -func quoteHstoreElementIfNeeded(src string) string { - if src == "" || (len(src) == 4 && strings.ToLower(src) == "null") || strings.ContainsAny(src, ` {},"\=>`) { - return quoteArrayElement(src) - } - return src -} - const ( hsPre = iota hsKey @@ -434,12 +431,21 @@ func parseHstore(s string) (k []string, v []Text, err error) { r, end = p.Consume() switch { case end: - err = errors.New("Found EOS after ',', expcting space") + err = errors.New("Found EOS after ',', expecting space") case (unicode.IsSpace(r)): + // after space is a doublequote to start the key r, end = p.Consume() + if end { + err = errors.New("Found EOS after space, expecting \"") + return + } + if r != '"' { + err = fmt.Errorf("Invalid character '%c' after space, expecting \"", r) + return + } state = hsKey default: - err = fmt.Errorf("Invalid character '%c' after ', ', expecting \"", r) + err = fmt.Errorf("Invalid character '%c' after ',', expecting space", r) } } else { err = fmt.Errorf("Invalid character '%c' after value, expecting ','", r) diff --git a/vendor/github.com/jackc/pgx/v5/pgtype/pgtype.go b/vendor/github.com/jackc/pgx/v5/pgtype/pgtype.go index eb9526725d..2672ba005c 100644 --- a/vendor/github.com/jackc/pgx/v5/pgtype/pgtype.go +++ b/vendor/github.com/jackc/pgx/v5/pgtype/pgtype.go @@ -147,7 +147,7 @@ const ( BinaryFormatCode = 1 ) -// A Codec converts between Go and PostgreSQL values. +// A Codec converts between Go and PostgreSQL values. A Codec must not be mutated after it is registered with a Map. type Codec interface { // FormatSupported returns true if the format is supported. FormatSupported(int16) bool @@ -178,6 +178,7 @@ func (e *nullAssignmentError) Error() string { return fmt.Sprintf("cannot assign NULL to %T", e.dst) } +// Type represents a PostgreSQL data type. It must not be mutated after it is registered with a Map. type Type struct { Codec Codec Name string @@ -211,7 +212,9 @@ type Map struct { } func NewMap() *Map { - m := &Map{ + defaultMapInitOnce.Do(initDefaultMap) + + return &Map{ oidToType: make(map[uint32]*Type), nameToType: make(map[string]*Type), reflectTypeToName: make(map[reflect.Type]string), @@ -240,184 +243,9 @@ func NewMap() *Map { TryWrapPtrArrayScanPlan, }, } - - // Base types - m.RegisterType(&Type{Name: "aclitem", OID: ACLItemOID, Codec: &TextFormatOnlyCodec{TextCodec{}}}) - m.RegisterType(&Type{Name: "bit", OID: BitOID, Codec: BitsCodec{}}) - m.RegisterType(&Type{Name: "bool", OID: BoolOID, Codec: BoolCodec{}}) - m.RegisterType(&Type{Name: "box", OID: BoxOID, Codec: BoxCodec{}}) - m.RegisterType(&Type{Name: "bpchar", OID: BPCharOID, Codec: TextCodec{}}) - m.RegisterType(&Type{Name: "bytea", OID: ByteaOID, Codec: ByteaCodec{}}) - m.RegisterType(&Type{Name: "char", OID: QCharOID, Codec: QCharCodec{}}) - m.RegisterType(&Type{Name: "cid", OID: CIDOID, Codec: Uint32Codec{}}) - m.RegisterType(&Type{Name: "cidr", OID: CIDROID, Codec: InetCodec{}}) - m.RegisterType(&Type{Name: "circle", OID: CircleOID, Codec: CircleCodec{}}) - m.RegisterType(&Type{Name: "date", OID: DateOID, Codec: DateCodec{}}) - m.RegisterType(&Type{Name: "float4", OID: Float4OID, Codec: Float4Codec{}}) - m.RegisterType(&Type{Name: "float8", OID: Float8OID, Codec: Float8Codec{}}) - m.RegisterType(&Type{Name: "inet", OID: InetOID, Codec: InetCodec{}}) - m.RegisterType(&Type{Name: "int2", OID: Int2OID, Codec: Int2Codec{}}) - m.RegisterType(&Type{Name: "int4", OID: Int4OID, Codec: Int4Codec{}}) - m.RegisterType(&Type{Name: "int8", OID: Int8OID, Codec: Int8Codec{}}) - m.RegisterType(&Type{Name: "interval", OID: IntervalOID, Codec: IntervalCodec{}}) - m.RegisterType(&Type{Name: "json", OID: JSONOID, Codec: JSONCodec{}}) - m.RegisterType(&Type{Name: "jsonb", OID: JSONBOID, Codec: JSONBCodec{}}) - m.RegisterType(&Type{Name: "jsonpath", OID: JSONPathOID, Codec: &TextFormatOnlyCodec{TextCodec{}}}) - m.RegisterType(&Type{Name: "line", OID: LineOID, Codec: LineCodec{}}) - m.RegisterType(&Type{Name: "lseg", OID: LsegOID, Codec: LsegCodec{}}) - m.RegisterType(&Type{Name: "macaddr", OID: MacaddrOID, Codec: MacaddrCodec{}}) - m.RegisterType(&Type{Name: "name", OID: NameOID, Codec: TextCodec{}}) - m.RegisterType(&Type{Name: "numeric", OID: NumericOID, Codec: NumericCodec{}}) - m.RegisterType(&Type{Name: "oid", OID: OIDOID, Codec: Uint32Codec{}}) - m.RegisterType(&Type{Name: "path", OID: PathOID, Codec: PathCodec{}}) - m.RegisterType(&Type{Name: "point", OID: PointOID, Codec: PointCodec{}}) - m.RegisterType(&Type{Name: "polygon", OID: PolygonOID, Codec: PolygonCodec{}}) - m.RegisterType(&Type{Name: "record", OID: RecordOID, Codec: RecordCodec{}}) - m.RegisterType(&Type{Name: "text", OID: TextOID, Codec: TextCodec{}}) - m.RegisterType(&Type{Name: "tid", OID: TIDOID, Codec: TIDCodec{}}) - m.RegisterType(&Type{Name: "time", OID: TimeOID, Codec: TimeCodec{}}) - m.RegisterType(&Type{Name: "timestamp", OID: TimestampOID, Codec: TimestampCodec{}}) - m.RegisterType(&Type{Name: "timestamptz", OID: TimestamptzOID, Codec: TimestamptzCodec{}}) - m.RegisterType(&Type{Name: "unknown", OID: UnknownOID, Codec: TextCodec{}}) - m.RegisterType(&Type{Name: "uuid", OID: UUIDOID, Codec: UUIDCodec{}}) - m.RegisterType(&Type{Name: "varbit", OID: VarbitOID, Codec: BitsCodec{}}) - m.RegisterType(&Type{Name: "varchar", OID: VarcharOID, Codec: TextCodec{}}) - m.RegisterType(&Type{Name: "xid", OID: XIDOID, Codec: Uint32Codec{}}) - - // Range types - m.RegisterType(&Type{Name: "daterange", OID: DaterangeOID, Codec: &RangeCodec{ElementType: m.oidToType[DateOID]}}) - m.RegisterType(&Type{Name: "int4range", OID: Int4rangeOID, Codec: &RangeCodec{ElementType: m.oidToType[Int4OID]}}) - m.RegisterType(&Type{Name: "int8range", OID: Int8rangeOID, Codec: &RangeCodec{ElementType: m.oidToType[Int8OID]}}) - m.RegisterType(&Type{Name: "numrange", OID: NumrangeOID, Codec: &RangeCodec{ElementType: m.oidToType[NumericOID]}}) - m.RegisterType(&Type{Name: "tsrange", OID: TsrangeOID, Codec: &RangeCodec{ElementType: m.oidToType[TimestampOID]}}) - m.RegisterType(&Type{Name: "tstzrange", OID: TstzrangeOID, Codec: &RangeCodec{ElementType: m.oidToType[TimestamptzOID]}}) - - // Multirange types - m.RegisterType(&Type{Name: "datemultirange", OID: DatemultirangeOID, Codec: &MultirangeCodec{ElementType: m.oidToType[DaterangeOID]}}) - m.RegisterType(&Type{Name: "int4multirange", OID: Int4multirangeOID, Codec: &MultirangeCodec{ElementType: m.oidToType[Int4rangeOID]}}) - m.RegisterType(&Type{Name: "int8multirange", OID: Int8multirangeOID, Codec: &MultirangeCodec{ElementType: m.oidToType[Int8rangeOID]}}) - m.RegisterType(&Type{Name: "nummultirange", OID: NummultirangeOID, Codec: &MultirangeCodec{ElementType: m.oidToType[NumrangeOID]}}) - m.RegisterType(&Type{Name: "tsmultirange", OID: TsmultirangeOID, Codec: &MultirangeCodec{ElementType: m.oidToType[TsrangeOID]}}) - m.RegisterType(&Type{Name: "tstzmultirange", OID: TstzmultirangeOID, Codec: &MultirangeCodec{ElementType: m.oidToType[TstzrangeOID]}}) - - // Array types - m.RegisterType(&Type{Name: "_aclitem", OID: ACLItemArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[ACLItemOID]}}) - m.RegisterType(&Type{Name: "_bit", OID: BitArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[BitOID]}}) - m.RegisterType(&Type{Name: "_bool", OID: BoolArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[BoolOID]}}) - m.RegisterType(&Type{Name: "_box", OID: BoxArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[BoxOID]}}) - m.RegisterType(&Type{Name: "_bpchar", OID: BPCharArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[BPCharOID]}}) - m.RegisterType(&Type{Name: "_bytea", OID: ByteaArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[ByteaOID]}}) - m.RegisterType(&Type{Name: "_char", OID: QCharArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[QCharOID]}}) - m.RegisterType(&Type{Name: "_cid", OID: CIDArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[CIDOID]}}) - m.RegisterType(&Type{Name: "_cidr", OID: CIDRArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[CIDROID]}}) - m.RegisterType(&Type{Name: "_circle", OID: CircleArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[CircleOID]}}) - m.RegisterType(&Type{Name: "_date", OID: DateArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[DateOID]}}) - m.RegisterType(&Type{Name: "_daterange", OID: DaterangeArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[DaterangeOID]}}) - m.RegisterType(&Type{Name: "_float4", OID: Float4ArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[Float4OID]}}) - m.RegisterType(&Type{Name: "_float8", OID: Float8ArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[Float8OID]}}) - m.RegisterType(&Type{Name: "_inet", OID: InetArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[InetOID]}}) - m.RegisterType(&Type{Name: "_int2", OID: Int2ArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[Int2OID]}}) - m.RegisterType(&Type{Name: "_int4", OID: Int4ArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[Int4OID]}}) - m.RegisterType(&Type{Name: "_int4range", OID: Int4rangeArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[Int4rangeOID]}}) - m.RegisterType(&Type{Name: "_int8", OID: Int8ArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[Int8OID]}}) - m.RegisterType(&Type{Name: "_int8range", OID: Int8rangeArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[Int8rangeOID]}}) - m.RegisterType(&Type{Name: "_interval", OID: IntervalArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[IntervalOID]}}) - m.RegisterType(&Type{Name: "_json", OID: JSONArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[JSONOID]}}) - m.RegisterType(&Type{Name: "_jsonb", OID: JSONBArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[JSONBOID]}}) - m.RegisterType(&Type{Name: "_jsonpath", OID: JSONPathArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[JSONPathOID]}}) - m.RegisterType(&Type{Name: "_line", OID: LineArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[LineOID]}}) - m.RegisterType(&Type{Name: "_lseg", OID: LsegArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[LsegOID]}}) - m.RegisterType(&Type{Name: "_macaddr", OID: MacaddrArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[MacaddrOID]}}) - m.RegisterType(&Type{Name: "_name", OID: NameArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[NameOID]}}) - m.RegisterType(&Type{Name: "_numeric", OID: NumericArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[NumericOID]}}) - m.RegisterType(&Type{Name: "_numrange", OID: NumrangeArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[NumrangeOID]}}) - m.RegisterType(&Type{Name: "_oid", OID: OIDArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[OIDOID]}}) - m.RegisterType(&Type{Name: "_path", OID: PathArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[PathOID]}}) - m.RegisterType(&Type{Name: "_point", OID: PointArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[PointOID]}}) - m.RegisterType(&Type{Name: "_polygon", OID: PolygonArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[PolygonOID]}}) - m.RegisterType(&Type{Name: "_record", OID: RecordArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[RecordOID]}}) - m.RegisterType(&Type{Name: "_text", OID: TextArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[TextOID]}}) - m.RegisterType(&Type{Name: "_tid", OID: TIDArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[TIDOID]}}) - m.RegisterType(&Type{Name: "_time", OID: TimeArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[TimeOID]}}) - m.RegisterType(&Type{Name: "_timestamp", OID: TimestampArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[TimestampOID]}}) - m.RegisterType(&Type{Name: "_timestamptz", OID: TimestamptzArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[TimestamptzOID]}}) - m.RegisterType(&Type{Name: "_tsrange", OID: TsrangeArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[TsrangeOID]}}) - m.RegisterType(&Type{Name: "_tstzrange", OID: TstzrangeArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[TstzrangeOID]}}) - m.RegisterType(&Type{Name: "_uuid", OID: UUIDArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[UUIDOID]}}) - m.RegisterType(&Type{Name: "_varbit", OID: VarbitArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[VarbitOID]}}) - m.RegisterType(&Type{Name: "_varchar", OID: VarcharArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[VarcharOID]}}) - m.RegisterType(&Type{Name: "_xid", OID: XIDArrayOID, Codec: &ArrayCodec{ElementType: m.oidToType[XIDOID]}}) - - // Integer types that directly map to a PostgreSQL type - registerDefaultPgTypeVariants[int16](m, "int2") - registerDefaultPgTypeVariants[int32](m, "int4") - registerDefaultPgTypeVariants[int64](m, "int8") - - // Integer types that do not have a direct match to a PostgreSQL type - registerDefaultPgTypeVariants[int8](m, "int8") - registerDefaultPgTypeVariants[int](m, "int8") - registerDefaultPgTypeVariants[uint8](m, "int8") - registerDefaultPgTypeVariants[uint16](m, "int8") - registerDefaultPgTypeVariants[uint32](m, "int8") - registerDefaultPgTypeVariants[uint64](m, "numeric") - registerDefaultPgTypeVariants[uint](m, "numeric") - - registerDefaultPgTypeVariants[float32](m, "float4") - registerDefaultPgTypeVariants[float64](m, "float8") - - registerDefaultPgTypeVariants[bool](m, "bool") - registerDefaultPgTypeVariants[time.Time](m, "timestamptz") - registerDefaultPgTypeVariants[time.Duration](m, "interval") - registerDefaultPgTypeVariants[string](m, "text") - registerDefaultPgTypeVariants[[]byte](m, "bytea") - - registerDefaultPgTypeVariants[net.IP](m, "inet") - registerDefaultPgTypeVariants[net.IPNet](m, "cidr") - registerDefaultPgTypeVariants[netip.Addr](m, "inet") - registerDefaultPgTypeVariants[netip.Prefix](m, "cidr") - - // pgtype provided structs - registerDefaultPgTypeVariants[Bits](m, "varbit") - registerDefaultPgTypeVariants[Bool](m, "bool") - registerDefaultPgTypeVariants[Box](m, "box") - registerDefaultPgTypeVariants[Circle](m, "circle") - registerDefaultPgTypeVariants[Date](m, "date") - registerDefaultPgTypeVariants[Range[Date]](m, "daterange") - registerDefaultPgTypeVariants[Multirange[Range[Date]]](m, "datemultirange") - registerDefaultPgTypeVariants[Float4](m, "float4") - registerDefaultPgTypeVariants[Float8](m, "float8") - registerDefaultPgTypeVariants[Range[Float8]](m, "numrange") // There is no PostgreSQL builtin float8range so map it to numrange. - registerDefaultPgTypeVariants[Multirange[Range[Float8]]](m, "nummultirange") // There is no PostgreSQL builtin float8multirange so map it to nummultirange. - registerDefaultPgTypeVariants[Int2](m, "int2") - registerDefaultPgTypeVariants[Int4](m, "int4") - registerDefaultPgTypeVariants[Range[Int4]](m, "int4range") - registerDefaultPgTypeVariants[Multirange[Range[Int4]]](m, "int4multirange") - registerDefaultPgTypeVariants[Int8](m, "int8") - registerDefaultPgTypeVariants[Range[Int8]](m, "int8range") - registerDefaultPgTypeVariants[Multirange[Range[Int8]]](m, "int8multirange") - registerDefaultPgTypeVariants[Interval](m, "interval") - registerDefaultPgTypeVariants[Line](m, "line") - registerDefaultPgTypeVariants[Lseg](m, "lseg") - registerDefaultPgTypeVariants[Numeric](m, "numeric") - registerDefaultPgTypeVariants[Range[Numeric]](m, "numrange") - registerDefaultPgTypeVariants[Multirange[Range[Numeric]]](m, "nummultirange") - registerDefaultPgTypeVariants[Path](m, "path") - registerDefaultPgTypeVariants[Point](m, "point") - registerDefaultPgTypeVariants[Polygon](m, "polygon") - registerDefaultPgTypeVariants[TID](m, "tid") - registerDefaultPgTypeVariants[Text](m, "text") - registerDefaultPgTypeVariants[Time](m, "time") - registerDefaultPgTypeVariants[Timestamp](m, "timestamp") - registerDefaultPgTypeVariants[Timestamptz](m, "timestamptz") - registerDefaultPgTypeVariants[Range[Timestamp]](m, "tsrange") - registerDefaultPgTypeVariants[Multirange[Range[Timestamp]]](m, "tsmultirange") - registerDefaultPgTypeVariants[Range[Timestamptz]](m, "tstzrange") - registerDefaultPgTypeVariants[Multirange[Range[Timestamptz]]](m, "tstzmultirange") - registerDefaultPgTypeVariants[UUID](m, "uuid") - - return m } +// RegisterType registers a data type with the Map. t must not be mutated after it is registered. func (m *Map) RegisterType(t *Type) { m.oidToType[t.OID] = t m.nameToType[t.Name] = t @@ -449,13 +277,22 @@ func (m *Map) RegisterDefaultPgType(value any, name string) { } } +// TypeForOID returns the Type registered for the given OID. The returned Type must not be mutated. func (m *Map) TypeForOID(oid uint32) (*Type, bool) { - dt, ok := m.oidToType[oid] + if dt, ok := m.oidToType[oid]; ok { + return dt, true + } + + dt, ok := defaultMap.oidToType[oid] return dt, ok } +// TypeForName returns the Type registered for the given name. The returned Type must not be mutated. func (m *Map) TypeForName(name string) (*Type, bool) { - dt, ok := m.nameToType[name] + if dt, ok := m.nameToType[name]; ok { + return dt, true + } + dt, ok := defaultMap.nameToType[name] return dt, ok } @@ -463,30 +300,43 @@ func (m *Map) buildReflectTypeToType() { m.reflectTypeToType = make(map[reflect.Type]*Type) for reflectType, name := range m.reflectTypeToName { - if dt, ok := m.nameToType[name]; ok { + if dt, ok := m.TypeForName(name); ok { m.reflectTypeToType[reflectType] = dt } } } // TypeForValue finds a data type suitable for v. Use RegisterType to register types that can encode and decode -// themselves. Use RegisterDefaultPgType to register that can be handled by a registered data type. +// themselves. Use RegisterDefaultPgType to register that can be handled by a registered data type. The returned Type +// must not be mutated. func (m *Map) TypeForValue(v any) (*Type, bool) { if m.reflectTypeToType == nil { m.buildReflectTypeToType() } - dt, ok := m.reflectTypeToType[reflect.TypeOf(v)] + if dt, ok := m.reflectTypeToType[reflect.TypeOf(v)]; ok { + return dt, true + } + + if defaultMap.reflectTypeToType == nil { + defaultMap.buildReflectTypeToType() + } + + dt, ok := defaultMap.reflectTypeToType[reflect.TypeOf(v)] return dt, ok } // FormatCodeForOID returns the preferred format code for type oid. If the type is not registered it returns the text // format code. func (m *Map) FormatCodeForOID(oid uint32) int16 { - fc, ok := m.oidToFormatCode[oid] - if ok { + if fc, ok := m.oidToFormatCode[oid]; ok { + return fc + } + + if fc, ok := defaultMap.oidToFormatCode[oid]; ok { return fc } + return TextFormatCode } @@ -587,6 +437,14 @@ func (plan *scanPlanFail) Scan(src []byte, dst any) error { return plan.Scan(src, dst) } } + for oid := range defaultMap.oidToType { + if _, ok := plan.m.oidToType[oid]; !ok { + plan := plan.m.planScan(oid, plan.formatCode, dst) + if _, ok := plan.(*scanPlanFail); !ok { + return plan.Scan(src, dst) + } + } + } } var format string @@ -600,7 +458,7 @@ func (plan *scanPlanFail) Scan(src []byte, dst any) error { } var dataTypeName string - if t, ok := plan.m.oidToType[plan.oid]; ok { + if t, ok := plan.m.TypeForOID(plan.oid); ok { dataTypeName = t.Name } else { dataTypeName = "unknown type" @@ -666,6 +524,7 @@ var elemKindToPointerTypes map[reflect.Kind]reflect.Type = map[reflect.Kind]refl reflect.Float32: reflect.TypeOf(new(float32)), reflect.Float64: reflect.TypeOf(new(float64)), reflect.String: reflect.TypeOf(new(string)), + reflect.Bool: reflect.TypeOf(new(bool)), } type underlyingTypeScanPlan struct { @@ -1519,6 +1378,7 @@ var kindToTypes map[reflect.Kind]reflect.Type = map[reflect.Kind]reflect.Type{ reflect.Float32: reflect.TypeOf(float32(0)), reflect.Float64: reflect.TypeOf(float64(0)), reflect.String: reflect.TypeOf(""), + reflect.Bool: reflect.TypeOf(false), } type underlyingTypeEncodePlan struct { @@ -2044,7 +1904,7 @@ func newEncodeError(value any, m *Map, oid uint32, formatCode int16, err error) } var dataTypeName string - if t, ok := m.oidToType[oid]; ok { + if t, ok := m.TypeForOID(oid); ok { dataTypeName = t.Name } else { dataTypeName = "unknown type" diff --git a/vendor/github.com/jackc/pgx/v5/pgtype/pgtype_default.go b/vendor/github.com/jackc/pgx/v5/pgtype/pgtype_default.go new file mode 100644 index 0000000000..7526efc604 --- /dev/null +++ b/vendor/github.com/jackc/pgx/v5/pgtype/pgtype_default.go @@ -0,0 +1,221 @@ +package pgtype + +import ( + "net" + "net/netip" + "reflect" + "sync" + "time" +) + +var ( + // defaultMap contains default mappings between PostgreSQL server types and Go type handling logic. + defaultMap *Map + defaultMapInitOnce = sync.Once{} +) + +func initDefaultMap() { + defaultMap = &Map{ + oidToType: make(map[uint32]*Type), + nameToType: make(map[string]*Type), + reflectTypeToName: make(map[reflect.Type]string), + oidToFormatCode: make(map[uint32]int16), + + memoizedScanPlans: make(map[uint32]map[reflect.Type][2]ScanPlan), + memoizedEncodePlans: make(map[uint32]map[reflect.Type][2]EncodePlan), + + TryWrapEncodePlanFuncs: []TryWrapEncodePlanFunc{ + TryWrapDerefPointerEncodePlan, + TryWrapBuiltinTypeEncodePlan, + TryWrapFindUnderlyingTypeEncodePlan, + TryWrapStructEncodePlan, + TryWrapSliceEncodePlan, + TryWrapMultiDimSliceEncodePlan, + TryWrapArrayEncodePlan, + }, + + TryWrapScanPlanFuncs: []TryWrapScanPlanFunc{ + TryPointerPointerScanPlan, + TryWrapBuiltinTypeScanPlan, + TryFindUnderlyingTypeScanPlan, + TryWrapStructScanPlan, + TryWrapPtrSliceScanPlan, + TryWrapPtrMultiDimSliceScanPlan, + TryWrapPtrArrayScanPlan, + }, + } + + // Base types + defaultMap.RegisterType(&Type{Name: "aclitem", OID: ACLItemOID, Codec: &TextFormatOnlyCodec{TextCodec{}}}) + defaultMap.RegisterType(&Type{Name: "bit", OID: BitOID, Codec: BitsCodec{}}) + defaultMap.RegisterType(&Type{Name: "bool", OID: BoolOID, Codec: BoolCodec{}}) + defaultMap.RegisterType(&Type{Name: "box", OID: BoxOID, Codec: BoxCodec{}}) + defaultMap.RegisterType(&Type{Name: "bpchar", OID: BPCharOID, Codec: TextCodec{}}) + defaultMap.RegisterType(&Type{Name: "bytea", OID: ByteaOID, Codec: ByteaCodec{}}) + defaultMap.RegisterType(&Type{Name: "char", OID: QCharOID, Codec: QCharCodec{}}) + defaultMap.RegisterType(&Type{Name: "cid", OID: CIDOID, Codec: Uint32Codec{}}) + defaultMap.RegisterType(&Type{Name: "cidr", OID: CIDROID, Codec: InetCodec{}}) + defaultMap.RegisterType(&Type{Name: "circle", OID: CircleOID, Codec: CircleCodec{}}) + defaultMap.RegisterType(&Type{Name: "date", OID: DateOID, Codec: DateCodec{}}) + defaultMap.RegisterType(&Type{Name: "float4", OID: Float4OID, Codec: Float4Codec{}}) + defaultMap.RegisterType(&Type{Name: "float8", OID: Float8OID, Codec: Float8Codec{}}) + defaultMap.RegisterType(&Type{Name: "inet", OID: InetOID, Codec: InetCodec{}}) + defaultMap.RegisterType(&Type{Name: "int2", OID: Int2OID, Codec: Int2Codec{}}) + defaultMap.RegisterType(&Type{Name: "int4", OID: Int4OID, Codec: Int4Codec{}}) + defaultMap.RegisterType(&Type{Name: "int8", OID: Int8OID, Codec: Int8Codec{}}) + defaultMap.RegisterType(&Type{Name: "interval", OID: IntervalOID, Codec: IntervalCodec{}}) + defaultMap.RegisterType(&Type{Name: "json", OID: JSONOID, Codec: JSONCodec{}}) + defaultMap.RegisterType(&Type{Name: "jsonb", OID: JSONBOID, Codec: JSONBCodec{}}) + defaultMap.RegisterType(&Type{Name: "jsonpath", OID: JSONPathOID, Codec: &TextFormatOnlyCodec{TextCodec{}}}) + defaultMap.RegisterType(&Type{Name: "line", OID: LineOID, Codec: LineCodec{}}) + defaultMap.RegisterType(&Type{Name: "lseg", OID: LsegOID, Codec: LsegCodec{}}) + defaultMap.RegisterType(&Type{Name: "macaddr", OID: MacaddrOID, Codec: MacaddrCodec{}}) + defaultMap.RegisterType(&Type{Name: "name", OID: NameOID, Codec: TextCodec{}}) + defaultMap.RegisterType(&Type{Name: "numeric", OID: NumericOID, Codec: NumericCodec{}}) + defaultMap.RegisterType(&Type{Name: "oid", OID: OIDOID, Codec: Uint32Codec{}}) + defaultMap.RegisterType(&Type{Name: "path", OID: PathOID, Codec: PathCodec{}}) + defaultMap.RegisterType(&Type{Name: "point", OID: PointOID, Codec: PointCodec{}}) + defaultMap.RegisterType(&Type{Name: "polygon", OID: PolygonOID, Codec: PolygonCodec{}}) + defaultMap.RegisterType(&Type{Name: "record", OID: RecordOID, Codec: RecordCodec{}}) + defaultMap.RegisterType(&Type{Name: "text", OID: TextOID, Codec: TextCodec{}}) + defaultMap.RegisterType(&Type{Name: "tid", OID: TIDOID, Codec: TIDCodec{}}) + defaultMap.RegisterType(&Type{Name: "time", OID: TimeOID, Codec: TimeCodec{}}) + defaultMap.RegisterType(&Type{Name: "timestamp", OID: TimestampOID, Codec: TimestampCodec{}}) + defaultMap.RegisterType(&Type{Name: "timestamptz", OID: TimestamptzOID, Codec: TimestamptzCodec{}}) + defaultMap.RegisterType(&Type{Name: "unknown", OID: UnknownOID, Codec: TextCodec{}}) + defaultMap.RegisterType(&Type{Name: "uuid", OID: UUIDOID, Codec: UUIDCodec{}}) + defaultMap.RegisterType(&Type{Name: "varbit", OID: VarbitOID, Codec: BitsCodec{}}) + defaultMap.RegisterType(&Type{Name: "varchar", OID: VarcharOID, Codec: TextCodec{}}) + defaultMap.RegisterType(&Type{Name: "xid", OID: XIDOID, Codec: Uint32Codec{}}) + + // Range types + defaultMap.RegisterType(&Type{Name: "daterange", OID: DaterangeOID, Codec: &RangeCodec{ElementType: defaultMap.oidToType[DateOID]}}) + defaultMap.RegisterType(&Type{Name: "int4range", OID: Int4rangeOID, Codec: &RangeCodec{ElementType: defaultMap.oidToType[Int4OID]}}) + defaultMap.RegisterType(&Type{Name: "int8range", OID: Int8rangeOID, Codec: &RangeCodec{ElementType: defaultMap.oidToType[Int8OID]}}) + defaultMap.RegisterType(&Type{Name: "numrange", OID: NumrangeOID, Codec: &RangeCodec{ElementType: defaultMap.oidToType[NumericOID]}}) + defaultMap.RegisterType(&Type{Name: "tsrange", OID: TsrangeOID, Codec: &RangeCodec{ElementType: defaultMap.oidToType[TimestampOID]}}) + defaultMap.RegisterType(&Type{Name: "tstzrange", OID: TstzrangeOID, Codec: &RangeCodec{ElementType: defaultMap.oidToType[TimestamptzOID]}}) + + // Multirange types + defaultMap.RegisterType(&Type{Name: "datemultirange", OID: DatemultirangeOID, Codec: &MultirangeCodec{ElementType: defaultMap.oidToType[DaterangeOID]}}) + defaultMap.RegisterType(&Type{Name: "int4multirange", OID: Int4multirangeOID, Codec: &MultirangeCodec{ElementType: defaultMap.oidToType[Int4rangeOID]}}) + defaultMap.RegisterType(&Type{Name: "int8multirange", OID: Int8multirangeOID, Codec: &MultirangeCodec{ElementType: defaultMap.oidToType[Int8rangeOID]}}) + defaultMap.RegisterType(&Type{Name: "nummultirange", OID: NummultirangeOID, Codec: &MultirangeCodec{ElementType: defaultMap.oidToType[NumrangeOID]}}) + defaultMap.RegisterType(&Type{Name: "tsmultirange", OID: TsmultirangeOID, Codec: &MultirangeCodec{ElementType: defaultMap.oidToType[TsrangeOID]}}) + defaultMap.RegisterType(&Type{Name: "tstzmultirange", OID: TstzmultirangeOID, Codec: &MultirangeCodec{ElementType: defaultMap.oidToType[TstzrangeOID]}}) + + // Array types + defaultMap.RegisterType(&Type{Name: "_aclitem", OID: ACLItemArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[ACLItemOID]}}) + defaultMap.RegisterType(&Type{Name: "_bit", OID: BitArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[BitOID]}}) + defaultMap.RegisterType(&Type{Name: "_bool", OID: BoolArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[BoolOID]}}) + defaultMap.RegisterType(&Type{Name: "_box", OID: BoxArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[BoxOID]}}) + defaultMap.RegisterType(&Type{Name: "_bpchar", OID: BPCharArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[BPCharOID]}}) + defaultMap.RegisterType(&Type{Name: "_bytea", OID: ByteaArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[ByteaOID]}}) + defaultMap.RegisterType(&Type{Name: "_char", OID: QCharArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[QCharOID]}}) + defaultMap.RegisterType(&Type{Name: "_cid", OID: CIDArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[CIDOID]}}) + defaultMap.RegisterType(&Type{Name: "_cidr", OID: CIDRArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[CIDROID]}}) + defaultMap.RegisterType(&Type{Name: "_circle", OID: CircleArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[CircleOID]}}) + defaultMap.RegisterType(&Type{Name: "_date", OID: DateArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[DateOID]}}) + defaultMap.RegisterType(&Type{Name: "_daterange", OID: DaterangeArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[DaterangeOID]}}) + defaultMap.RegisterType(&Type{Name: "_float4", OID: Float4ArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[Float4OID]}}) + defaultMap.RegisterType(&Type{Name: "_float8", OID: Float8ArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[Float8OID]}}) + defaultMap.RegisterType(&Type{Name: "_inet", OID: InetArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[InetOID]}}) + defaultMap.RegisterType(&Type{Name: "_int2", OID: Int2ArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[Int2OID]}}) + defaultMap.RegisterType(&Type{Name: "_int4", OID: Int4ArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[Int4OID]}}) + defaultMap.RegisterType(&Type{Name: "_int4range", OID: Int4rangeArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[Int4rangeOID]}}) + defaultMap.RegisterType(&Type{Name: "_int8", OID: Int8ArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[Int8OID]}}) + defaultMap.RegisterType(&Type{Name: "_int8range", OID: Int8rangeArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[Int8rangeOID]}}) + defaultMap.RegisterType(&Type{Name: "_interval", OID: IntervalArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[IntervalOID]}}) + defaultMap.RegisterType(&Type{Name: "_json", OID: JSONArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[JSONOID]}}) + defaultMap.RegisterType(&Type{Name: "_jsonb", OID: JSONBArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[JSONBOID]}}) + defaultMap.RegisterType(&Type{Name: "_jsonpath", OID: JSONPathArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[JSONPathOID]}}) + defaultMap.RegisterType(&Type{Name: "_line", OID: LineArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[LineOID]}}) + defaultMap.RegisterType(&Type{Name: "_lseg", OID: LsegArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[LsegOID]}}) + defaultMap.RegisterType(&Type{Name: "_macaddr", OID: MacaddrArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[MacaddrOID]}}) + defaultMap.RegisterType(&Type{Name: "_name", OID: NameArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[NameOID]}}) + defaultMap.RegisterType(&Type{Name: "_numeric", OID: NumericArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[NumericOID]}}) + defaultMap.RegisterType(&Type{Name: "_numrange", OID: NumrangeArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[NumrangeOID]}}) + defaultMap.RegisterType(&Type{Name: "_oid", OID: OIDArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[OIDOID]}}) + defaultMap.RegisterType(&Type{Name: "_path", OID: PathArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[PathOID]}}) + defaultMap.RegisterType(&Type{Name: "_point", OID: PointArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[PointOID]}}) + defaultMap.RegisterType(&Type{Name: "_polygon", OID: PolygonArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[PolygonOID]}}) + defaultMap.RegisterType(&Type{Name: "_record", OID: RecordArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[RecordOID]}}) + defaultMap.RegisterType(&Type{Name: "_text", OID: TextArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[TextOID]}}) + defaultMap.RegisterType(&Type{Name: "_tid", OID: TIDArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[TIDOID]}}) + defaultMap.RegisterType(&Type{Name: "_time", OID: TimeArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[TimeOID]}}) + defaultMap.RegisterType(&Type{Name: "_timestamp", OID: TimestampArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[TimestampOID]}}) + defaultMap.RegisterType(&Type{Name: "_timestamptz", OID: TimestamptzArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[TimestamptzOID]}}) + defaultMap.RegisterType(&Type{Name: "_tsrange", OID: TsrangeArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[TsrangeOID]}}) + defaultMap.RegisterType(&Type{Name: "_tstzrange", OID: TstzrangeArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[TstzrangeOID]}}) + defaultMap.RegisterType(&Type{Name: "_uuid", OID: UUIDArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[UUIDOID]}}) + defaultMap.RegisterType(&Type{Name: "_varbit", OID: VarbitArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[VarbitOID]}}) + defaultMap.RegisterType(&Type{Name: "_varchar", OID: VarcharArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[VarcharOID]}}) + defaultMap.RegisterType(&Type{Name: "_xid", OID: XIDArrayOID, Codec: &ArrayCodec{ElementType: defaultMap.oidToType[XIDOID]}}) + + // Integer types that directly map to a PostgreSQL type + registerDefaultPgTypeVariants[int16](defaultMap, "int2") + registerDefaultPgTypeVariants[int32](defaultMap, "int4") + registerDefaultPgTypeVariants[int64](defaultMap, "int8") + + // Integer types that do not have a direct match to a PostgreSQL type + registerDefaultPgTypeVariants[int8](defaultMap, "int8") + registerDefaultPgTypeVariants[int](defaultMap, "int8") + registerDefaultPgTypeVariants[uint8](defaultMap, "int8") + registerDefaultPgTypeVariants[uint16](defaultMap, "int8") + registerDefaultPgTypeVariants[uint32](defaultMap, "int8") + registerDefaultPgTypeVariants[uint64](defaultMap, "numeric") + registerDefaultPgTypeVariants[uint](defaultMap, "numeric") + + registerDefaultPgTypeVariants[float32](defaultMap, "float4") + registerDefaultPgTypeVariants[float64](defaultMap, "float8") + + registerDefaultPgTypeVariants[bool](defaultMap, "bool") + registerDefaultPgTypeVariants[time.Time](defaultMap, "timestamptz") + registerDefaultPgTypeVariants[time.Duration](defaultMap, "interval") + registerDefaultPgTypeVariants[string](defaultMap, "text") + registerDefaultPgTypeVariants[[]byte](defaultMap, "bytea") + + registerDefaultPgTypeVariants[net.IP](defaultMap, "inet") + registerDefaultPgTypeVariants[net.IPNet](defaultMap, "cidr") + registerDefaultPgTypeVariants[netip.Addr](defaultMap, "inet") + registerDefaultPgTypeVariants[netip.Prefix](defaultMap, "cidr") + + // pgtype provided structs + registerDefaultPgTypeVariants[Bits](defaultMap, "varbit") + registerDefaultPgTypeVariants[Bool](defaultMap, "bool") + registerDefaultPgTypeVariants[Box](defaultMap, "box") + registerDefaultPgTypeVariants[Circle](defaultMap, "circle") + registerDefaultPgTypeVariants[Date](defaultMap, "date") + registerDefaultPgTypeVariants[Range[Date]](defaultMap, "daterange") + registerDefaultPgTypeVariants[Multirange[Range[Date]]](defaultMap, "datemultirange") + registerDefaultPgTypeVariants[Float4](defaultMap, "float4") + registerDefaultPgTypeVariants[Float8](defaultMap, "float8") + registerDefaultPgTypeVariants[Range[Float8]](defaultMap, "numrange") // There is no PostgreSQL builtin float8range so map it to numrange. + registerDefaultPgTypeVariants[Multirange[Range[Float8]]](defaultMap, "nummultirange") // There is no PostgreSQL builtin float8multirange so map it to nummultirange. + registerDefaultPgTypeVariants[Int2](defaultMap, "int2") + registerDefaultPgTypeVariants[Int4](defaultMap, "int4") + registerDefaultPgTypeVariants[Range[Int4]](defaultMap, "int4range") + registerDefaultPgTypeVariants[Multirange[Range[Int4]]](defaultMap, "int4multirange") + registerDefaultPgTypeVariants[Int8](defaultMap, "int8") + registerDefaultPgTypeVariants[Range[Int8]](defaultMap, "int8range") + registerDefaultPgTypeVariants[Multirange[Range[Int8]]](defaultMap, "int8multirange") + registerDefaultPgTypeVariants[Interval](defaultMap, "interval") + registerDefaultPgTypeVariants[Line](defaultMap, "line") + registerDefaultPgTypeVariants[Lseg](defaultMap, "lseg") + registerDefaultPgTypeVariants[Numeric](defaultMap, "numeric") + registerDefaultPgTypeVariants[Range[Numeric]](defaultMap, "numrange") + registerDefaultPgTypeVariants[Multirange[Range[Numeric]]](defaultMap, "nummultirange") + registerDefaultPgTypeVariants[Path](defaultMap, "path") + registerDefaultPgTypeVariants[Point](defaultMap, "point") + registerDefaultPgTypeVariants[Polygon](defaultMap, "polygon") + registerDefaultPgTypeVariants[TID](defaultMap, "tid") + registerDefaultPgTypeVariants[Text](defaultMap, "text") + registerDefaultPgTypeVariants[Time](defaultMap, "time") + registerDefaultPgTypeVariants[Timestamp](defaultMap, "timestamp") + registerDefaultPgTypeVariants[Timestamptz](defaultMap, "timestamptz") + registerDefaultPgTypeVariants[Range[Timestamp]](defaultMap, "tsrange") + registerDefaultPgTypeVariants[Multirange[Range[Timestamp]]](defaultMap, "tsmultirange") + registerDefaultPgTypeVariants[Range[Timestamptz]](defaultMap, "tstzrange") + registerDefaultPgTypeVariants[Multirange[Range[Timestamptz]]](defaultMap, "tstzmultirange") + registerDefaultPgTypeVariants[UUID](defaultMap, "uuid") +} diff --git a/vendor/github.com/jackc/pgx/v5/pgtype/timestamp.go b/vendor/github.com/jackc/pgx/v5/pgtype/timestamp.go index 9f3de2c592..35d7395660 100644 --- a/vendor/github.com/jackc/pgx/v5/pgtype/timestamp.go +++ b/vendor/github.com/jackc/pgx/v5/pgtype/timestamp.go @@ -3,6 +3,7 @@ package pgtype import ( "database/sql/driver" "encoding/binary" + "encoding/json" "fmt" "strings" "time" @@ -66,6 +67,55 @@ func (ts Timestamp) Value() (driver.Value, error) { return ts.Time, nil } +func (ts Timestamp) MarshalJSON() ([]byte, error) { + if !ts.Valid { + return []byte("null"), nil + } + + var s string + + switch ts.InfinityModifier { + case Finite: + s = ts.Time.Format(time.RFC3339Nano) + case Infinity: + s = "infinity" + case NegativeInfinity: + s = "-infinity" + } + + return json.Marshal(s) +} + +func (ts *Timestamp) UnmarshalJSON(b []byte) error { + var s *string + err := json.Unmarshal(b, &s) + if err != nil { + return err + } + + if s == nil { + *ts = Timestamp{} + return nil + } + + switch *s { + case "infinity": + *ts = Timestamp{Valid: true, InfinityModifier: Infinity} + case "-infinity": + *ts = Timestamp{Valid: true, InfinityModifier: -Infinity} + default: + // PostgreSQL uses ISO 8601 for to_json function and casting from a string to timestamptz + tim, err := time.Parse(time.RFC3339Nano, *s) + if err != nil { + return err + } + + *ts = Timestamp{Time: tim, Valid: true} + } + + return nil +} + type TimestampCodec struct{} func (TimestampCodec) FormatSupported(format int16) bool { diff --git a/vendor/github.com/jackc/pgx/v5/pgxpool/pool.go b/vendor/github.com/jackc/pgx/v5/pgxpool/pool.go index 7649488edd..f4c3a30cab 100644 --- a/vendor/github.com/jackc/pgx/v5/pgxpool/pool.go +++ b/vendor/github.com/jackc/pgx/v5/pgxpool/pool.go @@ -85,6 +85,7 @@ type Pool struct { afterConnect func(context.Context, *pgx.Conn) error beforeAcquire func(context.Context, *pgx.Conn) bool afterRelease func(*pgx.Conn) bool + beforeClose func(*pgx.Conn) minConns int32 maxConns int32 maxConnLifetime time.Duration @@ -111,7 +112,7 @@ type Config struct { AfterConnect func(context.Context, *pgx.Conn) error // BeforeAcquire is called before a connection is acquired from the pool. It must return true to allow the - // acquision or false to indicate that the connection should be destroyed and a different connection should be + // acquisition or false to indicate that the connection should be destroyed and a different connection should be // acquired. BeforeAcquire func(context.Context, *pgx.Conn) bool @@ -119,6 +120,9 @@ type Config struct { // return the connection to the pool or false to destroy the connection. AfterRelease func(*pgx.Conn) bool + // BeforeClose is called right before a connection is closed and removed from the pool. + BeforeClose func(*pgx.Conn) + // MaxConnLifetime is the duration since creation after which a connection will be automatically closed. MaxConnLifetime time.Duration @@ -180,6 +184,7 @@ func NewWithConfig(ctx context.Context, config *Config) (*Pool, error) { afterConnect: config.AfterConnect, beforeAcquire: config.BeforeAcquire, afterRelease: config.AfterRelease, + beforeClose: config.BeforeClose, minConns: config.MinConns, maxConns: config.MaxConns, maxConnLifetime: config.MaxConnLifetime, @@ -236,6 +241,9 @@ func NewWithConfig(ctx context.Context, config *Config) (*Pool, error) { Destructor: func(value *connResource) { ctx, cancel := context.WithTimeout(context.Background(), 15*time.Second) conn := value.conn + if p.beforeClose != nil { + p.beforeClose(conn) + } conn.Close(ctx) select { case <-conn.PgConn().CleanupDone(): @@ -496,7 +504,7 @@ func (p *Pool) Acquire(ctx context.Context) (*Conn, error) { cr := res.Value() if res.IdleDuration() > time.Second { - err := cr.conn.PgConn().CheckConn() + err := cr.conn.Ping(ctx) if err != nil { res.Destroy() continue diff --git a/vendor/github.com/jackc/pgx/v5/pgxpool/rows.go b/vendor/github.com/jackc/pgx/v5/pgxpool/rows.go index 2b11ecd366..f834b7ec30 100644 --- a/vendor/github.com/jackc/pgx/v5/pgxpool/rows.go +++ b/vendor/github.com/jackc/pgx/v5/pgxpool/rows.go @@ -101,7 +101,14 @@ func (row *poolRow) Scan(dest ...any) error { return row.err } + panicked := true + defer func() { + if panicked && row.c != nil { + row.c.Release() + } + }() err := row.r.Scan(dest...) + panicked = false if row.c != nil { row.c.Release() } diff --git a/vendor/github.com/jackc/pgx/v5/rows.go b/vendor/github.com/jackc/pgx/v5/rows.go index fc3a2f3491..cdd72a25f3 100644 --- a/vendor/github.com/jackc/pgx/v5/rows.go +++ b/vendor/github.com/jackc/pgx/v5/rows.go @@ -28,12 +28,16 @@ type Rows interface { // to call Close after rows is already closed. Close() - // Err returns any error that occurred while reading. + // Err returns any error that occurred while reading. Err must only be called after the Rows is closed (either by + // calling Close or by Next returning false). If it is called early it may return nil even if there was an error + // executing the query. Err() error // CommandTag returns the command tag from this query. It is only available after Rows is closed. CommandTag() pgconn.CommandTag + // FieldDescriptions returns the field descriptions of the columns. It may return nil. In particular this can occur + // when there was an error executing the query. FieldDescriptions() []pgconn.FieldDescription // Next prepares the next row for reading. It returns true if there is another @@ -533,13 +537,11 @@ func (rs *positionalStructRowScanner) appendScanTargets(dstElemValue reflect.Val for i := 0; i < dstElemType.NumField(); i++ { sf := dstElemType.Field(i) - if sf.PkgPath == "" { - // Handle anonymous struct embedding, but do not try to handle embedded pointers. - if sf.Anonymous && sf.Type.Kind() == reflect.Struct { - scanTargets = rs.appendScanTargets(dstElemValue.Field(i), scanTargets) - } else { - scanTargets = append(scanTargets, dstElemValue.Field(i).Addr().Interface()) - } + // Handle anonymous struct embedding, but do not try to handle embedded pointers. + if sf.Anonymous && sf.Type.Kind() == reflect.Struct { + scanTargets = rs.appendScanTargets(dstElemValue.Field(i), scanTargets) + } else if sf.PkgPath == "" { + scanTargets = append(scanTargets, dstElemValue.Field(i).Addr().Interface()) } } diff --git a/vendor/modules.txt b/vendor/modules.txt index b30d917f6a..07d8d0220d 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -249,16 +249,16 @@ github.com/jackc/pgservicefile # github.com/jackc/pgx-zap v0.0.0-20221202020421-94b1cb2f889f ## explicit; go 1.17 github.com/jackc/pgx-zap -# github.com/jackc/pgx/v5 v5.3.2-0.20230421024359-6defa2a607fd +# github.com/jackc/pgx/v5 v5.4.0 ## explicit; go 1.19 github.com/jackc/pgx/v5 github.com/jackc/pgx/v5/internal/anynil github.com/jackc/pgx/v5/internal/iobufpool -github.com/jackc/pgx/v5/internal/nbconn github.com/jackc/pgx/v5/internal/pgio github.com/jackc/pgx/v5/internal/sanitize github.com/jackc/pgx/v5/internal/stmtcache github.com/jackc/pgx/v5/pgconn +github.com/jackc/pgx/v5/pgconn/internal/bgreader github.com/jackc/pgx/v5/pgconn/internal/ctxwatch github.com/jackc/pgx/v5/pgproto3 github.com/jackc/pgx/v5/pgtype