Compare commits

...

89 Commits

Author SHA1 Message Date
Elias Schneider
96aa2ce043 Merge branch 'breaking/v2' into v2/use-item-component 2025-11-29 18:42:28 +01:00
Kyle Mendell
e06538a101 feat: remove DbProvider env variable and calculate it dynamically (#1114) 2025-11-27 12:38:06 -08:00
Kyle Mendell
ddff3a2975 fixes 2025-11-26 13:55:49 -06:00
Kyle Mendell
a738d9fe88 refactor: update forms and other areas to use new shadcn components 2025-11-26 12:58:44 -06:00
Elias Schneider
39c1f93756 feat: add CLI command for importing and exporting Pocket ID data (#998)
Co-authored-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-26 10:38:15 +01:00
ItalyPaleAle
e306d6eb58 Merge remote-tracking branch 'origin' into breaking/v2 2025-11-25 20:48:03 -08:00
Elias Schneider
f523f39483 tests: fix Dutch validation message 2025-11-25 22:51:20 +01:00
Elias Schneider
4bde271b47 chore: upgrade dependencies 2025-11-25 22:30:28 +01:00
Elias Schneider
a3c968758a feat: add option to disable S3 integrity check 2025-11-25 22:14:44 +01:00
Elias Schneider
ca888b3dd2 chore(translations): add Finish files 2025-11-25 20:46:48 +01:00
Elias Schneider
ce88686c5f chore(translations): update translations via Crowdin (#1111) 2025-11-25 20:43:47 +01:00
Elias Schneider
a9b6635126 chore(translations): update translations via Crowdin (#1101) 2025-11-23 17:10:24 +01:00
dependabot[bot]
e817f042ec chore(deps): bump golang.org/x/crypto from 0.43.0 to 0.45.0 in /backend in the go_modules group across 1 directory (#1107)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-23 17:10:10 +01:00
Elias Schneider
46793fe68a Merge branch 'main' into v2 2025-11-17 08:39:36 +01:00
Alessandro (Ale) Segala
c56afe016e feat: adding/removing passkeys creates an entry in audit logs (#1099)
Co-authored-by: Elias Schneider <login@eliasschneider.com>
2025-11-16 14:51:38 -08:00
Alessandro (Ale) Segala
a54b867105 refactor: use constants for AppEnv values (#1098) 2025-11-16 18:25:06 +01:00
Alessandro (Ale) Segala
29a1d3b778 feat: add database storage backend (#1091)
Co-authored-by: Elias Schneider <login@eliasschneider.com>
2025-11-16 18:23:46 +01:00
Elias Schneider
e22822890f feat!: drop support for storing JWK on the filesystem (#1088) 2025-11-14 12:02:51 +01:00
Elias Schneider
2694d79add Merge branch 'main' into v2 2025-11-14 11:08:20 +01:00
Elias Schneider
98cf1f66c3 fix!: rename LDAP_ATTRIBUTE_ADMIN_GROUP env variable to LDAP_ADMIN_GROUP_NAME (#1089) 2025-11-12 11:51:01 +01:00
Elias Schneider
12125713a2 feat: add support for WEBP profile pictures (#1090) 2025-11-11 10:56:20 -06:00
Elias Schneider
ab9c0f9ac0 ci/cd: run checks on PR to breaking/** branches 2025-11-11 11:21:39 +01:00
Elias Schneider
42b872d6b2 chore(translations): update translations via Crowdin (#1085) 2025-11-10 14:46:48 +01:00
Elias Schneider
bfd71d090c feat: add support for S3 storage backend (#1080)
Co-authored-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com>
2025-11-10 09:02:25 +00:00
Kyle Mendell
d5e0cfd4a6 feat: light/dark/system mode switcher (#1081) 2025-11-09 13:28:58 -06:00
Kyle Mendell
9981304b4b chore(deps): update pnpm to 10.20 (#1082) 2025-11-08 13:09:05 -06:00
Elias Schneider
5cf73e9309 fix: use quoted-printable encoding for mails to prevent line limitation 2025-11-08 17:34:43 +01:00
Elias Schneider
f125cf0dad release: 1.15.0 2025-11-06 15:49:39 +01:00
Elias Schneider
6a038fcf9a fix: remove redundant indexes in Postgres 2025-11-06 15:02:39 +01:00
Elias Schneider
76e0192cee fix: disabled property gets ignored when creating an user 2025-11-06 12:28:07 +01:00
Elias Schneider
3ebf94dd84 chore(translations): update translations via Crowdin (#1059) 2025-11-05 11:28:14 +01:00
dai
7ec57437ac fix: replace %lang% placeholder in html lang (#1071)
Co-authored-by: Elias Schneider <login@eliasschneider.com>
2025-11-04 13:01:12 +00:00
Elias Schneider
ed2c7b2303 feat: add ability to set default profile picture (#1061)
Co-authored-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com>
2025-11-04 13:40:00 +01:00
Elias Schneider
e03270eb9d fix: sorting by PKCE and re-auth of OIDC clients 2025-11-04 13:27:05 +01:00
Elias Schneider
d683d18d91 chore: add support for OpenBSD binaries 2025-11-01 16:26:43 +01:00
Elias Schneider
f184120890 feat: open edit page on table row click 2025-10-29 10:46:03 +01:00
Elias Schneider
04d8500910 release: 1.14.2 2025-10-29 09:25:28 +01:00
Mufeed Ali
93639dddb2 fix: dark oidc client icons not saved on client creation (#1057) 2025-10-28 12:35:56 +01:00
Elias Schneider
a190529117 chore(translations): add Turkish language files 2025-10-28 09:31:32 +01:00
Elias Schneider
73392b5837 release: 1.14.1 2025-10-27 14:07:34 +01:00
Elias Schneider
65616f65e5 fix: ignore trailing slashes in APP_URL 2025-10-27 10:48:27 +01:00
Elias Schneider
98a99fbb0a chore(translations): update translations via Crowdin (#1048) 2025-10-27 09:48:39 +01:00
Quentin L'Hours
3f3b6b88fd fix: use credProps to save passkey on firefox android (#1055) 2025-10-27 09:48:24 +01:00
Mufeed Ali
8f98d8c0b4 fix: Prevent blinding FOUC in dark mode (#1054) 2025-10-26 20:40:25 +01:00
Elias Schneider
c9308472a9 release: 1.14.0 2025-10-24 13:47:58 +02:00
Elias Schneider
6362ff9861 chore: upgrade dependencies 2025-10-24 12:18:38 +02:00
Elias Schneider
10d640385f fix: prevent page flickering on redirection based on auth state 2025-10-24 12:12:42 +02:00
Elias Schneider
47927d1574 fix: make pkce requirement visible in the oidc form if client is public 2025-10-24 10:57:44 +02:00
Elias Schneider
b356cef766 fix: only animate login background on initial page load 2025-10-24 10:53:51 +02:00
Elias Schneider
9fc45930a8 chore(translations): update translations via Crowdin (#1033) 2025-10-24 09:58:00 +02:00
Kyle Mendell
028d1c858e feat: add support for dark mode oidc client icons (#1039)
Co-authored-by: Elias Schneider <login@eliasschneider.com>
2025-10-24 09:57:12 +02:00
Alessandro (Ale) Segala
eb3963d0fc fix: use constant time comparisons when validating PKCE challenges (#1047) 2025-10-24 08:30:50 +02:00
dependabot[bot]
35d913f905 chore(deps-dev): bump vite from 7.0.7 to 7.0.8 in the npm_and_yarn group across 1 directory (#1042)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-21 18:04:28 -05:00
github-actions[bot]
32485f4c7c chore: update AAGUIDs (#1041)
Co-authored-by: stonith404 <58886915+stonith404@users.noreply.github.com>
2025-10-20 08:43:27 +02:00
Elias Schneider
ceb38b0825 chore(translations): update translations via Crowdin (#1025) 2025-10-16 08:28:13 +02:00
dependabot[bot]
c0b6ede5be chore(deps): bump sveltekit-superforms from 2.27.1 to 2.27.4 in the npm_and_yarn group across 1 directory (#1031)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-10-16 08:27:52 +02:00
Elias Schneider
c20e93b55c feat: add various improvements to the table component (#961)
Co-authored-by: Kyle Mendell <kmendell@ofkm.us>
2025-10-13 09:12:55 +00:00
Elias Schneider
24ca6a106d chore(translations): update translations via Crowdin (#1014) 2025-10-12 09:58:22 -05:00
Elias Schneider
9f0aa55be6 fix: ignore trailing slash in URL 2025-10-09 20:27:15 +02:00
Kyle Mendell
068fcc65a6 chore(translations): add Japanese files 2025-10-07 18:29:13 -05:00
Elias Schneider
f2dfb3da5d release: 1.13.1 2025-10-07 08:21:41 +02:00
Elias Schneider
cbf0e3117d fix: mark any callback url as valid if they contain a wildcard (#1006) 2025-10-07 08:18:53 +02:00
CzBiX
694f266dea fix: uploading a client logo with an URL fails (#1008) 2025-10-06 10:37:43 -05:00
Kyle Mendell
29fc185376 chore: cleanup root of repo, update workflow actions (#1003)
Co-authored-by: Elias Schneider <login@eliasschneider.com>
2025-10-05 14:49:06 -05:00
Elias Schneider
781be37416 release: 1.13.0 2025-10-05 17:30:47 +02:00
Elias Schneider
b1f97e05a1 chore(translations): update translations via Crowdin (#999) 2025-10-05 17:30:08 +02:00
Elias Schneider
2c74865173 feat: add link to API docs on API key page 2025-10-04 23:45:37 +02:00
Elias Schneider
ad8a90c839 fix: uploading a client logo with an URL fails if folder doesn't exist 2025-10-04 23:44:37 +02:00
Elias Schneider
f9839a978c release: 1.12.0 2025-10-03 11:59:38 +02:00
Elias Schneider
b81de45166 fix: date locale can't be loaded if locale is en 2025-10-03 11:54:07 +02:00
Elias Schneider
22f4254932 fix: allow any image source but disallow base64 2025-10-03 11:50:39 +02:00
Elias Schneider
507f9490fa feat: add the ability to make email optional (#994) 2025-10-03 11:24:53 +02:00
Elias Schneider
043cce615d feat: add required indicator for required inputs (#993) 2025-10-01 13:44:17 +02:00
Elias Schneider
69e2083722 chore(translations): update translations via Crowdin (#992) 2025-09-30 23:04:54 -05:00
Elias Schneider
d47b20326f fix: improve back button handling on auth pages 2025-09-30 14:44:08 +02:00
Elias Schneider
fc9939d1f1 fix: prevent endless effect loop in login wrapper 2025-09-30 14:06:13 +02:00
Elias Schneider
2c1c67b5e4 fix: include port in OIDC client details 2025-09-30 12:18:44 +02:00
Elias Schneider
d010be4c88 feat: hide alternative sign in methods page if email login disabled 2025-09-30 12:15:08 +02:00
Elias Schneider
01db8c0a46 fix: make logo and oidc client images sizes consistent 2025-09-30 12:12:37 +02:00
Alessandro (Ale) Segala
fe5917d96d fix: tokens issued with refresh token flow don't contain groups (#989)
Co-authored-by: Elias Schneider <login@eliasschneider.com>
2025-09-30 09:44:38 +00:00
Elias Schneider
4f0b434c54 chore(translations): update translations via Crowdin (#973) 2025-09-30 11:39:47 +02:00
Kyle Mendell
6bdf5fa37a feat: support for url based icons (#840)
Co-authored-by: Elias Schneider <login@eliasschneider.com>
2025-09-29 15:07:55 +00:00
Caian Benedicto
47bd5ba1ba fix: remove previous socket file to prevent bind error (#979) 2025-09-24 10:10:05 +00:00
Elias Schneider
b746ac0835 chore(email): remove unnecessary logo fallback 2025-09-24 12:08:24 +02:00
Elias Schneider
79989fb176 fix(email): display login location correctly if country or city is not present 2025-09-24 12:01:20 +02:00
Clément Contini
ecc7e224e9 fix: show only country in audit log location if no city instead of Unknown (#977) 2025-09-23 18:41:54 -05:00
Alessandro (Ale) Segala
549d219f44 fix(unit-tests): do not use cache=shared for in-memory SQLite (#971) 2025-09-21 19:32:41 -05:00
github-actions[bot]
ffe18db2fb chore: update AAGUIDs (#972)
Co-authored-by: stonith404 <58886915+stonith404@users.noreply.github.com>
2025-09-21 19:31:44 -05:00
Elias Schneider
e8b172f1c3 chore(release notes): fix whitespace after commit message 2025-09-20 22:54:55 +02:00
339 changed files with 14793 additions and 10422 deletions

View File

@@ -1,5 +1,17 @@
# See the documentation for more information: https://pocket-id.org/docs/configuration/environment-variables
# These variables must be configured for your deployment:
APP_URL=https://your-pocket-id-domain.com
# Encryption key (choose one method):
# Method 1: Direct key (simple but less secure)
# Generate with: openssl rand -base64 32
ENCRYPTION_KEY=
# Method 2: File-based key (recommended)
# Put the base64 key in a file and point to it here.
# ENCRYPTION_KEY_FILE=/path/to/encryption_key
# These variables are optional but recommended to review:
TRUST_PROXY=false
MAXMIND_LICENSE_KEY=
PUID=1000

View File

@@ -2,11 +2,11 @@ name: Run Backend Linter
on:
push:
branches: [main]
branches: [main, breaking/**]
paths:
- "backend/**"
pull_request:
branches: [main]
branches: [main, breaking/**]
paths:
- "backend/**"
@@ -24,10 +24,10 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v5
- name: Set up Go
uses: actions/setup-go@v5
uses: actions/setup-go@v6
with:
go-version-file: backend/go.mod

View File

@@ -19,22 +19,20 @@ jobs:
attestations: write
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v5
- name: Setup pnpm
uses: pnpm/action-setup@v4
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v5
with:
node-version: 22
cache: 'pnpm'
cache-dependency-path: pnpm-lock.yaml
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@v6
with:
go-version-file: 'backend/go.mod'
go-version-file: "backend/go.mod"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
@@ -74,7 +72,7 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ env.DOCKER_IMAGE_NAME }}:next
file: Dockerfile-prebuilt
file: docker/Dockerfile-prebuilt
- name: Build and push container image (distroless)
uses: docker/build-push-action@v6
id: container-build-push-distroless
@@ -83,16 +81,16 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ env.DOCKER_IMAGE_NAME }}:next-distroless
file: Dockerfile-distroless
file: docker/Dockerfile-distroless
- name: Container image attestation
uses: actions/attest-build-provenance@v2
with:
subject-name: '${{ env.DOCKER_IMAGE_NAME }}'
subject-name: "${{ env.DOCKER_IMAGE_NAME }}"
subject-digest: ${{ steps.build-push-image.outputs.digest }}
push-to-registry: true
- name: Container image attestation (distroless)
uses: actions/attest-build-provenance@v2
with:
subject-name: '${{ env.DOCKER_IMAGE_NAME }}'
subject-name: "${{ env.DOCKER_IMAGE_NAME }}"
subject-digest: ${{ steps.container-build-push-distroless.outputs.digest }}
push-to-registry: true

View File

@@ -1,17 +1,17 @@
name: E2E Tests
on:
push:
branches: [main]
branches: [main, breaking/**]
paths-ignore:
- 'docs/**'
- '**.md'
- '.github/**'
- "docs/**"
- "**.md"
- ".github/**"
pull_request:
branches: [main]
branches: [main, breaking/**]
paths-ignore:
- 'docs/**'
- '**.md'
- '.github/**'
- "docs/**"
- "**.md"
- ".github/**"
jobs:
build:
@@ -22,7 +22,7 @@ jobs:
actions: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@@ -30,6 +30,8 @@ jobs:
- name: Build and export
uses: docker/build-push-action@v6
with:
context: .
file: docker/Dockerfile
push: false
load: false
tags: pocket-id:test
@@ -55,58 +57,98 @@ jobs:
strategy:
fail-fast: false
matrix:
db: [sqlite, postgres]
include:
- db: sqlite
storage: fs
- db: postgres
storage: fs
- db: sqlite
storage: s3
- db: sqlite
storage: database
- db: postgres
storage: database
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5
- name: Setup pnpm
uses: pnpm/action-setup@v4
- uses: actions/setup-node@v4
- name: Setup Node.js
uses: actions/setup-node@v5
with:
node-version: 22
cache: 'pnpm'
cache-dependency-path: pnpm-lock.yaml
- name: Cache Playwright Browsers
uses: actions/cache@v3
uses: actions/cache@v4
id: playwright-cache
with:
path: ~/.cache/ms-playwright
key: ${{ runner.os }}-playwright-${{ hashFiles('pnpm-lock.yaml') }}
- name: Cache PostgreSQL Docker image
if: matrix.db == 'postgres'
uses: actions/cache@v3
uses: actions/cache@v4
id: postgres-cache
with:
path: /tmp/postgres-image.tar
key: postgres-17-${{ runner.os }}
- name: Pull and save PostgreSQL image
if: matrix.db == 'postgres' && steps.postgres-cache.outputs.cache-hit != 'true'
run: |
docker pull postgres:17
docker save postgres:17 > /tmp/postgres-image.tar
- name: Load PostgreSQL image from cache
- name: Load PostgreSQL image
if: matrix.db == 'postgres' && steps.postgres-cache.outputs.cache-hit == 'true'
run: docker load < /tmp/postgres-image.tar
- name: Cache LLDAP Docker image
uses: actions/cache@v3
uses: actions/cache@v4
id: lldap-cache
with:
path: /tmp/lldap-image.tar
key: lldap-stable-${{ runner.os }}
- name: Pull and save LLDAP image
if: steps.lldap-cache.outputs.cache-hit != 'true'
run: |
docker pull nitnelave/lldap:stable
docker save nitnelave/lldap:stable > /tmp/lldap-image.tar
- name: Load LLDAP image from cache
docker pull lldap/lldap:2025-05-19
docker save lldap/lldap:2025-05-19 > /tmp/lldap-image.tar
- name: Load LLDAP image
if: steps.lldap-cache.outputs.cache-hit == 'true'
run: docker load < /tmp/lldap-image.tar
- name: Cache Localstack S3 Docker image
if: matrix.storage == 's3'
uses: actions/cache@v4
id: s3-cache
with:
path: /tmp/localstack-s3-image.tar
key: localstack-s3-latest-${{ runner.os }}
- name: Pull and save Localstack S3 image
if: matrix.storage == 's3' && steps.s3-cache.outputs.cache-hit != 'true'
run: |
docker pull localstack/localstack:s3-latest
docker save localstack/localstack:s3-latest > /tmp/localstack-s3-image.tar
- name: Load Localstack S3 image
if: matrix.storage == 's3' && steps.s3-cache.outputs.cache-hit == 'true'
run: docker load < /tmp/localstack-s3-image.tar
- name: Cache AWS CLI Docker image
if: matrix.storage == 's3'
uses: actions/cache@v4
id: aws-cli-cache
with:
path: /tmp/aws-cli-image.tar
key: aws-cli-latest-${{ runner.os }}
- name: Pull and save AWS CLI image
if: matrix.storage == 's3' && steps.aws-cli-cache.outputs.cache-hit != 'true'
run: |
docker pull amazon/aws-cli:latest
docker save amazon/aws-cli:latest > /tmp/aws-cli-image.tar
- name: Load AWS CLI image
if: matrix.storage == 's3' && steps.aws-cli-cache.outputs.cache-hit == 'true'
run: docker load < /tmp/aws-cli-image.tar
- name: Download Docker image artifact
uses: actions/download-artifact@v4
with:
@@ -123,19 +165,34 @@ jobs:
working-directory: ./tests
if: steps.playwright-cache.outputs.cache-hit != 'true'
run: pnpm exec playwright install --with-deps chromium
- name: Run Docker Container (sqlite) with LDAP
if: matrix.db == 'sqlite'
working-directory: ./tests/setup
run: |
docker compose up -d
docker compose logs -f pocket-id &> /tmp/backend.log &
- name: Run Docker Container (postgres) with LDAP
if: matrix.db == 'postgres'
- name: Run Docker containers
working-directory: ./tests/setup
run: |
docker compose -f docker-compose-postgres.yml up -d
docker compose -f docker-compose-postgres.yml logs -f pocket-id &> /tmp/backend.log &
DOCKER_COMPOSE_FILE=docker-compose.yml
echo "FILE_BACKEND=${{ matrix.storage }}" > .env
if [ "${{ matrix.db }}" = "postgres" ]; then
DOCKER_COMPOSE_FILE=docker-compose-postgres.yml
elif [ "${{ matrix.storage }}" = "s3" ]; then
DOCKER_COMPOSE_FILE=docker-compose-s3.yml
fi
docker compose -f "$DOCKER_COMPOSE_FILE" up -d
{
LOG_FILE="/tmp/backend.log"
while true; do
CID=$(docker compose -f "$DOCKER_COMPOSE_FILE" ps -q pocket-id)
if [ -n "$CID" ]; then
echo "[$(date)] Attaching logs for $CID" >> "$LOG_FILE"
docker logs -f --since=0 "$CID" >> "$LOG_FILE" 2>&1
else
echo "[$(date)] Container not yet running…" >> "$LOG_FILE"
fi
sleep 1
done
} &
- name: Run Playwright tests
working-directory: ./tests
@@ -145,7 +202,7 @@ jobs:
uses: actions/upload-artifact@v4
if: always() && github.event.pull_request.head.ref != 'i18n_crowdin'
with:
name: playwright-report-${{ matrix.db }}
name: playwright-report-${{ matrix.db }}-${{ matrix.storage }}
path: tests/.report
include-hidden-files: true
retention-days: 15
@@ -154,7 +211,7 @@ jobs:
uses: actions/upload-artifact@v4
if: always() && github.event.pull_request.head.ref != 'i18n_crowdin'
with:
name: backend-${{ matrix.db }}
name: backend-${{ matrix.db }}-${{ matrix.storage }}
path: /tmp/backend.log
include-hidden-files: true
retention-days: 15

View File

@@ -3,7 +3,7 @@ name: Release
on:
push:
tags:
- 'v*.*.*'
- "v*.*.*"
jobs:
build:
@@ -19,14 +19,12 @@ jobs:
- name: Setup pnpm
uses: pnpm/action-setup@v4
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v5
with:
node-version: 22
cache: 'pnpm'
cache-dependency-path: pnpm-lock.yaml
- uses: actions/setup-go@v5
- uses: actions/setup-go@v6
with:
go-version-file: 'backend/go.mod'
go-version-file: "backend/go.mod"
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
@@ -81,7 +79,7 @@ jobs:
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
file: Dockerfile-prebuilt
file: docker/Dockerfile-prebuilt
- name: Build and push container image (distroless)
uses: docker/build-push-action@v6
id: container-build-push-distroless
@@ -91,21 +89,21 @@ jobs:
push: true
tags: ${{ steps.meta-distroless.outputs.tags }}
labels: ${{ steps.meta-distroless.outputs.labels }}
file: Dockerfile-distroless
file: docker/Dockerfile-distroless
- name: Binary attestation
uses: actions/attest-build-provenance@v2
with:
subject-path: 'backend/.bin/pocket-id-**'
subject-path: "backend/.bin/pocket-id-**"
- name: Container image attestation
uses: actions/attest-build-provenance@v2
with:
subject-name: '${{ env.DOCKER_IMAGE_NAME }}'
subject-name: "${{ env.DOCKER_IMAGE_NAME }}"
subject-digest: ${{ steps.container-build-push.outputs.digest }}
push-to-registry: true
- name: Container image attestation (distroless)
uses: actions/attest-build-provenance@v2
with:
subject-name: '${{ env.DOCKER_IMAGE_NAME }}'
subject-name: "${{ env.DOCKER_IMAGE_NAME }}"
subject-digest: ${{ steps.container-build-push-distroless.outputs.digest }}
push-to-registry: true
- name: Upload binaries to release
@@ -122,6 +120,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v5
- name: Mark release as published
run: gh release edit ${{ github.ref_name }} --draft=false

View File

@@ -2,23 +2,23 @@ name: Svelte Check
on:
push:
branches: [main]
branches: [main, breaking/**]
paths:
- 'frontend/src/**'
- '.github/svelte-check-matcher.json'
- 'frontend/package.json'
- 'frontend/package-lock.json'
- 'frontend/tsconfig.json'
- 'frontend/svelte.config.js'
- "frontend/src/**"
- ".github/svelte-check-matcher.json"
- "frontend/package.json"
- "frontend/package-lock.json"
- "frontend/tsconfig.json"
- "frontend/svelte.config.js"
pull_request:
branches: [main]
paths:
- 'frontend/src/**'
- '.github/svelte-check-matcher.json'
- 'frontend/package.json'
- 'frontend/package-lock.json'
- 'frontend/tsconfig.json'
- 'frontend/svelte.config.js'
- "frontend/src/**"
- ".github/svelte-check-matcher.json"
- "frontend/package.json"
- "frontend/package-lock.json"
- "frontend/tsconfig.json"
- "frontend/svelte.config.js"
workflow_dispatch:
jobs:
@@ -34,17 +34,15 @@ jobs:
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v5
- name: Setup pnpm
uses: pnpm/action-setup@v4
- name: Setup Node.js
uses: actions/setup-node@v4
uses: actions/setup-node@v5
with:
node-version: 22
cache: 'pnpm'
cache-dependency-path: pnpm-lock.yaml
- name: Install dependencies
run: pnpm --filter pocket-id-frontend install --frozen-lockfile

View File

@@ -1,7 +1,7 @@
name: Unit Tests
on:
push:
branches: [main]
branches: [main, breaking/**]
paths:
- "backend/**"
pull_request:
@@ -16,8 +16,8 @@ jobs:
actions: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
- uses: actions/checkout@v5
- uses: actions/setup-go@v6
with:
go-version-file: "backend/go.mod"
cache-dependency-path: "backend/go.sum"

View File

@@ -15,7 +15,7 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4
uses: actions/checkout@v5
- name: Fetch JSON data
run: |

5
.gitignore vendored
View File

@@ -1,8 +1,12 @@
# JetBrains
**/.idea
# Node
node_modules
# PNPM
.pnpm-store/
# Output
.output
.vercel
@@ -11,6 +15,7 @@ node_modules
/backend/bin
pocket-id
/tests/test-results/*.json
.tmp/
# OS
.DS_Store

View File

@@ -1 +1 @@
1.11.2
1.15.0

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,12 @@
package main
import (
"fmt"
"os"
_ "time/tzdata"
"github.com/pocket-id/pocket-id/backend/internal/cmds"
"github.com/pocket-id/pocket-id/backend/internal/common"
)
// @title Pocket ID API
@@ -11,5 +14,9 @@ import (
// @description.markdown
func main() {
if err := common.ValidateEnvConfig(&common.EnvConfig); err != nil {
fmt.Fprintf(os.Stderr, "config error: %v\n", err)
os.Exit(1)
}
cmds.Execute()
}

View File

@@ -32,10 +32,6 @@ func init() {
panic(fmt.Errorf("failed to read index.html: %w", iErr))
}
// Get the position of the first <script> tag
idx := bytes.Index(index, []byte(scriptTag))
// Create writeIndexFn, which adds the CSP tag to the script tag if needed
writeIndexFn = func(w io.Writer, nonce string) (err error) {
// If there's no nonce, write the index as-is
if nonce == "" {
@@ -43,23 +39,16 @@ func init() {
return err
}
// We have a nonce, so first write the index until the <script> tag
// Then we write the modified script tag
// Finally, the rest of the index
_, err = w.Write(index[0:idx])
if err != nil {
return err
}
_, err = w.Write([]byte(`<script nonce="` + nonce + `">`))
if err != nil {
return err
}
_, err = w.Write(index[(idx + len(scriptTag)):])
if err != nil {
return err
}
// Add nonce to all <script> tags
// We replace "<script" with `<script nonce="..."` everywhere it appears
modified := bytes.ReplaceAll(
index,
[]byte(scriptTag),
[]byte(`<script nonce="`+nonce+`">`),
)
return nil
_, err = w.Write(modified)
return err
}
}
@@ -75,6 +64,11 @@ func RegisterFrontend(router *gin.Engine) error {
router.NoRoute(func(c *gin.Context) {
path := strings.TrimPrefix(c.Request.URL.Path, "/")
if strings.HasSuffix(path, "/") {
c.Redirect(http.StatusMovedPermanently, strings.TrimRight(c.Request.URL.String(), "/"))
return
}
if strings.HasPrefix(path, "api/") {
c.JSON(http.StatusNotFound, gin.H{"error": "API endpoint not found"})
return
@@ -94,13 +88,9 @@ func RegisterFrontend(router *gin.Engine) error {
c.Header("Content-Type", "text/html; charset=utf-8")
c.Header("Cache-Control", "no-store")
c.Status(http.StatusOK)
err = writeIndexFn(c.Writer, nonce)
if err != nil {
if err := writeIndexFn(c.Writer, nonce); err != nil {
_ = c.Error(fmt.Errorf("failed to write index.html file: %w", err))
return
}
return
}

View File

@@ -3,88 +3,109 @@ module github.com/pocket-id/pocket-id/backend
go 1.25
require (
github.com/aws/aws-sdk-go-v2 v1.40.0
github.com/aws/aws-sdk-go-v2/config v1.32.2
github.com/aws/aws-sdk-go-v2/credentials v1.19.2
github.com/aws/aws-sdk-go-v2/service/s3 v1.92.1
github.com/aws/smithy-go v1.23.2
github.com/caarlos0/env/v11 v11.3.1
github.com/cenkalti/backoff/v5 v5.0.3
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec
github.com/disintegration/imaging v1.6.2
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21
github.com/emersion/go-smtp v0.21.3
github.com/fxamacker/cbor/v2 v2.9.0
github.com/gin-contrib/slog v1.1.0
github.com/gin-gonic/gin v1.10.1
github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6
github.com/emersion/go-smtp v0.24.0
github.com/gin-contrib/slog v1.2.0
github.com/gin-gonic/gin v1.11.0
github.com/glebarez/go-sqlite v1.22.0
github.com/glebarez/sqlite v1.11.0
github.com/go-co-op/gocron/v2 v2.16.3
github.com/go-ldap/ldap/v3 v3.4.10
github.com/go-playground/validator/v10 v10.27.0
github.com/go-webauthn/webauthn v0.11.2
github.com/golang-migrate/migrate/v4 v4.18.3
github.com/go-co-op/gocron/v2 v2.18.1
github.com/go-ldap/ldap/v3 v3.4.12
github.com/go-playground/validator/v10 v10.28.0
github.com/go-webauthn/webauthn v0.15.0
github.com/golang-migrate/migrate/v4 v4.19.0
github.com/google/uuid v1.6.0
github.com/hashicorp/go-uuid v1.0.3
github.com/jinzhu/copier v0.4.0
github.com/joho/godotenv v1.5.1
github.com/lestrrat-go/httprc/v3 v3.0.0
github.com/lestrrat-go/jwx/v3 v3.0.10
github.com/lestrrat-go/httprc/v3 v3.0.1
github.com/lestrrat-go/jwx/v3 v3.0.12
github.com/lmittmann/tint v1.1.2
github.com/mattn/go-isatty v0.0.20
github.com/mileusna/useragent v1.3.5
github.com/orandin/slog-gorm v1.4.0
github.com/oschwald/maxminddb-golang/v2 v2.0.0-beta.8
github.com/spf13/cobra v1.9.1
github.com/stretchr/testify v1.10.0
go.opentelemetry.io/contrib/bridges/otelslog v0.12.0
go.opentelemetry.io/contrib/exporters/autoexport v0.59.0
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.60.0
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0
go.opentelemetry.io/otel v1.37.0
go.opentelemetry.io/otel/log v0.13.0
go.opentelemetry.io/otel/metric v1.37.0
go.opentelemetry.io/otel/sdk v1.35.0
go.opentelemetry.io/otel/sdk/log v0.10.0
go.opentelemetry.io/otel/sdk/metric v1.35.0
go.opentelemetry.io/otel/trace v1.37.0
golang.org/x/crypto v0.41.0
golang.org/x/image v0.30.0
golang.org/x/sync v0.16.0
golang.org/x/text v0.28.0
golang.org/x/time v0.12.0
github.com/oschwald/maxminddb-golang/v2 v2.1.0
github.com/spf13/cobra v1.10.1
github.com/stretchr/testify v1.11.1
go.opentelemetry.io/contrib/bridges/otelslog v0.13.0
go.opentelemetry.io/contrib/exporters/autoexport v0.63.0
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.63.0
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0
go.opentelemetry.io/otel v1.38.0
go.opentelemetry.io/otel/log v0.14.0
go.opentelemetry.io/otel/metric v1.38.0
go.opentelemetry.io/otel/sdk v1.38.0
go.opentelemetry.io/otel/sdk/log v0.14.0
go.opentelemetry.io/otel/sdk/metric v1.38.0
go.opentelemetry.io/otel/trace v1.38.0
golang.org/x/crypto v0.45.0
golang.org/x/image v0.33.0
golang.org/x/sync v0.18.0
golang.org/x/text v0.31.0
golang.org/x/time v0.14.0
gorm.io/driver/postgres v1.6.0
gorm.io/gorm v1.30.1
gorm.io/gorm v1.31.1
)
require (
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/Azure/go-ntlmssp v0.1.0 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.14 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.5 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.14 // indirect
github.com/aws/aws-sdk-go-v2/service/signin v1.0.2 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.30.5 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.10 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.41.2 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bytedance/sonic v1.14.0 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
github.com/bytedance/gopkg v0.1.3 // indirect
github.com/bytedance/sonic v1.14.2 // indirect
github.com/bytedance/sonic/loader v0.4.0 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
github.com/disintegration/gift v1.1.2 // indirect
github.com/disintegration/gift v1.2.1 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
github.com/fxamacker/cbor/v2 v2.9.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.11 // indirect
github.com/gin-contrib/sse v1.1.0 // indirect
github.com/go-asn1-ber/asn1-ber v1.5.7 // indirect
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect
github.com/go-logr/logr v1.4.3 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-webauthn/x v0.1.23 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/go-webauthn/x v0.1.26 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/golang-jwt/jwt/v5 v5.2.3 // indirect
github.com/goccy/go-yaml v1.18.0 // indirect
github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
github.com/google/go-github/v39 v39.2.0 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/go-tpm v0.9.5 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1 // indirect
github.com/google/go-tpm v0.9.7 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/pgx/v5 v5.7.5 // indirect
github.com/jackc/pgx/v5 v5.7.6 // indirect
github.com/jackc/puddle/v2 v2.2.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect
@@ -93,57 +114,61 @@ require (
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/lestrrat-go/blackmagic v1.0.4 // indirect
github.com/lestrrat-go/dsig v1.0.0 // indirect
github.com/lestrrat-go/dsig-secp256k1 v1.0.0 // indirect
github.com/lestrrat-go/httpcc v1.0.1 // indirect
github.com/lestrrat-go/option v1.0.1 // indirect
github.com/lestrrat-go/option/v2 v2.0.0 // indirect
github.com/lib/pq v1.10.9 // indirect
github.com/mattn/go-sqlite3 v1.14.24 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mattn/go-sqlite3 v1.14.32 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/ncruces/go-strftime v1.0.0 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_golang v1.22.0 // indirect
github.com/prometheus/client_model v0.6.1 // indirect
github.com/prometheus/common v0.62.0 // indirect
github.com/prometheus/procfs v0.15.1 // indirect
github.com/prometheus/client_golang v1.23.2 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
github.com/prometheus/common v0.67.4 // indirect
github.com/prometheus/otlptranslator v1.0.0 // indirect
github.com/prometheus/procfs v0.19.2 // indirect
github.com/quic-go/qpack v0.6.0 // indirect
github.com/quic-go/quic-go v0.57.0 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/robfig/cron/v3 v3.0.1 // indirect
github.com/segmentio/asm v1.2.0 // indirect
github.com/spf13/pflag v1.0.6 // indirect
github.com/segmentio/asm v1.2.1 // indirect
github.com/spf13/pflag v1.0.10 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.3.0 // indirect
github.com/ugorji/go/codec v1.3.1 // indirect
github.com/valyala/fastjson v1.6.4 // indirect
github.com/x448/float16 v0.8.4 // indirect
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
go.opentelemetry.io/contrib/bridges/prometheus v0.59.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.10.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.10.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/prometheus v0.57.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.10.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.35.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.35.0 // indirect
go.opentelemetry.io/proto/otlp v1.5.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
golang.org/x/arch v0.20.0 // indirect
golang.org/x/exp v0.0.0-20250813145105-42675adae3e6 // indirect
golang.org/x/net v0.43.0 // indirect
golang.org/x/oauth2 v0.27.0 // indirect
golang.org/x/sys v0.35.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20250218202821-56aae31c358a // indirect
google.golang.org/grpc v1.71.0 // indirect
google.golang.org/protobuf v1.36.7 // indirect
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
go.opentelemetry.io/contrib/bridges/prometheus v0.63.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 // indirect
go.opentelemetry.io/otel/exporters/prometheus v0.60.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0 // indirect
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 // indirect
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
go.yaml.in/yaml/v2 v2.4.3 // indirect
golang.org/x/arch v0.23.0 // indirect
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/oauth2 v0.33.0 // indirect
golang.org/x/sys v0.38.0 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846 // indirect
google.golang.org/grpc v1.77.0 // indirect
google.golang.org/protobuf v1.36.10 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
modernc.org/libc v1.66.7 // indirect
modernc.org/libc v1.67.1 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
modernc.org/sqlite v1.38.2 // indirect
modernc.org/sqlite v1.40.1 // indirect
)

View File

@@ -1,77 +1,120 @@
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 h1:mFRzDkZVAjdal+s7s0MwaRv9igoPqLRdzOLzw/8Xvq8=
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
github.com/Azure/go-ntlmssp v0.1.0 h1:DjFo6YtWzNqNvQdrwEyr/e4nhU3vRiwenz5QX7sFz+A=
github.com/Azure/go-ntlmssp v0.1.0/go.mod h1:NYqdhxd/8aAct/s4qSYZEerdPuH1liG2/X9DiVTbhpk=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa h1:LHTHcTQiSGT7VVbI0o4wBRNQIgn917usHWOd6VAffYI=
github.com/alexbrainman/sspi v0.0.0-20231016080023-1a75b4708caa/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e h1:4dAU9FXIyQktpoUAgOJK3OTFc/xug0PCXYCqU0FgDKI=
github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
github.com/aws/aws-sdk-go-v2 v1.40.0 h1:/WMUA0kjhZExjOQN2z3oLALDREea1A7TobfuiBrKlwc=
github.com/aws/aws-sdk-go-v2 v1.40.0/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 h1:DHctwEM8P8iTXFxC/QK0MRjwEpWQeM9yzidCRjldUz0=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3/go.mod h1:xdCzcZEtnSTKVDOmUZs4l/j3pSV6rpo1WXl5ugNsL8Y=
github.com/aws/aws-sdk-go-v2/config v1.32.2 h1:4liUsdEpUUPZs5WVapsJLx5NPmQhQdez7nYFcovrytk=
github.com/aws/aws-sdk-go-v2/config v1.32.2/go.mod h1:l0hs06IFz1eCT+jTacU/qZtC33nvcnLADAPL/XyrkZI=
github.com/aws/aws-sdk-go-v2/credentials v1.19.2 h1:qZry8VUyTK4VIo5aEdUcBjPZHL2v4FyQ3QEOaWcFLu4=
github.com/aws/aws-sdk-go-v2/credentials v1.19.2/go.mod h1:YUqm5a1/kBnoK+/NY5WEiMocZihKSo15/tJdmdXnM5g=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 h1:WZVR5DbDgxzA0BJeudId89Kmgy6DIU4ORpxwsVHz0qA=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14/go.mod h1:Dadl9QO0kHgbrH1GRqGiZdYtW5w+IXXaBNCHTIaheM4=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 h1:PZHqQACxYb8mYgms4RZbhZG0a7dPW06xOjmaH0EJC/I=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14/go.mod h1:VymhrMJUWs69D8u0/lZ7jSB6WgaG/NqHi3gX0aYf6U0=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 h1:bOS19y6zlJwagBfHxs0ESzr1XCOU2KXJCWcq3E2vfjY=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14/go.mod h1:1ipeGBMAxZ0xcTm6y6paC2C/J6f6OO7LBODV9afuAyM=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.14 h1:ITi7qiDSv/mSGDSWNpZ4k4Ve0DQR6Ug2SJQ8zEHoDXg=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.14/go.mod h1:k1xtME53H1b6YpZt74YmwlONMWf4ecM+lut1WQLAF/U=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.5 h1:Hjkh7kE6D81PgrHlE/m9gx+4TyyeLHuY8xJs7yXN5C4=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.5/go.mod h1:nPRXgyCfAurhyaTMoBMwRBYBhaHI4lNPAnJmjM0Tslc=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 h1:FIouAnCE46kyYqyhs0XEBDFFSREtdnr8HQuLPQPLCrY=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14/go.mod h1:UTwDc5COa5+guonQU8qBikJo1ZJ4ln2r1MkF7Dqag1E=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.14 h1:FzQE21lNtUor0Fb7QNgnEyiRCBlolLTX/Z1j65S7teM=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.14/go.mod h1:s1ydyWG9pm3ZwmmYN21HKyG9WzAZhYVW85wMHs5FV6w=
github.com/aws/aws-sdk-go-v2/service/s3 v1.92.1 h1:OgQy/+0+Kc3khtqiEOk23xQAglXi3Tj0y5doOxbi5tg=
github.com/aws/aws-sdk-go-v2/service/s3 v1.92.1/go.mod h1:wYNqY3L02Z3IgRYxOBPH9I1zD9Cjh9hI5QOy/eOjQvw=
github.com/aws/aws-sdk-go-v2/service/signin v1.0.2 h1:MxMBdKTYBjPQChlJhi4qlEueqB1p1KcbTEa7tD5aqPs=
github.com/aws/aws-sdk-go-v2/service/signin v1.0.2/go.mod h1:iS6EPmNeqCsGo+xQmXv0jIMjyYtQfnwg36zl2FwEouk=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.5 h1:ksUT5KtgpZd3SAiFJNJ0AFEJVva3gjBmN7eXUZjzUwQ=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.5/go.mod h1:av+ArJpoYf3pgyrj6tcehSFW+y9/QvAY8kMooR9bZCw=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.10 h1:GtsxyiF3Nd3JahRBJbxLCCdYW9ltGQYrFWg8XdkGDd8=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.10/go.mod h1:/j67Z5XBVDx8nZVp9EuFM9/BS5dvBznbqILGuu73hug=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.2 h1:a5UTtD4mHBU3t0o6aHQZFJTNKVfxFWfPX7J0Lr7G+uY=
github.com/aws/aws-sdk-go-v2/service/sts v1.41.2/go.mod h1:6TxbXoDSgBQ225Qd8Q+MbxUxUh6TtNKwbRt/EPS9xso=
github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM=
github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bytedance/sonic v1.14.0 h1:/OfKt8HFw0kh2rj8N0F6C/qPGRESq0BbaNZgcNXXzQQ=
github.com/bytedance/sonic v1.14.0/go.mod h1:WoEbx8WTcFJfzCe0hbmyTGrfjt8PzNEBdxlNUO24NhA=
github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA=
github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI=
github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M=
github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM=
github.com/bytedance/sonic v1.14.2 h1:k1twIoe97C1DtYUo+fZQy865IuHia4PR5RPiuGPPIIE=
github.com/bytedance/sonic v1.14.2/go.mod h1:T80iDELeHiHKSc0C9tubFygiuXoGzrkjKzX2quAx980=
github.com/bytedance/sonic/loader v0.4.0 h1:olZ7lEqcxtZygCK9EKYKADnpQoYkRQxaeY2NYzevs+o=
github.com/bytedance/sonic/loader v0.4.0/go.mod h1:AR4NYCk5DdzZizZ5djGqQ92eEhCCcdf5x77udYiSJRo=
github.com/caarlos0/env/v11 v11.3.1 h1:cArPWC15hWmEt+gWk7YBi7lEXTXCvpaSdCiZE2X5mCA=
github.com/caarlos0/env/v11 v11.3.1/go.mod h1:qupehSf/Y0TUTsxKywqRt/vJjN5nz6vauiYEUUr8P4U=
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M=
github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU=
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40=
github.com/dhui/dktest v0.4.5 h1:uUfYBIVREmj/Rw6MvgmqNAYzTiKOHJak+enB5Di73MM=
github.com/dhui/dktest v0.4.5/go.mod h1:tmcyeHDKagvlDrz7gDKq4UAJOLIfVZYkfD5OnHDwcCo=
github.com/disintegration/gift v1.1.2 h1:9ZyHJr+kPamiH10FX3Pynt1AxFUob812bU9Wt4GMzhs=
github.com/dhui/dktest v0.4.6 h1:+DPKyScKSEp3VLtbMDHcUq6V5Lm5zfZZVb0Sk7Ahom4=
github.com/dhui/dktest v0.4.6/go.mod h1:JHTSYDtKkvFNFHJKqCzVzqXecyv+tKt8EzceOmQOgbU=
github.com/disintegration/gift v1.1.2/go.mod h1:Jh2i7f7Q2BM7Ezno3PhfezbR1xpUg9dUg3/RlKGr4HI=
github.com/disintegration/gift v1.2.1 h1:Y005a1X4Z7Uc+0gLpSAsKhWi4qLtsdEcMIbbdvdZ6pc=
github.com/disintegration/gift v1.2.1/go.mod h1:Jh2i7f7Q2BM7Ezno3PhfezbR1xpUg9dUg3/RlKGr4HI=
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec h1:YrB6aVr9touOt75I9O1SiancmR2GMg45U9UYf0gtgWg=
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec/go.mod h1:K0KBFIr1gWu/C1Gp10nFAcAE4hsB7JxE6OgLijrJ8Sk=
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
github.com/docker/docker v27.2.0+incompatible h1:Rk9nIVdfH3+Vz4cyI/uhbINhEZ/oLmc+CBXmH6fbNk4=
github.com/docker/docker v27.2.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v28.3.3+incompatible h1:Dypm25kh4rmk49v1eiVbsAtpAsYURjYkaKubwuBdxEI=
github.com/docker/docker v28.3.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21 h1:OJyUGMJTzHTd1XQp98QTaHernxMYzRaOasRir9hUlFQ=
github.com/emersion/go-sasl v0.0.0-20200509203442-7bfe0ed36a21/go.mod h1:iL2twTeMvZnrg54ZoPDNfJaJaqy0xIQFuBdrLsmspwQ=
github.com/emersion/go-smtp v0.21.3 h1:7uVwagE8iPYE48WhNsng3RRpCUpFvNl39JGNSIyGVMY=
github.com/emersion/go-smtp v0.21.3/go.mod h1:qm27SGYgoIPRot6ubfQ/GpiPy/g3PaZAVRxiO/sDUgQ=
github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6 h1:oP4q0fw+fOSWn3DfFi4EXdT+B+gTtzx8GC9xsc26Znk=
github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6/go.mod h1:iL2twTeMvZnrg54ZoPDNfJaJaqy0xIQFuBdrLsmspwQ=
github.com/emersion/go-smtp v0.24.0 h1:g6AfoF140mvW0vLNPD/LuCBLEAdlxOjIXqbIkJIS6Wk=
github.com/emersion/go-smtp v0.24.0/go.mod h1:ZtRRkbTyp2XTHCA+BmyTFTrj8xY4I+b4McvHxCU2gsQ=
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/fxamacker/cbor/v2 v2.9.0 h1:NpKPmjDBgUfBms6tr6JZkTHtfFGcMKsw3eGcmD/sapM=
github.com/fxamacker/cbor/v2 v2.9.0/go.mod h1:vM4b+DJCtHn+zz7h3FFp/hDAI9WNWCsZj23V5ytsSxQ=
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
github.com/gin-contrib/slog v1.1.0 h1:K9MVNrETT6r/C3u2Aheer/gxwVeVqrGL0hXlsmv3fm4=
github.com/gin-contrib/slog v1.1.0/go.mod h1:PvNXQVXcVOAaaiJR84LV1/xlQHIaXi9ygEXyBkmjdkY=
github.com/gabriel-vasile/mimetype v1.4.11 h1:AQvxbp830wPhHTqc1u7nzoLT+ZFxGY7emj5DR5DYFik=
github.com/gabriel-vasile/mimetype v1.4.11/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
github.com/gin-contrib/slog v1.2.0 h1:vAxZfr7knD1ZYK5+pMJLP52sZXIkJXkcRPa/0dx9hSk=
github.com/gin-contrib/slog v1.2.0/go.mod h1:vYK6YltmpsEFkO0zfRMLTKHrWS3DwUSn0TMpT+kMagI=
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w=
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM=
github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ=
github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/gin-gonic/gin v1.11.0 h1:OW/6PLjyusp2PPXtyxKHU0RbX6I/l28FTdDlae5ueWk=
github.com/gin-gonic/gin v1.11.0/go.mod h1:+iq/FyxlGzII0KHiBGjuNn4UNENUlKbGlNmc+W50Dls=
github.com/glebarez/go-sqlite v1.22.0 h1:uAcMJhaA6r3LHMTFgP0SifzgXg46yJkgxqyuyec+ruQ=
github.com/glebarez/go-sqlite v1.22.0/go.mod h1:PlBIdHe0+aUEFn+r2/uthrWq4FxbzugL0L8Li6yQJbc=
github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw=
github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
github.com/go-asn1-ber/asn1-ber v1.5.7 h1:DTX+lbVTWaTw1hQ+PbZPlnDZPEIs0SS/GCZAl535dDk=
github.com/go-asn1-ber/asn1-ber v1.5.7/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
github.com/go-co-op/gocron/v2 v2.16.3 h1:kYqukZqBa8RC2+AFAHnunmKcs9GRTjwBo8WRF3I6cbI=
github.com/go-co-op/gocron/v2 v2.16.3/go.mod h1:aTf7/+5Jo2E+cyAqq625UQ6DzpkV96b22VHIUAt6l3c=
github.com/go-ldap/ldap/v3 v3.4.10 h1:ot/iwPOhfpNVgB1o+AVXljizWZ9JTp7YF5oeyONmcJU=
github.com/go-ldap/ldap/v3 v3.4.10/go.mod h1:JXh4Uxgi40P6E9rdsYqpUtbW46D9UTjJ9QSwGRznplY=
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ4S3TGls2FvczZtj5Re/2ZzkV9VwqPHH/3Bo=
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
github.com/go-co-op/gocron/v2 v2.18.1 h1:VVxgAghLW1Q6VHi/rc+B0ZSpFoUVlWgkw09Yximvn58=
github.com/go-co-op/gocron/v2 v2.18.1/go.mod h1:Zii6he+Zfgy5W9B+JKk/KwejFOW0kZTFvHtwIpR4aBI=
github.com/go-ldap/ldap/v3 v3.4.12 h1:1b81mv7MagXZ7+1r7cLTWmyuTqVqdwbtJSjC0DAp9s4=
github.com/go-ldap/ldap/v3 v3.4.12/go.mod h1:+SPAGcTtOfmGsCb3h1RFiq4xpp4N636G75OEace8lNo=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
@@ -83,60 +126,62 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4=
github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
github.com/go-webauthn/webauthn v0.11.2 h1:Fgx0/wlmkClTKlnOsdOQ+K5HcHDsDcYIvtYmfhEOSUc=
github.com/go-webauthn/webauthn v0.11.2/go.mod h1:aOtudaF94pM71g3jRwTYYwQTG1KyTILTcZqN1srkmD0=
github.com/go-webauthn/x v0.1.23 h1:9lEO0s+g8iTyz5Vszlg/rXTGrx3CjcD0RZQ1GPZCaxI=
github.com/go-webauthn/x v0.1.23/go.mod h1:AJd3hI7NfEp/4fI6T4CHD753u91l510lglU7/NMN6+E=
github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688=
github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU=
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/go-webauthn/webauthn v0.15.0 h1:LR1vPv62E0/6+sTenX35QrCmpMCzLeVAcnXeH4MrbJY=
github.com/go-webauthn/webauthn v0.15.0/go.mod h1:hcAOhVChPRG7oqG7Xj6XKN1mb+8eXTGP/B7zBLzkX5A=
github.com/go-webauthn/x v0.1.26 h1:eNzreFKnwNLDFoywGh9FA8YOMebBWTUNlNSdolQRebs=
github.com/go-webauthn/x v0.1.26/go.mod h1:jmf/phPV6oIsF6hmdVre+ovHkxjDOmNH0t6fekWUxvg=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/golang-jwt/jwt/v5 v5.2.3 h1:kkGXqQOBSDDWRhWNXTFpqGSCMyh/PLnqUvMGJPDJDs0=
github.com/golang-jwt/jwt/v5 v5.2.3/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang-migrate/migrate/v4 v4.18.3 h1:EYGkoOsvgHHfm5U/naS1RP/6PL/Xv3S4B/swMiAmDLs=
github.com/golang-migrate/migrate/v4 v4.18.3/go.mod h1:99BKpIi6ruaaXRM1A77eqZ+FWPQ3cfRa+ZVy5bmWMaY=
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
github.com/golang-migrate/migrate/v4 v4.19.0 h1:RcjOnCGz3Or6HQYEJ/EEVLfWnmw9KnoigPSjzhCuaSE=
github.com/golang-migrate/migrate/v4 v4.19.0/go.mod h1:9dyEcu+hO+G9hPSw8AIg50yg622pXJsoHItQnDGZkI0=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/google/go-github/v39 v39.2.0 h1:rNNM311XtPOz5rDdsJXAp2o8F67X9FnROXTvto3aSnQ=
github.com/google/go-github/v39 v39.2.0/go.mod h1:C1s8C5aCC9L+JXIYpJM5GYytdX52vC1bLvHEF1IhBrE=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
github.com/google/go-tpm v0.9.5 h1:ocUmnDebX54dnW+MQWGQRbdaAcJELsa6PqZhJ48KwVU=
github.com/google/go-tpm v0.9.5/go.mod h1:h9jEsEECg7gtLis0upRBQU+GhYVH6jMjrFxI8u6bVUY=
github.com/google/go-tpm v0.9.7 h1:u89J4tUUeDTlH8xxC3CTW7OHZjbjKoHdQ9W7gCUhtxA=
github.com/google/go-tpm v0.9.7/go.mod h1:h9jEsEECg7gtLis0upRBQU+GhYVH6jMjrFxI8u6bVUY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e h1:ijClszYn+mADRFY17kjQEVQ1XRhq2/JR1M3sGqeJoxs=
github.com/google/pprof v0.0.0-20250317173921-a4b03ec1a45e/go.mod h1:boTsfXsheKC2y+lKOCMpSfarhxDeIzfZG1jqGcPl3cA=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1 h1:e9Rjr40Z98/clHv5Yg79Is0NtosR5LXRvdr7o/6NwbA=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.1/go.mod h1:tIxuGz/9mpox++sgp9fJjHO0+q1X9/UOWd798aAm22M=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 h1:NmZ1PKzSTQbuGHw9DGPFomqkkLWMC+vZCkfs+FHv1Vg=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3/go.mod h1:zQrxl1YP88HQlA6i9c63DSVPFklWpGX4OWAc9bFuaH4=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.7.5 h1:JHGfMnQY+IEtGM63d+NGMjoRpysB2JBwDr5fsngwmJs=
github.com/jackc/pgx/v5 v5.7.5/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
github.com/jackc/pgx/v5 v5.7.6 h1:rWQc5FwZSPX58r1OQmkuaNicxdmExaEz5A2DO2hUuTk=
github.com/jackc/pgx/v5 v5.7.6/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
@@ -177,12 +222,16 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/lestrrat-go/blackmagic v1.0.4 h1:IwQibdnf8l2KoO+qC3uT4OaTWsW7tuRQXy9TRN9QanA=
github.com/lestrrat-go/blackmagic v1.0.4/go.mod h1:6AWFyKNNj0zEXQYfTMPfZrAXUWUfTIZ5ECEUEJaijtw=
github.com/lestrrat-go/dsig v1.0.0 h1:OE09s2r9Z81kxzJYRn07TFM9XA4akrUdoMwr0L8xj38=
github.com/lestrrat-go/dsig v1.0.0/go.mod h1:dEgoOYYEJvW6XGbLasr8TFcAxoWrKlbQvmJgCR0qkDo=
github.com/lestrrat-go/dsig-secp256k1 v1.0.0 h1:JpDe4Aybfl0soBvoVwjqDbp+9S1Y2OM7gcrVVMFPOzY=
github.com/lestrrat-go/dsig-secp256k1 v1.0.0/go.mod h1:CxUgAhssb8FToqbL8NjSPoGQlnO4w3LG1P0qPWQm/NU=
github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE=
github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E=
github.com/lestrrat-go/httprc/v3 v3.0.0 h1:nZUx/zFg5uc2rhlu1L1DidGr5Sj02JbXvGSpnY4LMrc=
github.com/lestrrat-go/httprc/v3 v3.0.0/go.mod h1:k2U1QIiyVqAKtkffbg+cUmsyiPGQsb9aAfNQiNFuQ9Q=
github.com/lestrrat-go/jwx/v3 v3.0.10 h1:XuoCBhZBncRIjMQ32HdEc76rH0xK/Qv2wq5TBouYJDw=
github.com/lestrrat-go/jwx/v3 v3.0.10/go.mod h1:kNMedLgTpHvPJkK5EMVa1JFz+UVyY2dMmZKu3qjl/Pk=
github.com/lestrrat-go/httprc/v3 v3.0.1 h1:3n7Es68YYGZb2Jf+k//llA4FTZMl3yCwIjFIk4ubevI=
github.com/lestrrat-go/httprc/v3 v3.0.1/go.mod h1:2uAvmbXE4Xq8kAUjVrZOq1tZVYYYs5iP62Cmtru00xk=
github.com/lestrrat-go/jwx/v3 v3.0.12 h1:p25r68Y4KrbBdYjIsQweYxq794CtGCzcrc5dGzJIRjg=
github.com/lestrrat-go/jwx/v3 v3.0.12/go.mod h1:HiUSaNmMLXgZ08OmGBaPVvoZQgJVOQphSrGr5zMamS8=
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
github.com/lestrrat-go/option/v2 v2.0.0 h1:XxrcaJESE1fokHy3FpaQ/cXW8ZsIdWcdFzzLOcID3Ss=
@@ -193,12 +242,10 @@ github.com/lmittmann/tint v1.1.2 h1:2CQzrL6rslrsyjqLDwD11bZ5OpLBPU+g3G/r5LSfS8w=
github.com/lmittmann/tint v1.1.2/go.mod h1:HIS3gSy7qNwGCj+5oRjAutErFBl4BzdQP6cJZ0NfMwE=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mattn/go-sqlite3 v1.14.32 h1:JD12Ag3oLy1zQA+BNn74xRgaBbdhbNIDYvQUEuuErjs=
github.com/mattn/go-sqlite3 v1.14.32/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mileusna/useragent v1.3.5 h1:SJM5NzBmh/hO+4LGeATKpaEX9+b4vcGg2qXGLiNGDws=
github.com/mileusna/useragent v1.3.5/go.mod h1:3d8TOmwL/5I8pJjyVDteHtgDGcefrFUX4ccGOMKNYYc=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
@@ -212,247 +259,206 @@ github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/ncruces/go-strftime v1.0.0 h1:HMFp8mLCTPp341M/ZnA4qaf7ZlsbTc+miZjCLOFAw7w=
github.com/ncruces/go-strftime v1.0.0/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
github.com/orandin/slog-gorm v1.4.0 h1:FgA8hJufF9/jeNSYoEXmHPPBwET2gwlF3B85JdpsTUU=
github.com/orandin/slog-gorm v1.4.0/go.mod h1:MoZ51+b7xE9lwGNPYEhxcUtRNrYzjdcKvA8QXQQGEPA=
github.com/oschwald/maxminddb-golang/v2 v2.0.0-beta.8 h1:aM1/rO6p+XV+l+seD7UCtFZgsOefDTrFVLvPoZWjXZs=
github.com/oschwald/maxminddb-golang/v2 v2.0.0-beta.8/go.mod h1:Jts8ztuE0PkUwY7VCJyp6B68ujQfr6G9P5Dn3Yx9u6w=
github.com/oschwald/maxminddb-golang/v2 v2.1.0 h1:2Iv7lmG9XtxuZA/jFAsd7LnZaC1E59pFsj5O/nU15pw=
github.com/oschwald/maxminddb-golang/v2 v2.1.0/go.mod h1:gG4V88LsawPEqtbL1Veh1WRh+nVSYwXzJ1P5Fcn77g0=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
github.com/prometheus/common v0.62.0 h1:xasJaQlnWAeyHdUBeGjXmutelfJHWMRr+Fg4QszZ2Io=
github.com/prometheus/common v0.62.0/go.mod h1:vyBcEuLSvWos9B1+CyL7JZ2up+uFzXhkqml0W5zIY1I=
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
github.com/prometheus/common v0.67.4 h1:yR3NqWO1/UyO1w2PhUvXlGQs/PtFmoveVO0KZ4+Lvsc=
github.com/prometheus/common v0.67.4/go.mod h1:gP0fq6YjjNCLssJCQp0yk4M8W6ikLURwkdd/YKtTbyI=
github.com/prometheus/otlptranslator v1.0.0 h1:s0LJW/iN9dkIH+EnhiD3BlkkP5QVIUVEoIwkU+A6qos=
github.com/prometheus/otlptranslator v1.0.0/go.mod h1:vRYWnXvI6aWGpsdY/mOT/cbeVRBlPWtBNDb7kGR3uKM=
github.com/prometheus/procfs v0.19.2 h1:zUMhqEW66Ex7OXIiDkll3tl9a1ZdilUOd/F6ZXw4Vws=
github.com/prometheus/procfs v0.19.2/go.mod h1:M0aotyiemPhBCM0z5w87kL22CxfcH05ZpYlu+b4J7mw=
github.com/quic-go/qpack v0.6.0 h1:g7W+BMYynC1LbYLSqRt8PBg5Tgwxn214ZZR34VIOjz8=
github.com/quic-go/qpack v0.6.0/go.mod h1:lUpLKChi8njB4ty2bFLX2x4gzDqXwUpaO1DP9qMDZII=
github.com/quic-go/quic-go v0.57.0 h1:AsSSrrMs4qI/hLrKlTH/TGQeTMY0ib1pAOX7vA3AdqE=
github.com/quic-go/quic-go v0.57.0/go.mod h1:ly4QBAjHA2VhdnxhojRsCUOeJwKYg+taDlos92xb1+s=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII=
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA=
github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY=
github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ=
github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/contrib/bridges/otelslog v0.12.0 h1:lFM7SZo8Ce01RzRfnUFQZEYeWRf/MtOA3A5MobOqk2g=
go.opentelemetry.io/contrib/bridges/otelslog v0.12.0/go.mod h1:Dw05mhFtrKAYu72Tkb3YBYeQpRUJ4quDgo2DQw3No5A=
go.opentelemetry.io/contrib/bridges/prometheus v0.59.0 h1:HY2hJ7yn3KuEBBBsKxvF3ViSmzLwsgeNvD+0utRMgzc=
go.opentelemetry.io/contrib/bridges/prometheus v0.59.0/go.mod h1:H4H7vs8766kwFnOZVEGMJFVF+phpBSmTckvvNRdJeDI=
go.opentelemetry.io/contrib/exporters/autoexport v0.59.0 h1:dKhAFwh7SSoOw+gwMtSv+XLkUGTFAwAGMT3X3XSE4FA=
go.opentelemetry.io/contrib/exporters/autoexport v0.59.0/go.mod h1:fPl+qlrhRdRntIpPs9JoQ0iBKAsnH5VkgppU1f9kyF4=
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.60.0 h1:jj/B7eX95/mOxim9g9laNZkOHKz/XCHG0G410SntRy4=
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.60.0/go.mod h1:ZvRTVaYYGypytG0zRp2A60lpj//cMq3ZnxYdZaljVBM=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8=
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.10.0 h1:5dTKu4I5Dn4P2hxyW3l3jTaZx9ACgg0ECos1eAVrheY=
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.10.0/go.mod h1:P5HcUI8obLrCCmM3sbVBohZFH34iszk/+CPWuakZWL8=
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.10.0 h1:q/heq5Zh8xV1+7GoMGJpTxM2Lhq5+bFxB29tshuRuw0=
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.10.0/go.mod h1:leO2CSTg0Y+LyvmR7Wm4pUxE8KAmaM2GCVx7O+RATLA=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0 h1:QcFwRrZLc82r8wODjvyCbP7Ifp3UANaBSmhDSFjnqSc=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.35.0/go.mod h1:CXIWhUomyWBG/oY2/r/kLp6K/cmx9e/7DLpBuuGdLCA=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.35.0 h1:0NIXxOCFx+SKbhCVxwl3ETG8ClLPAa0KuKV6p3yhxP8=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.35.0/go.mod h1:ChZSJbbfbl/DcRZNc9Gqh6DYGlfjw4PvO1pEOZH1ZsE=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0 h1:1fTNlAIJZGWLP5FVu0fikVry1IsiUnXjf7QFvoNN3Xw=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.35.0/go.mod h1:zjPK58DtkqQFn+YUMbx0M2XV3QgKU0gS9LeGohREyK4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0 h1:m639+BofXTvcY1q8CGs4ItwQarYtJPOWmVobfM1HpVI=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0/go.mod h1:LjReUci/F4BUyv+y4dwnq3h/26iNOeC3wAIqgvTIZVo=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.35.0 h1:xJ2qHD0C1BeYVTLLR9sX12+Qb95kfeD/byKj6Ky1pXg=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.35.0/go.mod h1:u5BF1xyjstDowA1R5QAO9JHzqK+ublenEW/dyqTjBVk=
go.opentelemetry.io/otel/exporters/prometheus v0.57.0 h1:AHh/lAP1BHrY5gBwk8ncc25FXWm/gmmY3BX258z5nuk=
go.opentelemetry.io/otel/exporters/prometheus v0.57.0/go.mod h1:QpFWz1QxqevfjwzYdbMb4Y1NnlJvqSGwyuU0B4iuc9c=
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.10.0 h1:GKCEAZLEpEf78cUvudQdTg0aET2ObOZRB2HtXA0qPAI=
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.10.0/go.mod h1:9/zqSWLCmHT/9Jo6fYeUDRRogOLL60ABLsHWS99lF8s=
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.35.0 h1:PB3Zrjs1sG1GBX51SXyTSoOTqcDglmsk7nT6tkKPb/k=
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.35.0/go.mod h1:U2R3XyVPzn0WX7wOIypPuptulsMcPDPs/oiSVOMVnHY=
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.35.0 h1:T0Ec2E+3YZf5bgTNQVet8iTDW7oIk03tXHq+wkwIDnE=
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.35.0/go.mod h1:30v2gqH+vYGJsesLWFov8u47EpYTcIQcBjKpI6pJThg=
go.opentelemetry.io/otel/log v0.13.0 h1:yoxRoIZcohB6Xf0lNv9QIyCzQvrtGZklVbdCoyb7dls=
go.opentelemetry.io/otel/log v0.13.0/go.mod h1:INKfG4k1O9CL25BaM1qLe0zIedOpvlS5Z7XgSbmN83E=
go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY=
go.opentelemetry.io/otel/sdk v1.35.0/go.mod h1:+ga1bZliga3DxJ3CQGg3updiaAJoNECOgJREo9KHGQg=
go.opentelemetry.io/otel/sdk/log v0.10.0 h1:lR4teQGWfeDVGoute6l0Ou+RpFqQ9vaPdrNJlST0bvw=
go.opentelemetry.io/otel/sdk/log v0.10.0/go.mod h1:A+V1UTWREhWAittaQEG4bYm4gAZa6xnvVu+xKrIRkzo=
go.opentelemetry.io/otel/sdk/metric v1.35.0 h1:1RriWBmCKgkeHEhM7a2uMjMUfP7MsOF5JpUCaEqEI9o=
go.opentelemetry.io/otel/sdk/metric v1.35.0/go.mod h1:is6XYCUMpcKi+ZsOvfluY5YstFnhW0BidkR+gL+qN+w=
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
go.opentelemetry.io/proto/otlp v1.5.0 h1:xJvq7gMzB31/d406fB8U5CBdyQGw4P399D1aQWU/3i4=
go.opentelemetry.io/proto/otlp v1.5.0/go.mod h1:keN8WnHxOy8PG0rQZjJJ5A2ebUoafqWp0eVQ4yIXvJ4=
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
go.opentelemetry.io/contrib/bridges/otelslog v0.13.0 h1:bwnLpizECbPr1RrQ27waeY2SPIPeccCx/xLuoYADZ9s=
go.opentelemetry.io/contrib/bridges/otelslog v0.13.0/go.mod h1:3nWlOiiqA9UtUnrcNk82mYasNxD8ehOspL0gOfEo6Y4=
go.opentelemetry.io/contrib/bridges/prometheus v0.63.0 h1:/Rij/t18Y7rUayNg7Id6rPrEnHgorxYabm2E6wUdPP4=
go.opentelemetry.io/contrib/bridges/prometheus v0.63.0/go.mod h1:AdyDPn6pkbkt2w01n3BubRVk7xAsCRq1Yg1mpfyA/0E=
go.opentelemetry.io/contrib/exporters/autoexport v0.63.0 h1:NLnZybb9KkfMXPwZhd5diBYJoVxiO9Qa06dacEA7ySY=
go.opentelemetry.io/contrib/exporters/autoexport v0.63.0/go.mod h1:OvRg7gm5WRSCtxzGSsrFHbDLToYlStHNZQ+iPNIyD6g=
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.63.0 h1:5kSIJ0y8ckZZKoDhZHdVtcyjVi6rXyAwyaR8mp4zLbg=
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.63.0/go.mod h1:i+fIMHvcSQtsIY82/xgiVWRklrNt/O6QriHLjzGeY+s=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG0FI8OiXhBfcRtqqHcZcka+gU3cskNuf05R18=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg=
go.opentelemetry.io/contrib/propagators/b3 v1.38.0 h1:uHsCCOSKl0kLrV2dLkFK+8Ywk9iKa/fptkytc6aFFEo=
go.opentelemetry.io/contrib/propagators/b3 v1.38.0/go.mod h1:wMRSZJZcY8ya9mApLLhwIMjqmApy2o/Ml+62lhvxyHU=
go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM=
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 h1:OMqPldHt79PqWKOMYIAQs3CxAi7RLgPxwfFSwr4ZxtM=
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0/go.mod h1:1biG4qiqTxKiUCtoWDPpL3fB3KxVwCiGw81j3nKMuHE=
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 h1:QQqYw3lkrzwVsoEX0w//EhH/TCnpRdEenKBOOEIMjWc=
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0/go.mod h1:gSVQcr17jk2ig4jqJ2DX30IdWH251JcNAecvrqTxH1s=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0 h1:vl9obrcoWVKp/lwl8tRE33853I8Xru9HFbw/skNeLs8=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0/go.mod h1:GAXRxmLJcVM3u22IjTg74zWBrRCKq8BnOqUVLodpcpw=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0 h1:Oe2z/BCg5q7k4iXC3cqJxKYg0ieRiOqF0cecFYdPTwk=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0/go.mod h1:ZQM5lAJpOsKnYagGg/zV2krVqTtaVdYdDkhMoX6Oalg=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 h1:lwI4Dc5leUqENgGuQImwLo4WnuXFPetmPpkLi2IrX54=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0/go.mod h1:Kz/oCE7z5wuyhPxsXDuaPteSWqjSBD5YaSdbxZYGbGk=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4=
go.opentelemetry.io/otel/exporters/prometheus v0.60.0 h1:cGtQxGvZbnrWdC2GyjZi0PDKVSLWP/Jocix3QWfXtbo=
go.opentelemetry.io/otel/exporters/prometheus v0.60.0/go.mod h1:hkd1EekxNo69PTV4OWFGZcKQiIqg0RfuWExcPKFvepk=
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 h1:B/g+qde6Mkzxbry5ZZag0l7QrQBCtVm7lVjaLgmpje8=
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0/go.mod h1:mOJK8eMmgW6ocDJn6Bn11CcZ05gi3P8GylBXEkZtbgA=
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0 h1:wm/Q0GAAykXv83wzcKzGGqAnnfLFyFe7RslekZuv+VI=
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0/go.mod h1:ra3Pa40+oKjvYh+ZD3EdxFZZB0xdMfuileHAm4nNN7w=
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 h1:kJxSDN4SgWWTjG/hPp3O7LCGLcHXFlvS2/FFOrwL+SE=
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0/go.mod h1:mgIOzS7iZeKJdeB8/NYHrJ48fdGc71Llo5bJ1J4DWUE=
go.opentelemetry.io/otel/log v0.14.0 h1:2rzJ+pOAZ8qmZ3DDHg73NEKzSZkhkGIua9gXtxNGgrM=
go.opentelemetry.io/otel/log v0.14.0/go.mod h1:5jRG92fEAgx0SU/vFPxmJvhIuDU9E1SUnEQrMlJpOno=
go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA=
go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI=
go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E=
go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg=
go.opentelemetry.io/otel/sdk/log v0.14.0 h1:JU/U3O7N6fsAXj0+CXz21Czg532dW2V4gG1HE/e8Zrg=
go.opentelemetry.io/otel/sdk/log v0.14.0/go.mod h1:imQvII+0ZylXfKU7/wtOND8Hn4OpT3YUoIgqJVksUkM=
go.opentelemetry.io/otel/sdk/log/logtest v0.14.0 h1:Ijbtz+JKXl8T2MngiwqBlPaHqc4YCaP/i13Qrow6gAM=
go.opentelemetry.io/otel/sdk/log/logtest v0.14.0/go.mod h1:dCU8aEL6q+L9cYTqcVOk8rM9Tp8WdnHOPLiBgp0SGOA=
go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM=
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A=
go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
golang.org/x/arch v0.20.0 h1:dx1zTU0MAE98U+TQ8BLl7XsJbgze2WnNKF/8tGp/Q6c=
golang.org/x/arch v0.20.0/go.mod h1:bdwinDaKcfZUGpH09BB7ZmOfhalA8lQdzl62l8gGWsk=
go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y=
go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU=
go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0=
go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8=
golang.org/x/arch v0.23.0 h1:lKF64A2jF6Zd8L0knGltUnegD62JMFBiCPBmQpToHhg=
golang.org/x/arch v0.23.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc=
golang.org/x/exp v0.0.0-20250813145105-42675adae3e6 h1:SbTAbRFnd5kjQXbczszQ0hdk3ctwYf3qBNH9jIsGclE=
golang.org/x/exp v0.0.0-20250813145105-42675adae3e6/go.mod h1:4QTo5u+SEIbbKW1RacMZq1YEfOBqeXa19JeshGi+zc4=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39 h1:DHNhtq3sNNzrvduZZIiFyXWOL9IWaDPHqTnLJp+rCBY=
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.30.0 h1:jD5RhkmVAnjqaCUXfbGBrn3lpxbknfN9w2UhHHU+5B4=
golang.org/x/image v0.30.0/go.mod h1:SAEUTxCCMWSrJcCy/4HwavEsfZZJlYxeHLc6tTiAe/c=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ=
golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc=
golang.org/x/image v0.33.0 h1:LXRZRnv1+zGd5XBUVRFmYEphyyKJjQjCRiOuAP3sZfQ=
golang.org/x/image v0.33.0/go.mod h1:DD3OsTYT9chzuzTQt+zMcOlBHgfoKQb1gry8p76Y1sc=
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M=
golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo=
golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg=
golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a h1:nwKuGPlUAt+aR+pcrkfFRrTU1BVrSmYyYMxYbUIVHr0=
google.golang.org/genproto/googleapis/api v0.0.0-20250218202821-56aae31c358a/go.mod h1:3kWAYMk1I75K4vykHtKt2ycnOgpA6974V7bREqbsenU=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250218202821-56aae31c358a h1:51aaUVRocpvUOSQKM6Q7VuoaktNIaMCLuhZB6DKksq4=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250218202821-56aae31c358a/go.mod h1:uRxBH1mhmO8PGhU89cMcHaXKZqO+OfakD8QQO0oYwlQ=
google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg=
google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec=
google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A=
google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846 h1:ZdyUkS9po3H7G0tuh955QVyyotWvOD4W0aEapeGeUYk=
google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846/go.mod h1:Fk4kyraUvqD7i5H6S43sj2W98fbZa75lpZz/eUyhfO0=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846 h1:Wgl1rcDNThT+Zn47YyCXOXyX/COgMTIdhJ717F0l4xk=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
google.golang.org/grpc v1.77.0 h1:wVVY6/8cGA6vvffn+wWK5ToddbgdU3d8MNENr4evgXM=
google.golang.org/grpc v1.77.0/go.mod h1:z0BY1iVj0q8E1uSQCjL9cppRj+gnZjzDnzV0dHhrNig=
google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE=
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/postgres v1.6.0 h1:2dxzU8xJ+ivvqTRph34QX+WrRaJlmfyPqXmoGVjMBa4=
gorm.io/driver/postgres v1.6.0/go.mod h1:vUw0mrGgrTK+uPHEhAdV4sfFELrByKVGnaVRkXDhtWo=
gorm.io/gorm v1.30.1 h1:lSHg33jJTBxs2mgJRfRZeLDG+WZaHYCk3Wtfl6Ngzo4=
gorm.io/gorm v1.30.1/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
modernc.org/cc/v4 v4.26.3 h1:yEN8dzrkRFnn4PUUKXLYIqVf2PJYAEjMTFjO3BDGc3I=
modernc.org/cc/v4 v4.26.3/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.28.0 h1:rjznn6WWehKq7dG4JtLRKxb52Ecv8OUGah8+Z/SfpNU=
modernc.org/ccgo/v4 v4.28.0/go.mod h1:JygV3+9AV6SmPhDasu4JgquwU81XAKLd3OKTUDNOiKE=
modernc.org/fileutil v1.3.15 h1:rJAXTP6ilMW/1+kzDiqmBlHLWszheUFXIyGQIAvjJpY=
modernc.org/fileutil v1.3.15/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
gorm.io/gorm v1.31.1 h1:7CA8FTFz/gRfgqgpeKIBcervUn3xSyPUmr6B2WXJ7kg=
gorm.io/gorm v1.31.1/go.mod h1:XyQVbO2k6YkOis7C2437jSit3SsDK72s7n7rsSHd+Gs=
modernc.org/cc/v4 v4.27.1 h1:9W30zRlYrefrDV2JE2O8VDtJ1yPGownxciz5rrbQZis=
modernc.org/cc/v4 v4.27.1/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.30.1 h1:4r4U1J6Fhj98NKfSjnPUN7Ze2c6MnAdL0hWw6+LrJpc=
modernc.org/ccgo/v4 v4.30.1/go.mod h1:bIOeI1JL54Utlxn+LwrFyjCx2n2RDiYEaJVSrgdrRfM=
modernc.org/fileutil v1.3.40 h1:ZGMswMNc9JOCrcrakF1HrvmergNLAmxOPjizirpfqBA=
modernc.org/fileutil v1.3.40/go.mod h1:HxmghZSZVAz/LXcMNwZPA/DRrQZEVP9VX0V4LQGQFOc=
modernc.org/gc/v2 v2.6.5 h1:nyqdV8q46KvTpZlsw66kWqwXRHdjIlJOhG6kxiV/9xI=
modernc.org/gc/v2 v2.6.5/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
modernc.org/gc/v3 v3.1.1 h1:k8T3gkXWY9sEiytKhcgyiZ2L0DTyCQ/nvX+LoCljoRE=
modernc.org/gc/v3 v3.1.1/go.mod h1:HFK/6AGESC7Ex+EZJhJ2Gni6cTaYpSMmU/cT9RmlfYY=
modernc.org/goabi0 v0.2.0 h1:HvEowk7LxcPd0eq6mVOAEMai46V+i7Jrj13t4AzuNks=
modernc.org/goabi0 v0.2.0/go.mod h1:CEFRnnJhKvWT1c1JTI3Avm+tgOWbkOu5oPA8eH8LnMI=
modernc.org/libc v1.66.7 h1:rjhZ8OSCybKWxS1CJr0hikpEi6Vg+944Ouyrd+bQsoY=
modernc.org/libc v1.66.7/go.mod h1:ln6tbWX0NH+mzApEoDRvilBvAWFt1HX7AUA4VDdVDPM=
modernc.org/libc v1.67.1 h1:bFaqOaa5/zbWYJo8aW0tXPX21hXsngG2M7mckCnFSVk=
modernc.org/libc v1.67.1/go.mod h1:QvvnnJ5P7aitu0ReNpVIEyesuhmDLQ8kaEoyMjIFZJA=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI=
@@ -461,8 +467,8 @@ modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.38.2 h1:Aclu7+tgjgcQVShZqim41Bbw9Cho0y/7WzYptXqkEek=
modernc.org/sqlite v1.38.2/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E=
modernc.org/sqlite v1.40.1 h1:VfuXcxcUWWKRBuP8+BR9L7VnmusMgBNNnBYGEe9w/iY=
modernc.org/sqlite v1.40.1/go.mod h1:9fjQZ0mB1LLP0GYrp39oOJXx/I2sxEnZtzCmEQIKvGE=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=

View File

@@ -2,68 +2,76 @@ package bootstrap
import (
"bytes"
"context"
"crypto/sha256"
"encoding/hex"
"errors"
"fmt"
"io"
"io/fs"
"log/slog"
"os"
"path"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/storage"
"github.com/pocket-id/pocket-id/backend/internal/utils"
"github.com/pocket-id/pocket-id/backend/resources"
)
// initApplicationImages copies the images from the images directory to the application-images directory
// initApplicationImages copies the images from the embedded directory to the storage backend
// and returns a map containing the detected file extensions in the application-images directory.
func initApplicationImages() (map[string]string, error) {
func initApplicationImages(ctx context.Context, fileStorage storage.FileStorage) (map[string]string, error) {
// Previous versions of images
// If these are found, they are deleted
legacyImageHashes := imageHashMap{
"background.jpg": mustDecodeHex("138d510030ed845d1d74de34658acabff562d306476454369a60ab8ade31933f"),
}
dirPath := common.EnvConfig.UploadPath + "/application-images"
sourceFiles, err := resources.FS.ReadDir("images")
if err != nil && !os.IsNotExist(err) {
return nil, fmt.Errorf("failed to read directory: %w", err)
}
destinationFiles, err := os.ReadDir(dirPath)
if err != nil && !os.IsNotExist(err) {
return nil, fmt.Errorf("failed to read directory: %w", err)
destinationFiles, err := fileStorage.List(ctx, "application-images")
if err != nil {
if storage.IsNotExist(err) {
destinationFiles = []storage.ObjectInfo{}
} else {
return nil, fmt.Errorf("failed to list application images: %w", err)
}
}
dstNameToExt := make(map[string]string, len(destinationFiles))
for _, f := range destinationFiles {
if f.IsDir() {
continue
}
name := f.Name()
nameWithoutExt, ext := utils.SplitFileName(name)
destFilePath := path.Join(dirPath, name)
// Skip directories
if f.IsDir() {
_, name := path.Split(f.Path)
if name == "" {
continue
}
h, err := utils.CreateSha256FileHash(destFilePath)
nameWithoutExt, ext := utils.SplitFileName(name)
reader, _, err := fileStorage.Open(ctx, f.Path)
if err != nil {
slog.Warn("Failed to get hash for file", slog.String("name", name), slog.Any("error", err))
if errors.Is(err, fs.ErrNotExist) {
continue
}
slog.Warn("Failed to open application image for hashing", slog.String("name", name), slog.Any("error", err))
continue
}
hash, err := hashStream(reader)
reader.Close()
if err != nil {
slog.Warn("Failed to hash application image", slog.String("name", name), slog.Any("error", err))
continue
}
// Check if the file is a legacy one - if so, delete it
if legacyImageHashes.Contains(h) {
if legacyImageHashes.Contains(hash) {
slog.Info("Found legacy application image that will be removed", slog.String("name", name))
err = os.Remove(destFilePath)
if err != nil {
if err := fileStorage.Delete(ctx, f.Path); err != nil {
return nil, fmt.Errorf("failed to remove legacy file '%s': %w", name, err)
}
continue
}
// Track existing files
dstNameToExt[nameWithoutExt] = ext
}
@@ -76,21 +84,21 @@ func initApplicationImages() (map[string]string, error) {
name := sourceFile.Name()
nameWithoutExt, ext := utils.SplitFileName(name)
srcFilePath := path.Join("images", name)
destFilePath := path.Join(dirPath, name)
// Skip if there's already an image at the path
// We do not check the extension because users could have uploaded a different one
if _, exists := dstNameToExt[nameWithoutExt]; exists {
continue
}
slog.Info("Writing new application image", slog.String("name", name))
err := utils.CopyEmbeddedFileToDisk(srcFilePath, destFilePath)
srcFile, err := resources.FS.Open(srcFilePath)
if err != nil {
return nil, fmt.Errorf("failed to copy file: %w", err)
return nil, fmt.Errorf("failed to open embedded file '%s': %w", name, err)
}
// Track the newly copied file so it can be included in the extensions map later
if err := fileStorage.Save(ctx, path.Join("application-images", name), srcFile); err != nil {
srcFile.Close()
return nil, fmt.Errorf("failed to store application image '%s': %w", name, err)
}
srcFile.Close()
dstNameToExt[nameWithoutExt] = ext
}
@@ -118,3 +126,11 @@ func mustDecodeHex(str string) []byte {
}
return b
}
func hashStream(r io.Reader) ([]byte, error) {
h := sha256.New()
if _, err := io.Copy(h, r); err != nil {
return nil, err
}
return h.Sum(nil), nil
}

View File

@@ -7,13 +7,25 @@ import (
"time"
_ "github.com/golang-migrate/migrate/v4/source/file"
"gorm.io/gorm"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/job"
"github.com/pocket-id/pocket-id/backend/internal/storage"
"github.com/pocket-id/pocket-id/backend/internal/utils"
)
func Bootstrap(ctx context.Context) error {
var shutdownFns []utils.Service
defer func() { //nolint:contextcheck
// Invoke all shutdown functions on exit
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
if err := utils.NewServiceRunner(shutdownFns...).Run(shutdownCtx); err != nil {
slog.Error("Error during graceful shutdown", "error", err)
}
}()
// Initialize the observability stack, including the logger, distributed tracing, and metrics
shutdownFns, httpClient, err := initObservability(ctx, common.EnvConfig.MetricsEnabled, common.EnvConfig.TracingEnabled)
if err != nil {
@@ -21,23 +33,47 @@ func Bootstrap(ctx context.Context) error {
}
slog.InfoContext(ctx, "Pocket ID is starting")
imageExtensions, err := initApplicationImages()
if err != nil {
return fmt.Errorf("failed to initialize application images: %w", err)
}
// Connect to the database
db, err := NewDatabase()
if err != nil {
return fmt.Errorf("failed to initialize database: %w", err)
}
fileStorage, err := InitStorage(ctx, db)
if err != nil {
return fmt.Errorf("failed to initialize file storage (backend: %s): %w", common.EnvConfig.FileBackend, err)
}
imageExtensions, err := initApplicationImages(ctx, fileStorage)
if err != nil {
return fmt.Errorf("failed to initialize application images: %w", err)
}
// Create all services
svc, err := initServices(ctx, db, httpClient, imageExtensions)
svc, err := initServices(ctx, db, httpClient, imageExtensions, fileStorage)
if err != nil {
return fmt.Errorf("failed to initialize services: %w", err)
}
waitUntil, err := svc.appLockService.Acquire(ctx, false)
if err != nil {
return fmt.Errorf("failed to acquire application lock: %w", err)
}
select {
case <-ctx.Done():
return ctx.Err()
case <-time.After(time.Until(waitUntil)):
}
shutdownFn := func(shutdownCtx context.Context) error {
sErr := svc.appLockService.Release(shutdownCtx)
if sErr != nil {
return fmt.Errorf("failed to release application lock: %w", sErr)
}
return nil
}
shutdownFns = append(shutdownFns, shutdownFn)
// Init the job scheduler
scheduler, err := job.NewScheduler()
if err != nil {
@@ -49,28 +85,51 @@ func Bootstrap(ctx context.Context) error {
}
// Init the router
router := initRouter(db, svc)
router, err := initRouter(db, svc)
if err != nil {
return fmt.Errorf("failed to initialize router: %w", err)
}
// Run all background services
// This call blocks until the context is canceled
err = utils.
NewServiceRunner(router, scheduler.Run).
Run(ctx)
services := []utils.Service{svc.appLockService.RunRenewal, router}
if common.EnvConfig.AppEnv != "test" {
services = append(services, scheduler.Run)
}
err = utils.NewServiceRunner(services...).Run(ctx)
if err != nil {
return fmt.Errorf("failed to run services: %w", err)
}
// Invoke all shutdown functions
// We give these a timeout of 5s
// Note: we use a background context because the run context has been canceled already
shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 5*time.Second)
defer shutdownCancel()
err = utils.
NewServiceRunner(shutdownFns...).
Run(shutdownCtx) //nolint:contextcheck
if err != nil {
slog.Error("Error shutting down services", slog.Any("error", err))
}
return nil
}
func InitStorage(ctx context.Context, db *gorm.DB) (fileStorage storage.FileStorage, err error) {
switch common.EnvConfig.FileBackend {
case storage.TypeFileSystem:
fileStorage, err = storage.NewFilesystemStorage(common.EnvConfig.UploadPath)
case storage.TypeDatabase:
fileStorage, err = storage.NewDatabaseStorage(db)
case storage.TypeS3:
s3Cfg := storage.S3Config{
Bucket: common.EnvConfig.S3Bucket,
Region: common.EnvConfig.S3Region,
Endpoint: common.EnvConfig.S3Endpoint,
AccessKeyID: common.EnvConfig.S3AccessKeyID,
SecretAccessKey: common.EnvConfig.S3SecretAccessKey,
ForcePathStyle: common.EnvConfig.S3ForcePathStyle,
DisableDefaultIntegrityChecks: common.EnvConfig.S3DisableDefaultIntegrityChecks,
Root: common.EnvConfig.UploadPath,
}
fileStorage, err = storage.NewS3Storage(ctx, s3Cfg)
default:
err = fmt.Errorf("unknown file storage backend: %s", common.EnvConfig.FileBackend)
}
if err != nil {
return fileStorage, err
}
return fileStorage, nil
}

View File

@@ -1,6 +1,7 @@
package bootstrap
import (
"database/sql"
"errors"
"fmt"
"log/slog"
@@ -11,12 +12,7 @@ import (
"time"
"github.com/glebarez/sqlite"
"github.com/golang-migrate/migrate/v4"
"github.com/golang-migrate/migrate/v4/database"
postgresMigrate "github.com/golang-migrate/migrate/v4/database/postgres"
sqliteMigrate "github.com/golang-migrate/migrate/v4/database/sqlite3"
_ "github.com/golang-migrate/migrate/v4/source/github"
"github.com/golang-migrate/migrate/v4/source/iofs"
slogGorm "github.com/orandin/slog-gorm"
"gorm.io/driver/postgres"
"gorm.io/gorm"
@@ -25,11 +21,10 @@ import (
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/utils"
sqliteutil "github.com/pocket-id/pocket-id/backend/internal/utils/sqlite"
"github.com/pocket-id/pocket-id/backend/resources"
)
func NewDatabase() (db *gorm.DB, err error) {
db, err = connectDatabase()
db, err = ConnectDatabase()
if err != nil {
return nil, fmt.Errorf("failed to connect to database: %w", err)
}
@@ -38,108 +33,19 @@ func NewDatabase() (db *gorm.DB, err error) {
return nil, fmt.Errorf("failed to get sql.DB: %w", err)
}
// Choose the correct driver for the database provider
var driver database.Driver
switch common.EnvConfig.DbProvider {
case common.DbProviderSqlite:
driver, err = sqliteMigrate.WithInstance(sqlDb, &sqliteMigrate.Config{
NoTxWrap: true,
})
case common.DbProviderPostgres:
driver, err = postgresMigrate.WithInstance(sqlDb, &postgresMigrate.Config{})
default:
// Should never happen at this point
return nil, fmt.Errorf("unsupported database provider: %s", common.EnvConfig.DbProvider)
}
if err != nil {
return nil, fmt.Errorf("failed to create migration driver: %w", err)
}
// Run migrations
if err := migrateDatabase(driver); err != nil {
if err := utils.MigrateDatabase(sqlDb); err != nil {
return nil, fmt.Errorf("failed to run migrations: %w", err)
}
return db, nil
}
func migrateDatabase(driver database.Driver) error {
// Embedded migrations via iofs
path := "migrations/" + string(common.EnvConfig.DbProvider)
source, err := iofs.New(resources.FS, path)
if err != nil {
return fmt.Errorf("failed to create embedded migration source: %w", err)
}
m, err := migrate.NewWithInstance("iofs", source, "pocket-id", driver)
if err != nil {
return fmt.Errorf("failed to create migration instance: %w", err)
}
requiredVersion, err := getRequiredMigrationVersion(path)
if err != nil {
return fmt.Errorf("failed to get last migration version: %w", err)
}
currentVersion, _, _ := m.Version()
if currentVersion > requiredVersion {
slog.Warn("Database version is newer than the application supports, possible downgrade detected", slog.Uint64("db_version", uint64(currentVersion)), slog.Uint64("app_version", uint64(requiredVersion)))
if !common.EnvConfig.AllowDowngrade {
return fmt.Errorf("database version (%d) is newer than application version (%d), downgrades are not allowed (set ALLOW_DOWNGRADE=true to enable)", currentVersion, requiredVersion)
}
slog.Info("Fetching migrations from GitHub to handle possible downgrades")
return migrateDatabaseFromGitHub(driver, requiredVersion)
}
if err := m.Migrate(requiredVersion); err != nil && !errors.Is(err, migrate.ErrNoChange) {
return fmt.Errorf("failed to apply embedded migrations: %w", err)
}
return nil
}
func migrateDatabaseFromGitHub(driver database.Driver, version uint) error {
srcURL := "github://pocket-id/pocket-id/backend/resources/migrations/" + string(common.EnvConfig.DbProvider)
m, err := migrate.NewWithDatabaseInstance(srcURL, "pocket-id", driver)
if err != nil {
return fmt.Errorf("failed to create GitHub migration instance: %w", err)
}
if err := m.Migrate(version); err != nil && !errors.Is(err, migrate.ErrNoChange) {
return fmt.Errorf("failed to apply GitHub migrations: %w", err)
}
return nil
}
// getRequiredMigrationVersion reads the embedded migration files and returns the highest version number found.
func getRequiredMigrationVersion(path string) (uint, error) {
entries, err := resources.FS.ReadDir(path)
if err != nil {
return 0, fmt.Errorf("failed to read migration directory: %w", err)
}
var maxVersion uint
for _, entry := range entries {
if entry.IsDir() {
continue
}
name := entry.Name()
var version uint
n, err := fmt.Sscanf(name, "%d_", &version)
if err == nil && n == 1 {
if version > maxVersion {
maxVersion = version
}
}
}
return maxVersion, nil
}
func connectDatabase() (db *gorm.DB, err error) {
func ConnectDatabase() (db *gorm.DB, err error) {
var dialector gorm.Dialector
// Choose the correct database provider
var onConnFn func(conn *sql.DB)
switch common.EnvConfig.DbProvider {
case common.DbProviderSqlite:
if common.EnvConfig.DbConnectionString == "" {
@@ -148,7 +54,7 @@ func connectDatabase() (db *gorm.DB, err error) {
sqliteutil.RegisterSqliteFunctions()
connString, dbPath, err := parseSqliteConnectionString(common.EnvConfig.DbConnectionString)
connString, dbPath, isMemoryDB, err := parseSqliteConnectionString(common.EnvConfig.DbConnectionString)
if err != nil {
return nil, err
}
@@ -159,6 +65,14 @@ func connectDatabase() (db *gorm.DB, err error) {
return nil, err
}
if isMemoryDB {
// For in-memory SQLite databases, we must limit to 1 open connection at the same time, or they won't see the whole data
// The other workaround, of using shared caches, doesn't work well with multiple write transactions trying to happen at once
onConnFn = func(conn *sql.DB) {
conn.SetMaxOpenConns(1)
}
}
dialector = sqlite.Open(connString)
case common.DbProviderPostgres:
if common.EnvConfig.DbConnectionString == "" {
@@ -176,6 +90,16 @@ func connectDatabase() (db *gorm.DB, err error) {
})
if err == nil {
slog.Info("Connected to database", slog.String("provider", string(common.EnvConfig.DbProvider)))
if onConnFn != nil {
conn, err := db.DB()
if err != nil {
slog.Warn("Failed to get database connection, will retry in 3s", slog.Int("attempt", i), slog.String("provider", string(common.EnvConfig.DbProvider)), slog.Any("error", err))
time.Sleep(3 * time.Second)
}
onConnFn(conn)
}
return db, nil
}
@@ -188,18 +112,18 @@ func connectDatabase() (db *gorm.DB, err error) {
return nil, err
}
func parseSqliteConnectionString(connString string) (parsedConnString string, dbPath string, err error) {
func parseSqliteConnectionString(connString string) (parsedConnString string, dbPath string, isMemoryDB bool, err error) {
if !strings.HasPrefix(connString, "file:") {
connString = "file:" + connString
}
// Check if we're using an in-memory database
isMemoryDB := isSqliteInMemory(connString)
isMemoryDB = isSqliteInMemory(connString)
// Parse the connection string
connStringUrl, err := url.Parse(connString)
if err != nil {
return "", "", fmt.Errorf("failed to parse SQLite connection string: %w", err)
return "", "", false, fmt.Errorf("failed to parse SQLite connection string: %w", err)
}
// Convert options for the old SQLite driver to the new one
@@ -208,7 +132,7 @@ func parseSqliteConnectionString(connString string) (parsedConnString string, db
// Add the default and required params
err = addSqliteDefaultParameters(connStringUrl, isMemoryDB)
if err != nil {
return "", "", fmt.Errorf("invalid SQLite connection string: %w", err)
return "", "", false, fmt.Errorf("invalid SQLite connection string: %w", err)
}
// Get the absolute path to the database
@@ -217,10 +141,10 @@ func parseSqliteConnectionString(connString string) (parsedConnString string, db
idx := strings.IndexRune(parsedConnString, '?')
dbPath, err = filepath.Abs(parsedConnString[len("file:"):idx])
if err != nil {
return "", "", fmt.Errorf("failed to determine absolute path to the database: %w", err)
return "", "", false, fmt.Errorf("failed to determine absolute path to the database: %w", err)
}
return parsedConnString, dbPath, nil
return parsedConnString, dbPath, isMemoryDB, nil
}
// The official C implementation of SQLite allows some additional properties in the connection string
@@ -272,11 +196,6 @@ func addSqliteDefaultParameters(connStringUrl *url.URL, isMemoryDB bool) error {
qs = make(url.Values, 2)
}
// If the database is in-memory, we must ensure that cache=shared is set
if isMemoryDB {
qs["cache"] = []string{"shared"}
}
// Check if the database is read-only or immutable
isReadOnly := false
if len(qs["mode"]) > 0 {

View File

@@ -205,7 +205,7 @@ func TestAddSqliteDefaultParameters(t *testing.T) {
name: "in-memory database",
input: "file::memory:",
isMemoryDB: true,
expected: "file::memory:?_pragma=busy_timeout%282500%29&_pragma=foreign_keys%281%29&_pragma=journal_mode%28MEMORY%29&_txlock=immediate&cache=shared",
expected: "file::memory:?_pragma=busy_timeout%282500%29&_pragma=foreign_keys%281%29&_pragma=journal_mode%28MEMORY%29&_txlock=immediate",
},
{
name: "read-only database with mode=ro",
@@ -249,12 +249,6 @@ func TestAddSqliteDefaultParameters(t *testing.T) {
isMemoryDB: false,
expected: "file:test.db?_pragma=busy_timeout%283000%29&_pragma=foreign_keys%281%29&_pragma=journal_mode%28TRUNCATE%29&_pragma=synchronous%28NORMAL%29&_txlock=immediate",
},
{
name: "in-memory database with cache already set",
input: "file::memory:?cache=private",
isMemoryDB: true,
expected: "file::memory:?_pragma=busy_timeout%282500%29&_pragma=foreign_keys%281%29&_pragma=journal_mode%28MEMORY%29&_txlock=immediate&cache=shared",
},
{
name: "database with mode=rw (not read-only)",
input: "file:test.db?mode=rw",

View File

@@ -17,7 +17,7 @@ import (
func init() {
registerTestControllers = []func(apiGroup *gin.RouterGroup, db *gorm.DB, svc *services){
func(apiGroup *gin.RouterGroup, db *gorm.DB, svc *services) {
testService, err := service.NewTestService(db, svc.appConfigService, svc.jwtService, svc.ldapService)
testService, err := service.NewTestService(db, svc.appConfigService, svc.jwtService, svc.ldapService, svc.appLockService, svc.fileStorage)
if err != nil {
slog.Error("Failed to initialize test service", slog.Any("error", err))
os.Exit(1)

View File

@@ -29,23 +29,14 @@ import (
// This is used to register additional controllers for tests
var registerTestControllers []func(apiGroup *gin.RouterGroup, db *gorm.DB, svc *services)
func initRouter(db *gorm.DB, svc *services) utils.Service {
runner, err := initRouterInternal(db, svc)
if err != nil {
slog.Error("Failed to init router", "error", err)
os.Exit(1)
}
return runner
}
func initRouterInternal(db *gorm.DB, svc *services) (utils.Service, error) {
func initRouter(db *gorm.DB, svc *services) (utils.Service, error) {
// Set the appropriate Gin mode based on the environment
switch common.EnvConfig.AppEnv {
case "production":
case common.AppEnvProduction:
gin.SetMode(gin.ReleaseMode)
case "development":
case common.AppEnvDevelopment:
gin.SetMode(gin.DebugMode)
case "test":
case common.AppEnvTest:
gin.SetMode(gin.TestMode)
}
@@ -92,7 +83,7 @@ func initRouterInternal(db *gorm.DB, svc *services) (utils.Service, error) {
controller.NewVersionController(apiGroup, svc.versionService)
// Add test controller in non-production environments
if common.EnvConfig.AppEnv != "production" {
if !common.EnvConfig.AppEnv.IsProduction() {
for _, f := range registerTestControllers {
f(apiGroup, db, svc)
}
@@ -119,6 +110,7 @@ func initRouterInternal(db *gorm.DB, svc *services) (utils.Service, error) {
if common.EnvConfig.UnixSocket != "" {
network = "unix"
addr = common.EnvConfig.UnixSocket
os.Remove(addr) // remove dangling the socket file to avoid file-exist error
}
listener, err := net.Listen(network, addr) //nolint:noctx

View File

@@ -23,7 +23,7 @@ func registerScheduledJobs(ctx context.Context, db *gorm.DB, svc *services, http
if err != nil {
return fmt.Errorf("failed to register DB cleanup jobs in scheduler: %w", err)
}
err = scheduler.RegisterFileCleanupJobs(ctx, db)
err = scheduler.RegisterFileCleanupJobs(ctx, db, svc.fileStorage)
if err != nil {
return fmt.Errorf("failed to register file cleanup jobs in scheduler: %w", err)
}

View File

@@ -8,6 +8,7 @@ import (
"gorm.io/gorm"
"github.com/pocket-id/pocket-id/backend/internal/service"
"github.com/pocket-id/pocket-id/backend/internal/storage"
)
type services struct {
@@ -25,10 +26,12 @@ type services struct {
ldapService *service.LdapService
apiKeyService *service.ApiKeyService
versionService *service.VersionService
fileStorage storage.FileStorage
appLockService *service.AppLockService
}
// Initializes all services
func initServices(ctx context.Context, db *gorm.DB, httpClient *http.Client, imageExtensions map[string]string) (svc *services, err error) {
func initServices(ctx context.Context, db *gorm.DB, httpClient *http.Client, imageExtensions map[string]string, fileStorage storage.FileStorage) (svc *services, err error) {
svc = &services{}
svc.appConfigService, err = service.NewAppConfigService(ctx, db)
@@ -36,7 +39,9 @@ func initServices(ctx context.Context, db *gorm.DB, httpClient *http.Client, ima
return nil, fmt.Errorf("failed to create app config service: %w", err)
}
svc.appImagesService = service.NewAppImagesService(imageExtensions)
svc.fileStorage = fileStorage
svc.appImagesService = service.NewAppImagesService(imageExtensions, fileStorage)
svc.appLockService = service.NewAppLockService(db)
svc.emailService, err = service.NewEmailService(db, svc.appConfigService)
if err != nil {
@@ -56,14 +61,14 @@ func initServices(ctx context.Context, db *gorm.DB, httpClient *http.Client, ima
return nil, fmt.Errorf("failed to create WebAuthn service: %w", err)
}
svc.oidcService, err = service.NewOidcService(ctx, db, svc.jwtService, svc.appConfigService, svc.auditLogService, svc.customClaimService, svc.webauthnService)
svc.oidcService, err = service.NewOidcService(ctx, db, svc.jwtService, svc.appConfigService, svc.auditLogService, svc.customClaimService, svc.webauthnService, httpClient, fileStorage)
if err != nil {
return nil, fmt.Errorf("failed to create OIDC service: %w", err)
}
svc.userGroupService = service.NewUserGroupService(db, svc.appConfigService)
svc.userService = service.NewUserService(db, svc.jwtService, svc.auditLogService, svc.emailService, svc.appConfigService, svc.customClaimService)
svc.ldapService = service.NewLdapService(db, httpClient, svc.appConfigService, svc.userService, svc.userGroupService)
svc.userService = service.NewUserService(db, svc.jwtService, svc.auditLogService, svc.emailService, svc.appConfigService, svc.customClaimService, svc.appImagesService, fileStorage)
svc.ldapService = service.NewLdapService(db, httpClient, svc.appConfigService, svc.userService, svc.userGroupService, fileStorage)
svc.apiKeyService = service.NewApiKeyService(db, svc.emailService)
svc.versionService = service.NewVersionService(httpClient)

View File

@@ -0,0 +1,70 @@
package cmds
import (
"context"
"fmt"
"io"
"os"
"github.com/pocket-id/pocket-id/backend/internal/bootstrap"
"github.com/pocket-id/pocket-id/backend/internal/service"
"github.com/spf13/cobra"
)
type exportFlags struct {
Path string
}
func init() {
var flags exportFlags
exportCmd := &cobra.Command{
Use: "export",
Short: "Exports all data of Pocket ID into a ZIP file",
RunE: func(cmd *cobra.Command, args []string) error {
return runExport(cmd.Context(), flags)
},
}
exportCmd.Flags().StringVarP(&flags.Path, "path", "p", "pocket-id-export.zip", "Path to the ZIP file to export the data to, or '-' to write to stdout")
rootCmd.AddCommand(exportCmd)
}
// runExport orchestrates the export flow
func runExport(ctx context.Context, flags exportFlags) error {
db, err := bootstrap.NewDatabase()
if err != nil {
return fmt.Errorf("failed to connect to database: %w", err)
}
storage, err := bootstrap.InitStorage(ctx, db)
if err != nil {
return fmt.Errorf("failed to initialize storage: %w", err)
}
exportService := service.NewExportService(db, storage)
var w io.Writer
if flags.Path == "-" {
w = os.Stdout
} else {
file, err := os.Create(flags.Path)
if err != nil {
return fmt.Errorf("failed to create export file: %w", err)
}
defer file.Close()
w = file
}
if err := exportService.ExportToZip(ctx, w); err != nil {
return fmt.Errorf("failed to export data: %w", err)
}
if flags.Path != "-" {
fmt.Printf("Exported data to %s\n", flags.Path)
}
return nil
}

View File

@@ -0,0 +1,191 @@
package cmds
import (
"archive/zip"
"context"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"time"
"github.com/spf13/cobra"
"gorm.io/gorm"
"github.com/pocket-id/pocket-id/backend/internal/bootstrap"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/service"
"github.com/pocket-id/pocket-id/backend/internal/utils"
)
type importFlags struct {
Path string
Yes bool
ForcefullyAcquireLock bool
}
func init() {
var flags importFlags
importCmd := &cobra.Command{
Use: "import",
Short: "Imports all data of Pocket ID from a ZIP file",
RunE: func(cmd *cobra.Command, args []string) error {
return runImport(cmd.Context(), flags)
},
}
importCmd.Flags().StringVarP(&flags.Path, "path", "p", "pocket-id-export.zip", "Path to the ZIP file to import the data from, or '-' to read from stdin")
importCmd.Flags().BoolVarP(&flags.Yes, "yes", "y", false, "Skip confirmation prompts")
importCmd.Flags().BoolVarP(&flags.ForcefullyAcquireLock, "forcefully-acquire-lock", "", false, "Forcefully acquire the application lock by terminating the Pocket ID instance")
rootCmd.AddCommand(importCmd)
}
// runImport handles the high-level orchestration of the import process
func runImport(ctx context.Context, flags importFlags) error {
if !flags.Yes {
ok, err := askForConfirmation()
if err != nil {
return fmt.Errorf("failed to get confirmation: %w", err)
}
if !ok {
fmt.Println("Aborted")
os.Exit(1)
}
}
var (
zipReader *zip.ReadCloser
cleanup func()
err error
)
if flags.Path == "-" {
zipReader, cleanup, err = readZipFromStdin()
defer cleanup()
} else {
zipReader, err = zip.OpenReader(flags.Path)
}
if err != nil {
return fmt.Errorf("failed to open zip: %w", err)
}
defer zipReader.Close()
db, err := bootstrap.ConnectDatabase()
if err != nil {
return err
}
err = acquireImportLock(ctx, db, flags.ForcefullyAcquireLock)
if err != nil {
return err
}
storage, err := bootstrap.InitStorage(ctx, db)
if err != nil {
return fmt.Errorf("failed to initialize storage: %w", err)
}
importService := service.NewImportService(db, storage)
err = importService.ImportFromZip(ctx, &zipReader.Reader)
if err != nil {
return fmt.Errorf("failed to import data from zip: %w", err)
}
fmt.Println("Import completed successfully.")
return nil
}
func acquireImportLock(ctx context.Context, db *gorm.DB, force bool) error {
// Check if the kv table exists, in case we are starting from an empty database
exists, err := utils.DBTableExists(db, "kv")
if err != nil {
return fmt.Errorf("failed to check if kv table exists: %w", err)
}
if !exists {
// This either means the database is empty, or the import is into an old version of PocketID that doesn't support locks
// In either case, there's no lock to acquire
fmt.Println("Could not acquire a lock because the 'kv' table does not exist. This is fine if you're importing into a new database, but make sure that there isn't an instance of Pocket ID currently running and using the same database.")
return nil
}
// Note that we do not call a deferred Release if the data was imported
// This is because we are overriding the contents of the database, so the lock is automatically lost
appLockService := service.NewAppLockService(db)
opCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
waitUntil, err := appLockService.Acquire(opCtx, force)
if err != nil {
if errors.Is(err, service.ErrLockUnavailable) {
//nolint:staticcheck
return errors.New("Pocket ID must be stopped before importing data; please stop the running instance or run with --forcefully-acquire-lock to terminate the other instance")
}
return fmt.Errorf("failed to acquire application lock: %w", err)
}
select {
case <-ctx.Done():
return ctx.Err()
case <-time.After(time.Until(waitUntil)):
}
return nil
}
func askForConfirmation() (bool, error) {
fmt.Println("WARNING: This feature is experimental and may not work correctly. Please create a backup before proceeding and report any issues you encounter.")
fmt.Println()
fmt.Println("WARNING: Import will erase all existing data at the following locations:")
fmt.Printf("Database: %s\n", absolutePathOrOriginal(common.EnvConfig.DbConnectionString))
fmt.Printf("Uploads Path: %s\n", absolutePathOrOriginal(common.EnvConfig.UploadPath))
ok, err := utils.PromptForConfirmation("Do you want to continue?")
if err != nil {
return false, err
}
return ok, nil
}
// absolutePathOrOriginal returns the absolute path of the given path, or the original if it fails
func absolutePathOrOriginal(path string) string {
abs, err := filepath.Abs(path)
if err != nil {
return path
}
return abs
}
func readZipFromStdin() (*zip.ReadCloser, func(), error) {
tmpFile, err := os.CreateTemp("", "pocket-id-import-*.zip")
if err != nil {
return nil, nil, fmt.Errorf("failed to create temporary file: %w", err)
}
cleanup := func() {
_ = os.Remove(tmpFile.Name())
}
if _, err := io.Copy(tmpFile, os.Stdin); err != nil {
tmpFile.Close()
cleanup()
return nil, nil, fmt.Errorf("failed to read data from stdin: %w", err)
}
if err := tmpFile.Close(); err != nil {
cleanup()
return nil, nil, fmt.Errorf("failed to close temporary file: %w", err)
}
r, err := zip.OpenReader(tmpFile.Name())
if err != nil {
cleanup()
return nil, nil, err
}
return r, cleanup, nil
}

View File

@@ -4,6 +4,7 @@ import (
"context"
"errors"
"fmt"
"os"
"strings"
"github.com/lestrrat-go/jwx/v3/jwa"
@@ -78,7 +79,7 @@ func keyRotate(ctx context.Context, flags keyRotateFlags, db *gorm.DB, envConfig
}
if !ok {
fmt.Println("Aborted")
return nil
os.Exit(1)
}
}

View File

@@ -1,8 +1,6 @@
package cmds
import (
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
@@ -69,78 +67,14 @@ func TestKeyRotate(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Run("file storage", func(t *testing.T) {
testKeyRotateWithFileStorage(t, tt.flags, tt.wantErr, tt.errMsg)
})
t.Run("database storage", func(t *testing.T) {
testKeyRotateWithDatabaseStorage(t, tt.flags, tt.wantErr, tt.errMsg)
})
testKeyRotateWithDatabaseStorage(t, tt.flags, tt.wantErr, tt.errMsg)
})
}
}
func testKeyRotateWithFileStorage(t *testing.T, flags keyRotateFlags, wantErr bool, errMsg string) {
// Create temporary directory for keys
tempDir := t.TempDir()
keysPath := filepath.Join(tempDir, "keys")
err := os.MkdirAll(keysPath, 0755)
require.NoError(t, err)
// Set up file storage config
envConfig := &common.EnvConfigSchema{
KeysStorage: "file",
KeysPath: keysPath,
}
// Create test database
db := testingutils.NewDatabaseForTest(t)
// Initialize app config service and create instance
appConfigService, err := service.NewAppConfigService(t.Context(), db)
require.NoError(t, err)
instanceID := appConfigService.GetDbConfig().InstanceID.Value
// Check if key exists before rotation
keyProvider, err := jwkutils.GetKeyProvider(db, envConfig, instanceID)
require.NoError(t, err)
// Run the key rotation
err = keyRotate(t.Context(), flags, db, envConfig)
if wantErr {
require.Error(t, err)
if errMsg != "" {
require.ErrorContains(t, err, errMsg)
}
return
}
require.NoError(t, err)
// Verify key was created
key, err := keyProvider.LoadKey()
require.NoError(t, err)
require.NotNil(t, key)
// Verify the algorithm matches what we requested
alg, _ := key.Algorithm()
assert.NotEmpty(t, alg)
if flags.Alg != "" {
expectedAlg := flags.Alg
if expectedAlg == "EdDSA" {
// EdDSA keys should have the EdDSA algorithm
assert.Equal(t, "EdDSA", alg.String())
} else {
assert.Equal(t, expectedAlg, alg.String())
}
}
}
func testKeyRotateWithDatabaseStorage(t *testing.T, flags keyRotateFlags, wantErr bool, errMsg string) {
// Set up database storage config
envConfig := &common.EnvConfigSchema{
KeysStorage: "database",
EncryptionKey: []byte("test-encryption-key-characters-long"),
}

View File

@@ -12,9 +12,10 @@ import (
)
var rootCmd = &cobra.Command{
Use: "pocket-id",
Short: "A simple and easy-to-use OIDC provider that allows users to authenticate with their passkeys to your services.",
Long: "By default, this command starts the pocket-id server.",
Use: "pocket-id",
Short: "A simple and easy-to-use OIDC provider that allows users to authenticate with their passkeys to your services.",
Long: "By default, this command starts the pocket-id server.",
SilenceUsage: true,
Run: func(cmd *cobra.Command, args []string) {
// Start the server
err := bootstrap.Bootstrap(cmd.Context())

View File

@@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"log/slog"
"net"
"net/url"
"os"
"reflect"
@@ -14,6 +15,7 @@ import (
_ "github.com/joho/godotenv/autoload"
)
type AppEnv string
type DbProvider string
const (
@@ -24,39 +26,52 @@ const (
)
const (
AppEnvProduction AppEnv = "production"
AppEnvDevelopment AppEnv = "development"
AppEnvTest AppEnv = "test"
DbProviderSqlite DbProvider = "sqlite"
DbProviderPostgres DbProvider = "postgres"
MaxMindGeoLiteCityUrl string = "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-City&license_key=%s&suffix=tar.gz"
defaultSqliteConnString string = "data/pocket-id.db"
defaultFsUploadPath string = "data/uploads"
AppUrl string = "http://localhost:1411"
)
type EnvConfigSchema struct {
AppEnv string `env:"APP_ENV" options:"toLower"`
LogLevel string `env:"LOG_LEVEL" options:"toLower"`
AppURL string `env:"APP_URL" options:"toLower"`
DbProvider DbProvider `env:"DB_PROVIDER" options:"toLower"`
DbConnectionString string `env:"DB_CONNECTION_STRING" options:"file"`
UploadPath string `env:"UPLOAD_PATH"`
KeysPath string `env:"KEYS_PATH"`
KeysStorage string `env:"KEYS_STORAGE"`
EncryptionKey []byte `env:"ENCRYPTION_KEY" options:"file"`
Port string `env:"PORT"`
Host string `env:"HOST" options:"toLower"`
UnixSocket string `env:"UNIX_SOCKET"`
UnixSocketMode string `env:"UNIX_SOCKET_MODE"`
MaxMindLicenseKey string `env:"MAXMIND_LICENSE_KEY" options:"file"`
GeoLiteDBPath string `env:"GEOLITE_DB_PATH"`
GeoLiteDBUrl string `env:"GEOLITE_DB_URL"`
LocalIPv6Ranges string `env:"LOCAL_IPV6_RANGES"`
UiConfigDisabled bool `env:"UI_CONFIG_DISABLED"`
MetricsEnabled bool `env:"METRICS_ENABLED"`
TracingEnabled bool `env:"TRACING_ENABLED"`
LogJSON bool `env:"LOG_JSON"`
TrustProxy bool `env:"TRUST_PROXY"`
AnalyticsDisabled bool `env:"ANALYTICS_DISABLED"`
AllowDowngrade bool `env:"ALLOW_DOWNGRADE"`
InternalAppURL string `env:"INTERNAL_APP_URL"`
AppEnv AppEnv `env:"APP_ENV" options:"toLower"`
LogLevel string `env:"LOG_LEVEL" options:"toLower"`
LogJSON bool `env:"LOG_JSON"`
AppURL string `env:"APP_URL" options:"toLower,trimTrailingSlash"`
DbProvider DbProvider
DbConnectionString string `env:"DB_CONNECTION_STRING" options:"file"`
EncryptionKey []byte `env:"ENCRYPTION_KEY" options:"file"`
Port string `env:"PORT"`
Host string `env:"HOST" options:"toLower"`
UnixSocket string `env:"UNIX_SOCKET"`
UnixSocketMode string `env:"UNIX_SOCKET_MODE"`
LocalIPv6Ranges string `env:"LOCAL_IPV6_RANGES"`
UiConfigDisabled bool `env:"UI_CONFIG_DISABLED"`
MetricsEnabled bool `env:"METRICS_ENABLED"`
TracingEnabled bool `env:"TRACING_ENABLED"`
TrustProxy bool `env:"TRUST_PROXY"`
AnalyticsDisabled bool `env:"ANALYTICS_DISABLED"`
AllowDowngrade bool `env:"ALLOW_DOWNGRADE"`
InternalAppURL string `env:"INTERNAL_APP_URL"`
MaxMindLicenseKey string `env:"MAXMIND_LICENSE_KEY" options:"file"`
GeoLiteDBPath string `env:"GEOLITE_DB_PATH"`
GeoLiteDBUrl string `env:"GEOLITE_DB_URL"`
FileBackend string `env:"FILE_BACKEND" options:"toLower"`
UploadPath string `env:"UPLOAD_PATH"`
S3Bucket string `env:"S3_BUCKET"`
S3Region string `env:"S3_REGION"`
S3Endpoint string `env:"S3_ENDPOINT"`
S3AccessKeyID string `env:"S3_ACCESS_KEY_ID"`
S3SecretAccessKey string `env:"S3_SECRET_ACCESS_KEY"`
S3ForcePathStyle bool `env:"S3_FORCE_PATH_STYLE"`
S3DisableDefaultIntegrityChecks bool `env:"S3_DISABLE_DEFAULT_INTEGRITY_CHECKS"`
}
var EnvConfig = defaultConfig()
@@ -71,30 +86,15 @@ func init() {
func defaultConfig() EnvConfigSchema {
return EnvConfigSchema{
AppEnv: "production",
LogLevel: "info",
DbProvider: "sqlite",
DbConnectionString: "",
UploadPath: "data/uploads",
KeysPath: "data/keys",
KeysStorage: "", // "database" or "file"
EncryptionKey: nil,
AppURL: AppUrl,
Port: "1411",
Host: "0.0.0.0",
UnixSocket: "",
UnixSocketMode: "",
MaxMindLicenseKey: "",
GeoLiteDBPath: "data/GeoLite2-City.mmdb",
GeoLiteDBUrl: MaxMindGeoLiteCityUrl,
LocalIPv6Ranges: "",
UiConfigDisabled: false,
MetricsEnabled: false,
TracingEnabled: false,
TrustProxy: false,
AnalyticsDisabled: false,
AllowDowngrade: false,
InternalAppURL: "",
AppEnv: AppEnvProduction,
LogLevel: "info",
DbProvider: "sqlite",
FileBackend: "fs",
AppURL: AppUrl,
Port: "1411",
Host: "0.0.0.0",
GeoLiteDBPath: "data/GeoLite2-City.mmdb",
GeoLiteDBUrl: MaxMindGeoLiteCityUrl,
}
}
@@ -117,32 +117,28 @@ func parseEnvConfig() error {
return fmt.Errorf("error preparing env config: %w", err)
}
err = validateEnvConfig(&EnvConfig)
if err != nil {
return err
}
return nil
}
// validateEnvConfig checks the EnvConfig for required fields and valid values
func validateEnvConfig(config *EnvConfigSchema) error {
// ValidateEnvConfig checks the EnvConfig for required fields and valid values
func ValidateEnvConfig(config *EnvConfigSchema) error {
if _, err := sloggin.ParseLevel(config.LogLevel); err != nil {
return errors.New("invalid LOG_LEVEL value. Must be 'debug', 'info', 'warn' or 'error'")
}
switch config.DbProvider {
case DbProviderSqlite:
if config.DbConnectionString == "" {
config.DbConnectionString = defaultSqliteConnString
}
case DbProviderPostgres:
if config.DbConnectionString == "" {
return errors.New("missing required env var 'DB_CONNECTION_STRING' for Postgres database")
}
if len(config.EncryptionKey) < 16 {
return errors.New("ENCRYPTION_KEY must be at least 16 bytes long")
}
switch {
case config.DbConnectionString == "":
config.DbProvider = DbProviderSqlite
config.DbConnectionString = defaultSqliteConnString
case strings.HasPrefix(config.DbConnectionString, "postgres://") || strings.HasPrefix(config.DbConnectionString, "postgresql://"):
config.DbProvider = DbProviderPostgres
default:
return errors.New("invalid DB_PROVIDER value. Must be 'sqlite' or 'postgres'")
config.DbProvider = DbProviderSqlite
}
parsedAppUrl, err := url.Parse(config.AppURL)
@@ -166,18 +162,33 @@ func validateEnvConfig(config *EnvConfigSchema) error {
}
}
switch config.KeysStorage {
// KeysStorage defaults to "file" if empty
case "":
config.KeysStorage = "file"
case "database":
if config.EncryptionKey == nil {
return errors.New("ENCRYPTION_KEY must be non-empty when KEYS_STORAGE is database")
switch config.FileBackend {
case "s3", "database":
case "", "fs":
if config.UploadPath == "" {
config.UploadPath = defaultFsUploadPath
}
case "file":
// All good, these are valid values
default:
return fmt.Errorf("invalid value for KEYS_STORAGE: %s", config.KeysStorage)
return errors.New("invalid FILE_BACKEND value. Must be 'fs', 'database', or 's3'")
}
// Validate LOCAL_IPV6_RANGES
ranges := strings.Split(config.LocalIPv6Ranges, ",")
for _, rangeStr := range ranges {
rangeStr = strings.TrimSpace(rangeStr)
if rangeStr == "" {
continue
}
_, ipNet, err := net.ParseCIDR(rangeStr)
if err != nil {
return fmt.Errorf("invalid LOCAL_IPV6_RANGES '%s': %w", rangeStr, err)
}
if ipNet.IP.To4() != nil {
return fmt.Errorf("range '%s' is not a valid IPv6 range", rangeStr)
}
}
return nil
@@ -207,6 +218,10 @@ func prepareEnvConfig(config *EnvConfigSchema) error {
if err != nil {
return err
}
case "trimTrailingSlash":
if field.Kind() == reflect.String {
field.SetString(strings.TrimRight(field.String(), "/"))
}
}
}
}
@@ -255,3 +270,11 @@ func resolveFileBasedEnvVariable(field reflect.Value, fieldType reflect.StructFi
return nil
}
func (a AppEnv) IsProduction() bool {
return a == AppEnvProduction
}
func (a AppEnv) IsTest() bool {
return a == AppEnvTest
}

View File

@@ -8,6 +8,20 @@ import (
"github.com/stretchr/testify/require"
)
func parseAndValidateEnvConfig(t *testing.T) error {
t.Helper()
if _, exists := os.LookupEnv("ENCRYPTION_KEY"); !exists {
t.Setenv("ENCRYPTION_KEY", "0123456789abcdef")
}
if err := parseEnvConfig(); err != nil {
return err
}
return ValidateEnvConfig(&EnvConfig)
}
func TestParseEnvConfig(t *testing.T) {
// Store original config to restore later
originalConfig := EnvConfig
@@ -17,11 +31,10 @@ func TestParseEnvConfig(t *testing.T) {
t.Run("should parse valid SQLite config correctly", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "SQLITE") // should be lowercased automatically
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "HTTP://LOCALHOST:3000")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.NoError(t, err)
assert.Equal(t, DbProviderSqlite, EnvConfig.DbProvider)
assert.Equal(t, "http://localhost:3000", EnvConfig.AppURL)
@@ -29,147 +42,76 @@ func TestParseEnvConfig(t *testing.T) {
t.Run("should parse valid Postgres config correctly", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "POSTGRES")
t.Setenv("DB_CONNECTION_STRING", "postgres://user:pass@localhost/db")
t.Setenv("APP_URL", "https://example.com")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.NoError(t, err)
assert.Equal(t, DbProviderPostgres, EnvConfig.DbProvider)
})
t.Run("should fail with invalid DB_PROVIDER", func(t *testing.T) {
t.Run("should fail when ENCRYPTION_KEY is too short", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "invalid")
t.Setenv("DB_CONNECTION_STRING", "test")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "http://localhost:3000")
t.Setenv("ENCRYPTION_KEY", "short")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.Error(t, err)
assert.ErrorContains(t, err, "invalid DB_PROVIDER value")
assert.ErrorContains(t, err, "ENCRYPTION_KEY must be at least 16 bytes long")
})
t.Run("should set default SQLite connection string when DB_CONNECTION_STRING is empty", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("APP_URL", "http://localhost:3000")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.NoError(t, err)
assert.Equal(t, defaultSqliteConnString, EnvConfig.DbConnectionString)
})
t.Run("should fail when Postgres DB_CONNECTION_STRING is missing", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "postgres")
t.Setenv("APP_URL", "http://localhost:3000")
err := parseEnvConfig()
require.Error(t, err)
assert.ErrorContains(t, err, "missing required env var 'DB_CONNECTION_STRING' for Postgres")
})
t.Run("should fail with invalid APP_URL", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "€://not-a-valid-url")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.Error(t, err)
assert.ErrorContains(t, err, "APP_URL is not a valid URL")
})
t.Run("should fail when APP_URL contains path", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "http://localhost:3000/path")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.Error(t, err)
assert.ErrorContains(t, err, "APP_URL must not contain a path")
})
t.Run("should fail with invalid INTERNAL_APP_URL", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("INTERNAL_APP_URL", "€://not-a-valid-url")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.Error(t, err)
assert.ErrorContains(t, err, "INTERNAL_APP_URL is not a valid URL")
})
t.Run("should fail when INTERNAL_APP_URL contains path", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("INTERNAL_APP_URL", "http://localhost:3000/path")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.Error(t, err)
assert.ErrorContains(t, err, "INTERNAL_APP_URL must not contain a path")
})
t.Run("should default KEYS_STORAGE to 'file' when empty", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "http://localhost:3000")
err := parseEnvConfig()
require.NoError(t, err)
assert.Equal(t, "file", EnvConfig.KeysStorage)
})
t.Run("should fail when KEYS_STORAGE is 'database' but no encryption key", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "http://localhost:3000")
t.Setenv("KEYS_STORAGE", "database")
err := parseEnvConfig()
require.Error(t, err)
assert.ErrorContains(t, err, "ENCRYPTION_KEY must be non-empty when KEYS_STORAGE is database")
})
t.Run("should accept valid KEYS_STORAGE values", func(t *testing.T) {
validStorageTypes := []string{"file", "database"}
for _, storage := range validStorageTypes {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "http://localhost:3000")
t.Setenv("KEYS_STORAGE", storage)
if storage == "database" {
t.Setenv("ENCRYPTION_KEY", "test-key")
}
err := parseEnvConfig()
require.NoError(t, err)
assert.Equal(t, storage, EnvConfig.KeysStorage)
}
})
t.Run("should fail with invalid KEYS_STORAGE value", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "http://localhost:3000")
t.Setenv("KEYS_STORAGE", "invalid")
err := parseEnvConfig()
require.Error(t, err)
assert.ErrorContains(t, err, "invalid value for KEYS_STORAGE")
})
t.Run("should parse boolean environment variables correctly", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "sqlite")
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "http://localhost:3000")
t.Setenv("UI_CONFIG_DISABLED", "true")
@@ -178,7 +120,7 @@ func TestParseEnvConfig(t *testing.T) {
t.Setenv("TRUST_PROXY", "true")
t.Setenv("ANALYTICS_DISABLED", "false")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.NoError(t, err)
assert.True(t, EnvConfig.UiConfigDisabled)
assert.True(t, EnvConfig.MetricsEnabled)
@@ -189,25 +131,47 @@ func TestParseEnvConfig(t *testing.T) {
t.Run("should parse string environment variables correctly", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_PROVIDER", "postgres")
t.Setenv("DB_CONNECTION_STRING", "postgres://test")
t.Setenv("APP_URL", "https://prod.example.com")
t.Setenv("APP_ENV", "STAGING")
t.Setenv("APP_ENV", "PRODUCTION")
t.Setenv("UPLOAD_PATH", "/custom/uploads")
t.Setenv("KEYS_PATH", "/custom/keys")
t.Setenv("PORT", "8080")
t.Setenv("HOST", "LOCALHOST")
t.Setenv("UNIX_SOCKET", "/tmp/app.sock")
t.Setenv("MAXMIND_LICENSE_KEY", "test-license")
t.Setenv("GEOLITE_DB_PATH", "/custom/geolite.mmdb")
err := parseEnvConfig()
err := parseAndValidateEnvConfig(t)
require.NoError(t, err)
assert.Equal(t, "staging", EnvConfig.AppEnv) // lowercased
assert.Equal(t, AppEnvProduction, EnvConfig.AppEnv) // lowercased
assert.Equal(t, "/custom/uploads", EnvConfig.UploadPath)
assert.Equal(t, "8080", EnvConfig.Port)
assert.Equal(t, "localhost", EnvConfig.Host) // lowercased
})
t.Run("should normalize file backend and default upload path", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "http://localhost:3000")
t.Setenv("FILE_BACKEND", "FS")
t.Setenv("UPLOAD_PATH", "")
err := parseAndValidateEnvConfig(t)
require.NoError(t, err)
assert.Equal(t, "fs", EnvConfig.FileBackend)
assert.Equal(t, defaultFsUploadPath, EnvConfig.UploadPath)
})
t.Run("should fail with invalid FILE_BACKEND value", func(t *testing.T) {
EnvConfig = defaultConfig()
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
t.Setenv("APP_URL", "http://localhost:3000")
t.Setenv("FILE_BACKEND", "invalid")
err := parseAndValidateEnvConfig(t)
require.Error(t, err)
assert.ErrorContains(t, err, "invalid FILE_BACKEND value")
})
}
func TestPrepareEnvConfig_FileBasedAndToLower(t *testing.T) {
@@ -241,7 +205,7 @@ func TestPrepareEnvConfig_FileBasedAndToLower(t *testing.T) {
err := prepareEnvConfig(&config)
require.NoError(t, err)
assert.Equal(t, "staging", config.AppEnv)
assert.Equal(t, AppEnv("staging"), config.AppEnv)
assert.Equal(t, "localhost", config.Host)
assert.Equal(t, []byte(encryptionKeyContent), config.EncryptionKey)
assert.Equal(t, dbConnContent, config.DbConnectionString)

View File

@@ -378,3 +378,23 @@ func (e *ClientIdAlreadyExistsError) Error() string {
func (e *ClientIdAlreadyExistsError) HttpStatusCode() int {
return http.StatusBadRequest
}
type UserEmailNotSetError struct{}
func (e *UserEmailNotSetError) Error() string {
return "The user does not have an email address set"
}
func (e *UserEmailNotSetError) HttpStatusCode() int {
return http.StatusBadRequest
}
type ImageNotFoundError struct{}
func (e *ImageNotFoundError) Error() string {
return "Image not found"
}
func (e *ImageNotFoundError) HttpStatusCode() int {
return http.StatusNotFound
}

View File

@@ -45,15 +45,11 @@ func NewApiKeyController(group *gin.RouterGroup, authMiddleware *middleware.Auth
// @Success 200 {object} dto.Paginated[dto.ApiKeyDto]
// @Router /api/api-keys [get]
func (c *ApiKeyController) listApiKeysHandler(ctx *gin.Context) {
listRequestOptions := utils.ParseListRequestOptions(ctx)
userID := ctx.GetString("userID")
var sortedPaginationRequest utils.SortedPaginationRequest
if err := ctx.ShouldBindQuery(&sortedPaginationRequest); err != nil {
_ = ctx.Error(err)
return
}
apiKeys, pagination, err := c.apiKeyService.ListApiKeys(ctx.Request.Context(), userID, sortedPaginationRequest)
apiKeys, pagination, err := c.apiKeyService.ListApiKeys(ctx.Request.Context(), userID, listRequestOptions)
if err != nil {
_ = ctx.Error(err)
return

View File

@@ -25,10 +25,14 @@ func NewAppImagesController(
group.GET("/application-images/logo", controller.getLogoHandler)
group.GET("/application-images/background", controller.getBackgroundImageHandler)
group.GET("/application-images/favicon", controller.getFaviconHandler)
group.GET("/application-images/default-profile-picture", authMiddleware.Add(), controller.getDefaultProfilePicture)
group.PUT("/application-images/logo", authMiddleware.Add(), controller.updateLogoHandler)
group.PUT("/application-images/background", authMiddleware.Add(), controller.updateBackgroundImageHandler)
group.PUT("/application-images/favicon", authMiddleware.Add(), controller.updateFaviconHandler)
group.PUT("/application-images/default-profile-picture", authMiddleware.Add(), controller.updateDefaultProfilePicture)
group.DELETE("/application-images/default-profile-picture", authMiddleware.Add(), controller.deleteDefaultProfilePicture)
}
type AppImagesController struct {
@@ -78,6 +82,18 @@ func (c *AppImagesController) getFaviconHandler(ctx *gin.Context) {
c.getImage(ctx, "favicon")
}
// getDefaultProfilePicture godoc
// @Summary Get default profile picture image
// @Description Get the default profile picture image for the application
// @Tags Application Images
// @Produce image/png
// @Produce image/jpeg
// @Success 200 {file} binary "Default profile picture image"
// @Router /api/application-images/default-profile-picture [get]
func (c *AppImagesController) getDefaultProfilePicture(ctx *gin.Context) {
c.getImage(ctx, "default-profile-picture")
}
// updateLogoHandler godoc
// @Summary Update logo
// @Description Update the application logo
@@ -100,7 +116,7 @@ func (c *AppImagesController) updateLogoHandler(ctx *gin.Context) {
imageName = "logoDark"
}
if err := c.appImagesService.UpdateImage(file, imageName); err != nil {
if err := c.appImagesService.UpdateImage(ctx.Request.Context(), file, imageName); err != nil {
_ = ctx.Error(err)
return
}
@@ -123,7 +139,7 @@ func (c *AppImagesController) updateBackgroundImageHandler(ctx *gin.Context) {
return
}
if err := c.appImagesService.UpdateImage(file, "background"); err != nil {
if err := c.appImagesService.UpdateImage(ctx.Request.Context(), file, "background"); err != nil {
_ = ctx.Error(err)
return
}
@@ -152,7 +168,7 @@ func (c *AppImagesController) updateFaviconHandler(ctx *gin.Context) {
return
}
if err := c.appImagesService.UpdateImage(file, "favicon"); err != nil {
if err := c.appImagesService.UpdateImage(ctx.Request.Context(), file, "favicon"); err != nil {
_ = ctx.Error(err)
return
}
@@ -161,13 +177,52 @@ func (c *AppImagesController) updateFaviconHandler(ctx *gin.Context) {
}
func (c *AppImagesController) getImage(ctx *gin.Context, name string) {
imagePath, mimeType, err := c.appImagesService.GetImage(name)
reader, size, mimeType, err := c.appImagesService.GetImage(ctx.Request.Context(), name)
if err != nil {
_ = ctx.Error(err)
return
}
defer reader.Close()
ctx.Header("Content-Type", mimeType)
utils.SetCacheControlHeader(ctx, 15*time.Minute, 24*time.Hour)
ctx.DataFromReader(http.StatusOK, size, mimeType, reader, nil)
}
// updateDefaultProfilePicture godoc
// @Summary Update default profile picture image
// @Description Update the default profile picture image
// @Tags Application Images
// @Accept multipart/form-data
// @Param file formData file true "Profile picture image file"
// @Success 204 "No Content"
// @Router /api/application-images/default-profile-picture [put]
func (c *AppImagesController) updateDefaultProfilePicture(ctx *gin.Context) {
file, err := ctx.FormFile("file")
if err != nil {
_ = ctx.Error(err)
return
}
ctx.Header("Content-Type", mimeType)
utils.SetCacheControlHeader(ctx, 15*time.Minute, 24*time.Hour)
ctx.File(imagePath)
if err := c.appImagesService.UpdateImage(ctx.Request.Context(), file, "default-profile-picture"); err != nil {
_ = ctx.Error(err)
return
}
ctx.Status(http.StatusNoContent)
}
// deleteDefaultProfilePicture godoc
// @Summary Delete default profile picture image
// @Description Delete the default profile picture image
// @Tags Application Images
// @Success 204 "No Content"
// @Router /api/application-images/default-profile-picture [delete]
func (c *AppImagesController) deleteDefaultProfilePicture(ctx *gin.Context) {
if err := c.appImagesService.DeleteImage(ctx.Request.Context(), "default-profile-picture"); err != nil {
_ = ctx.Error(err)
return
}
ctx.Status(http.StatusNoContent)
}

View File

@@ -41,18 +41,12 @@ type AuditLogController struct {
// @Success 200 {object} dto.Paginated[dto.AuditLogDto]
// @Router /api/audit-logs [get]
func (alc *AuditLogController) listAuditLogsForUserHandler(c *gin.Context) {
var sortedPaginationRequest utils.SortedPaginationRequest
err := c.ShouldBindQuery(&sortedPaginationRequest)
if err != nil {
_ = c.Error(err)
return
}
listRequestOptions := utils.ParseListRequestOptions(c)
userID := c.GetString("userID")
// Fetch audit logs for the user
logs, pagination, err := alc.auditLogService.ListAuditLogsForUser(c.Request.Context(), userID, sortedPaginationRequest)
logs, pagination, err := alc.auditLogService.ListAuditLogsForUser(c.Request.Context(), userID, listRequestOptions)
if err != nil {
_ = c.Error(err)
return
@@ -86,26 +80,12 @@ func (alc *AuditLogController) listAuditLogsForUserHandler(c *gin.Context) {
// @Param pagination[limit] query int false "Number of items per page" default(20)
// @Param sort[column] query string false "Column to sort by"
// @Param sort[direction] query string false "Sort direction (asc or desc)" default("asc")
// @Param filters[userId] query string false "Filter by user ID"
// @Param filters[event] query string false "Filter by event type"
// @Param filters[clientName] query string false "Filter by client name"
// @Param filters[location] query string false "Filter by location type (external or internal)"
// @Success 200 {object} dto.Paginated[dto.AuditLogDto]
// @Router /api/audit-logs/all [get]
func (alc *AuditLogController) listAllAuditLogsHandler(c *gin.Context) {
var sortedPaginationRequest utils.SortedPaginationRequest
if err := c.ShouldBindQuery(&sortedPaginationRequest); err != nil {
_ = c.Error(err)
return
}
listRequestOptions := utils.ParseListRequestOptions(c)
var filters dto.AuditLogFilterDto
if err := c.ShouldBindQuery(&filters); err != nil {
_ = c.Error(err)
return
}
logs, pagination, err := alc.auditLogService.ListAllAuditLogs(c.Request.Context(), sortedPaginationRequest, filters)
logs, pagination, err := alc.auditLogService.ListAllAuditLogs(c.Request.Context(), listRequestOptions)
if err != nil {
_ = c.Error(err)
return

View File

@@ -40,6 +40,11 @@ func (tc *TestController) resetAndSeedHandler(c *gin.Context) {
return
}
if err := tc.TestService.ResetLock(c.Request.Context()); err != nil {
_ = c.Error(err)
return
}
if err := tc.TestService.ResetApplicationImages(c.Request.Context()); err != nil {
_ = c.Error(err)
return
@@ -69,8 +74,6 @@ func (tc *TestController) resetAndSeedHandler(c *gin.Context) {
}
}
tc.TestService.SetJWTKeys()
c.Status(http.StatusNoContent)
}

View File

@@ -5,6 +5,7 @@ import (
"log/slog"
"net/http"
"net/url"
"strconv"
"strings"
"time"
@@ -357,6 +358,7 @@ func (oc *OidcController) getClientMetaDataHandler(c *gin.Context) {
clientDto := dto.OidcClientMetaDataDto{}
err = dto.MapStruct(client, &clientDto)
if err == nil {
clientDto.HasDarkLogo = client.HasDarkLogo()
c.JSON(http.StatusOK, clientDto)
return
}
@@ -403,13 +405,9 @@ func (oc *OidcController) getClientHandler(c *gin.Context) {
// @Router /api/oidc/clients [get]
func (oc *OidcController) listClientsHandler(c *gin.Context) {
searchTerm := c.Query("search")
var sortedPaginationRequest utils.SortedPaginationRequest
if err := c.ShouldBindQuery(&sortedPaginationRequest); err != nil {
_ = c.Error(err)
return
}
listRequestOptions := utils.ParseListRequestOptions(c)
clients, pagination, err := oc.oidcService.ListClients(c.Request.Context(), searchTerm, sortedPaginationRequest)
clients, pagination, err := oc.oidcService.ListClients(c.Request.Context(), searchTerm, listRequestOptions)
if err != nil {
_ = c.Error(err)
return
@@ -423,6 +421,7 @@ func (oc *OidcController) listClientsHandler(c *gin.Context) {
_ = c.Error(err)
return
}
clientDto.HasDarkLogo = client.HasDarkLogo()
clientDto.AllowedUserGroupsCount, err = oc.oidcService.GetAllowedGroupsCountOfClient(c, client.ID)
if err != nil {
_ = c.Error(err)
@@ -543,19 +542,23 @@ func (oc *OidcController) createClientSecretHandler(c *gin.Context) {
// @Produce image/jpeg
// @Produce image/svg+xml
// @Param id path string true "Client ID"
// @Param light query boolean false "Light mode logo (true) or dark mode logo (false)"
// @Success 200 {file} binary "Logo image"
// @Router /api/oidc/clients/{id}/logo [get]
func (oc *OidcController) getClientLogoHandler(c *gin.Context) {
imagePath, mimeType, err := oc.oidcService.GetClientLogo(c.Request.Context(), c.Param("id"))
lightLogo, _ := strconv.ParseBool(c.DefaultQuery("light", "true"))
reader, size, mimeType, err := oc.oidcService.GetClientLogo(c.Request.Context(), c.Param("id"), lightLogo)
if err != nil {
_ = c.Error(err)
return
}
defer reader.Close()
utils.SetCacheControlHeader(c, 15*time.Minute, 12*time.Hour)
c.Header("Content-Type", mimeType)
c.File(imagePath)
c.DataFromReader(http.StatusOK, size, mimeType, reader, nil)
}
// updateClientLogoHandler godoc
@@ -565,6 +568,7 @@ func (oc *OidcController) getClientLogoHandler(c *gin.Context) {
// @Accept multipart/form-data
// @Param id path string true "Client ID"
// @Param file formData file true "Logo image file (PNG, JPG, or SVG)"
// @Param light query boolean false "Light mode logo (true) or dark mode logo (false)"
// @Success 204 "No Content"
// @Router /api/oidc/clients/{id}/logo [post]
func (oc *OidcController) updateClientLogoHandler(c *gin.Context) {
@@ -574,7 +578,9 @@ func (oc *OidcController) updateClientLogoHandler(c *gin.Context) {
return
}
err = oc.oidcService.UpdateClientLogo(c.Request.Context(), c.Param("id"), file)
lightLogo, _ := strconv.ParseBool(c.DefaultQuery("light", "true"))
err = oc.oidcService.UpdateClientLogo(c.Request.Context(), c.Param("id"), file, lightLogo)
if err != nil {
_ = c.Error(err)
return
@@ -588,10 +594,19 @@ func (oc *OidcController) updateClientLogoHandler(c *gin.Context) {
// @Description Delete the logo for an OIDC client
// @Tags OIDC
// @Param id path string true "Client ID"
// @Param light query boolean false "Light mode logo (true) or dark mode logo (false)"
// @Success 204 "No Content"
// @Router /api/oidc/clients/{id}/logo [delete]
func (oc *OidcController) deleteClientLogoHandler(c *gin.Context) {
err := oc.oidcService.DeleteClientLogo(c.Request.Context(), c.Param("id"))
var err error
lightLogo, _ := strconv.ParseBool(c.DefaultQuery("light", "true"))
if lightLogo {
err = oc.oidcService.DeleteClientLogo(c.Request.Context(), c.Param("id"))
} else {
err = oc.oidcService.DeleteClientDarkLogo(c.Request.Context(), c.Param("id"))
}
if err != nil {
_ = c.Error(err)
return
@@ -628,6 +643,7 @@ func (oc *OidcController) updateAllowedUserGroupsHandler(c *gin.Context) {
_ = c.Error(err)
return
}
oidcClientDto.HasDarkLogo = oidcClient.HasDarkLogo()
c.JSON(http.StatusOK, oidcClientDto)
}
@@ -685,12 +701,9 @@ func (oc *OidcController) listAuthorizedClientsHandler(c *gin.Context) {
}
func (oc *OidcController) listAuthorizedClients(c *gin.Context, userID string) {
var sortedPaginationRequest utils.SortedPaginationRequest
if err := c.ShouldBindQuery(&sortedPaginationRequest); err != nil {
_ = c.Error(err)
return
}
authorizedClients, pagination, err := oc.oidcService.ListAuthorizedClients(c.Request.Context(), userID, sortedPaginationRequest)
listRequestOptions := utils.ParseListRequestOptions(c)
authorizedClients, pagination, err := oc.oidcService.ListAuthorizedClients(c.Request.Context(), userID, listRequestOptions)
if err != nil {
_ = c.Error(err)
return
@@ -741,15 +754,11 @@ func (oc *OidcController) revokeOwnClientAuthorizationHandler(c *gin.Context) {
// @Success 200 {object} dto.Paginated[dto.AccessibleOidcClientDto]
// @Router /api/oidc/users/me/clients [get]
func (oc *OidcController) listOwnAccessibleClientsHandler(c *gin.Context) {
listRequestOptions := utils.ParseListRequestOptions(c)
userID := c.GetString("userID")
var sortedPaginationRequest utils.SortedPaginationRequest
if err := c.ShouldBindQuery(&sortedPaginationRequest); err != nil {
_ = c.Error(err)
return
}
clients, pagination, err := oc.oidcService.ListAccessibleOidcClients(c.Request.Context(), userID, sortedPaginationRequest)
clients, pagination, err := oc.oidcService.ListAccessibleOidcClients(c.Request.Context(), userID, listRequestOptions)
if err != nil {
_ = c.Error(err)
return

View File

@@ -104,13 +104,9 @@ func (uc *UserController) getUserGroupsHandler(c *gin.Context) {
// @Router /api/users [get]
func (uc *UserController) listUsersHandler(c *gin.Context) {
searchTerm := c.Query("search")
var sortedPaginationRequest utils.SortedPaginationRequest
if err := c.ShouldBindQuery(&sortedPaginationRequest); err != nil {
_ = c.Error(err)
return
}
listRequestOptions := utils.ParseListRequestOptions(c)
users, pagination, err := uc.userService.ListUsers(c.Request.Context(), searchTerm, sortedPaginationRequest)
users, pagination, err := uc.userService.ListUsers(c.Request.Context(), searchTerm, listRequestOptions)
if err != nil {
_ = c.Error(err)
return
@@ -290,7 +286,7 @@ func (uc *UserController) updateUserProfilePictureHandler(c *gin.Context) {
}
defer file.Close()
if err := uc.userService.UpdateProfilePicture(userID, file); err != nil {
if err := uc.userService.UpdateProfilePicture(c.Request.Context(), userID, file); err != nil {
_ = c.Error(err)
return
}
@@ -321,7 +317,7 @@ func (uc *UserController) updateCurrentUserProfilePictureHandler(c *gin.Context)
}
defer file.Close()
if err := uc.userService.UpdateProfilePicture(userID, file); err != nil {
if err := uc.userService.UpdateProfilePicture(c.Request.Context(), userID, file); err != nil {
_ = c.Error(err)
return
}
@@ -574,13 +570,9 @@ func (uc *UserController) createSignupTokenHandler(c *gin.Context) {
// @Success 200 {object} dto.Paginated[dto.SignupTokenDto]
// @Router /api/signup-tokens [get]
func (uc *UserController) listSignupTokensHandler(c *gin.Context) {
var sortedPaginationRequest utils.SortedPaginationRequest
if err := c.ShouldBindQuery(&sortedPaginationRequest); err != nil {
_ = c.Error(err)
return
}
listRequestOptions := utils.ParseListRequestOptions(c)
tokens, pagination, err := uc.userService.ListSignupTokens(c.Request.Context(), sortedPaginationRequest)
tokens, pagination, err := uc.userService.ListSignupTokens(c.Request.Context(), listRequestOptions)
if err != nil {
_ = c.Error(err)
return
@@ -695,7 +687,7 @@ func (uc *UserController) updateUser(c *gin.Context, updateOwnUser bool) {
func (uc *UserController) resetUserProfilePictureHandler(c *gin.Context) {
userID := c.Param("id")
if err := uc.userService.ResetProfilePicture(userID); err != nil {
if err := uc.userService.ResetProfilePicture(c.Request.Context(), userID); err != nil {
_ = c.Error(err)
return
}
@@ -713,7 +705,7 @@ func (uc *UserController) resetUserProfilePictureHandler(c *gin.Context) {
func (uc *UserController) resetCurrentUserProfilePictureHandler(c *gin.Context) {
userID := c.GetString("userID")
if err := uc.userService.ResetProfilePicture(userID); err != nil {
if err := uc.userService.ResetProfilePicture(c.Request.Context(), userID); err != nil {
_ = c.Error(err)
return
}

View File

@@ -47,16 +47,10 @@ type UserGroupController struct {
// @Success 200 {object} dto.Paginated[dto.UserGroupDtoWithUserCount]
// @Router /api/user-groups [get]
func (ugc *UserGroupController) list(c *gin.Context) {
ctx := c.Request.Context()
searchTerm := c.Query("search")
var sortedPaginationRequest utils.SortedPaginationRequest
if err := c.ShouldBindQuery(&sortedPaginationRequest); err != nil {
_ = c.Error(err)
return
}
listRequestOptions := utils.ParseListRequestOptions(c)
groups, pagination, err := ugc.UserGroupService.List(ctx, searchTerm, sortedPaginationRequest)
groups, pagination, err := ugc.UserGroupService.List(c, searchTerm, listRequestOptions)
if err != nil {
_ = c.Error(err)
return
@@ -70,7 +64,7 @@ func (ugc *UserGroupController) list(c *gin.Context) {
_ = c.Error(err)
return
}
groupDto.UserCount, err = ugc.UserGroupService.GetUserCountOfGroup(ctx, group.ID)
groupDto.UserCount, err = ugc.UserGroupService.GetUserCountOfGroup(c.Request.Context(), group.ID)
if err != nil {
_ = c.Error(err)
return

View File

@@ -57,7 +57,7 @@ func (wc *WebauthnController) verifyRegistrationHandler(c *gin.Context) {
}
userID := c.GetString("userID")
credential, err := wc.webAuthnService.VerifyRegistration(c.Request.Context(), sessionID, userID, c.Request)
credential, err := wc.webAuthnService.VerifyRegistration(c.Request.Context(), sessionID, userID, c.Request, c.ClientIP())
if err != nil {
_ = c.Error(err)
return
@@ -134,8 +134,10 @@ func (wc *WebauthnController) listCredentialsHandler(c *gin.Context) {
func (wc *WebauthnController) deleteCredentialHandler(c *gin.Context) {
userID := c.GetString("userID")
credentialID := c.Param("id")
clientIP := c.ClientIP()
userAgent := c.Request.UserAgent()
err := wc.webAuthnService.DeleteCredential(c.Request.Context(), userID, credentialID)
err := wc.webAuthnService.DeleteCredential(c.Request.Context(), userID, credentialID, clientIP, userAgent)
if err != nil {
_ = c.Error(err)
return

View File

@@ -21,6 +21,7 @@ type AppConfigUpdateDto struct {
SignupDefaultUserGroupIDs string `json:"signupDefaultUserGroupIDs" binding:"omitempty,json"`
SignupDefaultCustomClaims string `json:"signupDefaultCustomClaims" binding:"omitempty,json"`
AccentColor string `json:"accentColor"`
RequireUserEmail string `json:"requireUserEmail" binding:"required"`
SmtpHost string `json:"smtpHost"`
SmtpPort string `json:"smtpPort"`
SmtpFrom string `json:"smtpFrom" binding:"omitempty,email"`
@@ -46,7 +47,7 @@ type AppConfigUpdateDto struct {
LdapAttributeGroupMember string `json:"ldapAttributeGroupMember"`
LdapAttributeGroupUniqueIdentifier string `json:"ldapAttributeGroupUniqueIdentifier"`
LdapAttributeGroupName string `json:"ldapAttributeGroupName"`
LdapAttributeAdminGroup string `json:"ldapAttributeAdminGroup"`
LdapAdminGroupName string `json:"ldapAdminGroupName"`
LdapSoftDeleteUsers string `json:"ldapSoftDeleteUsers"`
EmailOneTimeAccessAsAdminEnabled string `json:"emailOneTimeAccessAsAdminEnabled" binding:"required"`
EmailOneTimeAccessAsUnauthenticatedEnabled string `json:"emailOneTimeAccessAsUnauthenticatedEnabled" binding:"required"`

View File

@@ -17,10 +17,3 @@ type AuditLogDto struct {
Username string `json:"username"`
Data map[string]string `json:"data"`
}
type AuditLogFilterDto struct {
UserID string `form:"filters[userId]"`
Event string `form:"filters[event]"`
ClientName string `form:"filters[clientName]"`
Location string `form:"filters[location]"`
}

View File

@@ -6,6 +6,7 @@ type OidcClientMetaDataDto struct {
ID string `json:"id"`
Name string `json:"name"`
HasLogo bool `json:"hasLogo"`
HasDarkLogo bool `json:"hasDarkLogo"`
LaunchURL *string `json:"launchURL"`
RequiresReauthentication bool `json:"requiresReauthentication"`
}
@@ -38,6 +39,10 @@ type OidcClientUpdateDto struct {
RequiresReauthentication bool `json:"requiresReauthentication"`
Credentials OidcClientCredentialsDto `json:"credentials"`
LaunchURL *string `json:"launchURL" binding:"omitempty,url"`
HasLogo bool `json:"hasLogo"`
HasDarkLogo bool `json:"hasDarkLogo"`
LogoURL *string `json:"logoUrl"`
DarkLogoURL *string `json:"darkLogoUrl"`
}
type OidcClientCreateDto struct {

View File

@@ -10,7 +10,7 @@ import (
type UserDto struct {
ID string `json:"id"`
Username string `json:"username"`
Email string `json:"email" `
Email *string `json:"email" `
FirstName string `json:"firstName"`
LastName *string `json:"lastName"`
DisplayName string `json:"displayName"`
@@ -24,10 +24,10 @@ type UserDto struct {
type UserCreateDto struct {
Username string `json:"username" binding:"required,username,min=2,max=50" unorm:"nfc"`
Email string `json:"email" binding:"required,email" unorm:"nfc"`
Email *string `json:"email" binding:"omitempty,email" unorm:"nfc"`
FirstName string `json:"firstName" binding:"required,min=1,max=50" unorm:"nfc"`
LastName string `json:"lastName" binding:"max=50" unorm:"nfc"`
DisplayName string `json:"displayName" binding:"required,max=100" unorm:"nfc"`
DisplayName string `json:"displayName" binding:"required,min=1,max=100" unorm:"nfc"`
IsAdmin bool `json:"isAdmin"`
Locale *string `json:"locale"`
Disabled bool `json:"disabled"`
@@ -64,9 +64,9 @@ type UserUpdateUserGroupDto struct {
}
type SignUpDto struct {
Username string `json:"username" binding:"required,username,min=2,max=50" unorm:"nfc"`
Email string `json:"email" binding:"required,email" unorm:"nfc"`
FirstName string `json:"firstName" binding:"required,min=1,max=50" unorm:"nfc"`
LastName string `json:"lastName" binding:"max=50" unorm:"nfc"`
Token string `json:"token"`
Username string `json:"username" binding:"required,username,min=2,max=50" unorm:"nfc"`
Email *string `json:"email" binding:"omitempty,email" unorm:"nfc"`
FirstName string `json:"firstName" binding:"required,min=1,max=50" unorm:"nfc"`
LastName string `json:"lastName" binding:"max=50" unorm:"nfc"`
Token string `json:"token"`
}

View File

@@ -3,6 +3,7 @@ package dto
import (
"testing"
"github.com/pocket-id/pocket-id/backend/internal/utils"
"github.com/stretchr/testify/require"
)
@@ -16,7 +17,7 @@ func TestUserCreateDto_Validate(t *testing.T) {
name: "valid input",
input: UserCreateDto{
Username: "testuser",
Email: "test@example.com",
Email: utils.Ptr("test@example.com"),
FirstName: "John",
LastName: "Doe",
DisplayName: "John Doe",
@@ -26,7 +27,7 @@ func TestUserCreateDto_Validate(t *testing.T) {
{
name: "missing username",
input: UserCreateDto{
Email: "test@example.com",
Email: utils.Ptr("test@example.com"),
FirstName: "John",
LastName: "Doe",
DisplayName: "John Doe",
@@ -36,7 +37,7 @@ func TestUserCreateDto_Validate(t *testing.T) {
{
name: "missing display name",
input: UserCreateDto{
Email: "test@example.com",
Email: utils.Ptr("test@example.com"),
FirstName: "John",
LastName: "Doe",
},
@@ -46,7 +47,7 @@ func TestUserCreateDto_Validate(t *testing.T) {
name: "username contains invalid characters",
input: UserCreateDto{
Username: "test/ser",
Email: "test@example.com",
Email: utils.Ptr("test@example.com"),
FirstName: "John",
LastName: "Doe",
DisplayName: "John Doe",
@@ -57,7 +58,7 @@ func TestUserCreateDto_Validate(t *testing.T) {
name: "invalid email",
input: UserCreateDto{
Username: "testuser",
Email: "not-an-email",
Email: utils.Ptr("not-an-email"),
FirstName: "John",
LastName: "Doe",
DisplayName: "John Doe",
@@ -68,7 +69,7 @@ func TestUserCreateDto_Validate(t *testing.T) {
name: "first name too short",
input: UserCreateDto{
Username: "testuser",
Email: "test@example.com",
Email: utils.Ptr("test@example.com"),
FirstName: "",
LastName: "Doe",
DisplayName: "John Doe",
@@ -79,7 +80,7 @@ func TestUserCreateDto_Validate(t *testing.T) {
name: "last name too long",
input: UserCreateDto{
Username: "testuser",
Email: "test@example.com",
Email: utils.Ptr("test@example.com"),
FirstName: "John",
LastName: "abcdfghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz",
DisplayName: "John Doe",

View File

@@ -67,14 +67,12 @@ func ValidateClientID(clientID string) bool {
// ValidateCallbackURL validates callback URLs with support for wildcards
func ValidateCallbackURL(raw string) bool {
if raw == "*" {
// Don't validate if it contains a wildcard
if strings.Contains(raw, "*") {
return true
}
// Replace all '*' with 'x' to check if the rest is still a valid URI
test := strings.ReplaceAll(raw, "*", "x")
u, err := url.Parse(test)
u, err := url.Parse(raw)
if err != nil {
return false
}

View File

@@ -19,7 +19,7 @@ const heartbeatUrl = "https://analytics.pocket-id.org/heartbeat"
func (s *Scheduler) RegisterAnalyticsJob(ctx context.Context, appConfig *service.AppConfigService, httpClient *http.Client) error {
// Skip if analytics are disabled or not in production environment
if common.EnvConfig.AnalyticsDisabled || common.EnvConfig.AppEnv != "production" {
if common.EnvConfig.AnalyticsDisabled || !common.EnvConfig.AppEnv.IsProduction() {
return nil
}
@@ -39,7 +39,7 @@ type AnalyticsJob struct {
// sendHeartbeat sends a heartbeat to the analytics service
func (j *AnalyticsJob) sendHeartbeat(parentCtx context.Context) error {
// Skip if analytics are disabled or not in production environment
if common.EnvConfig.AnalyticsDisabled || common.EnvConfig.AppEnv != "production" {
if common.EnvConfig.AnalyticsDisabled || !common.EnvConfig.AppEnv.IsProduction() {
return nil
}

View File

@@ -37,7 +37,7 @@ func (j *ApiKeyEmailJobs) checkAndNotifyExpiringApiKeys(ctx context.Context) err
}
for _, key := range apiKeys {
if key.User.Email == "" {
if key.User.Email == nil {
continue
}
err = j.apiKeyService.SendApiKeyExpiringSoonEmail(ctx, key)

View File

@@ -2,29 +2,36 @@ package job
import (
"context"
"errors"
"fmt"
"log/slog"
"os"
"path/filepath"
"path"
"strings"
"time"
"github.com/go-co-op/gocron/v2"
"gorm.io/gorm"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/model"
"github.com/pocket-id/pocket-id/backend/internal/storage"
)
func (s *Scheduler) RegisterFileCleanupJobs(ctx context.Context, db *gorm.DB) error {
jobs := &FileCleanupJobs{db: db}
func (s *Scheduler) RegisterFileCleanupJobs(ctx context.Context, db *gorm.DB, fileStorage storage.FileStorage) error {
jobs := &FileCleanupJobs{db: db, fileStorage: fileStorage}
// Run every 24 hours
return s.registerJob(ctx, "ClearUnusedDefaultProfilePictures", gocron.DurationJob(24*time.Hour), jobs.clearUnusedDefaultProfilePictures, false)
err := s.registerJob(ctx, "ClearUnusedDefaultProfilePictures", gocron.DurationJob(24*time.Hour), jobs.clearUnusedDefaultProfilePictures, false)
// Only necessary for file system storage
if fileStorage.Type() == storage.TypeFileSystem {
err = errors.Join(err, s.registerJob(ctx, "ClearOrphanedTempFiles", gocron.DurationJob(12*time.Hour), jobs.clearOrphanedTempFiles, true))
}
return err
}
type FileCleanupJobs struct {
db *gorm.DB
db *gorm.DB
fileStorage storage.FileStorage
}
// ClearUnusedDefaultProfilePictures deletes default profile pictures that don't match any user's initials
@@ -44,29 +51,24 @@ func (j *FileCleanupJobs) clearUnusedDefaultProfilePictures(ctx context.Context)
initialsInUse[user.Initials()] = struct{}{}
}
defaultPicturesDir := common.EnvConfig.UploadPath + "/profile-pictures/defaults"
if _, err := os.Stat(defaultPicturesDir); os.IsNotExist(err) {
return nil
}
files, err := os.ReadDir(defaultPicturesDir)
defaultPicturesDir := path.Join("profile-pictures", "defaults")
files, err := j.fileStorage.List(ctx, defaultPicturesDir)
if err != nil {
return fmt.Errorf("failed to read default profile pictures directory: %w", err)
return fmt.Errorf("failed to list default profile pictures: %w", err)
}
filesDeleted := 0
for _, file := range files {
if file.IsDir() {
continue // Skip directories
_, filename := path.Split(file.Path)
if filename == "" {
continue
}
filename := file.Name()
initials := strings.TrimSuffix(filename, ".png")
// If these initials aren't used by any user, delete the file
if _, ok := initialsInUse[initials]; !ok {
filePath := filepath.Join(defaultPicturesDir, filename)
if err := os.Remove(filePath); err != nil {
filePath := path.Join(defaultPicturesDir, filename)
if err := j.fileStorage.Delete(ctx, filePath); err != nil {
slog.ErrorContext(ctx, "Failed to delete unused default profile picture", slog.String("path", filePath), slog.Any("error", err))
} else {
filesDeleted++
@@ -77,3 +79,34 @@ func (j *FileCleanupJobs) clearUnusedDefaultProfilePictures(ctx context.Context)
slog.Info("Done deleting unused default profile pictures", slog.Int("count", filesDeleted))
return nil
}
// clearOrphanedTempFiles deletes temporary files that are produced by failed atomic writes
func (j *FileCleanupJobs) clearOrphanedTempFiles(ctx context.Context) error {
const minAge = 10 * time.Minute
var deleted int
err := j.fileStorage.Walk(ctx, "/", func(p storage.ObjectInfo) error {
// Only temp files
if !strings.HasSuffix(p.Path, "-tmp") {
return nil
}
if time.Since(p.ModTime) < minAge {
return nil
}
if err := j.fileStorage.Delete(ctx, p.Path); err != nil {
slog.ErrorContext(ctx, "Failed to delete temp file", slog.String("path", p.Path), slog.Any("error", err))
return nil
}
deleted++
return nil
})
if err != nil {
return fmt.Errorf("failed to scan storage: %w", err)
}
slog.Info("Done cleaning orphaned temp files", slog.Int("count", deleted))
return nil
}

View File

@@ -34,7 +34,7 @@ func (m *CspMiddleware) Add() gin.HandlerFunc {
"object-src 'none'; " +
"frame-ancestors 'none'; " +
"form-action 'self'; " +
"img-src 'self' data: blob:; " +
"img-src * blob:;" +
"font-src 'self'; " +
"style-src 'self' 'unsafe-inline'; " +
"script-src 'self' 'nonce-" + nonce + "'"

View File

@@ -29,7 +29,7 @@ func (m *RateLimitMiddleware) Add(limit rate.Limit, burst int) gin.HandlerFunc {
// Skip rate limiting for localhost and test environment
// If the client ip is localhost the request comes from the frontend
if ip == "" || ip == "127.0.0.1" || ip == "::1" || common.EnvConfig.AppEnv == "test" {
if ip == "" || ip == "127.0.0.1" || ip == "::1" || common.EnvConfig.AppEnv.IsTest() {
c.Next()
return
}

View File

@@ -46,6 +46,7 @@ type AppConfig struct {
// Internal
InstanceID AppConfigVariable `key:"instanceId,internal"` // Internal
// Email
RequireUserEmail AppConfigVariable `key:"requireUserEmail,public"` // Public
SmtpHost AppConfigVariable `key:"smtpHost"`
SmtpPort AppConfigVariable `key:"smtpPort"`
SmtpFrom AppConfigVariable `key:"smtpFrom"`
@@ -76,7 +77,7 @@ type AppConfig struct {
LdapAttributeGroupMember AppConfigVariable `key:"ldapAttributeGroupMember"`
LdapAttributeGroupUniqueIdentifier AppConfigVariable `key:"ldapAttributeGroupUniqueIdentifier"`
LdapAttributeGroupName AppConfigVariable `key:"ldapAttributeGroupName"`
LdapAttributeAdminGroup AppConfigVariable `key:"ldapAttributeAdminGroup"`
LdapAdminGroupName AppConfigVariable `key:"ldapAdminGroupName"`
LdapSoftDeleteUsers AppConfigVariable `key:"ldapSoftDeleteUsers"`
}

View File

@@ -3,13 +3,14 @@ package model
import (
"database/sql/driver"
"encoding/json"
"fmt"
"github.com/pocket-id/pocket-id/backend/internal/utils"
)
type AuditLog struct {
Base
Event AuditLogEvent `sortable:"true"`
Event AuditLogEvent `sortable:"true" filterable:"true"`
IpAddress *string `sortable:"true"`
Country string `sortable:"true"`
City string `sortable:"true"`
@@ -17,7 +18,7 @@ type AuditLog struct {
Username string `gorm:"-"`
Data AuditLogData
UserID string
UserID string `filterable:"true"`
User User
}
@@ -33,6 +34,8 @@ const (
AuditLogEventNewClientAuthorization AuditLogEvent = "NEW_CLIENT_AUTHORIZATION"
AuditLogEventDeviceCodeAuthorization AuditLogEvent = "DEVICE_CODE_AUTHORIZATION"
AuditLogEventNewDeviceCodeAuthorization AuditLogEvent = "NEW_DEVICE_CODE_AUTHORIZATION"
AuditLogEventPasskeyAdded AuditLogEvent = "PASSKEY_ADDED"
AuditLogEventPasskeyRemoved AuditLogEvent = "PASSKEY_REMOVED"
)
// Scan and Value methods for GORM to handle the custom type
@@ -47,14 +50,7 @@ func (e AuditLogEvent) Value() (driver.Value, error) {
}
func (d *AuditLogData) Scan(value any) error {
switch v := value.(type) {
case []byte:
return json.Unmarshal(v, d)
case string:
return json.Unmarshal([]byte(v), d)
default:
return fmt.Errorf("unsupported type: %T", value)
}
return utils.UnmarshalJSONFromDatabase(d, value)
}
func (d AuditLogData) Value() (driver.Value, error) {

View File

@@ -3,12 +3,10 @@ package model
import (
"database/sql/driver"
"encoding/json"
"fmt"
"strings"
"gorm.io/gorm"
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
"github.com/pocket-id/pocket-id/backend/internal/utils"
)
type UserAuthorizedOidcClient struct {
@@ -54,10 +52,10 @@ type OidcClient struct {
CallbackURLs UrlList
LogoutCallbackURLs UrlList
ImageType *string
HasLogo bool `gorm:"-"`
DarkImageType *string
IsPublic bool
PkceEnabled bool
RequiresReauthentication bool
PkceEnabled bool `sortable:"true" filterable:"true"`
RequiresReauthentication bool `sortable:"true" filterable:"true"`
Credentials OidcClientCredentials
LaunchURL *string
@@ -67,6 +65,14 @@ type OidcClient struct {
UserAuthorizedOidcClients []UserAuthorizedOidcClient `gorm:"foreignKey:ClientID;references:ID"`
}
func (c OidcClient) HasLogo() bool {
return c.ImageType != nil && *c.ImageType != ""
}
func (c OidcClient) HasDarkLogo() bool {
return c.DarkImageType != nil && *c.DarkImageType != ""
}
type OidcRefreshToken struct {
Base
@@ -89,12 +95,6 @@ func (c OidcRefreshToken) Scopes() []string {
return strings.Split(c.Scope, " ")
}
func (c *OidcClient) AfterFind(_ *gorm.DB) (err error) {
// Compute HasLogo field
c.HasLogo = c.ImageType != nil && *c.ImageType != ""
return nil
}
type OidcClientCredentials struct { //nolint:recvcheck
FederatedIdentities []OidcClientFederatedIdentity `json:"federatedIdentities,omitempty"`
}
@@ -121,14 +121,7 @@ func (occ OidcClientCredentials) FederatedIdentityForIssuer(issuer string) (Oidc
}
func (occ *OidcClientCredentials) Scan(value any) error {
switch v := value.(type) {
case []byte:
return json.Unmarshal(v, occ)
case string:
return json.Unmarshal([]byte(v), occ)
default:
return fmt.Errorf("unsupported type: %T", value)
}
return utils.UnmarshalJSONFromDatabase(occ, value)
}
func (occ OidcClientCredentials) Value() (driver.Value, error) {
@@ -138,14 +131,7 @@ func (occ OidcClientCredentials) Value() (driver.Value, error) {
type UrlList []string //nolint:recvcheck
func (cu *UrlList) Scan(value any) error {
switch v := value.(type) {
case []byte:
return json.Unmarshal(v, cu)
case string:
return json.Unmarshal([]byte(v), cu)
default:
return fmt.Errorf("unsupported type: %T", value)
}
return utils.UnmarshalJSONFromDatabase(cu, value)
}
func (cu UrlList) Value() (driver.Value, error) {

View File

@@ -0,0 +1,17 @@
package model
import (
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
)
type Storage struct {
Path string `gorm:"primaryKey"`
Data []byte
Size int64
ModTime datatype.DateTime
CreatedAt datatype.DateTime
}
func (Storage) TableName() string {
return "storage"
}

View File

@@ -11,6 +11,15 @@ import (
// DateTime custom type for time.Time to store date as unix timestamp for sqlite and as date for postgres
type DateTime time.Time //nolint:recvcheck
func DateTimeFromString(str string) (DateTime, error) {
t, err := time.Parse(time.RFC3339Nano, str)
if err != nil {
return DateTime{}, fmt.Errorf("failed to parse date string: %w", err)
}
return DateTime(t), nil
}
func (date *DateTime) Scan(value any) (err error) {
switch v := value.(type) {
case time.Time:

View File

@@ -13,15 +13,15 @@ import (
type User struct {
Base
Username string `sortable:"true"`
Email string `sortable:"true"`
FirstName string `sortable:"true"`
LastName string `sortable:"true"`
DisplayName string `sortable:"true"`
IsAdmin bool `sortable:"true"`
Username string `sortable:"true"`
Email *string `sortable:"true"`
FirstName string `sortable:"true"`
LastName string `sortable:"true"`
DisplayName string `sortable:"true"`
IsAdmin bool `sortable:"true" filterable:"true"`
Locale *string
LdapID *string
Disabled bool `sortable:"true"`
Disabled bool `sortable:"true" filterable:"true"`
CustomClaims []CustomClaim
UserGroups []UserGroup `gorm:"many2many:user_groups_users;"`

View File

@@ -3,11 +3,11 @@ package model
import (
"database/sql/driver"
"encoding/json"
"fmt"
"time"
"github.com/go-webauthn/webauthn/protocol"
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
"github.com/pocket-id/pocket-id/backend/internal/utils"
)
type WebauthnSession struct {
@@ -16,6 +16,7 @@ type WebauthnSession struct {
Challenge string
ExpiresAt datatype.DateTime
UserVerification string
CredentialParams CredentialParameters
}
type WebauthnCredential struct {
@@ -58,16 +59,20 @@ type AuthenticatorTransportList []protocol.AuthenticatorTransport //nolint:recvc
// Scan and Value methods for GORM to handle the custom type
func (atl *AuthenticatorTransportList) Scan(value interface{}) error {
switch v := value.(type) {
case []byte:
return json.Unmarshal(v, atl)
case string:
return json.Unmarshal([]byte(v), atl)
default:
return fmt.Errorf("unsupported type: %T", value)
}
return utils.UnmarshalJSONFromDatabase(atl, value)
}
func (atl AuthenticatorTransportList) Value() (driver.Value, error) {
return json.Marshal(atl)
}
type CredentialParameters []protocol.CredentialParameter //nolint:recvcheck
// Scan and Value methods for GORM to handle the custom type
func (cp *CredentialParameters) Scan(value interface{}) error {
return utils.UnmarshalJSONFromDatabase(cp, value)
}
func (cp CredentialParameters) Value() (driver.Value, error) {
return json.Marshal(cp)
}

View File

@@ -25,14 +25,14 @@ func NewApiKeyService(db *gorm.DB, emailService *EmailService) *ApiKeyService {
return &ApiKeyService{db: db, emailService: emailService}
}
func (s *ApiKeyService) ListApiKeys(ctx context.Context, userID string, sortedPaginationRequest utils.SortedPaginationRequest) ([]model.ApiKey, utils.PaginationResponse, error) {
func (s *ApiKeyService) ListApiKeys(ctx context.Context, userID string, listRequestOptions utils.ListRequestOptions) ([]model.ApiKey, utils.PaginationResponse, error) {
query := s.db.
WithContext(ctx).
Where("user_id = ?", userID).
Model(&model.ApiKey{})
var apiKeys []model.ApiKey
pagination, err := utils.PaginateAndSort(sortedPaginationRequest, query, &apiKeys)
pagination, err := utils.PaginateFilterAndSort(listRequestOptions, query, &apiKeys)
if err != nil {
return nil, utils.PaginationResponse{}, err
}
@@ -144,9 +144,13 @@ func (s *ApiKeyService) SendApiKeyExpiringSoonEmail(ctx context.Context, apiKey
}
}
if user.Email == nil {
return &common.UserEmailNotSetError{}
}
err := SendEmail(ctx, s.emailService, email.Address{
Name: user.FullName(),
Email: user.Email,
Email: *user.Email,
}, ApiKeyExpiringSoonTemplate, &ApiKeyExpiringSoonTemplateData{
ApiKeyName: apiKey.Name,
ExpiresAt: apiKey.ExpiresAt.ToTime(),

View File

@@ -71,6 +71,7 @@ func (s *AppConfigService) getDefaultDbConfig() *model.AppConfig {
// Internal
InstanceID: model.AppConfigVariable{Value: ""},
// Email
RequireUserEmail: model.AppConfigVariable{Value: "true"},
SmtpHost: model.AppConfigVariable{},
SmtpPort: model.AppConfigVariable{},
SmtpFrom: model.AppConfigVariable{},
@@ -101,7 +102,7 @@ func (s *AppConfigService) getDefaultDbConfig() *model.AppConfig {
LdapAttributeGroupMember: model.AppConfigVariable{Value: "member"},
LdapAttributeGroupUniqueIdentifier: model.AppConfigVariable{},
LdapAttributeGroupName: model.AppConfigVariable{},
LdapAttributeAdminGroup: model.AppConfigVariable{},
LdapAdminGroupName: model.AppConfigVariable{},
LdapSoftDeleteUsers: model.AppConfigVariable{Value: "true"},
}
}

View File

@@ -1,42 +1,52 @@
package service
import (
"context"
"fmt"
"io"
"mime/multipart"
"os"
"path/filepath"
"path"
"strings"
"sync"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/storage"
"github.com/pocket-id/pocket-id/backend/internal/utils"
)
type AppImagesService struct {
mu sync.RWMutex
extensions map[string]string
storage storage.FileStorage
}
func NewAppImagesService(extensions map[string]string) *AppImagesService {
return &AppImagesService{extensions: extensions}
func NewAppImagesService(extensions map[string]string, storage storage.FileStorage) *AppImagesService {
return &AppImagesService{extensions: extensions, storage: storage}
}
func (s *AppImagesService) GetImage(name string) (string, string, error) {
func (s *AppImagesService) GetImage(ctx context.Context, name string) (io.ReadCloser, int64, string, error) {
ext, err := s.getExtension(name)
if err != nil {
return "", "", err
return nil, 0, "", err
}
mimeType := utils.GetImageMimeType(ext)
if mimeType == "" {
return "", "", fmt.Errorf("unsupported image type '%s'", ext)
return nil, 0, "", fmt.Errorf("unsupported image type '%s'", ext)
}
imagePath := filepath.Join(common.EnvConfig.UploadPath, "application-images", fmt.Sprintf("%s.%s", name, ext))
return imagePath, mimeType, nil
imagePath := path.Join("application-images", name+"."+ext)
reader, size, err := s.storage.Open(ctx, imagePath)
if err != nil {
if storage.IsNotExist(err) {
return nil, 0, "", &common.ImageNotFoundError{}
}
return nil, 0, "", err
}
return reader, size, mimeType, nil
}
func (s *AppImagesService) UpdateImage(file *multipart.FileHeader, imageName string) error {
func (s *AppImagesService) UpdateImage(ctx context.Context, file *multipart.FileHeader, imageName string) error {
fileType := strings.ToLower(utils.GetFileExtension(file.Filename))
mimeType := utils.GetImageMimeType(fileType)
if mimeType == "" {
@@ -48,18 +58,23 @@ func (s *AppImagesService) UpdateImage(file *multipart.FileHeader, imageName str
currentExt, ok := s.extensions[imageName]
if !ok {
return fmt.Errorf("unknown application image '%s'", imageName)
s.extensions[imageName] = fileType
}
imagePath := filepath.Join(common.EnvConfig.UploadPath, "application-images", fmt.Sprintf("%s.%s", imageName, fileType))
imagePath := path.Join("application-images", imageName+"."+fileType)
fileReader, err := file.Open()
if err != nil {
return err
}
defer fileReader.Close()
if err := utils.SaveFile(file, imagePath); err != nil {
if err := s.storage.Save(ctx, imagePath, fileReader); err != nil {
return err
}
if currentExt != "" && currentExt != fileType {
oldImagePath := filepath.Join(common.EnvConfig.UploadPath, "application-images", fmt.Sprintf("%s.%s", imageName, currentExt))
if err := os.Remove(oldImagePath); err != nil && !os.IsNotExist(err) {
oldImagePath := path.Join("application-images", imageName+"."+currentExt)
if err := s.storage.Delete(ctx, oldImagePath); err != nil {
return err
}
}
@@ -69,13 +84,39 @@ func (s *AppImagesService) UpdateImage(file *multipart.FileHeader, imageName str
return nil
}
func (s *AppImagesService) DeleteImage(ctx context.Context, imageName string) error {
s.mu.Lock()
defer s.mu.Unlock()
ext, ok := s.extensions[imageName]
if !ok || ext == "" {
return &common.ImageNotFoundError{}
}
imagePath := path.Join("application-images", imageName+"."+ext)
if err := s.storage.Delete(ctx, imagePath); err != nil {
return err
}
delete(s.extensions, imageName)
return nil
}
func (s *AppImagesService) IsDefaultProfilePictureSet() bool {
s.mu.RLock()
defer s.mu.RUnlock()
_, ok := s.extensions["default-profile-picture"]
return ok
}
func (s *AppImagesService) getExtension(name string) (string, error) {
s.mu.RLock()
defer s.mu.RUnlock()
ext, ok := s.extensions[name]
if !ok || ext == "" {
return "", fmt.Errorf("unknown application image '%s'", name)
return "", &common.ImageNotFoundError{}
}
return strings.ToLower(ext), nil

View File

@@ -2,66 +2,92 @@ package service
import (
"bytes"
"context"
"io"
"io/fs"
"mime/multipart"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"path"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/storage"
)
func TestAppImagesService_GetImage(t *testing.T) {
tempDir := t.TempDir()
originalUploadPath := common.EnvConfig.UploadPath
common.EnvConfig.UploadPath = tempDir
t.Cleanup(func() {
common.EnvConfig.UploadPath = originalUploadPath
})
imagesDir := filepath.Join(tempDir, "application-images")
require.NoError(t, os.MkdirAll(imagesDir, 0o755))
filePath := filepath.Join(imagesDir, "background.webp")
require.NoError(t, os.WriteFile(filePath, []byte("data"), fs.FileMode(0o644)))
service := NewAppImagesService(map[string]string{"background": "webp"})
path, mimeType, err := service.GetImage("background")
store, err := storage.NewFilesystemStorage(t.TempDir())
require.NoError(t, err)
require.Equal(t, filePath, path)
require.NoError(t, store.Save(context.Background(), path.Join("application-images", "background.webp"), bytes.NewReader([]byte("data"))))
service := NewAppImagesService(map[string]string{"background": "webp"}, store)
reader, size, mimeType, err := service.GetImage(context.Background(), "background")
require.NoError(t, err)
defer reader.Close()
payload, err := io.ReadAll(reader)
require.NoError(t, err)
require.Equal(t, []byte("data"), payload)
require.Equal(t, int64(len(payload)), size)
require.Equal(t, "image/webp", mimeType)
}
func TestAppImagesService_UpdateImage(t *testing.T) {
tempDir := t.TempDir()
originalUploadPath := common.EnvConfig.UploadPath
common.EnvConfig.UploadPath = tempDir
t.Cleanup(func() {
common.EnvConfig.UploadPath = originalUploadPath
})
store, err := storage.NewFilesystemStorage(t.TempDir())
require.NoError(t, err)
imagesDir := filepath.Join(tempDir, "application-images")
require.NoError(t, os.MkdirAll(imagesDir, 0o755))
require.NoError(t, store.Save(context.Background(), path.Join("application-images", "logoLight.svg"), bytes.NewReader([]byte("old"))))
oldPath := filepath.Join(imagesDir, "logoLight.svg")
require.NoError(t, os.WriteFile(oldPath, []byte("old"), fs.FileMode(0o644)))
service := NewAppImagesService(map[string]string{"logoLight": "svg"})
service := NewAppImagesService(map[string]string{"logoLight": "svg"}, store)
fileHeader := newFileHeader(t, "logoLight.png", []byte("new"))
require.NoError(t, service.UpdateImage(fileHeader, "logoLight"))
require.NoError(t, service.UpdateImage(context.Background(), fileHeader, "logoLight"))
_, err := os.Stat(filepath.Join(imagesDir, "logoLight.png"))
reader, _, err := store.Open(context.Background(), path.Join("application-images", "logoLight.png"))
require.NoError(t, err)
_ = reader.Close()
_, _, err = store.Open(context.Background(), path.Join("application-images", "logoLight.svg"))
require.ErrorIs(t, err, fs.ErrNotExist)
}
func TestAppImagesService_ErrorsAndFlags(t *testing.T) {
store, err := storage.NewFilesystemStorage(t.TempDir())
require.NoError(t, err)
_, err = os.Stat(oldPath)
require.ErrorIs(t, err, os.ErrNotExist)
service := NewAppImagesService(map[string]string{}, store)
t.Run("get missing image returns not found", func(t *testing.T) {
_, _, _, err := service.GetImage(context.Background(), "missing")
require.Error(t, err)
var imageErr *common.ImageNotFoundError
assert.ErrorAs(t, err, &imageErr)
})
t.Run("reject unsupported file types", func(t *testing.T) {
err := service.UpdateImage(context.Background(), newFileHeader(t, "logo.txt", []byte("nope")), "logo")
require.Error(t, err)
var fileTypeErr *common.FileTypeNotSupportedError
assert.ErrorAs(t, err, &fileTypeErr)
})
t.Run("delete and extension tracking", func(t *testing.T) {
require.NoError(t, store.Save(context.Background(), path.Join("application-images", "default-profile-picture.png"), bytes.NewReader([]byte("img"))))
service.extensions["default-profile-picture"] = "png"
require.NoError(t, service.DeleteImage(context.Background(), "default-profile-picture"))
assert.False(t, service.IsDefaultProfilePictureSet())
err := service.DeleteImage(context.Background(), "default-profile-picture")
require.Error(t, err)
var imageErr *common.ImageNotFoundError
assert.ErrorAs(t, err, &imageErr)
})
}
func newFileHeader(t *testing.T, filename string, content []byte) *multipart.FileHeader {

View File

@@ -0,0 +1,296 @@
package service
import (
"context"
"encoding/json"
"errors"
"fmt"
"log/slog"
"os"
"time"
"github.com/google/uuid"
"github.com/pocket-id/pocket-id/backend/internal/model"
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
var (
ErrLockUnavailable = errors.New("lock is already held by another process")
ErrLockLost = errors.New("lock ownership lost")
)
const (
ttl = 30 * time.Second
renewInterval = 20 * time.Second
renewRetries = 3
lockKey = "application_lock"
)
type AppLockService struct {
db *gorm.DB
lockID string
processID int64
hostID string
}
func NewAppLockService(db *gorm.DB) *AppLockService {
host, err := os.Hostname()
if err != nil || host == "" {
host = "unknown-host"
}
return &AppLockService{
db: db,
processID: int64(os.Getpid()),
hostID: host,
lockID: uuid.NewString(),
}
}
type lockValue struct {
ProcessID int64 `json:"process_id"`
HostID string `json:"host_id"`
LockID string `json:"lock_id"`
ExpiresAt int64 `json:"expires_at"`
}
func (lv *lockValue) Marshal() (string, error) {
data, err := json.Marshal(lv)
if err != nil {
return "", err
}
return string(data), nil
}
func (lv *lockValue) Unmarshal(raw string) error {
if raw == "" {
return nil
}
return json.Unmarshal([]byte(raw), lv)
}
// Acquire obtains the lock. When force is true, the lock is stolen from any existing owner.
// If the lock is forcefully acquired, it blocks until the previous lock has expired.
func (s *AppLockService) Acquire(ctx context.Context, force bool) (waitUntil time.Time, err error) {
tx := s.db.Begin()
defer func() {
tx.Rollback()
}()
var prevLockRaw string
err = tx.
WithContext(ctx).
Model(&model.KV{}).
Where("key = ?", lockKey).
Clauses(clause.Locking{Strength: "UPDATE"}).
Select("value").
Scan(&prevLockRaw).
Error
if err != nil {
return time.Time{}, fmt.Errorf("query existing lock: %w", err)
}
var prevLock lockValue
if prevLockRaw != "" {
if err := prevLock.Unmarshal(prevLockRaw); err != nil {
return time.Time{}, fmt.Errorf("decode existing lock value: %w", err)
}
}
now := time.Now()
nowUnix := now.Unix()
value := lockValue{
ProcessID: s.processID,
HostID: s.hostID,
LockID: s.lockID,
ExpiresAt: now.Add(ttl).Unix(),
}
raw, err := value.Marshal()
if err != nil {
return time.Time{}, fmt.Errorf("encode lock value: %w", err)
}
var query string
switch s.db.Name() {
case "sqlite":
query = `
INSERT INTO kv (key, value)
VALUES (?, ?)
ON CONFLICT(key) DO UPDATE SET
value = excluded.value
WHERE (json_extract(kv.value, '$.expires_at') < ?) OR ?
`
case "postgres":
query = `
INSERT INTO kv (key, value)
VALUES ($1, $2)
ON CONFLICT(key) DO UPDATE SET
value = excluded.value
WHERE ((kv.value::json->>'expires_at')::bigint < $3) OR ($4::boolean IS TRUE)
`
default:
return time.Time{}, fmt.Errorf("unsupported database dialect: %s", s.db.Name())
}
res := tx.WithContext(ctx).Exec(query, lockKey, raw, nowUnix, force)
if res.Error != nil {
return time.Time{}, fmt.Errorf("lock acquisition failed: %w", res.Error)
}
if err := tx.Commit().Error; err != nil {
return time.Time{}, fmt.Errorf("commit lock acquisition: %w", err)
}
// If there is a lock that is not expired and force is false, no rows will be affected
if res.RowsAffected == 0 {
return time.Time{}, ErrLockUnavailable
}
if force && prevLock.ExpiresAt > nowUnix && prevLock.LockID != s.lockID {
waitUntil = time.Unix(prevLock.ExpiresAt, 0)
}
attrs := []any{
slog.Int64("process_id", s.processID),
slog.String("host_id", s.hostID),
}
if wait := time.Until(waitUntil); wait > 0 {
attrs = append(attrs, slog.Duration("wait_before_proceeding", wait))
}
slog.Info("Acquired application lock", attrs...)
return waitUntil, nil
}
// RunRenewal keeps renewing the lock until the context is canceled.
func (s *AppLockService) RunRenewal(ctx context.Context) error {
ticker := time.NewTicker(renewInterval)
defer ticker.Stop()
for {
select {
case <-ctx.Done():
return nil
case <-ticker.C:
if err := s.renew(ctx); err != nil {
return fmt.Errorf("renew lock: %w", err)
}
}
}
}
// Release releases the lock if it is held by this process.
func (s *AppLockService) Release(ctx context.Context) error {
opCtx, cancel := context.WithTimeout(ctx, 3*time.Second)
defer cancel()
var query string
switch s.db.Name() {
case "sqlite":
query = `
DELETE FROM kv
WHERE key = ?
AND json_extract(value, '$.lock_id') = ?
`
case "postgres":
query = `
DELETE FROM kv
WHERE key = $1
AND value::json->>'lock_id' = $2
`
default:
return fmt.Errorf("unsupported database dialect: %s", s.db.Name())
}
res := s.db.WithContext(opCtx).Exec(query, lockKey, s.lockID)
if res.Error != nil {
return fmt.Errorf("release lock failed: %w", res.Error)
}
if res.RowsAffected == 0 {
slog.Warn("Application lock not held by this process, cannot release",
slog.Int64("process_id", s.processID),
slog.String("host_id", s.hostID),
)
}
slog.Info("Released application lock",
slog.Int64("process_id", s.processID),
slog.String("host_id", s.hostID),
)
return nil
}
// renew tries to renew the lock, retrying up to renewRetries times (sleeping 1s between attempts).
func (s *AppLockService) renew(ctx context.Context) error {
var lastErr error
for attempt := 1; attempt <= renewRetries; attempt++ {
now := time.Now()
nowUnix := now.Unix()
expiresAt := now.Add(ttl).Unix()
value := lockValue{
LockID: s.lockID,
ProcessID: s.processID,
HostID: s.hostID,
ExpiresAt: expiresAt,
}
raw, err := value.Marshal()
if err != nil {
return fmt.Errorf("encode lock value: %w", err)
}
var query string
switch s.db.Name() {
case "sqlite":
query = `
UPDATE kv
SET value = ?
WHERE key = ?
AND json_extract(value, '$.lock_id') = ?
AND json_extract(value, '$.expires_at') > ?
`
case "postgres":
query = `
UPDATE kv
SET value = $1
WHERE key = $2
AND value::json->>'lock_id' = $3
AND ((value::json->>'expires_at')::bigint > $4)
`
default:
return fmt.Errorf("unsupported database dialect: %s", s.db.Name())
}
opCtx, cancel := context.WithTimeout(ctx, 3*time.Second)
res := s.db.WithContext(opCtx).Exec(query, raw, lockKey, s.lockID, nowUnix)
cancel()
switch {
case res.Error != nil:
lastErr = fmt.Errorf("lock renewal failed: %w", res.Error)
case res.RowsAffected == 0:
// Must be after checking res.Error
return ErrLockLost
default:
slog.Debug("Renewed application lock",
slog.Int64("process_id", s.processID),
slog.String("host_id", s.hostID),
)
return nil
}
// Wait before next attempt or cancel if context is done
if attempt < renewRetries {
select {
case <-ctx.Done():
return ctx.Err()
case <-time.After(1 * time.Second):
}
}
}
return lastErr
}

View File

@@ -0,0 +1,189 @@
package service
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"gorm.io/gorm"
"github.com/pocket-id/pocket-id/backend/internal/model"
testutils "github.com/pocket-id/pocket-id/backend/internal/utils/testing"
)
func newTestAppLockService(t *testing.T, db *gorm.DB) *AppLockService {
t.Helper()
return &AppLockService{
db: db,
processID: 1,
hostID: "test-host",
lockID: "a13c7673-c7ae-49f1-9112-2cd2d0d4b0c1",
}
}
func insertLock(t *testing.T, db *gorm.DB, value lockValue) {
t.Helper()
raw, err := value.Marshal()
require.NoError(t, err)
err = db.Create(&model.KV{Key: lockKey, Value: &raw}).Error
require.NoError(t, err)
}
func readLockValue(t *testing.T, db *gorm.DB) lockValue {
t.Helper()
var row model.KV
err := db.Take(&row, "key = ?", lockKey).Error
require.NoError(t, err)
require.NotNil(t, row.Value)
var value lockValue
err = value.Unmarshal(*row.Value)
require.NoError(t, err)
return value
}
func TestAppLockServiceAcquire(t *testing.T) {
t.Run("creates new lock when none exists", func(t *testing.T) {
db := testutils.NewDatabaseForTest(t)
service := newTestAppLockService(t, db)
_, err := service.Acquire(context.Background(), false)
require.NoError(t, err)
stored := readLockValue(t, db)
require.Equal(t, service.processID, stored.ProcessID)
require.Equal(t, service.hostID, stored.HostID)
require.Greater(t, stored.ExpiresAt, time.Now().Unix())
})
t.Run("returns ErrLockUnavailable when lock held by another process", func(t *testing.T) {
db := testutils.NewDatabaseForTest(t)
service := newTestAppLockService(t, db)
existing := lockValue{
ProcessID: 99,
HostID: "other-host",
ExpiresAt: time.Now().Add(ttl).Unix(),
}
insertLock(t, db, existing)
_, err := service.Acquire(context.Background(), false)
require.ErrorIs(t, err, ErrLockUnavailable)
current := readLockValue(t, db)
require.Equal(t, existing, current)
})
t.Run("force acquisition steals lock", func(t *testing.T) {
db := testutils.NewDatabaseForTest(t)
service := newTestAppLockService(t, db)
insertLock(t, db, lockValue{
ProcessID: 99,
HostID: "other-host",
ExpiresAt: time.Now().Unix(),
})
_, err := service.Acquire(context.Background(), true)
require.NoError(t, err)
stored := readLockValue(t, db)
require.Equal(t, service.processID, stored.ProcessID)
require.Equal(t, service.hostID, stored.HostID)
require.Greater(t, stored.ExpiresAt, time.Now().Unix())
})
}
func TestAppLockServiceRelease(t *testing.T) {
t.Run("removes owned lock", func(t *testing.T) {
db := testutils.NewDatabaseForTest(t)
service := newTestAppLockService(t, db)
_, err := service.Acquire(context.Background(), false)
require.NoError(t, err)
err = service.Release(context.Background())
require.NoError(t, err)
var row model.KV
err = db.Take(&row, "key = ?", lockKey).Error
require.ErrorIs(t, err, gorm.ErrRecordNotFound)
})
t.Run("ignores lock held by another owner", func(t *testing.T) {
db := testutils.NewDatabaseForTest(t)
service := newTestAppLockService(t, db)
existing := lockValue{
ProcessID: 2,
HostID: "other-host",
ExpiresAt: time.Now().Add(ttl).Unix(),
}
insertLock(t, db, existing)
err := service.Release(context.Background())
require.NoError(t, err)
stored := readLockValue(t, db)
require.Equal(t, existing, stored)
})
}
func TestAppLockServiceRenew(t *testing.T) {
t.Run("extends expiration when lock is still owned", func(t *testing.T) {
db := testutils.NewDatabaseForTest(t)
service := newTestAppLockService(t, db)
_, err := service.Acquire(context.Background(), false)
require.NoError(t, err)
before := readLockValue(t, db)
err = service.renew(context.Background())
require.NoError(t, err)
after := readLockValue(t, db)
require.Equal(t, service.processID, after.ProcessID)
require.Equal(t, service.hostID, after.HostID)
require.GreaterOrEqual(t, after.ExpiresAt, before.ExpiresAt)
})
t.Run("returns ErrLockLost when lock is missing", func(t *testing.T) {
db := testutils.NewDatabaseForTest(t)
service := newTestAppLockService(t, db)
err := service.renew(context.Background())
require.ErrorIs(t, err, ErrLockLost)
})
t.Run("returns ErrLockLost when ownership changed", func(t *testing.T) {
db := testutils.NewDatabaseForTest(t)
service := newTestAppLockService(t, db)
_, err := service.Acquire(context.Background(), false)
require.NoError(t, err)
// Simulate a different process taking the lock.
newOwner := lockValue{
ProcessID: 9,
HostID: "stolen-host",
ExpiresAt: time.Now().Add(ttl).Unix(),
}
raw, marshalErr := newOwner.Marshal()
require.NoError(t, marshalErr)
updateErr := db.Model(&model.KV{}).
Where("key = ?", lockKey).
Update("value", raw).Error
require.NoError(t, updateErr)
err = service.renew(context.Background())
require.ErrorIs(t, err, ErrLockLost)
})
}

View File

@@ -6,7 +6,6 @@ import (
"log/slog"
userAgentParser "github.com/mileusna/useragent"
"github.com/pocket-id/pocket-id/backend/internal/dto"
"github.com/pocket-id/pocket-id/backend/internal/model"
"github.com/pocket-id/pocket-id/backend/internal/utils"
"github.com/pocket-id/pocket-id/backend/internal/utils/email"
@@ -35,7 +34,7 @@ func (s *AuditLogService) Create(ctx context.Context, event model.AuditLogEvent,
country, city, err := s.geoliteService.GetLocationByIP(ipAddress)
if err != nil {
// Log the error but don't interrupt the operation
slog.Warn("Failed to get IP location", "error", err)
slog.Warn("Failed to get IP location", slog.String("ip", ipAddress), slog.Any("error", err))
}
auditLog := model.AuditLog{
@@ -111,9 +110,13 @@ func (s *AuditLogService) CreateNewSignInWithEmail(ctx context.Context, ipAddres
return
}
if user.Email == nil {
return
}
innerErr = SendEmail(innerCtx, s.emailService, email.Address{
Name: user.FullName(),
Email: user.Email,
Email: *user.Email,
}, NewLoginTemplate, &NewLoginTemplateData{
IPAddress: ipAddress,
Country: createdAuditLog.Country,
@@ -122,7 +125,7 @@ func (s *AuditLogService) CreateNewSignInWithEmail(ctx context.Context, ipAddres
DateTime: createdAuditLog.CreatedAt.UTC(),
})
if innerErr != nil {
slog.ErrorContext(innerCtx, "Failed to send notification email", slog.Any("error", innerErr), slog.String("address", user.Email))
slog.ErrorContext(innerCtx, "Failed to send notification email", slog.Any("error", innerErr), slog.String("address", *user.Email))
return
}
}()
@@ -132,14 +135,14 @@ func (s *AuditLogService) CreateNewSignInWithEmail(ctx context.Context, ipAddres
}
// ListAuditLogsForUser retrieves all audit logs for a given user ID
func (s *AuditLogService) ListAuditLogsForUser(ctx context.Context, userID string, sortedPaginationRequest utils.SortedPaginationRequest) ([]model.AuditLog, utils.PaginationResponse, error) {
func (s *AuditLogService) ListAuditLogsForUser(ctx context.Context, userID string, listRequestOptions utils.ListRequestOptions) ([]model.AuditLog, utils.PaginationResponse, error) {
var logs []model.AuditLog
query := s.db.
WithContext(ctx).
Model(&model.AuditLog{}).
Where("user_id = ?", userID)
pagination, err := utils.PaginateAndSort(sortedPaginationRequest, query, &logs)
pagination, err := utils.PaginateFilterAndSort(listRequestOptions, query, &logs)
return logs, pagination, err
}
@@ -148,7 +151,7 @@ func (s *AuditLogService) DeviceStringFromUserAgent(userAgent string) string {
return ua.Name + " on " + ua.OS + " " + ua.OSVersion
}
func (s *AuditLogService) ListAllAuditLogs(ctx context.Context, sortedPaginationRequest utils.SortedPaginationRequest, filters dto.AuditLogFilterDto) ([]model.AuditLog, utils.PaginationResponse, error) {
func (s *AuditLogService) ListAllAuditLogs(ctx context.Context, listRequestOptions utils.ListRequestOptions) ([]model.AuditLog, utils.PaginationResponse, error) {
var logs []model.AuditLog
query := s.db.
@@ -156,33 +159,36 @@ func (s *AuditLogService) ListAllAuditLogs(ctx context.Context, sortedPagination
Preload("User").
Model(&model.AuditLog{})
if filters.UserID != "" {
query = query.Where("user_id = ?", filters.UserID)
}
if filters.Event != "" {
query = query.Where("event = ?", filters.Event)
}
if filters.ClientName != "" {
if clientName, ok := listRequestOptions.Filters["clientName"]; ok {
dialect := s.db.Name()
switch dialect {
case "sqlite":
query = query.Where("json_extract(data, '$.clientName') = ?", filters.ClientName)
query = query.Where("json_extract(data, '$.clientName') IN ?", clientName)
case "postgres":
query = query.Where("data->>'clientName' = ?", filters.ClientName)
query = query.Where("data->>'clientName' IN ?", clientName)
default:
return nil, utils.PaginationResponse{}, fmt.Errorf("unsupported database dialect: %s", dialect)
}
}
if filters.Location != "" {
switch filters.Location {
case "external":
query = query.Where("country != 'Internal Network'")
case "internal":
query = query.Where("country = 'Internal Network'")
if locations, ok := listRequestOptions.Filters["location"]; ok {
mapped := make([]string, 0, len(locations))
for _, v := range locations {
if s, ok := v.(string); ok {
switch s {
case "internal":
mapped = append(mapped, "Internal Network")
case "external":
mapped = append(mapped, "External Network")
}
}
}
if len(mapped) > 0 {
query = query.Where("country IN ?", mapped)
}
}
pagination, err := utils.PaginateAndSort(sortedPaginationRequest, query, &logs)
pagination, err := utils.PaginateFilterAndSort(listRequestOptions, query, &logs)
if err != nil {
return nil, pagination, err
}
@@ -195,8 +201,8 @@ func (s *AuditLogService) ListUsernamesWithIds(ctx context.Context) (users map[s
WithContext(ctx).
Joins("User").
Model(&model.AuditLog{}).
Select("DISTINCT \"User\".id, \"User\".username").
Where("\"User\".username IS NOT NULL")
Select(`DISTINCT "User".id, "User".username`).
Where(`"User".username IS NOT NULL`)
type Result struct {
ID string `gorm:"column:id"`
@@ -204,7 +210,8 @@ func (s *AuditLogService) ListUsernamesWithIds(ctx context.Context) (users map[s
}
var results []Result
if err := query.Find(&results).Error; err != nil {
err = query.Find(&results).Error
if err != nil {
return nil, fmt.Errorf("failed to query user IDs: %w", err)
}
@@ -240,7 +247,8 @@ func (s *AuditLogService) ListClientNames(ctx context.Context) (clientNames []st
}
var results []Result
if err := query.Find(&results).Error; err != nil {
err = query.Find(&results).Error
if err != nil {
return nil, fmt.Errorf("failed to query client IDs: %w", err)
}

View File

@@ -7,15 +7,12 @@ import (
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/x509"
"encoding/base64"
"fmt"
"log/slog"
"os"
"path/filepath"
"path"
"time"
"github.com/fxamacker/cbor/v2"
"github.com/go-webauthn/webauthn/protocol"
"github.com/lestrrat-go/jwx/v3/jwa"
"github.com/lestrrat-go/jwx/v3/jwk"
@@ -25,6 +22,7 @@ import (
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/model"
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
"github.com/pocket-id/pocket-id/backend/internal/storage"
"github.com/pocket-id/pocket-id/backend/internal/utils"
jwkutils "github.com/pocket-id/pocket-id/backend/internal/utils/jwk"
"github.com/pocket-id/pocket-id/backend/resources"
@@ -35,15 +33,19 @@ type TestService struct {
jwtService *JwtService
appConfigService *AppConfigService
ldapService *LdapService
fileStorage storage.FileStorage
appLockService *AppLockService
externalIdPKey jwk.Key
}
func NewTestService(db *gorm.DB, appConfigService *AppConfigService, jwtService *JwtService, ldapService *LdapService) (*TestService, error) {
func NewTestService(db *gorm.DB, appConfigService *AppConfigService, jwtService *JwtService, ldapService *LdapService, appLockService *AppLockService, fileStorage storage.FileStorage) (*TestService, error) {
s := &TestService{
db: db,
appConfigService: appConfigService,
jwtService: jwtService,
ldapService: ldapService,
appLockService: appLockService,
fileStorage: fileStorage,
}
err := s.initExternalIdP()
if err != nil {
@@ -79,7 +81,7 @@ func (s *TestService) SeedDatabase(baseURL string) error {
ID: "f4b89dc2-62fb-46bf-9f5f-c34f4eafe93e",
},
Username: "tim",
Email: "tim.cook@test.com",
Email: utils.Ptr("tim.cook@test.com"),
FirstName: "Tim",
LastName: "Cook",
DisplayName: "Tim Cook",
@@ -90,7 +92,7 @@ func (s *TestService) SeedDatabase(baseURL string) error {
ID: "1cd19686-f9a6-43f4-a41f-14a0bf5b4036",
},
Username: "craig",
Email: "craig.federighi@test.com",
Email: utils.Ptr("craig.federighi@test.com"),
FirstName: "Craig",
LastName: "Federighi",
DisplayName: "Craig Federighi",
@@ -286,8 +288,8 @@ func (s *TestService) SeedDatabase(baseURL string) error {
// openssl genpkey -algorithm EC -pkeyopt ec_paramgen_curve:P-256 | \
// openssl pkcs8 -topk8 -nocrypt | tee >(openssl pkey -pubout)
publicKeyPasskey1, _ := s.getCborPublicKey("MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEwcOo5KV169KR67QEHrcYkeXE3CCxv2BgwnSq4VYTQxyLtdmKxegexa8JdwFKhKXa2BMI9xaN15BoL6wSCRFJhg==")
publicKeyPasskey2, _ := s.getCborPublicKey("MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEj4qA0PrZzg8Co1C27nyUbzrp8Ewjr7eOlGI2LfrzmbL5nPhZRAdJ3hEaqrHMSnJBhfMqtQGKwDYpaLIQFAKLhw==")
publicKeyPasskey1, _ := base64.StdEncoding.DecodeString("pQMmIAEhWCDBw6jkpXXr0pHrtAQetxiR5cTcILG/YGDCdKrhVhNDHCJYIIu12YrF6B7Frwl3AUqEpdrYEwj3Fo3XkGgvrBIJEUmGAQI=")
publicKeyPasskey2, _ := base64.StdEncoding.DecodeString("pSJYIPmc+FlEB0neERqqscxKckGF8yq1AYrANiloshAUAouHAQIDJiABIVggj4qA0PrZzg8Co1C27nyUbzrp8Ewjr7eOlGI2LfrzmbI=")
webauthnCredentials := []model.WebauthnCredential{
{
Name: "Passkey 1",
@@ -316,6 +318,10 @@ func (s *TestService) SeedDatabase(baseURL string) error {
Challenge: "challenge",
ExpiresAt: datatype.DateTime(time.Now().Add(1 * time.Hour)),
UserVerification: "preferred",
CredentialParams: model.CredentialParameters{
{Type: "public-key", Algorithm: -7},
{Type: "public-key", Algorithm: -257},
},
}
if err := tx.Create(&webauthnSession).Error; err != nil {
return err
@@ -325,9 +331,10 @@ func (s *TestService) SeedDatabase(baseURL string) error {
Base: model.Base{
ID: "5f1fa856-c164-4295-961e-175a0d22d725",
},
Name: "Test API Key",
Key: "6c34966f57ef2bb7857649aff0e7ab3ad67af93c846342ced3f5a07be8706c20",
UserID: users[0].ID,
Name: "Test API Key",
Key: "6c34966f57ef2bb7857649aff0e7ab3ad67af93c846342ced3f5a07be8706c20",
UserID: users[0].ID,
ExpiresAt: datatype.DateTime(time.Now().Add(30 * 24 * time.Hour)),
}
if err := tx.Create(&apiKey).Error; err != nil {
return err
@@ -377,6 +384,20 @@ func (s *TestService) SeedDatabase(baseURL string) error {
}
}
keyValues := []model.KV{
{
Key: jwkutils.PrivateKeyDBKey,
// {"alg":"RS256","d":"mvMDWSdPPvcum0c0iEHE2gbqtV2NKMmLwrl9E6K7g8lTV95SePLnW_bwyMPV7EGp7PQk3l17I5XRhFjze7GqTnFIOgKzMianPs7jv2ELtBMGK0xOPATgu1iGb70xZ6vcvuEfRyY3dJ0zr4jpUdVuXwKmx9rK4IdZn2dFCKfvSuspqIpz11RhF1ALrqDLkxGVv7ZwNh0_VhJZU9hcjG5l6xc7rQEKpPRkZp0IdjkGS8Z0FskoVaiRIWAbZuiVFB9WCW8k1czC4HQTPLpII01bUQx2ludbm0UlXRgVU9ptUUbU7GAImQqTOW8LfPGklEvcgzlIlR_oqw4P9yBxLi-yMQ","dp":"pvNCSnnhbo8Igw9psPR-DicxFnkXlu_ix4gpy6efTrxA-z1VDFDioJ814vKQNioYDzpyAP1gfMPhRkvG_q0hRZsJah3Sb9dfA-WkhSWY7lURQP4yIBTMU0PF_rEATuS7lRciYk1SOx5fqXZd3m_LP0vpBC4Ujlq6NAq6CIjCnms","dq":"TtUVGCCkPNgfOLmkYXu7dxxUCV5kB01-xAEK2OY0n0pG8vfDophH4_D_ZC7nvJ8J9uDhs_3JStexq1lIvaWtG99RNTChIEDzpdn6GH9yaVcb_eB4uJjrNm64FhF8PGCCwxA-xMCZMaARKwhMB2_IOMkxUbWboL3gnhJ2rDO_QO0","e":"AQAB","kid":"8uHDw3M6rf8","kty":"RSA","n":"yaeEL0VKoPBXIAaWXsUgmu05lAvEIIdJn0FX9lHh4JE5UY9B83C5sCNdhs9iSWzpeP11EVjWp8i3Yv2CF7c7u50BXnVBGtxpZpFC-585UXacoJ0chUmarL9GRFJcM1nPHBTFu68aRrn1rIKNHUkNaaxFo0NFGl_4EDDTO8HwawTjwkPoQlRzeByhlvGPVvwgB3Fn93B8QJ_cZhXKxJvjjrC_8Pk76heC_ntEMru71Ix77BoC3j2TuyiN7m9RNBW8BU5q6lKoIdvIeZfTFLzi37iufyfvMrJTixp9zhNB1NxlLCeOZl2MXegtiGqd2H3cbAyqoOiv9ihUWTfXj7SxJw","p":"_Yylc9e07CKdqNRD2EosMC2mrhrEa9j5oY_l00Qyy4-jmCA59Q9viyqvveRo0U7cRvFA5BWgWN6GGLh1DG3X-QBqVr0dnk3uzbobb55RYUXyPLuBZI2q6w2oasbiDwPdY7KpkVv_H-bpITQlyDvO8hhucA6rUV7F6KTQVz8M3Ms","q":"y5p3hch-7jJ21TkAhp_Vk1fLCAuD4tbErwQs2of9ja8sB4iJOs5Wn6HD3P7Mc8Plye7qaLHvzc8I5g0tPKWvC0DPd_FLPXiWwMVAzee3NUX_oGeJNOQp11y1w_KqdO9qZqHSEPZ3NcFL_SZMFgggxhM1uzRiPzsVN0lnD_6prZU","qi":"2Grt6uXHm61ji3xSdkBWNtUnj19vS1-7rFJp5SoYztVQVThf_W52BAiXKBdYZDRVoItC_VS2NvAOjeJjhYO_xQ_q3hK7MdtuXfEPpLnyXKkmWo3lrJ26wbeF6l05LexCkI7ShsOuSt-dsyaTJTszuKDIA6YOfWvfo3aVZmlWRaI","use":"sig"}
Value: utils.Ptr("7d/5hl7diJ2rnFL14hEAQf9tzpu29aqXQ8jpJ2iqqKUNFZpdOkEpud0CmRv4H3r8yyk2u/Gqqj9klSy58DJkYXGF5PAYgLyoBIb7L3JXWRbxg4cQ3QJCug13l2OTmpAKoVc+rmX8c3j3h1sNqyJ+7Ql5sS0jSeyiYgIsFNCdnK5alBDyvtcpe/QDpklmP4JCeVpvmf2rLGplk3g5UO5ydJ8UiDXxfDmi+gF6NKJvrGnnah8Ar3G/x88z+tTJtp0DIQFwxXwUM2XZqzEVGm8K2r0w5o9/Keh6bBBaiuH2C78ZOaijGV3DovhR+e9J0cYUYGwT42MZMx9fSWQ/lvWGGnf+Uq3MXJfjWSREfhkp8KTQwR9F7+dnVJWswOEk7jPR8I7hCWTMxJyvaFX3wgAXIVmhrgXZQQbYOqTt56IoqUl0xOJku8dA8opg2UcLlmmuOh6+hfkXKsiiS/H/9c1BVIGj1fCOiT6IePh4wKKSTbwJnPD5EKmdJpgTsUpjcDnXQKY4ReO0UpdRdKxwRDDLeQuG6j+ljGxR9GPudCU9Nmci6rFVI6n5LWYkQxBA1O73RpmXRZPDzntDfpXMEonkmSvOoxaCK2Id7CRKMdqvR0kEouwnhk5WSFtsfi3sA0pkXzPFxwZeWM8vFtbffZOZzXaOhxCOfcj1NClZohlZhyc4jvkxmrpY7PSaAzih0AmHI7y0LYFi6fZu/K4EheVa1+KF55nWZ8ARikHMWKAKkyExkTak7xyN884TDmzURRaPlQg4jzQte5WMNjAG/hlHibdMBNvgwiYd49ZxteJ8ABdbiXVRl+2JGbdjl2ubpQZwOn7bJKlqO56bIwsZ+e4+pXsuOGdBahkHrUjtMEmH3DZbGc6CJLbcmdhdpApLQRRcLAazxJhzAwJ47FRYsHsj57LnYNvmcKdIxw8rxCdLUuzz95uw0T3ankEO5J9sjem+HMEuKdwXK1UcuOn2rjR8Sd/BuvQmeso27dFbPXqXYNS90Ml45YyTvcKSiopD181oZR703TFUSpR7dsiqROMr+p/2jN9h6a8WbQ8xpksyclaQByY/M77AssbXnG6wfhRsntNIINCZLbBnjXOyz6ZHIC5K4tSTdcnWaiYPeRPQmnw9UUvHAcNU2yMWsy0eU377yDS0WstTxOdQutTdkczl8kv5Lo26JiEK7mSIuRK19ffF9Zz8FG8+eKv5zdyIPjyQRDYBysUoDv5huKe2eoxJu/MWS2Pql/ZtUGeD6Ozm3mCvh0vQ9ceagBkY6Ocm3du0ziAKP29Ri0mjg4DizVorbLzsh+EQH/s2Pi9MnjUZDlEmuLl2Xfp7/w4j/8u0N0tVR70VDFuGdKpTjFY3vS8EJrPtyMTM51x1D9rb8gIql8aR/rJw4YF+huxg1mv5n6+tGVqg5msbPmF12eJijP4lkmaRwIpLW5pJTtaDkUj7uOeu1mm4k+Dt5nh0/0jPHzrv6bcTCcbV7UjMHDoTXXqEpFAAJ66rHR7zdAJu+YKsnTIZyLmOpcowq7LL8G9qTvV0OSpyQWUIavRSgbDHFqEqRs+JU94jAzkq8nCY5MTd9m5sIv9InfdT3k+pwpsE/FKge8nghFLtbUrafGkzTky8SE2druvVcIvbfXMfLIKRUYjJgnWc0gQzF5J6pzXM7D2r/RG6JDzASqjlbURq6v9bhNerlOVdMujWKEEVcKWIzlbt4RkihRjM8AUqIZQOyicGQ+4yfIjAHw5viuABONYs3OIWULnFqJxdvS9rNKhfxSjIq9cfqyzevq2xrRoMXEonobh6M3bD2Vang8OAeVeD1OXWPERi4pepCYFS9RJ/Xa/UWxptsqSNuGcb3fAzQSmLpXLGdWRoKXvSe7EYgc0bGcLOjSTu5RURKo+EF9i4KT9EJauf6VXw5dTf/CCIJRXE1bWzXhSCFYntohYhX2ldOCDYpi/jFBC6Vtkw0ud3/xq8Nmhd5gUk+SpngByCZH3Pm3H+jvlbMpiqkDkm1v74hDX13Xhrcw2eWyuqKBVoRCCniUvwpYNbGvBfjC6Hcizv0Aybciwj+4nybt5EPoEUm6S6Gs7fG7QpPdvrzpAxX70MlmdkF/gwyuhbEeJhLK+WL7qAsN5CvHPzVbsIf90x+nGTtMJPgpxVr0tJMj+vprXV4WxutfARBiOnqe58MhA857sd+MzKBgKnoLOBRTiC3qc/0/ULwbG2HCCD7nmwzz7M4nUuMvo8rgS7z0BF68OClT8X3JwSXbL5Wg=="),
},
}
for _, kv := range keyValues {
if err := tx.Create(&kv).Error; err != nil {
return err
}
}
return nil
})
@@ -424,8 +445,9 @@ func (s *TestService) ResetDatabase() error {
}
func (s *TestService) ResetApplicationImages(ctx context.Context) error {
if err := os.RemoveAll(common.EnvConfig.UploadPath); err != nil {
slog.ErrorContext(ctx, "Error removing directory", slog.Any("error", err))
err := s.fileStorage.DeleteAll(ctx, "/")
if err != nil {
slog.ErrorContext(ctx, "Error removing uploads", slog.Any("error", err))
return err
}
@@ -435,13 +457,20 @@ func (s *TestService) ResetApplicationImages(ctx context.Context) error {
}
for _, file := range files {
srcFilePath := filepath.Join("images", file.Name())
destFilePath := filepath.Join(common.EnvConfig.UploadPath, "application-images", file.Name())
err := utils.CopyEmbeddedFileToDisk(srcFilePath, destFilePath)
if file.IsDir() {
continue
}
srcFilePath := path.Join("images", file.Name())
srcFile, err := resources.FS.Open(srcFilePath)
if err != nil {
return err
}
err = s.fileStorage.Save(ctx, path.Join("application-images", file.Name()), srcFile)
if err != nil {
srcFile.Close()
return err
}
srcFile.Close()
}
return nil
@@ -454,47 +483,29 @@ func (s *TestService) ResetAppConfig(ctx context.Context) error {
return err
}
// Manually set instance ID
err = s.appConfigService.UpdateAppConfigValues(ctx, "instanceId", "test-instance-id")
if err != nil {
return err
}
// Reload the app config from the database after resetting the values
return s.appConfigService.LoadDbConfig(ctx)
err = s.appConfigService.LoadDbConfig(ctx)
if err != nil {
return err
}
// Reload the JWK
if err := s.jwtService.LoadOrGenerateKey(); err != nil {
return err
}
return nil
}
func (s *TestService) SetJWTKeys() {
const privateKeyString = `{"alg":"RS256","d":"mvMDWSdPPvcum0c0iEHE2gbqtV2NKMmLwrl9E6K7g8lTV95SePLnW_bwyMPV7EGp7PQk3l17I5XRhFjze7GqTnFIOgKzMianPs7jv2ELtBMGK0xOPATgu1iGb70xZ6vcvuEfRyY3dJ0zr4jpUdVuXwKmx9rK4IdZn2dFCKfvSuspqIpz11RhF1ALrqDLkxGVv7ZwNh0_VhJZU9hcjG5l6xc7rQEKpPRkZp0IdjkGS8Z0FskoVaiRIWAbZuiVFB9WCW8k1czC4HQTPLpII01bUQx2ludbm0UlXRgVU9ptUUbU7GAImQqTOW8LfPGklEvcgzlIlR_oqw4P9yBxLi-yMQ","dp":"pvNCSnnhbo8Igw9psPR-DicxFnkXlu_ix4gpy6efTrxA-z1VDFDioJ814vKQNioYDzpyAP1gfMPhRkvG_q0hRZsJah3Sb9dfA-WkhSWY7lURQP4yIBTMU0PF_rEATuS7lRciYk1SOx5fqXZd3m_LP0vpBC4Ujlq6NAq6CIjCnms","dq":"TtUVGCCkPNgfOLmkYXu7dxxUCV5kB01-xAEK2OY0n0pG8vfDophH4_D_ZC7nvJ8J9uDhs_3JStexq1lIvaWtG99RNTChIEDzpdn6GH9yaVcb_eB4uJjrNm64FhF8PGCCwxA-xMCZMaARKwhMB2_IOMkxUbWboL3gnhJ2rDO_QO0","e":"AQAB","kid":"8uHDw3M6rf8","kty":"RSA","n":"yaeEL0VKoPBXIAaWXsUgmu05lAvEIIdJn0FX9lHh4JE5UY9B83C5sCNdhs9iSWzpeP11EVjWp8i3Yv2CF7c7u50BXnVBGtxpZpFC-585UXacoJ0chUmarL9GRFJcM1nPHBTFu68aRrn1rIKNHUkNaaxFo0NFGl_4EDDTO8HwawTjwkPoQlRzeByhlvGPVvwgB3Fn93B8QJ_cZhXKxJvjjrC_8Pk76heC_ntEMru71Ix77BoC3j2TuyiN7m9RNBW8BU5q6lKoIdvIeZfTFLzi37iufyfvMrJTixp9zhNB1NxlLCeOZl2MXegtiGqd2H3cbAyqoOiv9ihUWTfXj7SxJw","p":"_Yylc9e07CKdqNRD2EosMC2mrhrEa9j5oY_l00Qyy4-jmCA59Q9viyqvveRo0U7cRvFA5BWgWN6GGLh1DG3X-QBqVr0dnk3uzbobb55RYUXyPLuBZI2q6w2oasbiDwPdY7KpkVv_H-bpITQlyDvO8hhucA6rUV7F6KTQVz8M3Ms","q":"y5p3hch-7jJ21TkAhp_Vk1fLCAuD4tbErwQs2of9ja8sB4iJOs5Wn6HD3P7Mc8Plye7qaLHvzc8I5g0tPKWvC0DPd_FLPXiWwMVAzee3NUX_oGeJNOQp11y1w_KqdO9qZqHSEPZ3NcFL_SZMFgggxhM1uzRiPzsVN0lnD_6prZU","qi":"2Grt6uXHm61ji3xSdkBWNtUnj19vS1-7rFJp5SoYztVQVThf_W52BAiXKBdYZDRVoItC_VS2NvAOjeJjhYO_xQ_q3hK7MdtuXfEPpLnyXKkmWo3lrJ26wbeF6l05LexCkI7ShsOuSt-dsyaTJTszuKDIA6YOfWvfo3aVZmlWRaI","use":"sig"}`
privateKey, _ := jwk.ParseKey([]byte(privateKeyString))
_ = s.jwtService.SetKey(privateKey)
}
// getCborPublicKey decodes a Base64 encoded public key and returns the CBOR encoded COSE key
func (s *TestService) getCborPublicKey(base64PublicKey string) ([]byte, error) {
decodedKey, err := base64.StdEncoding.DecodeString(base64PublicKey)
if err != nil {
return nil, fmt.Errorf("failed to decode base64 key: %w", err)
}
pubKey, err := x509.ParsePKIXPublicKey(decodedKey)
if err != nil {
return nil, fmt.Errorf("failed to parse public key: %w", err)
}
ecdsaPubKey, ok := pubKey.(*ecdsa.PublicKey)
if !ok {
return nil, fmt.Errorf("not an ECDSA public key")
}
coseKey := map[int]interface{}{
1: 2, // Key type: EC2
3: -7, // Algorithm: ECDSA with SHA-256
-1: 1, // Curve: P-256
-2: ecdsaPubKey.X.Bytes(), // X coordinate
-3: ecdsaPubKey.Y.Bytes(), // Y coordinate
}
cborPublicKey, err := cbor.Marshal(coseKey)
if err != nil {
return nil, fmt.Errorf("failed to marshal COSE key: %w", err)
}
return cborPublicKey, nil
func (s *TestService) ResetLock(ctx context.Context) error {
_, err := s.appLockService.Acquire(ctx, true)
return err
}
// SyncLdap triggers an LDAP synchronization
@@ -521,7 +532,7 @@ func (s *TestService) SetLdapTestConfig(ctx context.Context) error {
"ldapAttributeGroupUniqueIdentifier": "uuid",
"ldapAttributeGroupName": "uid",
"ldapAttributeGroupMember": "member",
"ldapAttributeAdminGroup": "admin_group",
"ldapAdminGroupName": "admin_group",
"ldapSoftDeleteUsers": "true",
"ldapEnabled": "true",
}

View File

@@ -62,9 +62,13 @@ func (srv *EmailService) SendTestEmail(ctx context.Context, recipientUserId stri
return err
}
if user.Email == nil {
return &common.UserEmailNotSetError{}
}
return SendEmail(ctx, srv,
email.Address{
Email: user.Email,
Email: *user.Email,
Name: user.FullName(),
}, TestTemplate, nil)
}
@@ -278,16 +282,18 @@ func prepareBody[V any](srv *EmailService, template email.Template[V], data *ema
var htmlHeader = textproto.MIMEHeader{}
htmlHeader.Add("Content-Type", "text/html; charset=UTF-8")
htmlHeader.Add("Content-Transfer-Encoding", "8bit")
htmlHeader.Add("Content-Transfer-Encoding", "quoted-printable")
htmlPart, err := mpart.CreatePart(htmlHeader)
if err != nil {
return "", "", fmt.Errorf("create html part: %w", err)
}
err = email.GetTemplate(srv.htmlTemplates, template).ExecuteTemplate(htmlPart, "root", data)
htmlQp := quotedprintable.NewWriter(htmlPart)
err = email.GetTemplate(srv.htmlTemplates, template).ExecuteTemplate(htmlQp, "root", data)
if err != nil {
return "", "", fmt.Errorf("execute html template: %w", err)
}
htmlQp.Close()
err = mpart.Close()
if err != nil {

View File

@@ -0,0 +1,217 @@
package service
import (
"archive/zip"
"context"
"encoding/json"
"fmt"
"io"
"path/filepath"
"gorm.io/gorm"
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
"github.com/pocket-id/pocket-id/backend/internal/storage"
"github.com/pocket-id/pocket-id/backend/internal/utils"
)
// ExportService handles exporting Pocket ID data into a ZIP archive.
type ExportService struct {
db *gorm.DB
storage storage.FileStorage
}
func NewExportService(db *gorm.DB, storage storage.FileStorage) *ExportService {
return &ExportService{
db: db,
storage: storage,
}
}
// ExportToZip performs the full export process and writes the ZIP data to the given writer.
func (s *ExportService) ExportToZip(ctx context.Context, w io.Writer) error {
dbData, err := s.extractDatabase()
if err != nil {
return err
}
return s.writeExportZipStream(ctx, w, dbData)
}
// extractDatabase reads all tables into a DatabaseExport struct
func (s *ExportService) extractDatabase() (DatabaseExport, error) {
schema, err := utils.LoadDBSchemaTypes(s.db)
if err != nil {
return DatabaseExport{}, fmt.Errorf("failed to load schema types: %w", err)
}
version, err := s.schemaVersion()
if err != nil {
return DatabaseExport{}, err
}
out := DatabaseExport{
Provider: s.db.Name(),
Version: version,
Tables: map[string][]map[string]any{},
// These tables need to be inserted in a specific order because of foreign key constraints
// Not all tables are listed here, because not all tables are order-dependent
TableOrder: []string{"users", "user_groups", "oidc_clients"},
}
for table := range schema {
if table == "storage" || table == "schema_migrations" {
continue
}
err = s.dumpTable(table, schema[table], &out)
if err != nil {
return DatabaseExport{}, err
}
}
return out, nil
}
func (s *ExportService) schemaVersion() (uint, error) {
var version uint
if err := s.db.Raw("SELECT version FROM schema_migrations").Row().Scan(&version); err != nil {
return 0, fmt.Errorf("failed to query schema version: %w", err)
}
return version, nil
}
// dumpTable selects all rows from a table and appends them to out.Tables
func (s *ExportService) dumpTable(table string, types utils.DBSchemaTableTypes, out *DatabaseExport) error {
rows, err := s.db.Raw("SELECT * FROM " + table).Rows()
if err != nil {
return fmt.Errorf("failed to read table %s: %w", table, err)
}
defer rows.Close()
cols, _ := rows.Columns()
if len(cols) != len(types) {
// Should never happen...
return fmt.Errorf("mismatched columns in table (%d) and schema (%d)", len(cols), len(types))
}
for rows.Next() {
vals := s.getScanValuesForTable(cols, types)
err = rows.Scan(vals...)
if err != nil {
return fmt.Errorf("failed to scan row in table %s: %w", table, err)
}
rowMap := make(map[string]any, len(cols))
for i, col := range cols {
rowMap[col] = vals[i]
}
// Skip the app lock row in the kv table
if table == "kv" {
if keyPtr, ok := rowMap["key"].(*string); ok && keyPtr != nil && *keyPtr == lockKey {
continue
}
}
out.Tables[table] = append(out.Tables[table], rowMap)
}
return rows.Err()
}
func (s *ExportService) getScanValuesForTable(cols []string, types utils.DBSchemaTableTypes) []any {
res := make([]any, len(cols))
for i, col := range cols {
// Store a pointer
// Note: don't create a helper function for this switch, because it would return type "any" and mess everything up
// If the column is nullable, we need a pointer to a pointer!
switch types[col].Name {
case "boolean", "bool":
var x bool
if types[col].Nullable {
res[i] = utils.Ptr(utils.Ptr(x))
} else {
res[i] = utils.Ptr(x)
}
case "blob", "bytea", "jsonb":
// Treat jsonb columns as binary too
var x []byte
if types[col].Nullable {
res[i] = utils.Ptr(utils.Ptr(x))
} else {
res[i] = utils.Ptr(x)
}
case "timestamp", "timestamptz", "timestamp with time zone", "datetime":
var x datatype.DateTime
if types[col].Nullable {
res[i] = utils.Ptr(utils.Ptr(x))
} else {
res[i] = utils.Ptr(x)
}
case "integer", "int", "bigint":
var x int64
if types[col].Nullable {
res[i] = utils.Ptr(utils.Ptr(x))
} else {
res[i] = utils.Ptr(x)
}
default:
// Treat everything else as a string (including the "numeric" type)
var x string
if types[col].Nullable {
res[i] = utils.Ptr(utils.Ptr(x))
} else {
res[i] = utils.Ptr(x)
}
}
}
return res
}
func (s *ExportService) writeExportZipStream(ctx context.Context, w io.Writer, dbData DatabaseExport) error {
zipWriter := zip.NewWriter(w)
// Add database.json
jsonWriter, err := zipWriter.Create("database.json")
if err != nil {
return fmt.Errorf("failed to create database.json in zip: %w", err)
}
jsonEncoder := json.NewEncoder(jsonWriter)
jsonEncoder.SetEscapeHTML(false)
if err := jsonEncoder.Encode(dbData); err != nil {
return fmt.Errorf("failed to encode database.json: %w", err)
}
// Add uploaded files
if err := s.addUploadsToZip(ctx, zipWriter); err != nil {
return err
}
return zipWriter.Close()
}
// addUploadsToZip adds all files from the storage to the ZIP archive under the "uploads/" directory
func (s *ExportService) addUploadsToZip(ctx context.Context, zipWriter *zip.Writer) error {
return s.storage.Walk(ctx, "/", func(p storage.ObjectInfo) error {
zipPath := filepath.Join("uploads", p.Path)
w, err := zipWriter.Create(zipPath)
if err != nil {
return fmt.Errorf("failed to create zip entry for %s: %w", zipPath, err)
}
f, _, err := s.storage.Open(ctx, p.Path)
if err != nil {
return fmt.Errorf("failed to open file %s: %w", zipPath, err)
}
defer f.Close()
if _, err := io.Copy(w, f); err != nil {
return fmt.Errorf("failed to copy file %s into zip: %w", zipPath, err)
}
return nil
})
}

View File

@@ -13,35 +13,19 @@ import (
"net/netip"
"os"
"path/filepath"
"strings"
"sync"
"time"
"github.com/oschwald/maxminddb-golang/v2"
"github.com/pocket-id/pocket-id/backend/internal/utils"
"github.com/pocket-id/pocket-id/backend/internal/common"
)
type GeoLiteService struct {
httpClient *http.Client
disableUpdater bool
mutex sync.RWMutex
localIPv6Ranges []*net.IPNet
}
var localhostIPNets = []*net.IPNet{
{IP: net.IPv4(127, 0, 0, 0), Mask: net.CIDRMask(8, 32)}, // 127.0.0.0/8
{IP: net.IPv6loopback, Mask: net.CIDRMask(128, 128)}, // ::1/128
}
var privateLanIPNets = []*net.IPNet{
{IP: net.IPv4(10, 0, 0, 0), Mask: net.CIDRMask(8, 32)}, // 10.0.0.0/8
{IP: net.IPv4(172, 16, 0, 0), Mask: net.CIDRMask(12, 32)}, // 172.16.0.0/12
{IP: net.IPv4(192, 168, 0, 0), Mask: net.CIDRMask(16, 32)}, // 192.168.0.0/16
}
var tailscaleIPNets = []*net.IPNet{
{IP: net.IPv4(100, 64, 0, 0), Mask: net.CIDRMask(10, 32)}, // 100.64.0.0/10
httpClient *http.Client
disableUpdater bool
mutex sync.RWMutex
}
// NewGeoLiteService initializes a new GeoLiteService instance and starts a goroutine to update the GeoLite2 City database.
@@ -56,67 +40,9 @@ func NewGeoLiteService(httpClient *http.Client) *GeoLiteService {
service.disableUpdater = true
}
// Initialize IPv6 local ranges
err := service.initializeIPv6LocalRanges()
if err != nil {
slog.Warn("Failed to initialize IPv6 local ranges", slog.Any("error", err))
}
return service
}
// initializeIPv6LocalRanges parses the LOCAL_IPV6_RANGES environment variable
func (s *GeoLiteService) initializeIPv6LocalRanges() error {
rangesEnv := common.EnvConfig.LocalIPv6Ranges
if rangesEnv == "" {
return nil // No local IPv6 ranges configured
}
ranges := strings.Split(rangesEnv, ",")
localRanges := make([]*net.IPNet, 0, len(ranges))
for _, rangeStr := range ranges {
rangeStr = strings.TrimSpace(rangeStr)
if rangeStr == "" {
continue
}
_, ipNet, err := net.ParseCIDR(rangeStr)
if err != nil {
return fmt.Errorf("invalid IPv6 range '%s': %w", rangeStr, err)
}
// Ensure it's an IPv6 range
if ipNet.IP.To4() != nil {
return fmt.Errorf("range '%s' is not a valid IPv6 range", rangeStr)
}
localRanges = append(localRanges, ipNet)
}
s.localIPv6Ranges = localRanges
if len(localRanges) > 0 {
slog.Info("Initialized IPv6 local ranges", slog.Int("count", len(localRanges)))
}
return nil
}
// isLocalIPv6 checks if the given IPv6 address is within any of the configured local ranges
func (s *GeoLiteService) isLocalIPv6(ip net.IP) bool {
if ip.To4() != nil {
return false // Not an IPv6 address
}
for _, localRange := range s.localIPv6Ranges {
if localRange.Contains(ip) {
return true
}
}
return false
}
func (s *GeoLiteService) DisableUpdater() bool {
return s.disableUpdater
}
@@ -129,26 +55,17 @@ func (s *GeoLiteService) GetLocationByIP(ipAddress string) (country, city string
// Check the IP address against known private IP ranges
if ip := net.ParseIP(ipAddress); ip != nil {
// Check IPv6 local ranges first
if s.isLocalIPv6(ip) {
if utils.IsLocalIPv6(ip) {
return "Internal Network", "LAN", nil
}
// Check existing IPv4 ranges
for _, ipNet := range tailscaleIPNets {
if ipNet.Contains(ip) {
return "Internal Network", "Tailscale", nil
}
if utils.IsTailscaleIP(ip) {
return "Internal Network", "Tailscale", nil
}
for _, ipNet := range privateLanIPNets {
if ipNet.Contains(ip) {
return "Internal Network", "LAN", nil
}
if utils.IsPrivateIP(ip) {
return "Internal Network", "LAN", nil
}
for _, ipNet := range localhostIPNets {
if ipNet.Contains(ip) {
return "Internal Network", "localhost", nil
}
if utils.IsLocalhostIP(ip) {
return "Internal Network", "localhost", nil
}
}

View File

@@ -1,220 +0,0 @@
package service
import (
"net"
"net/http"
"testing"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestGeoLiteService_IPv6LocalRanges(t *testing.T) {
tests := []struct {
name string
localRanges string
testIP string
expectedCountry string
expectedCity string
expectError bool
}{
{
name: "IPv6 in local range",
localRanges: "2001:0db8:abcd:000::/56,2001:0db8:abcd:001::/56",
testIP: "2001:0db8:abcd:000::1",
expectedCountry: "Internal Network",
expectedCity: "LAN",
expectError: false,
},
{
name: "IPv6 not in local range",
localRanges: "2001:0db8:abcd:000::/56",
testIP: "2001:0db8:ffff:000::1",
expectError: true,
},
{
name: "Multiple ranges - second range match",
localRanges: "2001:0db8:abcd:000::/56,2001:0db8:abcd:001::/56",
testIP: "2001:0db8:abcd:001::1",
expectedCountry: "Internal Network",
expectedCity: "LAN",
expectError: false,
},
{
name: "Empty local ranges",
localRanges: "",
testIP: "2001:0db8:abcd:000::1",
expectError: true,
},
{
name: "IPv4 private address still works",
localRanges: "2001:0db8:abcd:000::/56",
testIP: "192.168.1.1",
expectedCountry: "Internal Network",
expectedCity: "LAN",
expectError: false,
},
{
name: "IPv6 loopback",
localRanges: "2001:0db8:abcd:000::/56",
testIP: "::1",
expectedCountry: "Internal Network",
expectedCity: "localhost",
expectError: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
originalConfig := common.EnvConfig.LocalIPv6Ranges
common.EnvConfig.LocalIPv6Ranges = tt.localRanges
defer func() {
common.EnvConfig.LocalIPv6Ranges = originalConfig
}()
service := NewGeoLiteService(&http.Client{})
country, city, err := service.GetLocationByIP(tt.testIP)
if tt.expectError {
if err == nil && country != "Internal Network" {
t.Errorf("Expected error or internal network classification for external IP")
}
} else {
require.NoError(t, err)
assert.Equal(t, tt.expectedCountry, country)
assert.Equal(t, tt.expectedCity, city)
}
})
}
}
func TestGeoLiteService_isLocalIPv6(t *testing.T) {
tests := []struct {
name string
localRanges string
testIP string
expected bool
}{
{
name: "Valid IPv6 in range",
localRanges: "2001:0db8:abcd:000::/56",
testIP: "2001:0db8:abcd:000::1",
expected: true,
},
{
name: "Valid IPv6 not in range",
localRanges: "2001:0db8:abcd:000::/56",
testIP: "2001:0db8:ffff:000::1",
expected: false,
},
{
name: "IPv4 address should return false",
localRanges: "2001:0db8:abcd:000::/56",
testIP: "192.168.1.1",
expected: false,
},
{
name: "No ranges configured",
localRanges: "",
testIP: "2001:0db8:abcd:000::1",
expected: false,
},
{
name: "Edge of range",
localRanges: "2001:0db8:abcd:000::/56",
testIP: "2001:0db8:abcd:00ff:ffff:ffff:ffff:ffff",
expected: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
originalConfig := common.EnvConfig.LocalIPv6Ranges
common.EnvConfig.LocalIPv6Ranges = tt.localRanges
defer func() {
common.EnvConfig.LocalIPv6Ranges = originalConfig
}()
service := NewGeoLiteService(&http.Client{})
ip := net.ParseIP(tt.testIP)
if ip == nil {
t.Fatalf("Invalid test IP: %s", tt.testIP)
}
result := service.isLocalIPv6(ip)
assert.Equal(t, tt.expected, result)
})
}
}
func TestGeoLiteService_initializeIPv6LocalRanges(t *testing.T) {
tests := []struct {
name string
envValue string
expectError bool
expectCount int
}{
{
name: "Valid IPv6 ranges",
envValue: "2001:0db8:abcd:000::/56,2001:0db8:abcd:001::/56",
expectError: false,
expectCount: 2,
},
{
name: "Empty environment variable",
envValue: "",
expectError: false,
expectCount: 0,
},
{
name: "Invalid CIDR notation",
envValue: "2001:0db8:abcd:000::/999",
expectError: true,
expectCount: 0,
},
{
name: "IPv4 range in IPv6 env var",
envValue: "192.168.1.0/24",
expectError: true,
expectCount: 0,
},
{
name: "Mixed valid and invalid ranges",
envValue: "2001:0db8:abcd:000::/56,invalid-range",
expectError: true,
expectCount: 0,
},
{
name: "Whitespace handling",
envValue: " 2001:0db8:abcd:000::/56 , 2001:0db8:abcd:001::/56 ",
expectError: false,
expectCount: 2,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
originalConfig := common.EnvConfig.LocalIPv6Ranges
common.EnvConfig.LocalIPv6Ranges = tt.envValue
defer func() {
common.EnvConfig.LocalIPv6Ranges = originalConfig
}()
service := &GeoLiteService{
httpClient: &http.Client{},
}
err := service.initializeIPv6LocalRanges()
if tt.expectError {
require.Error(t, err)
} else {
require.NoError(t, err)
}
assert.Len(t, service.localIPv6Ranges, tt.expectCount)
})
}
}

View File

@@ -0,0 +1,264 @@
package service
import (
"archive/zip"
"bytes"
"context"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"slices"
"strings"
"gorm.io/gorm"
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
"github.com/pocket-id/pocket-id/backend/internal/storage"
"github.com/pocket-id/pocket-id/backend/internal/utils"
)
// ImportService handles importing Pocket ID data from an exported ZIP archive.
type ImportService struct {
db *gorm.DB
storage storage.FileStorage
}
type DatabaseExport struct {
Provider string `json:"provider"`
Version uint `json:"version"`
Tables map[string][]map[string]any `json:"tables"`
TableOrder []string `json:"tableOrder"`
}
func NewImportService(db *gorm.DB, storage storage.FileStorage) *ImportService {
return &ImportService{
db: db,
storage: storage,
}
}
// ImportFromZip performs the full import process from the given ZIP reader.
func (s *ImportService) ImportFromZip(ctx context.Context, r *zip.Reader) error {
dbData, err := processZipDatabaseJson(r.File)
if err != nil {
return err
}
err = s.ImportDatabase(dbData)
if err != nil {
return err
}
err = s.importUploads(ctx, r.File)
if err != nil {
return err
}
return nil
}
// ImportDatabase only imports the database data from the given DatabaseExport struct.
func (s *ImportService) ImportDatabase(dbData DatabaseExport) error {
err := s.resetSchema(dbData.Version, dbData.Provider)
if err != nil {
return err
}
err = s.insertData(dbData)
if err != nil {
return err
}
return nil
}
// processZipDatabaseJson extracts database.json from the ZIP archive
func processZipDatabaseJson(files []*zip.File) (dbData DatabaseExport, err error) {
for _, f := range files {
if f.Name == "database.json" {
return parseDatabaseJsonStream(f)
}
}
return dbData, errors.New("database.json not found in the ZIP file")
}
func parseDatabaseJsonStream(f *zip.File) (dbData DatabaseExport, err error) {
rc, err := f.Open()
if err != nil {
return dbData, fmt.Errorf("failed to open database.json: %w", err)
}
defer rc.Close()
err = json.NewDecoder(rc).Decode(&dbData)
if err != nil {
return dbData, fmt.Errorf("failed to decode database.json: %w", err)
}
return dbData, nil
}
// importUploads imports files from the uploads/ directory in the ZIP archive
func (s *ImportService) importUploads(ctx context.Context, files []*zip.File) error {
const maxFileSize = 50 << 20 // 50 MiB
const uploadsPrefix = "uploads/"
for _, f := range files {
if !strings.HasPrefix(f.Name, uploadsPrefix) {
continue
}
if f.UncompressedSize64 > maxFileSize {
return fmt.Errorf("file %s too large (%d bytes)", f.Name, f.UncompressedSize64)
}
targetPath := strings.TrimPrefix(f.Name, uploadsPrefix)
if strings.HasSuffix(f.Name, "/") || targetPath == "" {
continue // Skip directories
}
err := s.storage.DeleteAll(ctx, targetPath)
if err != nil {
return fmt.Errorf("failed to delete existing file %s: %w", targetPath, err)
}
rc, err := f.Open()
if err != nil {
return err
}
buf, err := io.ReadAll(rc)
rc.Close()
if err != nil {
return fmt.Errorf("read file %s: %w", f.Name, err)
}
err = s.storage.Save(ctx, targetPath, bytes.NewReader(buf))
if err != nil {
return fmt.Errorf("failed to save file %s: %w", targetPath, err)
}
}
return nil
}
// resetSchema drops the existing schema and migrates to the target version
func (s *ImportService) resetSchema(targetVersion uint, exportDbProvider string) error {
sqlDb, err := s.db.DB()
if err != nil {
return fmt.Errorf("failed to get sql.DB: %w", err)
}
m, err := utils.GetEmbeddedMigrateInstance(sqlDb)
if err != nil {
return fmt.Errorf("failed to get migrate instance: %w", err)
}
err = m.Drop()
if err != nil {
return fmt.Errorf("failed to drop existing schema: %w", err)
}
// Needs to be called again to re-create the schema_migrations table
m, err = utils.GetEmbeddedMigrateInstance(sqlDb)
if err != nil {
return fmt.Errorf("failed to get migrate instance: %w", err)
}
err = m.Migrate(targetVersion)
if err != nil {
return fmt.Errorf("migration failed: %w", err)
}
return nil
}
// insertData populates the DB with the imported data
func (s *ImportService) insertData(dbData DatabaseExport) error {
schema, err := utils.LoadDBSchemaTypes(s.db)
if err != nil {
return fmt.Errorf("failed to load schema types: %w", err)
}
return s.db.Transaction(func(tx *gorm.DB) error {
// Iterate through all tables
// Some tables need to be processed in order
tables := make([]string, 0, len(dbData.Tables))
tables = append(tables, dbData.TableOrder...)
for t := range dbData.Tables {
// Skip tables already present where the order matters
// Also skip the schema_migrations table
if slices.Contains(dbData.TableOrder, t) || t == "schema_migrations" {
continue
}
tables = append(tables, t)
}
// Insert rows
for _, table := range tables {
for _, row := range dbData.Tables[table] {
err = normalizeRowWithSchema(row, table, schema)
if err != nil {
return fmt.Errorf("failed to normalize row for table '%s': %w", table, err)
}
err = tx.Table(table).Create(row).Error
if err != nil {
return fmt.Errorf("failed inserting into table '%s': %w", table, err)
}
}
}
return nil
})
}
// normalizeRowWithSchema converts row values based on the DB schema
func normalizeRowWithSchema(row map[string]any, table string, schema utils.DBSchemaTypes) error {
if schema[table] == nil {
return fmt.Errorf("schema not found for table '%s'", table)
}
for col, val := range row {
if val == nil {
// If the value is nil, skip the column
continue
}
colType := schema[table][col]
switch colType.Name {
case "timestamp", "timestamptz", "timestamp with time zone", "datetime":
// Dates are stored as strings
str, ok := val.(string)
if !ok {
return fmt.Errorf("value for column '%s/%s' was expected to be a string, but was '%T'", table, col, val)
}
d, err := datatype.DateTimeFromString(str)
if err != nil {
return fmt.Errorf("failed to decode value for column '%s/%s' as timestamp: %w", table, col, err)
}
row[col] = d
case "blob", "bytea", "jsonb":
// Binary data and jsonb data is stored in the file as base64-encoded string
str, ok := val.(string)
if !ok {
return fmt.Errorf("value for column '%s/%s' was expected to be a string, but was '%T'", table, col, val)
}
b, err := base64.StdEncoding.DecodeString(str)
if err != nil {
return fmt.Errorf("failed to decode value for column '%s/%s' from base64: %w", table, col, err)
}
// For jsonb, we additionally cast to json.RawMessage
if colType.Name == "jsonb" {
row[col] = json.RawMessage(b)
} else {
row[col] = b
}
}
}
return nil
}

View File

@@ -18,14 +18,6 @@ import (
)
const (
// PrivateKeyFile is the path in the data/keys folder where the key is stored
// This is a JSON file containing a key encoded as JWK
PrivateKeyFile = "jwt_private_key.json"
// PrivateKeyFileEncrypted is the path in the data/keys folder where the encrypted key is stored
// This is a encrypted JSON file containing a key encoded as JWK
PrivateKeyFileEncrypted = "jwt_private_key.json.enc"
// KeyUsageSigning is the usage for the private keys, for the "use" property
KeyUsageSigning = "sig"
@@ -56,6 +48,7 @@ const (
)
type JwtService struct {
db *gorm.DB
envConfig *common.EnvConfigSchema
privateKey jwk.Key
keyId string
@@ -66,7 +59,6 @@ type JwtService struct {
func NewJwtService(db *gorm.DB, appConfigService *AppConfigService) (*JwtService, error) {
service := &JwtService{}
// Ensure keys are generated or loaded
err := service.init(db, appConfigService, &common.EnvConfig)
if err != nil {
return nil, err
@@ -78,14 +70,15 @@ func NewJwtService(db *gorm.DB, appConfigService *AppConfigService) (*JwtService
func (s *JwtService) init(db *gorm.DB, appConfigService *AppConfigService, envConfig *common.EnvConfigSchema) (err error) {
s.appConfigService = appConfigService
s.envConfig = envConfig
s.db = db
// Ensure keys are generated or loaded
return s.loadOrGenerateKey(db)
return s.LoadOrGenerateKey()
}
func (s *JwtService) loadOrGenerateKey(db *gorm.DB) error {
func (s *JwtService) LoadOrGenerateKey() error {
// Get the key provider
keyProvider, err := jwkutils.GetKeyProvider(db, s.envConfig, s.appConfigService.GetDbConfig().InstanceID.Value)
keyProvider, err := jwkutils.GetKeyProvider(s.db, s.envConfig, s.appConfigService.GetDbConfig().InstanceID.Value)
if err != nil {
return fmt.Errorf("failed to get key provider: %w", err)
}
@@ -93,7 +86,7 @@ func (s *JwtService) loadOrGenerateKey(db *gorm.DB) error {
// Try loading a key
key, err := keyProvider.LoadKey()
if err != nil {
return fmt.Errorf("failed to load key (provider type '%s'): %w", s.envConfig.KeysStorage, err)
return fmt.Errorf("failed to load key: %w", err)
}
// If we have a key, store it in the object and we're done
@@ -114,7 +107,7 @@ func (s *JwtService) loadOrGenerateKey(db *gorm.DB) error {
// Save the newly-generated key
err = keyProvider.SaveKey(s.privateKey)
if err != nil {
return fmt.Errorf("failed to save private key (provider type '%s'): %w", s.envConfig.KeysStorage, err)
return fmt.Errorf("failed to save private key: %w", err)
}
return nil

File diff suppressed because it is too large Load Diff

View File

@@ -11,12 +11,15 @@ import (
"log/slog"
"net/http"
"net/url"
"path"
"strings"
"time"
"unicode/utf8"
"github.com/go-ldap/ldap/v3"
"github.com/google/uuid"
"github.com/pocket-id/pocket-id/backend/internal/storage"
"github.com/pocket-id/pocket-id/backend/internal/utils"
"golang.org/x/text/unicode/norm"
"gorm.io/gorm"
@@ -31,15 +34,23 @@ type LdapService struct {
appConfigService *AppConfigService
userService *UserService
groupService *UserGroupService
fileStorage storage.FileStorage
}
func NewLdapService(db *gorm.DB, httpClient *http.Client, appConfigService *AppConfigService, userService *UserService, groupService *UserGroupService) *LdapService {
type savePicture struct {
userID string
username string
picture string
}
func NewLdapService(db *gorm.DB, httpClient *http.Client, appConfigService *AppConfigService, userService *UserService, groupService *UserGroupService, fileStorage storage.FileStorage) *LdapService {
return &LdapService{
db: db,
httpClient: httpClient,
appConfigService: appConfigService,
userService: userService,
groupService: groupService,
fileStorage: fileStorage,
}
}
@@ -67,12 +78,6 @@ func (s *LdapService) createClient() (*ldap.Conn, error) {
}
func (s *LdapService) SyncAll(ctx context.Context) error {
// Start a transaction
tx := s.db.Begin()
defer func() {
tx.Rollback()
}()
// Setup LDAP connection
client, err := s.createClient()
if err != nil {
@@ -80,7 +85,13 @@ func (s *LdapService) SyncAll(ctx context.Context) error {
}
defer client.Close()
err = s.SyncUsers(ctx, tx, client)
// Start a transaction
tx := s.db.Begin()
defer func() {
tx.Rollback()
}()
savePictures, deleteFiles, err := s.SyncUsers(ctx, tx, client)
if err != nil {
return fmt.Errorf("failed to sync users: %w", err)
}
@@ -96,6 +107,25 @@ func (s *LdapService) SyncAll(ctx context.Context) error {
return fmt.Errorf("failed to commit changes to database: %w", err)
}
// Now that we've committed the transaction, we can perform operations on the storage layer
// First, save all new pictures
for _, sp := range savePictures {
err = s.saveProfilePicture(ctx, sp.userID, sp.picture)
if err != nil {
// This is not a fatal error
slog.Warn("Error saving profile picture for LDAP user", slog.String("username", sp.username), slog.Any("error", err))
}
}
// Delete all old files
for _, path := range deleteFiles {
err = s.fileStorage.Delete(ctx, path)
if err != nil {
// This is not a fatal error
slog.Error("Failed to delete file after LDAP sync", slog.String("path", path), slog.Any("error", err))
}
}
return nil
}
@@ -265,7 +295,7 @@ func (s *LdapService) SyncGroups(ctx context.Context, tx *gorm.DB, client *ldap.
}
//nolint:gocognit
func (s *LdapService) SyncUsers(ctx context.Context, tx *gorm.DB, client *ldap.Conn) error {
func (s *LdapService) SyncUsers(ctx context.Context, tx *gorm.DB, client *ldap.Conn) (savePictures []savePicture, deleteFiles []string, err error) {
dbConfig := s.appConfigService.GetDbConfig()
searchAttrs := []string{
@@ -293,11 +323,12 @@ func (s *LdapService) SyncUsers(ctx context.Context, tx *gorm.DB, client *ldap.C
result, err := client.Search(searchReq)
if err != nil {
return fmt.Errorf("failed to query LDAP: %w", err)
return nil, nil, fmt.Errorf("failed to query LDAP: %w", err)
}
// Create a mapping for users that exist
ldapUserIDs := make(map[string]struct{}, len(result.Entries))
savePictures = make([]savePicture, 0, len(result.Entries))
for _, value := range result.Entries {
ldapId := convertLdapIdToString(value.GetAttributeValue(dbConfig.LdapAttributeUserUniqueIdentifier.Value))
@@ -328,19 +359,19 @@ func (s *LdapService) SyncUsers(ctx context.Context, tx *gorm.DB, client *ldap.C
Error
if err != nil {
return fmt.Errorf("failed to enable user %s: %w", databaseUser.Username, err)
return nil, nil, fmt.Errorf("failed to enable user %s: %w", databaseUser.Username, err)
}
}
if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) {
// This could error with ErrRecordNotFound and we want to ignore that here
return fmt.Errorf("failed to query for LDAP user ID '%s': %w", ldapId, err)
return nil, nil, fmt.Errorf("failed to query for LDAP user ID '%s': %w", ldapId, err)
}
// Check if user is admin by checking if they are in the admin group
isAdmin := false
for _, group := range value.GetAttributeValues("memberOf") {
if getDNProperty(dbConfig.LdapAttributeGroupName.Value, group) == dbConfig.LdapAttributeAdminGroup.Value {
if getDNProperty(dbConfig.LdapAttributeGroupName.Value, group) == dbConfig.LdapAdminGroupName.Value {
isAdmin = true
break
}
@@ -348,13 +379,18 @@ func (s *LdapService) SyncUsers(ctx context.Context, tx *gorm.DB, client *ldap.C
newUser := dto.UserCreateDto{
Username: value.GetAttributeValue(dbConfig.LdapAttributeUserUsername.Value),
Email: value.GetAttributeValue(dbConfig.LdapAttributeUserEmail.Value),
Email: utils.PtrOrNil(value.GetAttributeValue(dbConfig.LdapAttributeUserEmail.Value)),
FirstName: value.GetAttributeValue(dbConfig.LdapAttributeUserFirstName.Value),
LastName: value.GetAttributeValue(dbConfig.LdapAttributeUserLastName.Value),
DisplayName: value.GetAttributeValue(dbConfig.LdapAttributeUserDisplayName.Value),
IsAdmin: isAdmin,
LdapID: ldapId,
}
if newUser.DisplayName == "" {
newUser.DisplayName = strings.TrimSpace(newUser.FirstName + " " + newUser.LastName)
}
dto.Normalize(newUser)
err = newUser.Validate()
@@ -363,32 +399,35 @@ func (s *LdapService) SyncUsers(ctx context.Context, tx *gorm.DB, client *ldap.C
continue
}
userID := databaseUser.ID
if databaseUser.ID == "" {
_, err = s.userService.createUserInternal(ctx, newUser, true, tx)
createdUser, err := s.userService.createUserInternal(ctx, newUser, true, tx)
if errors.Is(err, &common.AlreadyInUseError{}) {
slog.Warn("Skipping creating LDAP user", slog.String("username", newUser.Username), slog.Any("error", err))
continue
} else if err != nil {
return fmt.Errorf("error creating user '%s': %w", newUser.Username, err)
return nil, nil, fmt.Errorf("error creating user '%s': %w", newUser.Username, err)
}
userID = createdUser.ID
} else {
_, err = s.userService.updateUserInternal(ctx, databaseUser.ID, newUser, false, true, tx)
if errors.Is(err, &common.AlreadyInUseError{}) {
slog.Warn("Skipping updating LDAP user", slog.String("username", newUser.Username), slog.Any("error", err))
continue
} else if err != nil {
return fmt.Errorf("error updating user '%s': %w", newUser.Username, err)
return nil, nil, fmt.Errorf("error updating user '%s': %w", newUser.Username, err)
}
}
// Save profile picture
pictureString := value.GetAttributeValue(dbConfig.LdapAttributeUserProfilePicture.Value)
if pictureString != "" {
err = s.saveProfilePicture(ctx, databaseUser.ID, pictureString)
if err != nil {
// This is not a fatal error
slog.Warn("Error saving profile picture for user", slog.String("username", newUser.Username), slog.Any("error", err))
}
// Storage operations must be executed outside of a transaction
savePictures = append(savePictures, savePicture{
userID: databaseUser.ID,
username: userID,
picture: pictureString,
})
}
}
@@ -400,10 +439,11 @@ func (s *LdapService) SyncUsers(ctx context.Context, tx *gorm.DB, client *ldap.C
Select("id, username, ldap_id, disabled").
Error
if err != nil {
return fmt.Errorf("failed to fetch users from database: %w", err)
return nil, nil, fmt.Errorf("failed to fetch users from database: %w", err)
}
// Mark users as disabled or delete users that no longer exist in LDAP
deleteFiles = make([]string, 0, len(ldapUserIDs))
for _, user := range ldapUsersInDb {
// Skip if the user ID exists in the fetched LDAP results
if _, exists := ldapUserIDs[*user.LdapID]; exists {
@@ -411,30 +451,34 @@ func (s *LdapService) SyncUsers(ctx context.Context, tx *gorm.DB, client *ldap.C
}
if dbConfig.LdapSoftDeleteUsers.IsTrue() {
err = s.userService.disableUserInternal(ctx, user.ID, tx)
err = s.userService.disableUserInternal(ctx, tx, user.ID)
if err != nil {
return fmt.Errorf("failed to disable user %s: %w", user.Username, err)
return nil, nil, fmt.Errorf("failed to disable user %s: %w", user.Username, err)
}
slog.Info("Disabled user", slog.String("username", user.Username))
} else {
err = s.userService.deleteUserInternal(ctx, user.ID, true, tx)
target := &common.LdapUserUpdateError{}
if errors.As(err, &target) {
return fmt.Errorf("failed to delete user %s: LDAP user must be disabled before deletion", user.Username)
} else if err != nil {
return fmt.Errorf("failed to delete user %s: %w", user.Username, err)
err = s.userService.deleteUserInternal(ctx, tx, user.ID, true)
if err != nil {
target := &common.LdapUserUpdateError{}
if errors.As(err, &target) {
return nil, nil, fmt.Errorf("failed to delete user %s: LDAP user must be disabled before deletion", user.Username)
}
return nil, nil, fmt.Errorf("failed to delete user %s: %w", user.Username, err)
}
slog.Info("Deleted user", slog.String("username", user.Username))
// Storage operations must be executed outside of a transaction
deleteFiles = append(deleteFiles, path.Join("profile-pictures", user.ID+".png"))
}
}
return nil
return savePictures, deleteFiles, nil
}
func (s *LdapService) saveProfilePicture(parentCtx context.Context, userId string, pictureString string) error {
var reader io.Reader
var reader io.ReadSeeker
_, err := url.ParseRequestURI(pictureString)
if err == nil {
@@ -454,7 +498,12 @@ func (s *LdapService) saveProfilePicture(parentCtx context.Context, userId strin
}
defer res.Body.Close()
reader = res.Body
data, err := io.ReadAll(res.Body)
if err != nil {
return fmt.Errorf("failed to read profile picture: %w", err)
}
reader = bytes.NewReader(data)
} else if decodedPhoto, err := base64.StdEncoding.DecodeString(pictureString); err == nil {
// If the photo is a base64 encoded string, decode it
reader = bytes.NewReader(decodedPhoto)
@@ -464,7 +513,7 @@ func (s *LdapService) saveProfilePicture(parentCtx context.Context, userId strin
}
// Update the profile picture
err = s.userService.UpdateProfilePicture(userId, reader)
err = s.userService.UpdateProfilePicture(parentCtx, userId, reader)
if err != nil {
return fmt.Errorf("failed to update profile picture: %w", err)
}

View File

@@ -3,15 +3,18 @@ package service
import (
"context"
"crypto/sha256"
"crypto/subtle"
"crypto/tls"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"io"
"log/slog"
"mime/multipart"
"net/http"
"os"
"net/url"
"path"
"regexp"
"slices"
"strings"
@@ -30,6 +33,7 @@ import (
"github.com/pocket-id/pocket-id/backend/internal/dto"
"github.com/pocket-id/pocket-id/backend/internal/model"
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
"github.com/pocket-id/pocket-id/backend/internal/storage"
"github.com/pocket-id/pocket-id/backend/internal/utils"
)
@@ -54,8 +58,9 @@ type OidcService struct {
customClaimService *CustomClaimService
webAuthnService *WebAuthnService
httpClient *http.Client
jwkCache *jwk.Cache
httpClient *http.Client
jwkCache *jwk.Cache
fileStorage storage.FileStorage
}
func NewOidcService(
@@ -66,6 +71,8 @@ func NewOidcService(
auditLogService *AuditLogService,
customClaimService *CustomClaimService,
webAuthnService *WebAuthnService,
httpClient *http.Client,
fileStorage storage.FileStorage,
) (s *OidcService, err error) {
s = &OidcService{
db: db,
@@ -74,6 +81,8 @@ func NewOidcService(
auditLogService: auditLogService,
customClaimService: customClaimService,
webAuthnService: webAuthnService,
httpClient: httpClient,
fileStorage: fileStorage,
}
// Note: we don't pass the HTTP Client with OTel instrumented to this because requests are always made in background and not tied to a specific trace
@@ -388,7 +397,7 @@ func (s *OidcService) createTokenFromAuthorizationCode(ctx context.Context, inpu
// If the client is public or PKCE is enabled, the code verifier must match the code challenge
if client.IsPublic || client.PkceEnabled {
if !s.validateCodeVerifier(input.CodeVerifier, *authorizationCodeMetaData.CodeChallenge, *authorizationCodeMetaData.CodeChallengeMethodSha256) {
if !validateCodeVerifier(input.CodeVerifier, *authorizationCodeMetaData.CodeChallenge, *authorizationCodeMetaData.CodeChallengeMethodSha256) {
return CreatedTokens{}, &common.OidcInvalidCodeVerifierError{}
}
}
@@ -469,7 +478,7 @@ func (s *OidcService) createTokenFromRefreshToken(ctx context.Context, input dto
var storedRefreshToken model.OidcRefreshToken
err = tx.
WithContext(ctx).
Preload("User").
Preload("User.UserGroups").
Where(
"token = ? AND expires_at > ? AND user_id = ? AND client_id = ?",
utils.CreateSha256Hash(rt),
@@ -669,24 +678,26 @@ func (s *OidcService) introspectRefreshToken(ctx context.Context, clientID strin
}
func (s *OidcService) GetClient(ctx context.Context, clientID string) (model.OidcClient, error) {
return s.getClientInternal(ctx, clientID, s.db)
return s.getClientInternal(ctx, clientID, s.db, false)
}
func (s *OidcService) getClientInternal(ctx context.Context, clientID string, tx *gorm.DB) (model.OidcClient, error) {
func (s *OidcService) getClientInternal(ctx context.Context, clientID string, tx *gorm.DB, forUpdate bool) (model.OidcClient, error) {
var client model.OidcClient
err := tx.
q := tx.
WithContext(ctx).
Preload("CreatedBy").
Preload("AllowedUserGroups").
First(&client, "id = ?", clientID).
Error
if err != nil {
return model.OidcClient{}, err
Preload("AllowedUserGroups")
if forUpdate {
q = q.Clauses(clause.Locking{Strength: "UPDATE"})
}
q = q.First(&client, "id = ?", clientID)
if q.Error != nil {
return model.OidcClient{}, q.Error
}
return client, nil
}
func (s *OidcService) ListClients(ctx context.Context, name string, sortedPaginationRequest utils.SortedPaginationRequest) ([]model.OidcClient, utils.PaginationResponse, error) {
func (s *OidcService) ListClients(ctx context.Context, name string, listRequestOptions utils.ListRequestOptions) ([]model.OidcClient, utils.PaginationResponse, error) {
var clients []model.OidcClient
query := s.db.
@@ -699,17 +710,17 @@ func (s *OidcService) ListClients(ctx context.Context, name string, sortedPagina
}
// As allowedUserGroupsCount is not a column, we need to manually sort it
if sortedPaginationRequest.Sort.Column == "allowedUserGroupsCount" && utils.IsValidSortDirection(sortedPaginationRequest.Sort.Direction) {
if listRequestOptions.Sort.Column == "allowedUserGroupsCount" && utils.IsValidSortDirection(listRequestOptions.Sort.Direction) {
query = query.Select("oidc_clients.*, COUNT(oidc_clients_allowed_user_groups.oidc_client_id)").
Joins("LEFT JOIN oidc_clients_allowed_user_groups ON oidc_clients.id = oidc_clients_allowed_user_groups.oidc_client_id").
Group("oidc_clients.id").
Order("COUNT(oidc_clients_allowed_user_groups.oidc_client_id) " + sortedPaginationRequest.Sort.Direction)
Order("COUNT(oidc_clients_allowed_user_groups.oidc_client_id) " + listRequestOptions.Sort.Direction)
response, err := utils.Paginate(sortedPaginationRequest.Pagination.Page, sortedPaginationRequest.Pagination.Limit, query, &clients)
response, err := utils.Paginate(listRequestOptions.Pagination.Page, listRequestOptions.Pagination.Limit, query, &clients)
return clients, response, err
}
response, err := utils.PaginateAndSort(sortedPaginationRequest, query, &clients)
response, err := utils.PaginateFilterAndSort(listRequestOptions, query, &clients)
return clients, response, err
}
@@ -733,6 +744,21 @@ func (s *OidcService) CreateClient(ctx context.Context, input dto.OidcClientCrea
return model.OidcClient{}, err
}
// All storage operations must be executed outside of a transaction
if input.LogoURL != nil {
err = s.downloadAndSaveLogoFromURL(ctx, client.ID, *input.LogoURL, true)
if err != nil {
return model.OidcClient{}, fmt.Errorf("failed to download logo: %w", err)
}
}
if input.DarkLogoURL != nil {
err = s.downloadAndSaveLogoFromURL(ctx, client.ID, *input.DarkLogoURL, false)
if err != nil {
return model.OidcClient{}, fmt.Errorf("failed to download dark logo: %w", err)
}
}
return client, nil
}
@@ -743,21 +769,16 @@ func (s *OidcService) UpdateClient(ctx context.Context, clientID string, input d
}()
var client model.OidcClient
err := tx.
WithContext(ctx).
err := tx.WithContext(ctx).
Preload("CreatedBy").
First(&client, "id = ?", clientID).
Error
First(&client, "id = ?", clientID).Error
if err != nil {
return model.OidcClient{}, err
}
updateOIDCClientModelFromDto(&client, &input)
err = tx.
WithContext(ctx).
Save(&client).
Error
err = tx.WithContext(ctx).Save(&client).Error
if err != nil {
return model.OidcClient{}, err
}
@@ -767,6 +788,21 @@ func (s *OidcService) UpdateClient(ctx context.Context, clientID string, input d
return model.OidcClient{}, err
}
// All storage operations must be executed outside of a transaction
if input.LogoURL != nil {
err = s.downloadAndSaveLogoFromURL(ctx, client.ID, *input.LogoURL, true)
if err != nil {
return model.OidcClient{}, fmt.Errorf("failed to download logo: %w", err)
}
}
if input.DarkLogoURL != nil {
err = s.downloadAndSaveLogoFromURL(ctx, client.ID, *input.DarkLogoURL, false)
if err != nil {
return model.OidcClient{}, fmt.Errorf("failed to download dark logo: %w", err)
}
}
return client, nil
}
@@ -799,12 +835,24 @@ func (s *OidcService) DeleteClient(ctx context.Context, clientID string) error {
err := s.db.
WithContext(ctx).
Where("id = ?", clientID).
Clauses(clause.Returning{}).
Delete(&client).
Error
if err != nil {
return err
}
// Delete images if present
// Note that storage operations must be done outside of a transaction
if client.ImageType != nil && *client.ImageType != "" {
old := path.Join("oidc-client-images", client.ID+"."+*client.ImageType)
_ = s.fileStorage.Delete(ctx, old)
}
if client.DarkImageType != nil && *client.DarkImageType != "" {
old := path.Join("oidc-client-images", client.ID+"-dark."+*client.DarkImageType)
_ = s.fileStorage.Delete(ctx, old)
}
return nil
}
@@ -850,69 +898,66 @@ func (s *OidcService) CreateClientSecret(ctx context.Context, clientID string) (
return clientSecret, nil
}
func (s *OidcService) GetClientLogo(ctx context.Context, clientID string) (string, string, error) {
func (s *OidcService) GetClientLogo(ctx context.Context, clientID string, light bool) (io.ReadCloser, int64, string, error) {
var client model.OidcClient
err := s.db.
WithContext(ctx).
First(&client, "id = ?", clientID).
Error
if err != nil {
return "", "", err
return nil, 0, "", err
}
if client.ImageType == nil {
return "", "", errors.New("image not found")
var suffix string
var ext string
switch {
case !light && client.DarkImageType != nil:
// Dark logo if requested and exists
suffix = "-dark"
ext = *client.DarkImageType
case client.ImageType != nil:
// Light logo if requested or no dark logo is available
ext = *client.ImageType
default:
return nil, 0, "", errors.New("image not found")
}
imagePath := common.EnvConfig.UploadPath + "/oidc-client-images/" + client.ID + "." + *client.ImageType
mimeType := utils.GetImageMimeType(*client.ImageType)
mimeType := utils.GetImageMimeType(ext)
if mimeType == "" {
return nil, 0, "", fmt.Errorf("unsupported image type '%s'", ext)
}
key := path.Join("oidc-client-images", client.ID+suffix+"."+ext)
reader, size, err := s.fileStorage.Open(ctx, key)
if err != nil {
return nil, 0, "", err
}
return imagePath, mimeType, nil
return reader, size, mimeType, nil
}
func (s *OidcService) UpdateClientLogo(ctx context.Context, clientID string, file *multipart.FileHeader) error {
func (s *OidcService) UpdateClientLogo(ctx context.Context, clientID string, file *multipart.FileHeader, light bool) error {
fileType := strings.ToLower(utils.GetFileExtension(file.Filename))
if mimeType := utils.GetImageMimeType(fileType); mimeType == "" {
return &common.FileTypeNotSupportedError{}
}
imagePath := common.EnvConfig.UploadPath + "/oidc-client-images/" + clientID + "." + fileType
err := utils.SaveFile(file, imagePath)
var darkSuffix string
if !light {
darkSuffix = "-dark"
}
imagePath := path.Join("oidc-client-images", clientID+darkSuffix+"."+fileType)
reader, err := file.Open()
if err != nil {
return err
}
defer reader.Close()
err = s.fileStorage.Save(ctx, imagePath, reader)
if err != nil {
return err
}
tx := s.db.Begin()
defer func() {
tx.Rollback()
}()
var client model.OidcClient
err = tx.
WithContext(ctx).
First(&client, "id = ?", clientID).
Error
if err != nil {
return err
}
if client.ImageType != nil && fileType != *client.ImageType {
oldImagePath := fmt.Sprintf("%s/oidc-client-images/%s.%s", common.EnvConfig.UploadPath, client.ID, *client.ImageType)
if err := os.Remove(oldImagePath); err != nil {
return err
}
}
client.ImageType = &fileType
err = tx.
WithContext(ctx).
Save(&client).
Error
if err != nil {
return err
}
err = tx.Commit().Error
err = s.updateClientLogoType(ctx, clientID, fileType, light)
if err != nil {
return err
}
@@ -921,6 +966,30 @@ func (s *OidcService) UpdateClientLogo(ctx context.Context, clientID string, fil
}
func (s *OidcService) DeleteClientLogo(ctx context.Context, clientID string) error {
return s.deleteClientLogoInternal(ctx, clientID, "", func(client *model.OidcClient) (string, error) {
if client.ImageType == nil {
return "", errors.New("image not found")
}
oldImageType := *client.ImageType
client.ImageType = nil
return oldImageType, nil
})
}
func (s *OidcService) DeleteClientDarkLogo(ctx context.Context, clientID string) error {
return s.deleteClientLogoInternal(ctx, clientID, "-dark", func(client *model.OidcClient) (string, error) {
if client.DarkImageType == nil {
return "", errors.New("image not found")
}
oldImageType := *client.DarkImageType
client.DarkImageType = nil
return oldImageType, nil
})
}
func (s *OidcService) deleteClientLogoInternal(ctx context.Context, clientID string, imagePathSuffix string, setClientImage func(*model.OidcClient) (string, error)) error {
tx := s.db.Begin()
defer func() {
tx.Rollback()
@@ -935,12 +1004,11 @@ func (s *OidcService) DeleteClientLogo(ctx context.Context, clientID string) err
return err
}
if client.ImageType == nil {
return errors.New("image not found")
oldImageType, err := setClientImage(&client)
if err != nil {
return err
}
oldImageType := *client.ImageType
client.ImageType = nil
err = tx.
WithContext(ctx).
Save(&client).
@@ -949,12 +1017,14 @@ func (s *OidcService) DeleteClientLogo(ctx context.Context, clientID string) err
return err
}
imagePath := common.EnvConfig.UploadPath + "/oidc-client-images/" + client.ID + "." + oldImageType
if err := os.Remove(imagePath); err != nil {
err = tx.Commit().Error
if err != nil {
return err
}
err = tx.Commit().Error
// All storage operations must be performed outside of a database transaction
imagePath := path.Join("oidc-client-images", client.ID+imagePathSuffix+"."+oldImageType)
err = s.fileStorage.Delete(ctx, imagePath)
if err != nil {
return err
}
@@ -968,7 +1038,7 @@ func (s *OidcService) UpdateAllowedUserGroups(ctx context.Context, id string, in
tx.Rollback()
}()
client, err = s.getClientInternal(ctx, id, tx)
client, err = s.getClientInternal(ctx, id, tx, true)
if err != nil {
return model.OidcClient{}, err
}
@@ -1090,13 +1160,20 @@ func (s *OidcService) createAuthorizationCode(ctx context.Context, clientID stri
return randomString, nil
}
func (s *OidcService) validateCodeVerifier(codeVerifier, codeChallenge string, codeChallengeMethodSha256 bool) bool {
func validateCodeVerifier(codeVerifier, codeChallenge string, codeChallengeMethodSha256 bool) bool {
if codeVerifier == "" || codeChallenge == "" {
return false
}
if !codeChallengeMethodSha256 {
return codeVerifier == codeChallenge
return subtle.ConstantTimeCompare([]byte(codeVerifier), []byte(codeChallenge)) == 1
}
// Base64 URL decode the challenge
// If it's not valid base64url, fail the operation
codeChallengeBytes, err := base64.RawURLEncoding.DecodeString(codeChallenge)
if err != nil {
return false
}
// Compute SHA-256 hash of the codeVerifier
@@ -1104,10 +1181,7 @@ func (s *OidcService) validateCodeVerifier(codeVerifier, codeChallenge string, c
h.Write([]byte(codeVerifier))
codeVerifierHash := h.Sum(nil)
// Base64 URL encode the verifier hash
encodedVerifierHash := base64.RawURLEncoding.EncodeToString(codeVerifierHash)
return encodedVerifierHash == codeChallenge
return subtle.ConstantTimeCompare(codeVerifierHash, codeChallengeBytes) == 1
}
func (s *OidcService) getCallbackURL(client *model.OidcClient, inputCallbackURL string, tx *gorm.DB, ctx context.Context) (callbackURL string, err error) {
@@ -1331,9 +1405,10 @@ func (s *OidcService) GetDeviceCodeInfo(ctx context.Context, userCode string, us
return &dto.DeviceCodeInfoDto{
Client: dto.OidcClientMetaDataDto{
ID: deviceAuth.Client.ID,
Name: deviceAuth.Client.Name,
HasLogo: deviceAuth.Client.HasLogo,
ID: deviceAuth.Client.ID,
Name: deviceAuth.Client.Name,
HasLogo: deviceAuth.Client.HasLogo(),
HasDarkLogo: deviceAuth.Client.HasDarkLogo(),
},
Scope: deviceAuth.Scope,
AuthorizationRequired: !hasAuthorizedClient,
@@ -1357,7 +1432,7 @@ func (s *OidcService) GetAllowedGroupsCountOfClient(ctx context.Context, id stri
return count, nil
}
func (s *OidcService) ListAuthorizedClients(ctx context.Context, userID string, sortedPaginationRequest utils.SortedPaginationRequest) ([]model.UserAuthorizedOidcClient, utils.PaginationResponse, error) {
func (s *OidcService) ListAuthorizedClients(ctx context.Context, userID string, listRequestOptions utils.ListRequestOptions) ([]model.UserAuthorizedOidcClient, utils.PaginationResponse, error) {
query := s.db.
WithContext(ctx).
@@ -1366,7 +1441,7 @@ func (s *OidcService) ListAuthorizedClients(ctx context.Context, userID string,
Where("user_id = ?", userID)
var authorizedClients []model.UserAuthorizedOidcClient
response, err := utils.PaginateAndSort(sortedPaginationRequest, query, &authorizedClients)
response, err := utils.PaginateFilterAndSort(listRequestOptions, query, &authorizedClients)
return authorizedClients, response, err
}
@@ -1399,7 +1474,7 @@ func (s *OidcService) RevokeAuthorizedClient(ctx context.Context, userID string,
return nil
}
func (s *OidcService) ListAccessibleOidcClients(ctx context.Context, userID string, sortedPaginationRequest utils.SortedPaginationRequest) ([]dto.AccessibleOidcClientDto, utils.PaginationResponse, error) {
func (s *OidcService) ListAccessibleOidcClients(ctx context.Context, userID string, listRequestOptions utils.ListRequestOptions) ([]dto.AccessibleOidcClientDto, utils.PaginationResponse, error) {
tx := s.db.Begin()
defer func() {
tx.Rollback()
@@ -1446,13 +1521,13 @@ func (s *OidcService) ListAccessibleOidcClients(ctx context.Context, userID stri
// Handle custom sorting for lastUsedAt column
var response utils.PaginationResponse
if sortedPaginationRequest.Sort.Column == "lastUsedAt" && utils.IsValidSortDirection(sortedPaginationRequest.Sort.Direction) {
if listRequestOptions.Sort.Column == "lastUsedAt" && utils.IsValidSortDirection(listRequestOptions.Sort.Direction) {
query = query.
Joins("LEFT JOIN user_authorized_oidc_clients ON oidc_clients.id = user_authorized_oidc_clients.client_id AND user_authorized_oidc_clients.user_id = ?", userID).
Order("user_authorized_oidc_clients.last_used_at " + sortedPaginationRequest.Sort.Direction + " NULLS LAST")
Order("user_authorized_oidc_clients.last_used_at " + listRequestOptions.Sort.Direction + " NULLS LAST")
}
response, err = utils.PaginateAndSort(sortedPaginationRequest, query, &clients)
response, err = utils.PaginateFilterAndSort(listRequestOptions, query, &clients)
if err != nil {
return nil, utils.PaginationResponse{}, err
}
@@ -1465,10 +1540,11 @@ func (s *OidcService) ListAccessibleOidcClients(ctx context.Context, userID stri
}
dtos[i] = dto.AccessibleOidcClientDto{
OidcClientMetaDataDto: dto.OidcClientMetaDataDto{
ID: client.ID,
Name: client.Name,
LaunchURL: client.LaunchURL,
HasLogo: client.HasLogo,
ID: client.ID,
Name: client.Name,
LaunchURL: client.LaunchURL,
HasLogo: client.HasLogo(),
HasDarkLogo: client.HasDarkLogo(),
},
LastUsedAt: lastUsedAt,
}
@@ -1745,7 +1821,7 @@ func (s *OidcService) GetClientPreview(ctx context.Context, clientID string, use
tx.Rollback()
}()
client, err := s.getClientInternal(ctx, clientID, tx)
client, err := s.getClientInternal(ctx, clientID, tx, false)
if err != nil {
return nil, err
}
@@ -1889,3 +1965,161 @@ func (s *OidcService) IsClientAccessibleToUser(ctx context.Context, clientID str
return s.IsUserGroupAllowedToAuthorize(user, client), nil
}
var errLogoTooLarge = errors.New("logo is too large")
func httpClientWithCheckRedirect(source *http.Client, checkRedirect func(req *http.Request, via []*http.Request) error) *http.Client {
if source == nil {
source = http.DefaultClient
}
// Create a new client that clones the transport
client := &http.Client{
Transport: source.Transport,
}
// Assign the CheckRedirect function
client.CheckRedirect = checkRedirect
return client
}
func (s *OidcService) downloadAndSaveLogoFromURL(parentCtx context.Context, clientID string, raw string, light bool) error {
u, err := url.Parse(raw)
if err != nil {
return err
}
ctx, cancel := context.WithTimeout(parentCtx, 15*time.Second)
defer cancel()
// Prevents SSRF by allowing only public IPs
ok, err := utils.IsURLPrivate(ctx, u)
if err != nil {
return err
} else if ok {
return errors.New("private IP addresses are not allowed")
}
// We need to check this on redirects too
client := httpClientWithCheckRedirect(s.httpClient, func(r *http.Request, via []*http.Request) error {
if len(via) >= 10 {
return errors.New("stopped after 10 redirects")
}
ok, err := utils.IsURLPrivate(r.Context(), r.URL)
if err != nil {
return err
} else if ok {
return errors.New("private IP addresses are not allowed")
}
return nil
})
req, err := http.NewRequestWithContext(ctx, http.MethodGet, raw, nil)
if err != nil {
return err
}
req.Header.Set("User-Agent", "pocket-id/oidc-logo-fetcher")
req.Header.Set("Accept", "image/*")
resp, err := client.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("failed to fetch logo: %s", resp.Status)
}
const maxLogoSize int64 = 2 * 1024 * 1024 // 2MB
if resp.ContentLength > maxLogoSize {
return errLogoTooLarge
}
// Prefer extension in path if supported
ext := utils.GetFileExtension(u.Path)
if ext == "" || utils.GetImageMimeType(ext) == "" {
// Otherwise, try to detect from content type
ext = utils.GetImageExtensionFromMimeType(resp.Header.Get("Content-Type"))
}
if ext == "" {
return &common.FileTypeNotSupportedError{}
}
var darkSuffix string
if !light {
darkSuffix = "-dark"
}
imagePath := path.Join("oidc-client-images", clientID+darkSuffix+"."+ext)
err = s.fileStorage.Save(ctx, imagePath, utils.NewLimitReader(resp.Body, maxLogoSize+1))
if errors.Is(err, utils.ErrSizeExceeded) {
return errLogoTooLarge
} else if err != nil {
return err
}
err = s.updateClientLogoType(ctx, clientID, ext, light)
if err != nil {
return err
}
return nil
}
func (s *OidcService) updateClientLogoType(ctx context.Context, clientID string, ext string, light bool) error {
var darkSuffix string
if !light {
darkSuffix = "-dark"
}
tx := s.db.Begin()
defer func() {
tx.Rollback()
}()
// We need to acquire an update lock for the row to be locked, since we'll update it later
var client model.OidcClient
err := tx.
WithContext(ctx).
Clauses(clause.Locking{Strength: "UPDATE"}).
First(&client, "id = ?", clientID).
Error
if err != nil {
return fmt.Errorf("failed to look up client: %w", err)
}
var currentType *string
if light {
currentType = client.ImageType
client.ImageType = &ext
} else {
currentType = client.DarkImageType
client.DarkImageType = &ext
}
err = tx.
WithContext(ctx).
Save(&client).
Error
if err != nil {
return fmt.Errorf("failed to save updated client: %w", err)
}
err = tx.Commit().Error
if err != nil {
return fmt.Errorf("failed to commit transaction: %w", err)
}
// Storage operations must be executed outside of a transaction
if currentType != nil && *currentType != ext {
old := path.Join("oidc-client-images", client.ID+darkSuffix+"."+*currentType)
_ = s.fileStorage.Delete(ctx, old)
}
return nil
}

View File

@@ -5,8 +5,13 @@ import (
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/sha256"
"encoding/base64"
"encoding/json"
"io"
"net/http"
"strconv"
"strings"
"testing"
"time"
@@ -19,6 +24,7 @@ import (
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/dto"
"github.com/pocket-id/pocket-id/backend/internal/model"
"github.com/pocket-id/pocket-id/backend/internal/storage"
testutils "github.com/pocket-id/pocket-id/backend/internal/utils/testing"
)
@@ -142,6 +148,7 @@ func TestOidcService_verifyClientCredentialsInternal(t *testing.T) {
var err error
// Create a test database
db := testutils.NewDatabaseForTest(t)
common.EnvConfig.EncryptionKey = []byte("0123456789abcdef0123456789abcdef")
// Create two JWKs for testing
privateJWK, jwkSetJSON := generateTestECDSAKey(t)
@@ -510,3 +517,460 @@ func TestOidcService_verifyClientCredentialsInternal(t *testing.T) {
})
})
}
func TestValidateCodeVerifier_Plain(t *testing.T) {
require.False(t, validateCodeVerifier("", "", false))
require.False(t, validateCodeVerifier("", "", true))
t.Run("plain", func(t *testing.T) {
require.False(t, validateCodeVerifier("", "challenge", false))
require.False(t, validateCodeVerifier("verifier", "", false))
require.True(t, validateCodeVerifier("plainVerifier", "plainVerifier", false))
require.False(t, validateCodeVerifier("plainVerifier", "otherVerifier", false))
})
t.Run("SHA 256", func(t *testing.T) {
codeVerifier := "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk"
hash := sha256.Sum256([]byte(codeVerifier))
codeChallenge := base64.RawURLEncoding.EncodeToString(hash[:])
require.True(t, validateCodeVerifier(codeVerifier, codeChallenge, true))
require.False(t, validateCodeVerifier("wrongVerifier", codeChallenge, true))
require.False(t, validateCodeVerifier(codeVerifier, "!", true))
// Invalid base64
require.False(t, validateCodeVerifier("NOT!VALID", codeChallenge, true))
})
}
func TestOidcService_updateClientLogoType(t *testing.T) {
// Create a test database
db := testutils.NewDatabaseForTest(t)
// Create database storage
dbStorage, err := storage.NewDatabaseStorage(db)
require.NoError(t, err)
// Init the OidcService
s := &OidcService{
db: db,
fileStorage: dbStorage,
}
// Create a test client
client := model.OidcClient{
Name: "Test Client",
CallbackURLs: model.UrlList{"https://example.com/callback"},
}
err = db.Create(&client).Error
require.NoError(t, err)
// Helper function to check if a file exists in storage
fileExists := func(t *testing.T, path string) bool {
t.Helper()
_, _, err := dbStorage.Open(t.Context(), path)
return err == nil
}
// Helper function to create a dummy file in storage
createDummyFile := func(t *testing.T, path string) {
t.Helper()
err := dbStorage.Save(t.Context(), path, strings.NewReader("dummy content"))
require.NoError(t, err)
}
t.Run("Updates light logo type for client without previous logo", func(t *testing.T) {
// Update the logo type
err := s.updateClientLogoType(t.Context(), client.ID, "png", true)
require.NoError(t, err)
// Verify the client was updated
var updatedClient model.OidcClient
err = db.First(&updatedClient, "id = ?", client.ID).Error
require.NoError(t, err)
require.NotNil(t, updatedClient.ImageType)
assert.Equal(t, "png", *updatedClient.ImageType)
})
t.Run("Updates dark logo type for client without previous dark logo", func(t *testing.T) {
// Update the dark logo type
err := s.updateClientLogoType(t.Context(), client.ID, "jpg", false)
require.NoError(t, err)
// Verify the client was updated
var updatedClient model.OidcClient
err = db.First(&updatedClient, "id = ?", client.ID).Error
require.NoError(t, err)
require.NotNil(t, updatedClient.DarkImageType)
assert.Equal(t, "jpg", *updatedClient.DarkImageType)
})
t.Run("Updates light logo type and deletes old file when type changes", func(t *testing.T) {
// Create the old PNG file in storage
oldPath := "oidc-client-images/" + client.ID + ".png"
createDummyFile(t, oldPath)
require.True(t, fileExists(t, oldPath), "Old file should exist before update")
// Client currently has a PNG logo, update to WEBP
err := s.updateClientLogoType(t.Context(), client.ID, "webp", true)
require.NoError(t, err)
// Verify the client was updated
var updatedClient model.OidcClient
err = db.First(&updatedClient, "id = ?", client.ID).Error
require.NoError(t, err)
require.NotNil(t, updatedClient.ImageType)
assert.Equal(t, "webp", *updatedClient.ImageType)
// Old PNG file should be deleted
assert.False(t, fileExists(t, oldPath), "Old PNG file should have been deleted")
})
t.Run("Updates dark logo type and deletes old file when type changes", func(t *testing.T) {
// Create the old JPG dark file in storage
oldPath := "oidc-client-images/" + client.ID + "-dark.jpg"
createDummyFile(t, oldPath)
require.True(t, fileExists(t, oldPath), "Old dark file should exist before update")
// Client currently has a JPG dark logo, update to WEBP
err := s.updateClientLogoType(t.Context(), client.ID, "webp", false)
require.NoError(t, err)
// Verify the client was updated
var updatedClient model.OidcClient
err = db.First(&updatedClient, "id = ?", client.ID).Error
require.NoError(t, err)
require.NotNil(t, updatedClient.DarkImageType)
assert.Equal(t, "webp", *updatedClient.DarkImageType)
// Old JPG dark file should be deleted
assert.False(t, fileExists(t, oldPath), "Old JPG dark file should have been deleted")
})
t.Run("Does not delete file when type remains the same", func(t *testing.T) {
// Create the WEBP file in storage
webpPath := "oidc-client-images/" + client.ID + ".webp"
createDummyFile(t, webpPath)
require.True(t, fileExists(t, webpPath), "WEBP file should exist before update")
// Update to the same type (WEBP)
err := s.updateClientLogoType(t.Context(), client.ID, "webp", true)
require.NoError(t, err)
// Verify the client still has WEBP
var updatedClient model.OidcClient
err = db.First(&updatedClient, "id = ?", client.ID).Error
require.NoError(t, err)
require.NotNil(t, updatedClient.ImageType)
assert.Equal(t, "webp", *updatedClient.ImageType)
// WEBP file should still exist since type didn't change
assert.True(t, fileExists(t, webpPath), "WEBP file should still exist")
})
t.Run("Returns error for non-existent client", func(t *testing.T) {
err := s.updateClientLogoType(t.Context(), "non-existent-client-id", "png", true)
require.Error(t, err)
require.ErrorContains(t, err, "failed to look up client")
})
}
func TestOidcService_downloadAndSaveLogoFromURL(t *testing.T) {
// Create a test database
db := testutils.NewDatabaseForTest(t)
// Create database storage
dbStorage, err := storage.NewDatabaseStorage(db)
require.NoError(t, err)
// Create a test client
client := model.OidcClient{
Name: "Test Client",
CallbackURLs: model.UrlList{"https://example.com/callback"},
}
err = db.Create(&client).Error
require.NoError(t, err)
// Helper function to check if a file exists in storage
fileExists := func(t *testing.T, path string) bool {
t.Helper()
_, _, err := dbStorage.Open(t.Context(), path)
return err == nil
}
// Helper function to get file content from storage
getFileContent := func(t *testing.T, path string) []byte {
t.Helper()
reader, _, err := dbStorage.Open(t.Context(), path)
require.NoError(t, err)
defer reader.Close()
content, err := io.ReadAll(reader)
require.NoError(t, err)
return content
}
t.Run("Successfully downloads and saves PNG logo from URL", func(t *testing.T) {
// Create mock PNG content
pngContent := []byte("fake-png-content")
// Create a mock HTTP response with headers
//nolint:bodyclose
pngResponse := testutils.NewMockResponse(http.StatusOK, string(pngContent))
pngResponse.Header.Set("Content-Type", "image/png")
// Create a mock HTTP client with responses
mockResponses := map[string]*http.Response{
//nolint:bodyclose
"https://example.com/logo.png": pngResponse,
}
httpClient := &http.Client{
Transport: &testutils.MockRoundTripper{
Responses: mockResponses,
},
}
// Init the OidcService with mock HTTP client
s := &OidcService{
db: db,
fileStorage: dbStorage,
httpClient: httpClient,
}
// Download and save the logo
err := s.downloadAndSaveLogoFromURL(t.Context(), client.ID, "https://example.com/logo.png", true)
require.NoError(t, err)
// Verify the file was saved
logoPath := "oidc-client-images/" + client.ID + ".png"
require.True(t, fileExists(t, logoPath), "Logo file should exist in storage")
// Verify the content
savedContent := getFileContent(t, logoPath)
assert.Equal(t, pngContent, savedContent)
// Verify the client was updated
var updatedClient model.OidcClient
err = db.First(&updatedClient, "id = ?", client.ID).Error
require.NoError(t, err)
require.NotNil(t, updatedClient.ImageType)
assert.Equal(t, "png", *updatedClient.ImageType)
})
t.Run("Successfully downloads and saves dark logo", func(t *testing.T) {
// Create mock WEBP content
webpContent := []byte("fake-webp-content")
//nolint:bodyclose
webpResponse := testutils.NewMockResponse(http.StatusOK, string(webpContent))
webpResponse.Header.Set("Content-Type", "image/webp")
mockResponses := map[string]*http.Response{
//nolint:bodyclose
"https://example.com/dark-logo.webp": webpResponse,
}
httpClient := &http.Client{
Transport: &testutils.MockRoundTripper{
Responses: mockResponses,
},
}
s := &OidcService{
db: db,
fileStorage: dbStorage,
httpClient: httpClient,
}
// Download and save the dark logo
err := s.downloadAndSaveLogoFromURL(t.Context(), client.ID, "https://example.com/dark-logo.webp", false)
require.NoError(t, err)
// Verify the dark logo file was saved
darkLogoPath := "oidc-client-images/" + client.ID + "-dark.webp"
require.True(t, fileExists(t, darkLogoPath), "Dark logo file should exist in storage")
// Verify the content
savedContent := getFileContent(t, darkLogoPath)
assert.Equal(t, webpContent, savedContent)
// Verify the client was updated
var updatedClient model.OidcClient
err = db.First(&updatedClient, "id = ?", client.ID).Error
require.NoError(t, err)
require.NotNil(t, updatedClient.DarkImageType)
assert.Equal(t, "webp", *updatedClient.DarkImageType)
})
t.Run("Detects extension from URL path", func(t *testing.T) {
svgContent := []byte("<svg></svg>")
mockResponses := map[string]*http.Response{
//nolint:bodyclose
"https://example.com/icon.svg": testutils.NewMockResponse(http.StatusOK, string(svgContent)),
}
httpClient := &http.Client{
Transport: &testutils.MockRoundTripper{
Responses: mockResponses,
},
}
s := &OidcService{
db: db,
fileStorage: dbStorage,
httpClient: httpClient,
}
err := s.downloadAndSaveLogoFromURL(t.Context(), client.ID, "https://example.com/icon.svg", true)
require.NoError(t, err)
// Verify SVG file was saved
logoPath := "oidc-client-images/" + client.ID + ".svg"
require.True(t, fileExists(t, logoPath), "SVG logo should exist")
})
t.Run("Detects extension from Content-Type when path has no extension", func(t *testing.T) {
jpgContent := []byte("fake-jpg-content")
//nolint:bodyclose
jpgResponse := testutils.NewMockResponse(http.StatusOK, string(jpgContent))
jpgResponse.Header.Set("Content-Type", "image/jpeg")
mockResponses := map[string]*http.Response{
//nolint:bodyclose
"https://example.com/logo": jpgResponse,
}
httpClient := &http.Client{
Transport: &testutils.MockRoundTripper{
Responses: mockResponses,
},
}
s := &OidcService{
db: db,
fileStorage: dbStorage,
httpClient: httpClient,
}
err := s.downloadAndSaveLogoFromURL(t.Context(), client.ID, "https://example.com/logo", true)
require.NoError(t, err)
// Verify JPG file was saved (jpeg extension is normalized to jpg)
logoPath := "oidc-client-images/" + client.ID + ".jpg"
require.True(t, fileExists(t, logoPath), "JPG logo should exist")
})
t.Run("Returns error for invalid URL", func(t *testing.T) {
s := &OidcService{
db: db,
fileStorage: dbStorage,
httpClient: &http.Client{},
}
err := s.downloadAndSaveLogoFromURL(t.Context(), client.ID, "://invalid-url", true)
require.Error(t, err)
})
t.Run("Returns error for non-200 status code", func(t *testing.T) {
mockResponses := map[string]*http.Response{
//nolint:bodyclose
"https://example.com/not-found.png": testutils.NewMockResponse(http.StatusNotFound, "Not Found"),
}
httpClient := &http.Client{
Transport: &testutils.MockRoundTripper{
Responses: mockResponses,
},
}
s := &OidcService{
db: db,
fileStorage: dbStorage,
httpClient: httpClient,
}
err := s.downloadAndSaveLogoFromURL(t.Context(), client.ID, "https://example.com/not-found.png", true)
require.Error(t, err)
require.ErrorContains(t, err, "failed to fetch logo")
})
t.Run("Returns error for too large content", func(t *testing.T) {
// Create content larger than 2MB (maxLogoSize)
largeContent := strings.Repeat("x", 2<<20+100) // 2.1MB
//nolint:bodyclose
largeResponse := testutils.NewMockResponse(http.StatusOK, largeContent)
largeResponse.Header.Set("Content-Type", "image/png")
largeResponse.Header.Set("Content-Length", strconv.Itoa(len(largeContent)))
mockResponses := map[string]*http.Response{
//nolint:bodyclose
"https://example.com/large.png": largeResponse,
}
httpClient := &http.Client{
Transport: &testutils.MockRoundTripper{
Responses: mockResponses,
},
}
s := &OidcService{
db: db,
fileStorage: dbStorage,
httpClient: httpClient,
}
err := s.downloadAndSaveLogoFromURL(t.Context(), client.ID, "https://example.com/large.png", true)
require.Error(t, err)
require.ErrorIs(t, err, errLogoTooLarge)
})
t.Run("Returns error for unsupported file type", func(t *testing.T) {
//nolint:bodyclose
textResponse := testutils.NewMockResponse(http.StatusOK, "text content")
textResponse.Header.Set("Content-Type", "text/plain")
mockResponses := map[string]*http.Response{
//nolint:bodyclose
"https://example.com/file.txt": textResponse,
}
httpClient := &http.Client{
Transport: &testutils.MockRoundTripper{
Responses: mockResponses,
},
}
s := &OidcService{
db: db,
fileStorage: dbStorage,
httpClient: httpClient,
}
err := s.downloadAndSaveLogoFromURL(t.Context(), client.ID, "https://example.com/file.txt", true)
require.Error(t, err)
var fileTypeErr *common.FileTypeNotSupportedError
require.ErrorAs(t, err, &fileTypeErr)
})
t.Run("Returns error for non-existent client", func(t *testing.T) {
//nolint:bodyclose
pngResponse := testutils.NewMockResponse(http.StatusOK, "content")
pngResponse.Header.Set("Content-Type", "image/png")
mockResponses := map[string]*http.Response{
//nolint:bodyclose
"https://example.com/logo.png": pngResponse,
}
httpClient := &http.Client{
Transport: &testutils.MockRoundTripper{
Responses: mockResponses,
},
}
s := &OidcService{
db: db,
fileStorage: dbStorage,
httpClient: httpClient,
}
err := s.downloadAndSaveLogoFromURL(t.Context(), "non-existent-client-id", "https://example.com/logo.png", true)
require.Error(t, err)
require.ErrorContains(t, err, "failed to look up client")
})
}

View File

@@ -21,7 +21,7 @@ func NewUserGroupService(db *gorm.DB, appConfigService *AppConfigService) *UserG
return &UserGroupService{db: db, appConfigService: appConfigService}
}
func (s *UserGroupService) List(ctx context.Context, name string, sortedPaginationRequest utils.SortedPaginationRequest) (groups []model.UserGroup, response utils.PaginationResponse, err error) {
func (s *UserGroupService) List(ctx context.Context, name string, listRequestOptions utils.ListRequestOptions) (groups []model.UserGroup, response utils.PaginationResponse, err error) {
query := s.db.
WithContext(ctx).
Preload("CustomClaims").
@@ -32,17 +32,14 @@ func (s *UserGroupService) List(ctx context.Context, name string, sortedPaginati
}
// As userCount is not a column we need to manually sort it
if sortedPaginationRequest.Sort.Column == "userCount" && utils.IsValidSortDirection(sortedPaginationRequest.Sort.Direction) {
if listRequestOptions.Sort.Column == "userCount" && utils.IsValidSortDirection(listRequestOptions.Sort.Direction) {
query = query.Select("user_groups.*, COUNT(user_groups_users.user_id)").
Joins("LEFT JOIN user_groups_users ON user_groups.id = user_groups_users.user_group_id").
Group("user_groups.id").
Order("COUNT(user_groups_users.user_id) " + sortedPaginationRequest.Sort.Direction)
response, err := utils.Paginate(sortedPaginationRequest.Pagination.Page, sortedPaginationRequest.Pagination.Limit, query, &groups)
return groups, response, err
Order("COUNT(user_groups_users.user_id) " + listRequestOptions.Sort.Direction)
}
response, err = utils.PaginateAndSort(sortedPaginationRequest, query, &groups)
response, err = utils.PaginateFilterAndSort(listRequestOptions, query, &groups)
return groups, response, err
}

View File

@@ -7,20 +7,23 @@ import (
"errors"
"fmt"
"io"
"io/fs"
"log/slog"
"net/url"
"os"
"path"
"strings"
"time"
"github.com/google/uuid"
"go.opentelemetry.io/otel/trace"
"gorm.io/gorm"
"gorm.io/gorm/clause"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/dto"
"github.com/pocket-id/pocket-id/backend/internal/model"
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
"github.com/pocket-id/pocket-id/backend/internal/storage"
"github.com/pocket-id/pocket-id/backend/internal/utils"
"github.com/pocket-id/pocket-id/backend/internal/utils/email"
profilepicture "github.com/pocket-id/pocket-id/backend/internal/utils/image"
@@ -33,9 +36,11 @@ type UserService struct {
emailService *EmailService
appConfigService *AppConfigService
customClaimService *CustomClaimService
appImagesService *AppImagesService
fileStorage storage.FileStorage
}
func NewUserService(db *gorm.DB, jwtService *JwtService, auditLogService *AuditLogService, emailService *EmailService, appConfigService *AppConfigService, customClaimService *CustomClaimService) *UserService {
func NewUserService(db *gorm.DB, jwtService *JwtService, auditLogService *AuditLogService, emailService *EmailService, appConfigService *AppConfigService, customClaimService *CustomClaimService, appImagesService *AppImagesService, fileStorage storage.FileStorage) *UserService {
return &UserService{
db: db,
jwtService: jwtService,
@@ -43,10 +48,12 @@ func NewUserService(db *gorm.DB, jwtService *JwtService, auditLogService *AuditL
emailService: emailService,
appConfigService: appConfigService,
customClaimService: customClaimService,
appImagesService: appImagesService,
fileStorage: fileStorage,
}
}
func (s *UserService) ListUsers(ctx context.Context, searchTerm string, sortedPaginationRequest utils.SortedPaginationRequest) ([]model.User, utils.PaginationResponse, error) {
func (s *UserService) ListUsers(ctx context.Context, searchTerm string, listRequestOptions utils.ListRequestOptions) ([]model.User, utils.PaginationResponse, error) {
var users []model.User
query := s.db.WithContext(ctx).
Model(&model.User{}).
@@ -60,7 +67,7 @@ func (s *UserService) ListUsers(ctx context.Context, searchTerm string, sortedPa
searchPattern, searchPattern, searchPattern, searchPattern)
}
pagination, err := utils.PaginateAndSort(sortedPaginationRequest, query, &users)
pagination, err := utils.PaginateFilterAndSort(listRequestOptions, query, &users)
return users, pagination, err
}
@@ -87,39 +94,42 @@ func (s *UserService) GetProfilePicture(ctx context.Context, userID string) (io.
return nil, 0, &common.InvalidUUIDError{}
}
// First check for a custom uploaded profile picture (userID.png)
profilePicturePath := common.EnvConfig.UploadPath + "/profile-pictures/" + userID + ".png"
file, err := os.Open(profilePicturePath)
if err == nil {
// Get the file size
fileInfo, err := file.Stat()
if err != nil {
file.Close()
return nil, 0, err
}
return file, fileInfo.Size(), nil
}
// If no custom picture exists, get the user's data for creating initials
user, err := s.GetUser(ctx, userID)
if err != nil {
return nil, 0, err
}
// Check if we have a cached default picture for these initials
defaultProfilePicturesDir := common.EnvConfig.UploadPath + "/profile-pictures/defaults/"
defaultPicturePath := defaultProfilePicturesDir + user.Initials() + ".png"
file, err = os.Open(defaultPicturePath)
profilePicturePath := path.Join("profile-pictures", userID+".png")
// Try custom profile picture
file, size, err := s.fileStorage.Open(ctx, profilePicturePath)
if err == nil {
fileInfo, err := file.Stat()
if err != nil {
file.Close()
return nil, 0, err
}
return file, fileInfo.Size(), nil
return file, size, nil
} else if !errors.Is(err, fs.ErrNotExist) {
return nil, 0, err
}
// If no cached default picture exists, create one and save it for future use
// Try default global profile picture
if s.appImagesService.IsDefaultProfilePictureSet() {
reader, size, _, err := s.appImagesService.GetImage(ctx, "default-profile-picture")
if err == nil {
return reader, size, nil
}
if !errors.Is(err, &common.ImageNotFoundError{}) {
return nil, 0, err
}
}
// Try cached default for initials
defaultPicturePath := path.Join("profile-pictures", "defaults", user.Initials()+".png")
file, size, err = s.fileStorage.Open(ctx, defaultPicturePath)
if err == nil {
return file, size, nil
} else if !errors.Is(err, fs.ErrNotExist) {
return nil, 0, err
}
// Create and return generated default with initials
defaultPicture, err := profilepicture.CreateDefaultProfilePicture(user.Initials())
if err != nil {
return nil, 0, err
@@ -127,20 +137,16 @@ func (s *UserService) GetProfilePicture(ctx context.Context, userID string) (io.
// Save the default picture for future use (in a goroutine to avoid blocking)
defaultPictureBytes := defaultPicture.Bytes()
//nolint:contextcheck
go func() {
// Ensure the directory exists
errInternal := os.MkdirAll(defaultProfilePicturesDir, os.ModePerm)
if errInternal != nil {
slog.Error("Failed to create directory for default profile picture", slog.Any("error", errInternal))
return
}
errInternal = utils.SaveFileStream(bytes.NewReader(defaultPictureBytes), defaultPicturePath)
if errInternal != nil {
slog.Error("Failed to cache default profile picture for initials", slog.String("initials", user.Initials()), slog.Any("error", errInternal))
// Use bytes.NewReader because we need an io.ReadSeeker
rErr := s.fileStorage.Save(context.Background(), defaultPicturePath, bytes.NewReader(defaultPictureBytes))
if rErr != nil {
slog.Error("Failed to cache default profile picture", slog.String("initials", user.Initials()), slog.Any("error", rErr))
}
}()
return io.NopCloser(bytes.NewReader(defaultPictureBytes)), int64(defaultPicture.Len()), nil
return io.NopCloser(bytes.NewReader(defaultPictureBytes)), int64(len(defaultPictureBytes)), nil
}
func (s *UserService) GetUserGroups(ctx context.Context, userID string) ([]model.UserGroup, error) {
@@ -157,7 +163,7 @@ func (s *UserService) GetUserGroups(ctx context.Context, userID string) ([]model
return user.UserGroups, nil
}
func (s *UserService) UpdateProfilePicture(userID string, file io.Reader) error {
func (s *UserService) UpdateProfilePicture(ctx context.Context, userID string, file io.ReadSeeker) error {
// Validate the user ID to prevent directory traversal
err := uuid.Validate(userID)
if err != nil {
@@ -170,15 +176,8 @@ func (s *UserService) UpdateProfilePicture(userID string, file io.Reader) error
return err
}
// Ensure the directory exists
profilePictureDir := common.EnvConfig.UploadPath + "/profile-pictures"
err = os.MkdirAll(profilePictureDir, os.ModePerm)
if err != nil {
return err
}
// Create the profile picture file
err = utils.SaveFileStream(profilePicture, profilePictureDir+"/"+userID+".png")
profilePicturePath := path.Join("profile-pictures", userID+".png")
err = s.fileStorage.Save(ctx, profilePicturePath, profilePicture)
if err != nil {
return err
}
@@ -187,17 +186,30 @@ func (s *UserService) UpdateProfilePicture(userID string, file io.Reader) error
}
func (s *UserService) DeleteUser(ctx context.Context, userID string, allowLdapDelete bool) error {
return s.db.Transaction(func(tx *gorm.DB) error {
return s.deleteUserInternal(ctx, userID, allowLdapDelete, tx)
err := s.db.Transaction(func(tx *gorm.DB) error {
return s.deleteUserInternal(ctx, tx, userID, allowLdapDelete)
})
if err != nil {
return fmt.Errorf("failed to delete user '%s': %w", userID, err)
}
// Storage operations must be executed outside of a transaction
profilePicturePath := path.Join("profile-pictures", userID+".png")
err = s.fileStorage.Delete(ctx, profilePicturePath)
if err != nil && !storage.IsNotExist(err) {
return fmt.Errorf("failed to delete profile picture for user '%s': %w", userID, err)
}
return nil
}
func (s *UserService) deleteUserInternal(ctx context.Context, userID string, allowLdapDelete bool, tx *gorm.DB) error {
func (s *UserService) deleteUserInternal(ctx context.Context, tx *gorm.DB, userID string, allowLdapDelete bool) error {
var user model.User
err := tx.
WithContext(ctx).
Where("id = ?", userID).
Clauses(clause.Locking{Strength: "UPDATE"}).
First(&user).
Error
if err != nil {
@@ -209,13 +221,6 @@ func (s *UserService) deleteUserInternal(ctx context.Context, userID string, all
return &common.LdapUserUpdateError{}
}
// Delete the profile picture
profilePicturePath := common.EnvConfig.UploadPath + "/profile-pictures/" + userID + ".png"
err = os.Remove(profilePicturePath)
if err != nil && !os.IsNotExist(err) {
return err
}
err = tx.WithContext(ctx).Delete(&user).Error
if err != nil {
return fmt.Errorf("failed to delete user: %w", err)
@@ -244,6 +249,10 @@ func (s *UserService) CreateUser(ctx context.Context, input dto.UserCreateDto) (
}
func (s *UserService) createUserInternal(ctx context.Context, input dto.UserCreateDto, isLdapSync bool, tx *gorm.DB) (model.User, error) {
if s.appConfigService.GetDbConfig().RequireUserEmail.IsTrue() && input.Email == nil {
return model.User{}, &common.UserEmailNotSetError{}
}
user := model.User{
FirstName: input.FirstName,
LastName: input.LastName,
@@ -252,6 +261,7 @@ func (s *UserService) createUserInternal(ctx context.Context, input dto.UserCrea
Username: input.Username,
IsAdmin: input.IsAdmin,
Locale: input.Locale,
Disabled: input.Disabled,
}
if input.LdapID != "" {
user.LdapID = &input.LdapID
@@ -288,16 +298,27 @@ func (s *UserService) applySignupDefaults(ctx context.Context, user *model.User,
// Apply default user groups
var groupIDs []string
if v := config.SignupDefaultUserGroupIDs.Value; v != "" && v != "[]" {
if err := json.Unmarshal([]byte(v), &groupIDs); err != nil {
v := config.SignupDefaultUserGroupIDs.Value
if v != "" && v != "[]" {
err := json.Unmarshal([]byte(v), &groupIDs)
if err != nil {
return fmt.Errorf("invalid SignupDefaultUserGroupIDs JSON: %w", err)
}
if len(groupIDs) > 0 {
var groups []model.UserGroup
if err := tx.WithContext(ctx).Where("id IN ?", groupIDs).Find(&groups).Error; err != nil {
err = tx.WithContext(ctx).
Where("id IN ?", groupIDs).
Find(&groups).
Error
if err != nil {
return fmt.Errorf("failed to find default user groups: %w", err)
}
if err := tx.WithContext(ctx).Model(user).Association("UserGroups").Replace(groups); err != nil {
err = tx.WithContext(ctx).
Model(user).
Association("UserGroups").
Replace(groups)
if err != nil {
return fmt.Errorf("failed to associate default user groups: %w", err)
}
}
@@ -305,12 +326,15 @@ func (s *UserService) applySignupDefaults(ctx context.Context, user *model.User,
// Apply default custom claims
var claims []dto.CustomClaimCreateDto
if v := config.SignupDefaultCustomClaims.Value; v != "" && v != "[]" {
if err := json.Unmarshal([]byte(v), &claims); err != nil {
v = config.SignupDefaultCustomClaims.Value
if v != "" && v != "[]" {
err := json.Unmarshal([]byte(v), &claims)
if err != nil {
return fmt.Errorf("invalid SignupDefaultCustomClaims JSON: %w", err)
}
if len(claims) > 0 {
if _, err := s.customClaimService.updateCustomClaimsInternal(ctx, UserID, user.ID, claims, tx); err != nil {
_, err = s.customClaimService.updateCustomClaimsInternal(ctx, UserID, user.ID, claims, tx)
if err != nil {
return fmt.Errorf("failed to apply default custom claims: %w", err)
}
}
@@ -339,10 +363,15 @@ func (s *UserService) UpdateUser(ctx context.Context, userID string, updatedUser
}
func (s *UserService) updateUserInternal(ctx context.Context, userID string, updatedUser dto.UserCreateDto, updateOwnUser bool, isLdapSync bool, tx *gorm.DB) (model.User, error) {
if s.appConfigService.GetDbConfig().RequireUserEmail.IsTrue() && updatedUser.Email == nil {
return model.User{}, &common.UserEmailNotSetError{}
}
var user model.User
err := tx.
WithContext(ctx).
Where("id = ?", userID).
Clauses(clause.Locking{Strength: "UPDATE"}).
First(&user).
Error
if err != nil {
@@ -414,13 +443,11 @@ func (s *UserService) RequestOneTimeAccessEmailAsUnauthenticatedUser(ctx context
var userId string
err := s.db.Model(&model.User{}).Select("id").Where("email = ?", userID).First(&userId).Error
if err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
// Do not return error if user not found to prevent email enumeration
if errors.Is(err, gorm.ErrRecordNotFound) {
return nil
} else {
return err
}
return nil
} else if err != nil {
return err
}
return s.requestOneTimeAccessEmailInternal(ctx, userId, redirectPath, 15*time.Minute)
@@ -437,6 +464,10 @@ func (s *UserService) requestOneTimeAccessEmailInternal(ctx context.Context, use
return err
}
if user.Email == nil {
return &common.UserEmailNotSetError{}
}
oneTimeAccessToken, err := s.createOneTimeAccessTokenInternal(ctx, user.ID, ttl, tx)
if err != nil {
return err
@@ -464,7 +495,7 @@ func (s *UserService) requestOneTimeAccessEmailInternal(ctx context.Context, use
errInternal := SendEmail(innerCtx, s.emailService, email.Address{
Name: user.FullName(),
Email: user.Email,
Email: *user.Email,
}, OneTimeAccessTemplate, &OneTimeAccessTemplateData{
Code: oneTimeAccessToken,
LoginLink: link,
@@ -472,7 +503,7 @@ func (s *UserService) requestOneTimeAccessEmailInternal(ctx context.Context, use
ExpirationString: utils.DurationToString(ttl),
})
if errInternal != nil {
slog.ErrorContext(innerCtx, "Failed to send one-time access token email", slog.Any("error", errInternal), slog.String("address", user.Email))
slog.ErrorContext(innerCtx, "Failed to send one-time access token email", slog.Any("error", errInternal), slog.String("address", *user.Email))
return
}
}()
@@ -507,7 +538,9 @@ func (s *UserService) ExchangeOneTimeAccessToken(ctx context.Context, token stri
var oneTimeAccessToken model.OneTimeAccessToken
err := tx.
WithContext(ctx).
Where("token = ? AND expires_at > ?", token, datatype.DateTime(time.Now())).Preload("User").
Where("token = ? AND expires_at > ?", token, datatype.DateTime(time.Now())).
Preload("User").
Clauses(clause.Locking{Strength: "UPDATE"}).
First(&oneTimeAccessToken).
Error
if err != nil {
@@ -660,30 +693,20 @@ func (s *UserService) checkDuplicatedFields(ctx context.Context, user model.User
}
// ResetProfilePicture deletes a user's custom profile picture
func (s *UserService) ResetProfilePicture(userID string) error {
func (s *UserService) ResetProfilePicture(ctx context.Context, userID string) error {
// Validate the user ID to prevent directory traversal
if err := uuid.Validate(userID); err != nil {
return &common.InvalidUUIDError{}
}
// Build path to profile picture
profilePicturePath := common.EnvConfig.UploadPath + "/profile-pictures/" + userID + ".png"
// Check if file exists and delete it
if _, err := os.Stat(profilePicturePath); err == nil {
if err := os.Remove(profilePicturePath); err != nil {
return fmt.Errorf("failed to delete profile picture: %w", err)
}
} else if !os.IsNotExist(err) {
// If any error other than "file not exists"
return fmt.Errorf("failed to check if profile picture exists: %w", err)
profilePicturePath := path.Join("profile-pictures", userID+".png")
if err := s.fileStorage.Delete(ctx, profilePicturePath); err != nil {
return fmt.Errorf("failed to delete profile picture: %w", err)
}
// It's okay if the file doesn't exist - just means there's no custom picture to delete
return nil
}
func (s *UserService) disableUserInternal(ctx context.Context, userID string, tx *gorm.DB) error {
func (s *UserService) disableUserInternal(ctx context.Context, tx *gorm.DB, userID string) error {
return tx.
WithContext(ctx).
Model(&model.User{}).
@@ -724,6 +747,7 @@ func (s *UserService) SignUp(ctx context.Context, signupData dto.SignUpDto, ipAd
err := tx.
WithContext(ctx).
Where("token = ?", signupData.Token).
Clauses(clause.Locking{Strength: "UPDATE"}).
First(&signupToken).
Error
if err != nil {
@@ -782,11 +806,11 @@ func (s *UserService) SignUp(ctx context.Context, signupData dto.SignUpDto, ipAd
return user, accessToken, nil
}
func (s *UserService) ListSignupTokens(ctx context.Context, sortedPaginationRequest utils.SortedPaginationRequest) ([]model.SignupToken, utils.PaginationResponse, error) {
func (s *UserService) ListSignupTokens(ctx context.Context, listRequestOptions utils.ListRequestOptions) ([]model.SignupToken, utils.PaginationResponse, error) {
var tokens []model.SignupToken
query := s.db.WithContext(ctx).Model(&model.SignupToken{})
pagination, err := utils.PaginateAndSort(sortedPaginationRequest, query, &tokens)
pagination, err := utils.PaginateFilterAndSort(listRequestOptions, query, &tokens)
return tokens, pagination, err
}

View File

@@ -58,7 +58,7 @@ func (s *VersionService) GetLatestVersion(ctx context.Context) (string, error) {
}
if payload.TagName == "" {
return "", fmt.Errorf("GitHub API returned empty tag name")
return "", errors.New("GitHub API returned empty tag name")
}
return strings.TrimPrefix(payload.TagName, "v"), nil

View File

@@ -2,6 +2,8 @@ package service
import (
"context"
"encoding/hex"
"errors"
"fmt"
"net/http"
"time"
@@ -81,6 +83,7 @@ func (s *WebAuthnService) BeginRegistration(ctx context.Context, userID string)
&user,
webauthn.WithResidentKeyRequirement(protocol.ResidentKeyRequirementRequired),
webauthn.WithExclusions(user.WebAuthnCredentialDescriptors()),
webauthn.WithExtensions(map[string]any{"credProps": true}), // Required for Firefox Android to properly save the key in Google password manager
)
if err != nil {
return nil, fmt.Errorf("failed to begin WebAuthn registration: %w", err)
@@ -89,6 +92,7 @@ func (s *WebAuthnService) BeginRegistration(ctx context.Context, userID string)
sessionToStore := &model.WebauthnSession{
ExpiresAt: datatype.DateTime(session.Expires),
Challenge: session.Challenge,
CredentialParams: session.CredParams,
UserVerification: string(session.UserVerification),
}
@@ -112,7 +116,7 @@ func (s *WebAuthnService) BeginRegistration(ctx context.Context, userID string)
}, nil
}
func (s *WebAuthnService) VerifyRegistration(ctx context.Context, sessionID, userID string, r *http.Request) (model.WebauthnCredential, error) {
func (s *WebAuthnService) VerifyRegistration(ctx context.Context, sessionID string, userID string, r *http.Request, ipAddress string) (model.WebauthnCredential, error) {
tx := s.db.Begin()
defer func() {
tx.Rollback()
@@ -130,9 +134,10 @@ func (s *WebAuthnService) VerifyRegistration(ctx context.Context, sessionID, use
}
session := webauthn.SessionData{
Challenge: storedSession.Challenge,
Expires: storedSession.ExpiresAt.ToTime(),
UserID: []byte(userID),
Challenge: storedSession.Challenge,
Expires: storedSession.ExpiresAt.ToTime(),
CredParams: storedSession.CredentialParams,
UserID: []byte(userID),
}
var user model.User
@@ -170,6 +175,9 @@ func (s *WebAuthnService) VerifyRegistration(ctx context.Context, sessionID, use
return model.WebauthnCredential{}, fmt.Errorf("failed to store WebAuthn credential: %w", err)
}
auditLogData := model.AuditLogData{"credentialID": hex.EncodeToString(credential.ID), "passkeyName": passkeyName}
s.auditLogService.Create(ctx, model.AuditLogEventPasskeyAdded, ipAddress, r.UserAgent(), userID, auditLogData, tx)
err = tx.Commit().Error
if err != nil {
return model.WebauthnCredential{}, fmt.Errorf("failed to commit transaction: %w", err)
@@ -285,16 +293,30 @@ func (s *WebAuthnService) ListCredentials(ctx context.Context, userID string) ([
return credentials, nil
}
func (s *WebAuthnService) DeleteCredential(ctx context.Context, userID, credentialID string) error {
err := s.db.
func (s *WebAuthnService) DeleteCredential(ctx context.Context, userID string, credentialID string, ipAddress string, userAgent string) error {
tx := s.db.Begin()
defer func() {
tx.Rollback()
}()
credential := &model.WebauthnCredential{}
err := tx.
WithContext(ctx).
Where("id = ? AND user_id = ?", credentialID, userID).
Delete(&model.WebauthnCredential{}).
Clauses(clause.Returning{}).
Delete(credential, "id = ? AND user_id = ?", credentialID, userID).
Error
if err != nil {
return fmt.Errorf("failed to delete record: %w", err)
}
auditLogData := model.AuditLogData{"credentialID": hex.EncodeToString(credential.CredentialID), "passkeyName": credential.Name}
s.auditLogService.Create(ctx, model.AuditLogEventPasskeyRemoved, ipAddress, userAgent, userID, auditLogData, tx)
err = tx.Commit().Error
if err != nil {
return fmt.Errorf("failed to commit transaction: %w", err)
}
return nil
}
@@ -350,7 +372,7 @@ func (s *WebAuthnService) CreateReauthenticationTokenWithAccessToken(ctx context
userID, ok := token.Subject()
if !ok {
return "", fmt.Errorf("access token does not contain user ID")
return "", errors.New("access token does not contain user ID")
}
// Check if token is issued less than a minute ago

View File

@@ -0,0 +1,226 @@
package storage
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"time"
"github.com/pocket-id/pocket-id/backend/internal/model"
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
var TypeDatabase = "database"
type databaseStorage struct {
db *gorm.DB
}
// NewDatabaseStorage creates a new database storage provider
func NewDatabaseStorage(db *gorm.DB) (FileStorage, error) {
if db == nil {
return nil, errors.New("database connection is required")
}
return &databaseStorage{db: db}, nil
}
func (s *databaseStorage) Type() string {
return TypeDatabase
}
func (s *databaseStorage) Save(ctx context.Context, relativePath string, data io.Reader) error {
// Normalize the path
relativePath = filepath.ToSlash(filepath.Clean(relativePath))
// Read all data into memory
b, err := io.ReadAll(data)
if err != nil {
return fmt.Errorf("failed to read data: %w", err)
}
now := datatype.DateTime(time.Now())
storage := model.Storage{
Path: relativePath,
Data: b,
Size: int64(len(b)),
ModTime: now,
CreatedAt: now,
}
// Use upsert: insert or update on conflict
result := s.db.
WithContext(ctx).
Clauses(clause.OnConflict{
Columns: []clause.Column{{Name: "path"}},
DoUpdates: clause.AssignmentColumns([]string{"data", "size", "mod_time"}),
}).
Create(&storage)
if result.Error != nil {
return fmt.Errorf("failed to save file to database: %w", result.Error)
}
return nil
}
func (s *databaseStorage) Open(ctx context.Context, relativePath string) (io.ReadCloser, int64, error) {
relativePath = filepath.ToSlash(filepath.Clean(relativePath))
var storage model.Storage
result := s.db.
WithContext(ctx).
Where("path = ?", relativePath).
First(&storage)
if result.Error != nil {
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
return nil, 0, os.ErrNotExist
}
return nil, 0, fmt.Errorf("failed to read file from database: %w", result.Error)
}
reader := io.NopCloser(bytes.NewReader(storage.Data))
return reader, storage.Size, nil
}
func (s *databaseStorage) Delete(ctx context.Context, relativePath string) error {
relativePath = filepath.ToSlash(filepath.Clean(relativePath))
result := s.db.
WithContext(ctx).
Where("path = ?", relativePath).
Delete(&model.Storage{})
if result.Error != nil {
return fmt.Errorf("failed to delete file from database: %w", result.Error)
}
return nil
}
func (s *databaseStorage) DeleteAll(ctx context.Context, prefix string) error {
prefix = filepath.ToSlash(filepath.Clean(prefix))
// If empty prefix, delete all
if isRootPath(prefix) {
result := s.db.
WithContext(ctx).
Where("1 = 1"). // Delete everything
Delete(&model.Storage{})
if result.Error != nil {
return fmt.Errorf("failed to delete all files from database: %w", result.Error)
}
return nil
}
// Ensure prefix ends with / for proper prefix matching
if !strings.HasSuffix(prefix, "/") {
prefix += "/"
}
query := s.db.WithContext(ctx)
query = addPathPrefixClause(s.db.Name(), query, prefix)
result := query.Delete(&model.Storage{})
if result.Error != nil {
return fmt.Errorf("failed to delete files with prefix '%s' from database: %w", prefix, result.Error)
}
return nil
}
func (s *databaseStorage) List(ctx context.Context, prefix string) ([]ObjectInfo, error) {
prefix = filepath.ToSlash(filepath.Clean(prefix))
var storageItems []model.Storage
query := s.db.WithContext(ctx)
if !isRootPath(prefix) {
// Ensure prefix matching
if !strings.HasSuffix(prefix, "/") {
prefix += "/"
}
query = addPathPrefixClause(s.db.Name(), query, prefix)
}
result := query.
Select("path", "size", "mod_time").
Find(&storageItems)
if result.Error != nil {
return nil, fmt.Errorf("failed to list files from database: %w", result.Error)
}
objects := make([]ObjectInfo, 0, len(storageItems))
for _, item := range storageItems {
// Filter out directory-like paths (those that contain additional slashes after the prefix)
relativePath := strings.TrimPrefix(item.Path, prefix)
if strings.ContainsRune(relativePath, '/') {
continue
}
objects = append(objects, ObjectInfo{
Path: item.Path,
Size: item.Size,
ModTime: time.Time(item.ModTime),
})
}
return objects, nil
}
func (s *databaseStorage) Walk(ctx context.Context, root string, fn func(ObjectInfo) error) error {
root = filepath.ToSlash(filepath.Clean(root))
var storageItems []model.Storage
query := s.db.WithContext(ctx)
if !isRootPath(root) {
// Ensure root matching
if !strings.HasSuffix(root, "/") {
root += "/"
}
query = addPathPrefixClause(s.db.Name(), query, root)
}
result := query.
Select("path", "size", "mod_time").
Find(&storageItems)
if result.Error != nil {
return fmt.Errorf("failed to walk files from database: %w", result.Error)
}
for _, item := range storageItems {
err := fn(ObjectInfo{
Path: item.Path,
Size: item.Size,
ModTime: time.Time(item.ModTime),
})
if err != nil {
return err
}
}
return nil
}
func isRootPath(path string) bool {
return path == "" || path == "/" || path == "."
}
func addPathPrefixClause(dialect string, query *gorm.DB, prefix string) *gorm.DB {
// In SQLite, we use "GLOB" which can use the index
switch dialect {
case "sqlite":
return query.Where("path GLOB ?", prefix+"*")
case "postgres":
return query.Where("path LIKE ?", prefix+"%")
default:
// Indicates a development-time error
panic(fmt.Errorf("unsupported database dialect: %s", dialect))
}
}

View File

@@ -0,0 +1,148 @@
package storage
import (
"bytes"
"context"
"io"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
testingutil "github.com/pocket-id/pocket-id/backend/internal/utils/testing"
)
func TestDatabaseStorageOperations(t *testing.T) {
ctx := context.Background()
db := testingutil.NewDatabaseForTest(t)
store, err := NewDatabaseStorage(db)
require.NoError(t, err)
t.Run("type should be database", func(t *testing.T) {
assert.Equal(t, TypeDatabase, store.Type())
})
t.Run("save, open and list files", func(t *testing.T) {
err := store.Save(ctx, "images/logo.png", bytes.NewBufferString("logo-data"))
require.NoError(t, err)
reader, size, err := store.Open(ctx, "images/logo.png")
require.NoError(t, err)
defer reader.Close()
contents, err := io.ReadAll(reader)
require.NoError(t, err)
assert.Equal(t, []byte("logo-data"), contents)
assert.Equal(t, int64(len(contents)), size)
err = store.Save(ctx, "images/nested/child.txt", bytes.NewBufferString("child"))
require.NoError(t, err)
files, err := store.List(ctx, "images")
require.NoError(t, err)
require.Len(t, files, 1)
assert.Equal(t, "images/logo.png", files[0].Path)
assert.Equal(t, int64(len("logo-data")), files[0].Size)
})
t.Run("save should update existing file", func(t *testing.T) {
err := store.Save(ctx, "test/update.txt", bytes.NewBufferString("original"))
require.NoError(t, err)
err = store.Save(ctx, "test/update.txt", bytes.NewBufferString("updated"))
require.NoError(t, err)
reader, size, err := store.Open(ctx, "test/update.txt")
require.NoError(t, err)
defer reader.Close()
contents, err := io.ReadAll(reader)
require.NoError(t, err)
assert.Equal(t, []byte("updated"), contents)
assert.Equal(t, int64(len("updated")), size)
})
t.Run("delete files individually", func(t *testing.T) {
err := store.Save(ctx, "images/delete-me.txt", bytes.NewBufferString("temp"))
require.NoError(t, err)
require.NoError(t, store.Delete(ctx, "images/delete-me.txt"))
_, _, err = store.Open(ctx, "images/delete-me.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
})
t.Run("delete missing file should not error", func(t *testing.T) {
require.NoError(t, store.Delete(ctx, "images/missing.txt"))
})
t.Run("delete all files", func(t *testing.T) {
require.NoError(t, store.Save(ctx, "cleanup/a.txt", bytes.NewBufferString("a")))
require.NoError(t, store.Save(ctx, "cleanup/b.txt", bytes.NewBufferString("b")))
require.NoError(t, store.Save(ctx, "cleanup/nested/c.txt", bytes.NewBufferString("c")))
require.NoError(t, store.DeleteAll(ctx, "/"))
_, _, err := store.Open(ctx, "cleanup/a.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
_, _, err = store.Open(ctx, "cleanup/b.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
_, _, err = store.Open(ctx, "cleanup/nested/c.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
})
t.Run("delete all files under a prefix", func(t *testing.T) {
require.NoError(t, store.Save(ctx, "cleanup/a.txt", bytes.NewBufferString("a")))
require.NoError(t, store.Save(ctx, "cleanup/b.txt", bytes.NewBufferString("b")))
require.NoError(t, store.Save(ctx, "cleanup/nested/c.txt", bytes.NewBufferString("c")))
require.NoError(t, store.DeleteAll(ctx, "cleanup"))
_, _, err := store.Open(ctx, "cleanup/a.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
_, _, err = store.Open(ctx, "cleanup/b.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
_, _, err = store.Open(ctx, "cleanup/nested/c.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
})
t.Run("walk files", func(t *testing.T) {
require.NoError(t, store.Save(ctx, "walk/file1.txt", bytes.NewBufferString("1")))
require.NoError(t, store.Save(ctx, "walk/file2.txt", bytes.NewBufferString("2")))
require.NoError(t, store.Save(ctx, "walk/nested/file3.txt", bytes.NewBufferString("3")))
var paths []string
err := store.Walk(ctx, "walk", func(info ObjectInfo) error {
paths = append(paths, info.Path)
return nil
})
require.NoError(t, err)
assert.Len(t, paths, 3)
assert.Contains(t, paths, "walk/file1.txt")
assert.Contains(t, paths, "walk/file2.txt")
assert.Contains(t, paths, "walk/nested/file3.txt")
})
}
func TestNewDatabaseStorage(t *testing.T) {
t.Run("should return error with nil database", func(t *testing.T) {
_, err := NewDatabaseStorage(nil)
require.Error(t, err)
assert.Contains(t, err.Error(), "database connection is required")
})
t.Run("should create storage with valid database", func(t *testing.T) {
db := testingutil.NewDatabaseForTest(t)
store, err := NewDatabaseStorage(db)
require.NoError(t, err)
assert.NotNil(t, store)
})
}

View File

@@ -0,0 +1,193 @@
package storage
import (
"context"
"errors"
"fmt"
"io"
"io/fs"
"os"
"path/filepath"
"strings"
"github.com/google/uuid"
)
type filesystemStorage struct {
root *os.Root
absoluteRootPath string
}
func NewFilesystemStorage(rootPath string) (FileStorage, error) {
if err := os.MkdirAll(rootPath, 0700); err != nil {
return nil, fmt.Errorf("failed to create root directory '%s': %w", rootPath, err)
}
root, err := os.OpenRoot(rootPath)
if err != nil {
return nil, fmt.Errorf("failed to open root directory '%s': %w", rootPath, err)
}
absoluteRootPath, err := filepath.Abs(rootPath)
if err != nil {
return nil, fmt.Errorf("failed to get absolute path of root directory '%s': %w", rootPath, err)
}
return &filesystemStorage{root: root, absoluteRootPath: absoluteRootPath}, err
}
func (s *filesystemStorage) Type() string {
return TypeFileSystem
}
func (s *filesystemStorage) Save(_ context.Context, path string, data io.Reader) error {
path = filepath.FromSlash(path)
if err := s.root.MkdirAll(filepath.Dir(path), 0700); err != nil {
return fmt.Errorf("failed to create directories for path '%s': %w", path, err)
}
// Our strategy is to save to a separate file and then rename it to override the original file
tmpName := path + "." + uuid.NewString() + "-tmp"
// Write to the temporary file
tmpFile, err := s.root.Create(tmpName)
if err != nil {
return fmt.Errorf("failed to open file '%s' for writing: %w", tmpName, err)
}
_, err = io.Copy(tmpFile, data)
if err != nil {
tmpFile.Close()
_ = s.root.Remove(tmpName)
return fmt.Errorf("failed to write temporary file: %w", err)
}
if err = tmpFile.Close(); err != nil {
_ = s.root.Remove(tmpName)
return fmt.Errorf("failed to close temporary file: %w", err)
}
// Rename to the final file, which overrides existing files
// This is an atomic operation
if err = s.root.Rename(tmpName, path); err != nil {
_ = s.root.Remove(tmpName)
return fmt.Errorf("failed to move temporary file: %w", err)
}
return nil
}
func (s *filesystemStorage) Open(_ context.Context, path string) (io.ReadCloser, int64, error) {
path = filepath.FromSlash(path)
file, err := s.root.Open(path)
if err != nil {
return nil, 0, err
}
info, err := file.Stat()
if err != nil {
file.Close()
return nil, 0, err
}
return file, info.Size(), nil
}
func (s *filesystemStorage) Delete(_ context.Context, path string) error {
path = filepath.FromSlash(path)
err := s.root.Remove(path)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
return err
}
return nil
}
func (s *filesystemStorage) DeleteAll(_ context.Context, path string) error {
path = filepath.FromSlash(path)
// If "/", "." or "" is requested, we delete all contents of the root.
if path == "" || path == "/" || path == "." {
dir, err := s.root.Open(".")
if err != nil {
return fmt.Errorf("failed to open root directory: %w", err)
}
defer dir.Close()
entries, err := dir.ReadDir(-1)
if err != nil {
return fmt.Errorf("failed to list root directory: %w", err)
}
for _, entry := range entries {
if err := s.root.RemoveAll(entry.Name()); err != nil {
return fmt.Errorf("failed to delete '%s': %w", entry.Name(), err)
}
}
return nil
}
return s.root.RemoveAll(path)
}
func (s *filesystemStorage) List(_ context.Context, path string) ([]ObjectInfo, error) {
path = filepath.FromSlash(path)
dir, err := s.root.Open(path)
if err != nil {
return nil, err
}
defer dir.Close()
entries, err := dir.ReadDir(-1)
if err != nil {
return nil, err
}
objects := make([]ObjectInfo, 0, len(entries))
for _, entry := range entries {
if entry.IsDir() {
continue
}
info, err := entry.Info()
if err != nil {
return nil, err
}
objects = append(objects, ObjectInfo{
Path: filepath.Join(path, entry.Name()),
Size: info.Size(),
ModTime: info.ModTime(),
})
}
return objects, nil
}
func (s *filesystemStorage) Walk(_ context.Context, root string, fn func(ObjectInfo) error) error {
root = filepath.FromSlash(root)
fullPath := filepath.Clean(filepath.Join(s.absoluteRootPath, root))
// As we can't use os.Root here, we manually ensure that the fullPath is within the root directory
sep := string(filepath.Separator)
if !strings.HasPrefix(fullPath+sep, s.absoluteRootPath+sep) {
return fmt.Errorf("invalid root path: %s", root)
}
return filepath.WalkDir(fullPath, func(full string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
rel, err := filepath.Rel(s.absoluteRootPath, full)
if err != nil {
return err
}
info, err := d.Info()
if err != nil {
return err
}
return fn(ObjectInfo{
Path: filepath.ToSlash(rel),
Size: info.Size(),
ModTime: info.ModTime(),
})
})
}

View File

@@ -0,0 +1,68 @@
package storage
import (
"bytes"
"context"
"io"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFilesystemStorageOperations(t *testing.T) {
ctx := context.Background()
store, err := NewFilesystemStorage(t.TempDir())
require.NoError(t, err)
t.Run("save, open and list files", func(t *testing.T) {
err := store.Save(ctx, "images/logo.png", bytes.NewBufferString("logo-data"))
require.NoError(t, err)
reader, size, err := store.Open(ctx, "images/logo.png")
require.NoError(t, err)
defer reader.Close()
contents, err := io.ReadAll(reader)
require.NoError(t, err)
assert.Equal(t, []byte("logo-data"), contents)
assert.Equal(t, int64(len(contents)), size)
err = store.Save(ctx, "images/nested/child.txt", bytes.NewBufferString("child"))
require.NoError(t, err)
files, err := store.List(ctx, "images")
require.NoError(t, err)
require.Len(t, files, 1)
assert.Equal(t, filepath.Join("images", "logo.png"), files[0].Path)
assert.Equal(t, int64(len("logo-data")), files[0].Size)
})
t.Run("delete files individually and idempotently", func(t *testing.T) {
err := store.Save(ctx, "images/delete-me.txt", bytes.NewBufferString("temp"))
require.NoError(t, err)
require.NoError(t, store.Delete(ctx, "images/delete-me.txt"))
_, _, err = store.Open(ctx, "images/delete-me.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
// Deleting a missing object should be a no-op.
require.NoError(t, store.Delete(ctx, "images/missing.txt"))
})
t.Run("delete all files under a prefix", func(t *testing.T) {
require.NoError(t, store.Save(ctx, "images/a.txt", bytes.NewBufferString("a")))
require.NoError(t, store.Save(ctx, "images/b.txt", bytes.NewBufferString("b")))
require.NoError(t, store.DeleteAll(ctx, "images"))
_, _, err := store.Open(ctx, "images/a.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
_, _, err = store.Open(ctx, "images/b.txt")
require.Error(t, err)
assert.True(t, IsNotExist(err))
})
}

View File

@@ -0,0 +1,190 @@
package storage
import (
"context"
"errors"
"fmt"
"io"
"io/fs"
"path/filepath"
"strings"
"github.com/aws/aws-sdk-go-v2/aws"
awscfg "github.com/aws/aws-sdk-go-v2/config"
"github.com/aws/aws-sdk-go-v2/credentials"
"github.com/aws/aws-sdk-go-v2/service/s3"
s3types "github.com/aws/aws-sdk-go-v2/service/s3/types"
"github.com/aws/smithy-go"
)
type S3Config struct {
Bucket string
Region string
Endpoint string
AccessKeyID string
SecretAccessKey string
ForcePathStyle bool
DisableDefaultIntegrityChecks bool
Root string
}
type s3Storage struct {
client *s3.Client
bucket string
prefix string
}
func NewS3Storage(ctx context.Context, cfg S3Config) (FileStorage, error) {
creds := credentials.NewStaticCredentialsProvider(cfg.AccessKeyID, cfg.SecretAccessKey, "")
awsCfg, err := awscfg.LoadDefaultConfig(ctx, awscfg.WithRegion(cfg.Region), awscfg.WithCredentialsProvider(creds))
if err != nil {
return nil, fmt.Errorf("failed to load AWS configuration: %w", err)
}
client := s3.NewFromConfig(awsCfg, func(o *s3.Options) {
if cfg.Endpoint != "" {
o.BaseEndpoint = aws.String(cfg.Endpoint)
}
o.UsePathStyle = cfg.ForcePathStyle
if cfg.DisableDefaultIntegrityChecks {
o.RequestChecksumCalculation = aws.RequestChecksumCalculationWhenRequired
o.ResponseChecksumValidation = aws.ResponseChecksumValidationWhenRequired
}
})
return &s3Storage{
client: client,
bucket: cfg.Bucket,
prefix: strings.Trim(cfg.Root, "/"),
}, nil
}
func (s *s3Storage) Type() string {
return TypeS3
}
func (s *s3Storage) Save(ctx context.Context, path string, data io.Reader) error {
_, err := s.client.PutObject(ctx, &s3.PutObjectInput{
Bucket: aws.String(s.bucket),
Key: aws.String(s.buildObjectKey(path)),
Body: data,
})
return err
}
func (s *s3Storage) Open(ctx context.Context, path string) (io.ReadCloser, int64, error) {
resp, err := s.client.GetObject(ctx, &s3.GetObjectInput{
Bucket: aws.String(s.bucket),
Key: aws.String(s.buildObjectKey(path)),
})
if err != nil {
if isS3NotFound(err) {
return nil, 0, fs.ErrNotExist
}
return nil, 0, err
}
return resp.Body, aws.ToInt64(resp.ContentLength), nil
}
func (s *s3Storage) Delete(ctx context.Context, path string) error {
_, err := s.client.DeleteObject(ctx, &s3.DeleteObjectInput{
Bucket: aws.String(s.bucket),
Key: aws.String(s.buildObjectKey(path)),
})
return err
}
func (s *s3Storage) DeleteAll(ctx context.Context, path string) error {
paginator := s3.NewListObjectsV2Paginator(s.client, &s3.ListObjectsV2Input{
Bucket: aws.String(s.bucket),
Prefix: aws.String(s.buildObjectKey(path)),
})
for paginator.HasMorePages() {
page, err := paginator.NextPage(ctx)
if err != nil {
return err
}
if len(page.Contents) == 0 {
continue
}
objects := make([]s3types.ObjectIdentifier, 0, len(page.Contents))
for _, obj := range page.Contents {
objects = append(objects, s3types.ObjectIdentifier{Key: obj.Key})
}
_, err = s.client.DeleteObjects(ctx, &s3.DeleteObjectsInput{
Bucket: aws.String(s.bucket),
Delete: &s3types.Delete{Objects: objects, Quiet: aws.Bool(true)},
})
if err != nil {
return err
}
}
return nil
}
func (s *s3Storage) List(ctx context.Context, path string) ([]ObjectInfo, error) {
paginator := s3.NewListObjectsV2Paginator(s.client, &s3.ListObjectsV2Input{
Bucket: aws.String(s.bucket),
Prefix: aws.String(s.buildObjectKey(path)),
})
var objects []ObjectInfo
for paginator.HasMorePages() {
page, err := paginator.NextPage(ctx)
if err != nil {
return nil, err
}
for _, obj := range page.Contents {
if obj.Key == nil {
continue
}
objects = append(objects, ObjectInfo{
Path: aws.ToString(obj.Key),
Size: aws.ToInt64(obj.Size),
ModTime: aws.ToTime(obj.LastModified),
})
}
}
return objects, nil
}
func (s *s3Storage) Walk(ctx context.Context, root string, fn func(ObjectInfo) error) error {
objects, err := s.List(ctx, root)
if err != nil {
return err
}
for _, obj := range objects {
if err := fn(obj); err != nil {
return err
}
}
return nil
}
func (s *s3Storage) buildObjectKey(p string) string {
p = filepath.Clean(p)
p = filepath.ToSlash(p)
p = strings.Trim(p, "/")
if p == "" || p == "." {
return s.prefix
}
if s.prefix == "" {
return p
}
return s.prefix + "/" + p
}
func isS3NotFound(err error) bool {
var apiErr smithy.APIError
if errors.As(err, &apiErr) {
if apiErr.ErrorCode() == "NotFound" || apiErr.ErrorCode() == "NoSuchKey" {
return true
}
}
var missingKey *s3types.NoSuchKey
return errors.As(err, &missingKey)
}

View File

@@ -0,0 +1,44 @@
package storage
import (
"errors"
"testing"
s3types "github.com/aws/aws-sdk-go-v2/service/s3/types"
"github.com/aws/smithy-go"
"github.com/stretchr/testify/assert"
)
func TestS3Helpers(t *testing.T) {
t.Run("buildObjectKey trims and joins prefix", func(t *testing.T) {
tests := []struct {
name string
prefix string
path string
expected string
}{
{name: "no prefix no path", prefix: "", path: "", expected: ""},
{name: "prefix no path", prefix: "root", path: "", expected: "root"},
{name: "prefix with nested path", prefix: "root", path: "foo/bar/baz", expected: "root/foo/bar/baz"},
{name: "trimmed path and prefix", prefix: "root", path: "/foo//bar/", expected: "root/foo/bar"},
{name: "no prefix path only", prefix: "", path: "./images/logo.png", expected: "images/logo.png"},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
s := &s3Storage{
bucket: "bucket",
prefix: tc.prefix,
}
assert.Equal(t, tc.expected, s.buildObjectKey(tc.path))
})
}
})
t.Run("isS3NotFound detects expected errors", func(t *testing.T) {
assert.True(t, isS3NotFound(&smithy.GenericAPIError{Code: "NoSuchKey"}))
assert.True(t, isS3NotFound(&smithy.GenericAPIError{Code: "NotFound"}))
assert.True(t, isS3NotFound(&s3types.NoSuchKey{}))
assert.False(t, isS3NotFound(errors.New("boom")))
})
}

View File

@@ -0,0 +1,33 @@
package storage
import (
"context"
"io"
"os"
"time"
)
var (
TypeFileSystem = "fs"
TypeS3 = "s3"
)
type ObjectInfo struct {
Path string
Size int64
ModTime time.Time
}
type FileStorage interface {
Save(ctx context.Context, relativePath string, data io.Reader) error
Open(ctx context.Context, relativePath string) (io.ReadCloser, int64, error)
Delete(ctx context.Context, relativePath string) error
DeleteAll(ctx context.Context, prefix string) error
List(ctx context.Context, prefix string) ([]ObjectInfo, error)
Walk(ctx context.Context, root string, fn func(ObjectInfo) error) error
Type() string
}
func IsNotExist(err error) bool {
return os.IsNotExist(err)
}

View File

@@ -0,0 +1,130 @@
package utils
import (
"database/sql"
"errors"
"fmt"
"log/slog"
"github.com/golang-migrate/migrate/v4"
"github.com/golang-migrate/migrate/v4/database"
postgresMigrate "github.com/golang-migrate/migrate/v4/database/postgres"
sqliteMigrate "github.com/golang-migrate/migrate/v4/database/sqlite3"
"github.com/golang-migrate/migrate/v4/source/iofs"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/resources"
)
// MigrateDatabase applies database migrations using embedded migration files or fetches them from GitHub if a downgrade is detected.
func MigrateDatabase(sqlDb *sql.DB) error {
m, err := GetEmbeddedMigrateInstance(sqlDb)
if err != nil {
return fmt.Errorf("failed to get migrate instance: %w", err)
}
path := "migrations/" + string(common.EnvConfig.DbProvider)
requiredVersion, err := getRequiredMigrationVersion(path)
if err != nil {
return fmt.Errorf("failed to get last migration version: %w", err)
}
currentVersion, _, _ := m.Version()
if currentVersion > requiredVersion {
slog.Warn("Database version is newer than the application supports, possible downgrade detected", slog.Uint64("db_version", uint64(currentVersion)), slog.Uint64("app_version", uint64(requiredVersion)))
if !common.EnvConfig.AllowDowngrade {
return fmt.Errorf("database version (%d) is newer than application version (%d), downgrades are not allowed (set ALLOW_DOWNGRADE=true to enable)", currentVersion, requiredVersion)
}
slog.Info("Fetching migrations from GitHub to handle possible downgrades")
return migrateDatabaseFromGitHub(sqlDb, requiredVersion)
}
if err := m.Migrate(requiredVersion); err != nil && !errors.Is(err, migrate.ErrNoChange) {
return fmt.Errorf("failed to apply embedded migrations: %w", err)
}
return nil
}
// GetEmbeddedMigrateInstance creates a migrate.Migrate instance using embedded migration files.
func GetEmbeddedMigrateInstance(sqlDb *sql.DB) (*migrate.Migrate, error) {
path := "migrations/" + string(common.EnvConfig.DbProvider)
source, err := iofs.New(resources.FS, path)
if err != nil {
return nil, fmt.Errorf("failed to create embedded migration source: %w", err)
}
driver, err := newMigrationDriver(sqlDb, common.EnvConfig.DbProvider)
if err != nil {
return nil, fmt.Errorf("failed to create migration driver: %w", err)
}
m, err := migrate.NewWithInstance("iofs", source, "pocket-id", driver)
if err != nil {
return nil, fmt.Errorf("failed to create migration instance: %w", err)
}
return m, nil
}
// newMigrationDriver creates a database.Driver instance based on the given database provider.
func newMigrationDriver(sqlDb *sql.DB, dbProvider common.DbProvider) (driver database.Driver, err error) {
switch dbProvider {
case common.DbProviderSqlite:
driver, err = sqliteMigrate.WithInstance(sqlDb, &sqliteMigrate.Config{
NoTxWrap: true,
})
case common.DbProviderPostgres:
driver, err = postgresMigrate.WithInstance(sqlDb, &postgresMigrate.Config{})
default:
// Should never happen at this point
return nil, fmt.Errorf("unsupported database provider: %s", common.EnvConfig.DbProvider)
}
if err != nil {
return nil, fmt.Errorf("failed to create migration driver: %w", err)
}
return driver, nil
}
// migrateDatabaseFromGitHub applies database migrations fetched from GitHub to handle downgrades.
func migrateDatabaseFromGitHub(sqlDb *sql.DB, version uint) error {
srcURL := "github://pocket-id/pocket-id/backend/resources/migrations/" + string(common.EnvConfig.DbProvider)
driver, err := newMigrationDriver(sqlDb, common.EnvConfig.DbProvider)
if err != nil {
return fmt.Errorf("failed to create migration driver: %w", err)
}
m, err := migrate.NewWithDatabaseInstance(srcURL, "pocket-id", driver)
if err != nil {
return fmt.Errorf("failed to create GitHub migration instance: %w", err)
}
if err := m.Migrate(version); err != nil && !errors.Is(err, migrate.ErrNoChange) {
return fmt.Errorf("failed to apply GitHub migrations: %w", err)
}
return nil
}
// getRequiredMigrationVersion reads the embedded migration files and returns the highest version number found.
func getRequiredMigrationVersion(path string) (uint, error) {
entries, err := resources.FS.ReadDir(path)
if err != nil {
return 0, fmt.Errorf("failed to read migration directory: %w", err)
}
var maxVersion uint
for _, entry := range entries {
if entry.IsDir() {
continue
}
name := entry.Name()
var version uint
n, err := fmt.Sscanf(name, "%d_", &version)
if err == nil && n == 1 {
if version > maxVersion {
maxVersion = version
}
}
}
return maxVersion, nil
}

View File

@@ -0,0 +1,116 @@
package utils
import (
"fmt"
"strings"
"gorm.io/gorm"
)
// DBTableExists checks if a table exists in the database
func DBTableExists(db *gorm.DB, tableName string) (exists bool, err error) {
switch db.Name() {
case "postgres":
query := `SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = ?
)`
err = db.Raw(query, tableName).Scan(&exists).Error
if err != nil {
return false, err
}
case "sqlite":
query := `SELECT COUNT(*) > 0 FROM sqlite_master WHERE type='table' AND name=?`
err = db.Raw(query, tableName).Scan(&exists).Error
if err != nil {
return false, err
}
default:
return false, fmt.Errorf("unsupported database dialect: %s", db.Name())
}
return exists, nil
}
type DBSchemaColumn struct {
Name string
Nullable bool
}
type DBSchemaTableTypes = map[string]DBSchemaColumn
type DBSchemaTypes = map[string]DBSchemaTableTypes
// LoadDBSchemaTypes retrieves the column types for all tables in the DB
// Result is a map of "table --> column --> {name: column type name, nullable: boolean}"
func LoadDBSchemaTypes(db *gorm.DB) (result DBSchemaTypes, err error) {
result = make(DBSchemaTypes)
switch db.Name() {
case "postgres":
var rows []struct {
TableName string
ColumnName string
DataType string
Nullable bool
}
err := db.
Raw(`
SELECT table_name, column_name, data_type, is_nullable = 'YES' AS nullable
FROM information_schema.columns
WHERE table_schema = 'public';
`).
Scan(&rows).
Error
if err != nil {
return nil, err
}
for _, r := range rows {
t := strings.ToLower(r.DataType)
if result[r.TableName] == nil {
result[r.TableName] = make(map[string]DBSchemaColumn)
}
result[r.TableName][r.ColumnName] = DBSchemaColumn{
Name: strings.ToLower(t),
Nullable: r.Nullable,
}
}
case "sqlite":
var tables []string
err = db.
Raw(`SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';`).
Scan(&tables).
Error
if err != nil {
return nil, err
}
for _, table := range tables {
var cols []struct {
Name string
Type string
Notnull bool
}
err := db.
Raw(`PRAGMA table_info("` + table + `");`).
Scan(&cols).
Error
if err != nil {
return nil, err
}
for _, c := range cols {
if result[table] == nil {
result[table] = make(map[string]DBSchemaColumn)
}
result[table][c.Name] = DBSchemaColumn{
Name: strings.ToLower(c.Type),
Nullable: !c.Notnull,
}
}
}
default:
return nil, fmt.Errorf("unsupported database dialect: %s", db.Name())
}
return result, nil
}

View File

@@ -2,19 +2,15 @@ package utils
import (
"crypto/rand"
"crypto/sha256"
"encoding/hex"
"errors"
"fmt"
"io"
"mime/multipart"
"mime"
"os"
"path/filepath"
"strings"
"syscall"
"github.com/google/uuid"
"github.com/pocket-id/pocket-id/backend/resources"
)
func GetFileExtension(filename string) string {
@@ -57,108 +53,32 @@ func GetImageMimeType(ext string) string {
}
}
func CopyEmbeddedFileToDisk(srcFilePath, destFilePath string) error {
srcFile, err := resources.FS.Open(srcFilePath)
if err != nil {
return fmt.Errorf("failed to open embedded file: %w", err)
func GetImageExtensionFromMimeType(mimeType string) string {
// Normalize and strip parameters like `; charset=utf-8`
mt := strings.TrimSpace(strings.ToLower(mimeType))
if v, _, err := mime.ParseMediaType(mt); err == nil {
mt = v
}
defer srcFile.Close()
err = os.MkdirAll(filepath.Dir(destFilePath), os.ModePerm)
if err != nil {
return fmt.Errorf("failed to create destination directory: %w", err)
switch mt {
case "image/jpeg", "image/jpg":
return "jpg"
case "image/png":
return "png"
case "image/svg+xml":
return "svg"
case "image/x-icon", "image/vnd.microsoft.icon":
return "ico"
case "image/gif":
return "gif"
case "image/webp":
return "webp"
case "image/avif":
return "avif"
case "image/heic", "image/heif":
return "heic"
default:
return ""
}
destFile, err := os.Create(destFilePath)
if err != nil {
return fmt.Errorf("failed to open destination file: %w", err)
}
defer destFile.Close()
_, err = io.Copy(destFile, srcFile)
if err != nil {
return fmt.Errorf("failed to write to destination file: %w", err)
}
return nil
}
func EmbeddedFileSha256(filePath string) ([]byte, error) {
f, err := resources.FS.Open(filePath)
if err != nil {
return nil, fmt.Errorf("failed to open embedded file: %w", err)
}
defer f.Close()
h := sha256.New()
_, err = io.Copy(h, f)
if err != nil {
return nil, fmt.Errorf("failed to read embedded file: %w", err)
}
return h.Sum(nil), nil
}
func SaveFile(file *multipart.FileHeader, dst string) error {
src, err := file.Open()
if err != nil {
return err
}
defer src.Close()
if err = os.MkdirAll(filepath.Dir(dst), 0o750); err != nil {
return err
}
return SaveFileStream(src, dst)
}
// SaveFileStream saves a stream to a file.
func SaveFileStream(r io.Reader, dstFileName string) error {
// Our strategy is to save to a separate file and then rename it to override the original file
tmpFileName := dstFileName + "." + uuid.NewString() + "-tmp"
// Write to the temporary file
tmpFile, err := os.Create(tmpFileName)
if err != nil {
return fmt.Errorf("failed to open file '%s' for writing: %w", tmpFileName, err)
}
n, err := io.Copy(tmpFile, r)
if err != nil {
// Delete the temporary file; we ignore errors here
_ = tmpFile.Close()
_ = os.Remove(tmpFileName)
return fmt.Errorf("failed to write to file '%s': %w", tmpFileName, err)
}
err = tmpFile.Close()
if err != nil {
// Delete the temporary file; we ignore errors here
_ = os.Remove(tmpFileName)
return fmt.Errorf("failed to close stream to file '%s': %w", tmpFileName, err)
}
if n == 0 {
// Delete the temporary file; we ignore errors here
_ = os.Remove(tmpFileName)
return errors.New("no data written")
}
// Rename to the final file, which overrides existing files
// This is an atomic operation
err = os.Rename(tmpFileName, dstFileName)
if err != nil {
// Delete the temporary file; we ignore errors here
_ = os.Remove(tmpFileName)
return fmt.Errorf("failed to rename file '%s': %w", dstFileName, err)
}
return nil
}
// FileExists returns true if a file exists on disk and is a regular file

View File

@@ -12,32 +12,40 @@ import (
"golang.org/x/image/font"
"golang.org/x/image/font/opentype"
"golang.org/x/image/math/fixed"
"golang.org/x/image/webp"
"github.com/pocket-id/pocket-id/backend/resources"
)
const profilePictureSize = 300
// CreateProfilePicture resizes the profile picture to a square
func CreateProfilePicture(file io.Reader) (io.Reader, error) {
// CreateProfilePicture resizes the profile picture to a square and encodes it as PNG
func CreateProfilePicture(file io.ReadSeeker) (io.ReadSeeker, error) {
// Attempt standard formats first
img, _, err := imageorient.Decode(file)
if err != nil {
return nil, fmt.Errorf("failed to decode image: %w", err)
if _, seekErr := file.Seek(0, io.SeekStart); seekErr != nil {
return nil, fmt.Errorf("failed to seek file: %w", seekErr)
}
// Try WebP
webpImg, webpErr := webp.Decode(file)
if webpErr != nil {
return nil, fmt.Errorf("failed to decode image: %w", err)
}
img = webpImg
}
// Resize to square
img = imaging.Fill(img, profilePictureSize, profilePictureSize, imaging.Center, imaging.Lanczos)
pr, pw := io.Pipe()
go func() {
innerErr := imaging.Encode(pw, img, imaging.PNG)
if innerErr != nil {
_ = pw.CloseWithError(fmt.Errorf("failed to encode image: %w", innerErr))
return
}
pw.Close()
}()
// Encode back to PNG
var buf bytes.Buffer
if err := imaging.Encode(&buf, img, imaging.PNG); err != nil {
return nil, fmt.Errorf("failed to encode image: %w", err)
}
return pr, nil
return bytes.NewReader(buf.Bytes()), nil
}
// CreateDefaultProfilePicture creates a profile picture with the initials

View File

@@ -0,0 +1,107 @@
package utils
import (
"context"
"errors"
"net"
"net/url"
"strings"
"github.com/pocket-id/pocket-id/backend/internal/common"
)
var localIPv6Ranges []*net.IPNet
var localhostIPNets = []*net.IPNet{
{IP: net.IPv4(127, 0, 0, 0), Mask: net.CIDRMask(8, 32)}, // 127.0.0.0/8
{IP: net.IPv6loopback, Mask: net.CIDRMask(128, 128)}, // ::1/128
}
var privateLanIPNets = []*net.IPNet{
{IP: net.IPv4(10, 0, 0, 0), Mask: net.CIDRMask(8, 32)}, // 10.0.0.0/8
{IP: net.IPv4(172, 16, 0, 0), Mask: net.CIDRMask(12, 32)}, // 172.16.0.0/12
{IP: net.IPv4(192, 168, 0, 0), Mask: net.CIDRMask(16, 32)}, // 192.168.0.0/16
}
var tailscaleIPNets = []*net.IPNet{
{IP: net.IPv4(100, 64, 0, 0), Mask: net.CIDRMask(10, 32)}, // 100.64.0.0/10
}
func IsLocalIPv6(ip net.IP) bool {
if ip.To4() != nil {
return false
}
return listContainsIP(localIPv6Ranges, ip)
}
func IsLocalhostIP(ip net.IP) bool {
return listContainsIP(localhostIPNets, ip)
}
func IsPrivateLanIP(ip net.IP) bool {
if ip.To4() == nil {
return false
}
return listContainsIP(privateLanIPNets, ip)
}
func IsTailscaleIP(ip net.IP) bool {
if ip.To4() == nil {
return false
}
return listContainsIP(tailscaleIPNets, ip)
}
func IsPrivateIP(ip net.IP) bool {
return IsLocalhostIP(ip) || IsPrivateLanIP(ip) || IsTailscaleIP(ip) || IsLocalIPv6(ip)
}
func IsURLPrivate(ctx context.Context, u *url.URL) (bool, error) {
var r net.Resolver
ips, err := r.LookupIPAddr(ctx, u.Hostname())
if err != nil || len(ips) == 0 {
return false, errors.New("cannot resolve hostname")
}
// Prevents SSRF by allowing only public IPs
for _, addr := range ips {
if IsPrivateIP(addr.IP) {
return true, nil
}
}
return false, nil
}
func listContainsIP(ipNets []*net.IPNet, ip net.IP) bool {
for _, ipNet := range ipNets {
if ipNet.Contains(ip) {
return true
}
}
return false
}
func loadLocalIPv6Ranges() {
localIPv6Ranges = nil
ranges := strings.Split(common.EnvConfig.LocalIPv6Ranges, ",")
for _, rangeStr := range ranges {
rangeStr = strings.TrimSpace(rangeStr)
if rangeStr == "" {
continue
}
_, ipNet, err := net.ParseCIDR(rangeStr)
if err == nil {
localIPv6Ranges = append(localIPv6Ranges, ipNet)
}
}
}
func init() {
loadLocalIPv6Ranges()
}

View File

@@ -0,0 +1,343 @@
package utils
import (
"context"
"net"
"net/url"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/pocket-id/pocket-id/backend/internal/common"
)
func TestIsLocalhostIP(t *testing.T) {
tests := []struct {
ip string
expected bool
}{
{"127.0.0.1", true},
{"127.255.255.255", true},
{"::1", true},
{"192.168.1.1", false},
}
for _, tt := range tests {
ip := net.ParseIP(tt.ip)
got := IsLocalhostIP(ip)
assert.Equal(t, tt.expected, got)
}
}
func TestIsPrivateLanIP(t *testing.T) {
tests := []struct {
ip string
expected bool
}{
{"10.0.0.1", true},
{"172.16.5.4", true},
{"192.168.100.200", true},
{"8.8.8.8", false},
{"::1", false}, // IPv6 should return false
}
for _, tt := range tests {
ip := net.ParseIP(tt.ip)
got := IsPrivateLanIP(ip)
assert.Equal(t, tt.expected, got)
}
}
func TestIsTailscaleIP(t *testing.T) {
tests := []struct {
ip string
expected bool
}{
{"100.64.0.1", true},
{"100.127.255.254", true},
{"8.8.8.8", false},
{"::1", false}, // IPv6 should return false
}
for _, tt := range tests {
ip := net.ParseIP(tt.ip)
got := IsTailscaleIP(ip)
assert.Equal(t, tt.expected, got)
}
}
func TestIsLocalIPv6(t *testing.T) {
// Save and restore env config
origRanges := common.EnvConfig.LocalIPv6Ranges
defer func() { common.EnvConfig.LocalIPv6Ranges = origRanges }()
common.EnvConfig.LocalIPv6Ranges = "fd00::/8,fc00::/7"
localIPv6Ranges = nil // reset
loadLocalIPv6Ranges()
tests := []struct {
ip string
expected bool
}{
{"fd00::1", true},
{"fc00::abcd", true},
{"::1", false}, // loopback handled separately
{"192.168.1.1", false}, // IPv4 should return false
}
for _, tt := range tests {
ip := net.ParseIP(tt.ip)
got := IsLocalIPv6(ip)
assert.Equal(t, tt.expected, got)
}
}
func TestIsPrivateIP(t *testing.T) {
// Save and restore env config
origRanges := common.EnvConfig.LocalIPv6Ranges
t.Cleanup(func() {
common.EnvConfig.LocalIPv6Ranges = origRanges
})
common.EnvConfig.LocalIPv6Ranges = "fd00::/8"
localIPv6Ranges = nil // reset
loadLocalIPv6Ranges()
tests := []struct {
ip string
expected bool
}{
{"127.0.0.1", true}, // localhost
{"192.168.1.1", true}, // private LAN
{"100.64.0.1", true}, // Tailscale
{"fd00::1", true}, // local IPv6
{"8.8.8.8", false}, // public IPv4
{"2001:4860:4860::8888", false}, // public IPv6
}
for _, tt := range tests {
ip := net.ParseIP(tt.ip)
got := IsPrivateIP(ip)
assert.Equal(t, tt.expected, got)
}
}
func TestListContainsIP(t *testing.T) {
_, ipNet1, _ := net.ParseCIDR("10.0.0.0/8")
_, ipNet2, _ := net.ParseCIDR("192.168.0.0/16")
list := []*net.IPNet{ipNet1, ipNet2}
tests := []struct {
ip string
expected bool
}{
{"10.1.1.1", true},
{"192.168.5.5", true},
{"172.16.0.1", false},
}
for _, tt := range tests {
ip := net.ParseIP(tt.ip)
got := listContainsIP(list, ip)
assert.Equal(t, tt.expected, got)
}
}
func TestInit_LocalIPv6Ranges(t *testing.T) {
// Save and restore env config
origRanges := common.EnvConfig.LocalIPv6Ranges
t.Cleanup(func() {
common.EnvConfig.LocalIPv6Ranges = origRanges
})
common.EnvConfig.LocalIPv6Ranges = "fd00::/8, invalidCIDR ,fc00::/7"
localIPv6Ranges = nil
loadLocalIPv6Ranges()
assert.Len(t, localIPv6Ranges, 2)
}
func TestIsURLPrivate(t *testing.T) {
ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second)
defer cancel()
tests := []struct {
name string
urlStr string
expectPriv bool
expectError bool
}{
{
name: "localhost by name",
urlStr: "http://localhost",
expectPriv: true,
expectError: false,
},
{
name: "localhost with port",
urlStr: "http://localhost:8080",
expectPriv: true,
expectError: false,
},
{
name: "127.0.0.1 IP",
urlStr: "http://127.0.0.1",
expectPriv: true,
expectError: false,
},
{
name: "127.0.0.1 with port",
urlStr: "http://127.0.0.1:3000",
expectPriv: true,
expectError: false,
},
{
name: "IPv6 loopback",
urlStr: "http://[::1]",
expectPriv: true,
expectError: false,
},
{
name: "IPv6 loopback with port",
urlStr: "http://[::1]:8080",
expectPriv: true,
expectError: false,
},
{
name: "private IP 10.x.x.x",
urlStr: "http://10.0.0.1",
expectPriv: true,
expectError: false,
},
{
name: "private IP 192.168.x.x",
urlStr: "http://192.168.1.1",
expectPriv: true,
expectError: false,
},
{
name: "private IP 172.16.x.x",
urlStr: "http://172.16.0.1",
expectPriv: true,
expectError: false,
},
{
name: "Tailscale IP",
urlStr: "http://100.64.0.1",
expectPriv: true,
expectError: false,
},
{
name: "public IP - Google DNS",
urlStr: "http://8.8.8.8",
expectPriv: false,
expectError: false,
},
{
name: "public IP - Cloudflare DNS",
urlStr: "http://1.1.1.1",
expectPriv: false,
expectError: false,
},
{
name: "invalid hostname",
urlStr: "http://this-should-not-resolve-ever-123456789.invalid",
expectPriv: false,
expectError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
u, err := url.Parse(tt.urlStr)
require.NoError(t, err, "Failed to parse URL %s", tt.urlStr)
isPriv, err := IsURLPrivate(ctx, u)
if tt.expectError {
require.Error(t, err, "IsURLPrivate(%s) expected error but got none", tt.urlStr)
} else {
require.NoError(t, err, "IsURLPrivate(%s) unexpected error", tt.urlStr)
assert.Equal(t, tt.expectPriv, isPriv, "IsURLPrivate(%s)", tt.urlStr)
}
})
}
}
func TestIsURLPrivate_WithDomainName(t *testing.T) {
// Note: These tests rely on actual DNS resolution
// They test real public domains to ensure they are not flagged as private
ctx, cancel := context.WithTimeout(t.Context(), 10*time.Second)
defer cancel()
tests := []struct {
name string
urlStr string
expectPriv bool
}{
{
name: "Google public domain",
urlStr: "https://www.google.com",
expectPriv: false,
},
{
name: "GitHub public domain",
urlStr: "https://github.com",
expectPriv: false,
},
{
// localhost.localtest.me is a well-known domain that resolves to 127.0.0.1
name: "localhost.localtest.me resolves to 127.0.0.1",
urlStr: "http://localhost.localtest.me",
expectPriv: true,
},
{
// 10.0.0.1.nip.io resolves to 10.0.0.1 (private IP)
name: "nip.io domain resolving to private 10.x IP",
urlStr: "http://10.0.0.1.nip.io",
expectPriv: true,
},
{
// 192.168.1.1.nip.io resolves to 192.168.1.1 (private IP)
name: "nip.io domain resolving to private 192.168.x IP",
urlStr: "http://192.168.1.1.nip.io",
expectPriv: true,
},
{
// 127.0.0.1.nip.io resolves to 127.0.0.1 (localhost)
name: "nip.io domain resolving to localhost",
urlStr: "http://127.0.0.1.nip.io",
expectPriv: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
u, err := url.Parse(tt.urlStr)
require.NoError(t, err, "Failed to parse URL %s", tt.urlStr)
isPriv, err := IsURLPrivate(ctx, u)
if err != nil {
t.Skipf("DNS resolution failed for %s (network issue?): %v", tt.urlStr, err)
return
}
assert.Equal(t, tt.expectPriv, isPriv, "IsURLPrivate(%s)", tt.urlStr)
})
}
}
func TestIsURLPrivate_ContextCancellation(t *testing.T) {
ctx, cancel := context.WithCancel(t.Context())
cancel() // Cancel immediately
u, err := url.Parse("http://example.com")
require.NoError(t, err, "Failed to parse URL")
_, err = IsURLPrivate(ctx, u)
assert.Error(t, err, "IsURLPrivate with cancelled context expected error but got none")
}

View File

@@ -3,6 +3,7 @@ package utils
import (
"encoding/json"
"errors"
"fmt"
"time"
)
@@ -40,3 +41,14 @@ func (d *JSONDuration) UnmarshalJSON(b []byte) error {
return errors.New("invalid duration")
}
}
func UnmarshalJSONFromDatabase(data interface{}, value any) error {
switch v := value.(type) {
case []byte:
return json.Unmarshal(v, data)
case string:
return json.Unmarshal([]byte(v), data)
default:
return fmt.Errorf("unsupported type: %T", value)
}
}

View File

@@ -28,22 +28,14 @@ func GetKeyProvider(db *gorm.DB, envConfig *common.EnvConfigSchema, instanceID s
return nil, fmt.Errorf("failed to load encryption key: %w", err)
}
// Get the key provider
switch envConfig.KeysStorage {
case "file", "":
keyProvider = &KeyProviderFile{}
case "database":
keyProvider = &KeyProviderDatabase{}
default:
return nil, fmt.Errorf("invalid key storage '%s'", envConfig.KeysStorage)
}
keyProvider = &KeyProviderDatabase{}
err = keyProvider.Init(KeyProviderOpts{
DB: db,
EnvConfig: envConfig,
Kek: kek,
})
if err != nil {
return nil, fmt.Errorf("failed to init key provider of type '%s': %w", envConfig.KeysStorage, err)
return nil, fmt.Errorf("failed to init key provider: %w", err)
}
return keyProvider, nil

View File

@@ -1,202 +0,0 @@
package jwk
import (
"encoding/base64"
"fmt"
"os"
"path/filepath"
"github.com/lestrrat-go/jwx/v3/jwk"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/utils"
cryptoutils "github.com/pocket-id/pocket-id/backend/internal/utils/crypto"
)
const (
// PrivateKeyFile is the path in the data/keys folder where the key is stored
// This is a JSON file containing a key encoded as JWK
PrivateKeyFile = "jwt_private_key.json"
// PrivateKeyFileEncrypted is the path in the data/keys folder where the encrypted key is stored
// This is a encrypted JSON file containing a key encoded as JWK
PrivateKeyFileEncrypted = "jwt_private_key.json.enc"
)
type KeyProviderFile struct {
envConfig *common.EnvConfigSchema
kek []byte
}
func (f *KeyProviderFile) Init(opts KeyProviderOpts) error {
f.envConfig = opts.EnvConfig
f.kek = opts.Kek
return nil
}
func (f *KeyProviderFile) LoadKey() (jwk.Key, error) {
if len(f.kek) > 0 {
return f.loadEncryptedKey()
}
return f.loadKey()
}
func (f *KeyProviderFile) SaveKey(key jwk.Key) error {
if len(f.kek) > 0 {
return f.saveKeyEncrypted(key)
}
return f.saveKey(key)
}
func (f *KeyProviderFile) loadKey() (jwk.Key, error) {
var key jwk.Key
// First, check if we have a JWK file
// If we do, then we just load that
jwkPath := f.jwkPath()
ok, err := utils.FileExists(jwkPath)
if err != nil {
return nil, fmt.Errorf("failed to check if private key file exists at path '%s': %w", jwkPath, err)
}
if !ok {
// File doesn't exist, no key was loaded
return nil, nil
}
data, err := os.ReadFile(jwkPath)
if err != nil {
return nil, fmt.Errorf("failed to read private key file at path '%s': %w", jwkPath, err)
}
key, err = jwk.ParseKey(data)
if err != nil {
return nil, fmt.Errorf("failed to parse private key file at path '%s': %w", jwkPath, err)
}
return key, nil
}
func (f *KeyProviderFile) loadEncryptedKey() (key jwk.Key, err error) {
// First, check if we have an encrypted JWK file
// If we do, then we just load that
encJwkPath := f.encJwkPath()
ok, err := utils.FileExists(encJwkPath)
if err != nil {
return nil, fmt.Errorf("failed to check if encrypted private key file exists at path '%s': %w", encJwkPath, err)
}
if ok {
encB64, err := os.ReadFile(encJwkPath)
if err != nil {
return nil, fmt.Errorf("failed to read encrypted private key file at path '%s': %w", encJwkPath, err)
}
// Decode from base64
enc := make([]byte, base64.StdEncoding.DecodedLen(len(encB64)))
n, err := base64.StdEncoding.Decode(enc, encB64)
if err != nil {
return nil, fmt.Errorf("failed to read encrypted private key file at path '%s': not a valid base64-encoded file: %w", encJwkPath, err)
}
// Decrypt the data
data, err := cryptoutils.Decrypt(f.kek, enc[:n], nil)
if err != nil {
return nil, fmt.Errorf("failed to decrypt private key file at path '%s': %w", encJwkPath, err)
}
// Parse the key
key, err = jwk.ParseKey(data)
if err != nil {
return nil, fmt.Errorf("failed to parse encrypted private key file at path '%s': %w", encJwkPath, err)
}
return key, nil
}
// Check if we have an un-encrypted JWK file
key, err = f.loadKey()
if err != nil {
return nil, fmt.Errorf("failed to load un-encrypted key file: %w", err)
}
if key == nil {
// No key exists, encrypted or un-encrypted
return nil, nil
}
// If we are here, we have loaded a key that was un-encrypted
// We need to replace the plaintext key with the encrypted one before we return
err = f.saveKeyEncrypted(key)
if err != nil {
return nil, fmt.Errorf("failed to save encrypted key file: %w", err)
}
jwkPath := f.jwkPath()
err = os.Remove(jwkPath)
if err != nil {
return nil, fmt.Errorf("failed to remove un-encrypted key file at path '%s': %w", jwkPath, err)
}
return key, nil
}
func (f *KeyProviderFile) saveKey(key jwk.Key) error {
err := os.MkdirAll(f.envConfig.KeysPath, 0700)
if err != nil {
return fmt.Errorf("failed to create directory '%s' for key file: %w", f.envConfig.KeysPath, err)
}
jwkPath := f.jwkPath()
keyFile, err := os.OpenFile(jwkPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
return fmt.Errorf("failed to create key file at path '%s': %w", jwkPath, err)
}
defer keyFile.Close()
// Write the JSON file to disk
err = EncodeJWK(keyFile, key)
if err != nil {
return fmt.Errorf("failed to write key file at path '%s': %w", jwkPath, err)
}
return nil
}
func (f *KeyProviderFile) saveKeyEncrypted(key jwk.Key) error {
err := os.MkdirAll(f.envConfig.KeysPath, 0700)
if err != nil {
return fmt.Errorf("failed to create directory '%s' for encrypted key file: %w", f.envConfig.KeysPath, err)
}
// Encode the key to JSON
data, err := EncodeJWKBytes(key)
if err != nil {
return fmt.Errorf("failed to encode key to JSON: %w", err)
}
// Encrypt the key then encode to Base64
enc, err := cryptoutils.Encrypt(f.kek, data, nil)
if err != nil {
return fmt.Errorf("failed to encrypt key: %w", err)
}
encB64 := make([]byte, base64.StdEncoding.EncodedLen(len(enc)))
base64.StdEncoding.Encode(encB64, enc)
// Write to disk
encJwkPath := f.encJwkPath()
err = os.WriteFile(encJwkPath, encB64, 0600)
if err != nil {
return fmt.Errorf("failed to write encrypted key file at path '%s': %w", encJwkPath, err)
}
return nil
}
func (f *KeyProviderFile) jwkPath() string {
return filepath.Join(f.envConfig.KeysPath, PrivateKeyFile)
}
func (f *KeyProviderFile) encJwkPath() string {
return filepath.Join(f.envConfig.KeysPath, PrivateKeyFileEncrypted)
}
// Compile-time interface check
var _ KeyProvider = (*KeyProviderFile)(nil)

View File

@@ -1,320 +0,0 @@
package jwk
import (
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"encoding/base64"
"os"
"path/filepath"
"testing"
"github.com/lestrrat-go/jwx/v3/jwk"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/pocket-id/pocket-id/backend/internal/common"
"github.com/pocket-id/pocket-id/backend/internal/utils"
cryptoutils "github.com/pocket-id/pocket-id/backend/internal/utils/crypto"
)
func TestKeyProviderFile_LoadKey(t *testing.T) {
// Generate a test key to use in our tests
pk, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
require.NoError(t, err)
key, err := jwk.Import(pk)
require.NoError(t, err)
t.Run("LoadKey with no existing key", func(t *testing.T) {
tempDir := t.TempDir()
provider := &KeyProviderFile{}
err := provider.Init(KeyProviderOpts{
EnvConfig: &common.EnvConfigSchema{
KeysPath: tempDir,
},
})
require.NoError(t, err)
// Load key when none exists
loadedKey, err := provider.LoadKey()
require.NoError(t, err)
assert.Nil(t, loadedKey, "Expected nil key when no key exists")
})
t.Run("LoadKey with no existing key (with kek)", func(t *testing.T) {
tempDir := t.TempDir()
provider := &KeyProviderFile{}
err = provider.Init(KeyProviderOpts{
EnvConfig: &common.EnvConfigSchema{
KeysPath: tempDir,
},
Kek: makeKEK(t),
})
require.NoError(t, err)
// Load key when none exists
loadedKey, err := provider.LoadKey()
require.NoError(t, err)
assert.Nil(t, loadedKey, "Expected nil key when no key exists")
})
t.Run("LoadKey with unencrypted key", func(t *testing.T) {
tempDir := t.TempDir()
provider := &KeyProviderFile{}
err := provider.Init(KeyProviderOpts{
EnvConfig: &common.EnvConfigSchema{
KeysPath: tempDir,
},
})
require.NoError(t, err)
// Save a key
err = provider.SaveKey(key)
require.NoError(t, err)
// Make sure the key file exists
keyPath := filepath.Join(tempDir, PrivateKeyFile)
exists, err := utils.FileExists(keyPath)
require.NoError(t, err)
assert.True(t, exists, "Expected key file to exist")
// Load the key
loadedKey, err := provider.LoadKey()
require.NoError(t, err)
assert.NotNil(t, loadedKey, "Expected non-nil key when key exists")
// Verify the loaded key is the same as the original
keyBytes, err := EncodeJWKBytes(key)
require.NoError(t, err)
loadedKeyBytes, err := EncodeJWKBytes(loadedKey)
require.NoError(t, err)
assert.Equal(t, keyBytes, loadedKeyBytes, "Expected loaded key to match original key")
})
t.Run("LoadKey with encrypted key", func(t *testing.T) {
tempDir := t.TempDir()
provider := &KeyProviderFile{}
err = provider.Init(KeyProviderOpts{
EnvConfig: &common.EnvConfigSchema{
KeysPath: tempDir,
},
Kek: makeKEK(t),
})
require.NoError(t, err)
// Save a key (will be encrypted)
err = provider.SaveKey(key)
require.NoError(t, err)
// Make sure the encrypted key file exists
encKeyPath := filepath.Join(tempDir, PrivateKeyFileEncrypted)
exists, err := utils.FileExists(encKeyPath)
require.NoError(t, err)
assert.True(t, exists, "Expected encrypted key file to exist")
// Make sure the unencrypted key file does not exist
keyPath := filepath.Join(tempDir, PrivateKeyFile)
exists, err = utils.FileExists(keyPath)
require.NoError(t, err)
assert.False(t, exists, "Expected unencrypted key file to not exist")
// Load the key
loadedKey, err := provider.LoadKey()
require.NoError(t, err)
assert.NotNil(t, loadedKey, "Expected non-nil key when encrypted key exists")
// Verify the loaded key is the same as the original
keyBytes, err := EncodeJWKBytes(key)
require.NoError(t, err)
loadedKeyBytes, err := EncodeJWKBytes(loadedKey)
require.NoError(t, err)
assert.Equal(t, keyBytes, loadedKeyBytes, "Expected loaded key to match original key")
})
t.Run("LoadKey replaces unencrypted key with encrypted key when kek is provided", func(t *testing.T) {
tempDir := t.TempDir()
// First, create an unencrypted key
providerNoKek := &KeyProviderFile{}
err := providerNoKek.Init(KeyProviderOpts{
EnvConfig: &common.EnvConfigSchema{
KeysPath: tempDir,
},
})
require.NoError(t, err)
// Save an unencrypted key
err = providerNoKek.SaveKey(key)
require.NoError(t, err)
// Verify unencrypted key exists
keyPath := filepath.Join(tempDir, PrivateKeyFile)
exists, err := utils.FileExists(keyPath)
require.NoError(t, err)
assert.True(t, exists, "Expected unencrypted key file to exist")
// Now create a provider with a kek
kek := make([]byte, 32)
_, err = rand.Read(kek)
require.NoError(t, err)
providerWithKek := &KeyProviderFile{}
err = providerWithKek.Init(KeyProviderOpts{
EnvConfig: &common.EnvConfigSchema{
KeysPath: tempDir,
},
Kek: kek,
})
require.NoError(t, err)
// Load the key - this should convert the unencrypted key to encrypted
loadedKey, err := providerWithKek.LoadKey()
require.NoError(t, err)
assert.NotNil(t, loadedKey, "Expected non-nil key when loading and converting key")
// Verify the unencrypted key no longer exists
exists, err = utils.FileExists(keyPath)
require.NoError(t, err)
assert.False(t, exists, "Expected unencrypted key file to be removed")
// Verify the encrypted key file exists
encKeyPath := filepath.Join(tempDir, PrivateKeyFileEncrypted)
exists, err = utils.FileExists(encKeyPath)
require.NoError(t, err)
assert.True(t, exists, "Expected encrypted key file to exist after conversion")
// Verify the key data
keyBytes, err := EncodeJWKBytes(key)
require.NoError(t, err)
loadedKeyBytes, err := EncodeJWKBytes(loadedKey)
require.NoError(t, err)
assert.Equal(t, keyBytes, loadedKeyBytes, "Expected loaded key to match original key after conversion")
})
}
func TestKeyProviderFile_SaveKey(t *testing.T) {
// Generate a test key to use in our tests
pk, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
require.NoError(t, err)
key, err := jwk.Import(pk)
require.NoError(t, err)
t.Run("SaveKey unencrypted", func(t *testing.T) {
tempDir := t.TempDir()
provider := &KeyProviderFile{}
err := provider.Init(KeyProviderOpts{
EnvConfig: &common.EnvConfigSchema{
KeysPath: tempDir,
},
})
require.NoError(t, err)
// Save the key
err = provider.SaveKey(key)
require.NoError(t, err)
// Verify the key file exists
keyPath := filepath.Join(tempDir, PrivateKeyFile)
exists, err := utils.FileExists(keyPath)
require.NoError(t, err)
assert.True(t, exists, "Expected key file to exist")
// Verify the content of the key file
data, err := os.ReadFile(keyPath)
require.NoError(t, err)
parsedKey, err := jwk.ParseKey(data)
require.NoError(t, err)
// Compare the saved key with the original
keyBytes, err := EncodeJWKBytes(key)
require.NoError(t, err)
parsedKeyBytes, err := EncodeJWKBytes(parsedKey)
require.NoError(t, err)
assert.Equal(t, keyBytes, parsedKeyBytes, "Expected saved key to match original key")
})
t.Run("SaveKey encrypted", func(t *testing.T) {
tempDir := t.TempDir()
// Generate a 64-byte kek
kek := makeKEK(t)
provider := &KeyProviderFile{}
err = provider.Init(KeyProviderOpts{
EnvConfig: &common.EnvConfigSchema{
KeysPath: tempDir,
},
Kek: kek,
})
require.NoError(t, err)
// Save the key (will be encrypted)
err = provider.SaveKey(key)
require.NoError(t, err)
// Verify the encrypted key file exists
encKeyPath := filepath.Join(tempDir, PrivateKeyFileEncrypted)
exists, err := utils.FileExists(encKeyPath)
require.NoError(t, err)
assert.True(t, exists, "Expected encrypted key file to exist")
// Verify the unencrypted key file doesn't exist
keyPath := filepath.Join(tempDir, PrivateKeyFile)
exists, err = utils.FileExists(keyPath)
require.NoError(t, err)
assert.False(t, exists, "Expected unencrypted key file to not exist")
// Manually decrypt the encrypted key file to verify it contains the correct key
encB64, err := os.ReadFile(encKeyPath)
require.NoError(t, err)
// Decode from base64
enc := make([]byte, base64.StdEncoding.DecodedLen(len(encB64)))
n, err := base64.StdEncoding.Decode(enc, encB64)
require.NoError(t, err)
enc = enc[:n] // Trim any padding
// Decrypt the data
data, err := cryptoutils.Decrypt(kek, enc, nil)
require.NoError(t, err)
// Parse the key
parsedKey, err := jwk.ParseKey(data)
require.NoError(t, err)
// Compare the decrypted key with the original
keyBytes, err := EncodeJWKBytes(key)
require.NoError(t, err)
parsedKeyBytes, err := EncodeJWKBytes(parsedKey)
require.NoError(t, err)
assert.Equal(t, keyBytes, parsedKeyBytes, "Expected decrypted key to match original key")
})
}
func makeKEK(t *testing.T) []byte {
t.Helper()
// Generate a 32-byte kek
kek := make([]byte, 32)
_, err := rand.Read(kek)
require.NoError(t, err)
return kek
}

View File

@@ -0,0 +1,205 @@
package utils
import (
"reflect"
"strings"
"github.com/gin-gonic/gin"
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
type PaginationResponse struct {
TotalPages int64 `json:"totalPages"`
TotalItems int64 `json:"totalItems"`
CurrentPage int `json:"currentPage"`
ItemsPerPage int `json:"itemsPerPage"`
}
type ListRequestOptions struct {
Pagination struct {
Page int `form:"pagination[page]"`
Limit int `form:"pagination[limit]"`
} `form:"pagination"`
Sort struct {
Column string `form:"sort[column]"`
Direction string `form:"sort[direction]"`
} `form:"sort"`
Filters map[string][]any
}
type FieldMeta struct {
ColumnName string
IsSortable bool
IsFilterable bool
}
func ParseListRequestOptions(ctx *gin.Context) (listRequestOptions ListRequestOptions) {
if err := ctx.ShouldBindQuery(&listRequestOptions); err != nil {
return listRequestOptions
}
listRequestOptions.Filters = parseNestedFilters(ctx)
return listRequestOptions
}
func PaginateFilterAndSort(params ListRequestOptions, query *gorm.DB, result interface{}) (PaginationResponse, error) {
meta := extractModelMetadata(result)
query = applyFilters(params.Filters, query, meta)
query = applySorting(params.Sort.Column, params.Sort.Direction, query, meta)
return Paginate(params.Pagination.Page, params.Pagination.Limit, query, result)
}
func Paginate(page int, pageSize int, query *gorm.DB, result interface{}) (PaginationResponse, error) {
if page < 1 {
page = 1
}
if pageSize < 1 {
pageSize = 20
} else if pageSize > 100 {
pageSize = 100
}
var totalItems int64
if err := query.Count(&totalItems).Error; err != nil {
return PaginationResponse{}, err
}
totalPages := (totalItems + int64(pageSize) - 1) / int64(pageSize)
if totalItems == 0 {
totalPages = 1
}
if int64(page) > totalPages {
page = int(totalPages)
}
offset := (page - 1) * pageSize
if err := query.Offset(offset).Limit(pageSize).Find(result).Error; err != nil {
return PaginationResponse{}, err
}
return PaginationResponse{
TotalPages: totalPages,
TotalItems: totalItems,
CurrentPage: page,
ItemsPerPage: pageSize,
}, nil
}
func NormalizeSortDirection(direction string) string {
d := strings.ToLower(strings.TrimSpace(direction))
if d != "asc" && d != "desc" {
return "asc"
}
return d
}
func IsValidSortDirection(direction string) bool {
d := strings.ToLower(strings.TrimSpace(direction))
return d == "asc" || d == "desc"
}
// parseNestedFilters handles ?filters[field][0]=val1&filters[field][1]=val2
func parseNestedFilters(ctx *gin.Context) map[string][]any {
result := make(map[string][]any)
query := ctx.Request.URL.Query()
for key, values := range query {
if !strings.HasPrefix(key, "filters[") {
continue
}
// Keys can be "filters[field]" or "filters[field][0]"
raw := strings.TrimPrefix(key, "filters[")
// Take everything up to the first closing bracket
if idx := strings.IndexByte(raw, ']'); idx != -1 {
field := raw[:idx]
for _, v := range values {
result[field] = append(result[field], ConvertStringToType(v))
}
}
}
return result
}
// applyFilters applies filtering to the GORM query based on the provided filters
func applyFilters(filters map[string][]any, query *gorm.DB, meta map[string]FieldMeta) *gorm.DB {
for key, values := range filters {
if key == "" || len(values) == 0 {
continue
}
fieldName := CapitalizeFirstLetter(key)
fieldMeta, ok := meta[fieldName]
if !ok || !fieldMeta.IsFilterable {
continue
}
query = query.Where(fieldMeta.ColumnName+" IN ?", values)
}
return query
}
// applySorting applies sorting to the GORM query based on the provided column and direction
func applySorting(sortColumn string, sortDirection string, query *gorm.DB, meta map[string]FieldMeta) *gorm.DB {
fieldName := CapitalizeFirstLetter(sortColumn)
fieldMeta, ok := meta[fieldName]
if !ok || !fieldMeta.IsSortable {
return query
}
sortDirection = NormalizeSortDirection(sortDirection)
query = query.Clauses(clause.OrderBy{
Columns: []clause.OrderByColumn{
{Column: clause.Column{Name: fieldMeta.ColumnName}, Desc: sortDirection == "desc"},
},
})
return query
}
// extractModelMetadata extracts FieldMeta from the model struct using reflection
func extractModelMetadata(model interface{}) map[string]FieldMeta {
meta := make(map[string]FieldMeta)
// Unwrap pointers and slices to get the element struct type
t := reflect.TypeOf(model)
for t.Kind() == reflect.Ptr || t.Kind() == reflect.Slice {
t = t.Elem()
if t == nil {
return meta
}
}
// recursive parser that merges fields from embedded structs
var parseStruct func(reflect.Type)
parseStruct = func(st reflect.Type) {
for i := 0; i < st.NumField(); i++ {
field := st.Field(i)
ft := field.Type
// If the field is an embedded/anonymous struct, recurse into it
if field.Anonymous && ft.Kind() == reflect.Struct {
parseStruct(ft)
continue
}
// Normal field: record metadata
name := field.Name
meta[name] = FieldMeta{
ColumnName: CamelCaseToSnakeCase(name),
IsSortable: field.Tag.Get("sortable") == "true",
IsFilterable: field.Tag.Get("filterable") == "true",
}
}
}
parseStruct(t)
return meta
}

View File

@@ -1,99 +0,0 @@
package utils
import (
"reflect"
"strconv"
"strings"
"gorm.io/gorm"
"gorm.io/gorm/clause"
)
type PaginationResponse struct {
TotalPages int64 `json:"totalPages"`
TotalItems int64 `json:"totalItems"`
CurrentPage int `json:"currentPage"`
ItemsPerPage int `json:"itemsPerPage"`
}
type SortedPaginationRequest struct {
Pagination struct {
Page int `form:"pagination[page]"`
Limit int `form:"pagination[limit]"`
} `form:"pagination"`
Sort struct {
Column string `form:"sort[column]"`
Direction string `form:"sort[direction]"`
} `form:"sort"`
}
func PaginateAndSort(sortedPaginationRequest SortedPaginationRequest, query *gorm.DB, result interface{}) (PaginationResponse, error) {
pagination := sortedPaginationRequest.Pagination
sort := sortedPaginationRequest.Sort
capitalizedSortColumn := CapitalizeFirstLetter(sort.Column)
sortField, sortFieldFound := reflect.TypeOf(result).Elem().Elem().FieldByName(capitalizedSortColumn)
isSortable, _ := strconv.ParseBool(sortField.Tag.Get("sortable"))
sort.Direction = NormalizeSortDirection(sort.Direction)
if sortFieldFound && isSortable {
columnName := CamelCaseToSnakeCase(sort.Column)
query = query.Clauses(clause.OrderBy{
Columns: []clause.OrderByColumn{
{Column: clause.Column{Name: columnName}, Desc: sort.Direction == "desc"},
},
})
}
return Paginate(pagination.Page, pagination.Limit, query, result)
}
func Paginate(page int, pageSize int, query *gorm.DB, result interface{}) (PaginationResponse, error) {
if page < 1 {
page = 1
}
if pageSize < 1 {
pageSize = 20
} else if pageSize > 100 {
pageSize = 100
}
offset := (page - 1) * pageSize
var totalItems int64
if err := query.Count(&totalItems).Error; err != nil {
return PaginationResponse{}, err
}
if err := query.Offset(offset).Limit(pageSize).Find(result).Error; err != nil {
return PaginationResponse{}, err
}
totalPages := (totalItems + int64(pageSize) - 1) / int64(pageSize)
if totalItems == 0 {
totalPages = 1
}
return PaginationResponse{
TotalPages: totalPages,
TotalItems: totalItems,
CurrentPage: page,
ItemsPerPage: pageSize,
}, nil
}
func NormalizeSortDirection(direction string) string {
d := strings.ToLower(strings.TrimSpace(direction))
if d != "asc" && d != "desc" {
return "asc"
}
return d
}
func IsValidSortDirection(direction string) bool {
d := strings.ToLower(strings.TrimSpace(direction))
return d == "asc" || d == "desc"
}

View File

@@ -1,13 +1,16 @@
package utils
// Ptr returns a pointer to the given value.
func Ptr[T any](v T) *T {
return &v
}
func PtrValueOrZero[T any](ptr *T) T {
if ptr == nil {
var zero T
return zero
// PtrOrNil returns a pointer to v if v is not the zero value of its type,
// otherwise it returns nil.
func PtrOrNil[T comparable](v T) *T {
var zero T
if v == zero {
return nil
}
return *ptr
return &v
}

View File

@@ -38,6 +38,7 @@ func (r *ServiceRunner) Run(ctx context.Context) error {
// Ignore context canceled errors here as they generally indicate that the service is stopping
if rErr != nil && !errors.Is(rErr, context.Canceled) {
cancel()
errCh <- rErr
return
}

Some files were not shown because too many files have changed in this diff Show More