mirror of
https://github.com/pocket-id/pocket-id.git
synced 2025-12-29 16:34:43 +03:00
Compare commits
14 Commits
oidc-scope
...
update-aag
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ee74e461d0 | ||
|
|
1bc9f5f7e7 | ||
|
|
461293ba1d | ||
|
|
7c5ffbf9a5 | ||
|
|
f75cef83d5 | ||
|
|
e358c433f0 | ||
|
|
08e4ffeb60 | ||
|
|
59ca6b26ac | ||
|
|
f5da11b99b | ||
|
|
3eaf36aae7 | ||
|
|
0a6ff6f84b | ||
|
|
edb32d82b2 | ||
|
|
90f555f7c1 | ||
|
|
177ada10ba |
2
.github/workflows/backend-linter.yml
vendored
2
.github/workflows/backend-linter.yml
vendored
@@ -2,7 +2,7 @@ name: Run Backend Linter
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, breaking/**]
|
||||
branches: [main]
|
||||
paths:
|
||||
- "backend/**"
|
||||
pull_request:
|
||||
|
||||
19
.github/workflows/e2e-tests.yml
vendored
19
.github/workflows/e2e-tests.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: E2E Tests
|
||||
on:
|
||||
push:
|
||||
branches: [main, breaking/**]
|
||||
branches: [main]
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
- "**.md"
|
||||
@@ -171,7 +171,7 @@ jobs:
|
||||
run: |
|
||||
DOCKER_COMPOSE_FILE=docker-compose.yml
|
||||
|
||||
echo "FILE_BACKEND=${{ matrix.storage }}" > .env
|
||||
export FILE_BACKEND="${{ matrix.storage }}"
|
||||
if [ "${{ matrix.db }}" = "postgres" ]; then
|
||||
DOCKER_COMPOSE_FILE=docker-compose-postgres.yml
|
||||
elif [ "${{ matrix.storage }}" = "s3" ]; then
|
||||
@@ -179,20 +179,7 @@ jobs:
|
||||
fi
|
||||
|
||||
docker compose -f "$DOCKER_COMPOSE_FILE" up -d
|
||||
|
||||
{
|
||||
LOG_FILE="/tmp/backend.log"
|
||||
while true; do
|
||||
CID=$(docker compose -f "$DOCKER_COMPOSE_FILE" ps -q pocket-id)
|
||||
if [ -n "$CID" ]; then
|
||||
echo "[$(date)] Attaching logs for $CID" >> "$LOG_FILE"
|
||||
docker logs -f --since=0 "$CID" >> "$LOG_FILE" 2>&1
|
||||
else
|
||||
echo "[$(date)] Container not yet running…" >> "$LOG_FILE"
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
} &
|
||||
docker compose -f "$DOCKER_COMPOSE_FILE" logs -f pocket-id &> /tmp/backend.log &
|
||||
|
||||
- name: Run Playwright tests
|
||||
working-directory: ./tests
|
||||
|
||||
2
.github/workflows/svelte-check.yml
vendored
2
.github/workflows/svelte-check.yml
vendored
@@ -2,7 +2,7 @@ name: Svelte Check
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, breaking/**]
|
||||
branches: [main]
|
||||
paths:
|
||||
- "frontend/src/**"
|
||||
- ".github/svelte-check-matcher.json"
|
||||
|
||||
2
.github/workflows/unit-tests.yml
vendored
2
.github/workflows/unit-tests.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: Unit Tests
|
||||
on:
|
||||
push:
|
||||
branches: [main, breaking/**]
|
||||
branches: [main]
|
||||
paths:
|
||||
- "backend/**"
|
||||
pull_request:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -15,7 +15,6 @@ node_modules
|
||||
/backend/bin
|
||||
pocket-id
|
||||
/tests/test-results/*.json
|
||||
.tmp/
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
|
||||
@@ -4,7 +4,7 @@ Pocket ID is a simple OIDC provider that allows users to authenticate with their
|
||||
|
||||
→ Try out the [Demo](https://demo.pocket-id.org)
|
||||
|
||||
<img src="https://github.com/user-attachments/assets/96ac549d-b897-404a-8811-f42b16ea58e2" width="1200"/>
|
||||
<img src="https://github.com/user-attachments/assets/1e99ba44-76da-4b47-9b8a-dbe9b7f84512" width="1200"/>
|
||||
|
||||
The goal of Pocket ID is to be a simple and easy-to-use. There are other self-hosted OIDC providers like [Keycloak](https://www.keycloak.org/) or [ORY Hydra](https://www.ory.sh/hydra/) but they are often too complex for simple use cases.
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
_ "time/tzdata"
|
||||
|
||||
"github.com/pocket-id/pocket-id/backend/internal/cmds"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/common"
|
||||
)
|
||||
|
||||
// @title Pocket ID API
|
||||
@@ -14,9 +11,5 @@ import (
|
||||
// @description.markdown
|
||||
|
||||
func main() {
|
||||
if err := common.ValidateEnvConfig(&common.EnvConfig); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "config error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
cmds.Execute()
|
||||
}
|
||||
|
||||
127
backend/go.mod
127
backend/go.mod
@@ -3,54 +3,55 @@ module github.com/pocket-id/pocket-id/backend
|
||||
go 1.25
|
||||
|
||||
require (
|
||||
github.com/aws/aws-sdk-go-v2 v1.40.0
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.2
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.2
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.92.1
|
||||
github.com/aws/smithy-go v1.23.2
|
||||
github.com/aws/aws-sdk-go-v2 v1.41.0
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.5
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.5
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.93.2
|
||||
github.com/aws/smithy-go v1.24.0
|
||||
github.com/caarlos0/env/v11 v11.3.1
|
||||
github.com/cenkalti/backoff/v5 v5.0.3
|
||||
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec
|
||||
github.com/disintegration/imaging v1.6.2
|
||||
github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6
|
||||
github.com/emersion/go-smtp v0.24.0
|
||||
github.com/fxamacker/cbor/v2 v2.9.0
|
||||
github.com/gin-contrib/slog v1.2.0
|
||||
github.com/gin-gonic/gin v1.11.0
|
||||
github.com/glebarez/go-sqlite v1.22.0
|
||||
github.com/glebarez/sqlite v1.11.0
|
||||
github.com/go-co-op/gocron/v2 v2.18.1
|
||||
github.com/go-co-op/gocron/v2 v2.18.2
|
||||
github.com/go-ldap/ldap/v3 v3.4.12
|
||||
github.com/go-playground/validator/v10 v10.28.0
|
||||
github.com/go-webauthn/webauthn v0.15.0
|
||||
github.com/golang-migrate/migrate/v4 v4.19.0
|
||||
github.com/golang-migrate/migrate/v4 v4.19.1
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/hashicorp/go-uuid v1.0.3
|
||||
github.com/jinzhu/copier v0.4.0
|
||||
github.com/joho/godotenv v1.5.1
|
||||
github.com/lestrrat-go/httprc/v3 v3.0.1
|
||||
github.com/lestrrat-go/httprc/v3 v3.0.2
|
||||
github.com/lestrrat-go/jwx/v3 v3.0.12
|
||||
github.com/lmittmann/tint v1.1.2
|
||||
github.com/mattn/go-isatty v0.0.20
|
||||
github.com/mileusna/useragent v1.3.5
|
||||
github.com/orandin/slog-gorm v1.4.0
|
||||
github.com/oschwald/maxminddb-golang/v2 v2.1.0
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/oschwald/maxminddb-golang/v2 v2.1.1
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/stretchr/testify v1.11.1
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.13.0
|
||||
go.opentelemetry.io/contrib/exporters/autoexport v0.63.0
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.63.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0
|
||||
go.opentelemetry.io/otel v1.38.0
|
||||
go.opentelemetry.io/otel/log v0.14.0
|
||||
go.opentelemetry.io/otel/metric v1.38.0
|
||||
go.opentelemetry.io/otel/sdk v1.38.0
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0
|
||||
go.opentelemetry.io/otel/trace v1.38.0
|
||||
golang.org/x/crypto v0.45.0
|
||||
golang.org/x/image v0.33.0
|
||||
golang.org/x/sync v0.18.0
|
||||
golang.org/x/text v0.31.0
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.14.0
|
||||
go.opentelemetry.io/contrib/exporters/autoexport v0.64.0
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.64.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0
|
||||
go.opentelemetry.io/otel v1.39.0
|
||||
go.opentelemetry.io/otel/log v0.15.0
|
||||
go.opentelemetry.io/otel/metric v1.39.0
|
||||
go.opentelemetry.io/otel/sdk v1.39.0
|
||||
go.opentelemetry.io/otel/sdk/log v0.15.0
|
||||
go.opentelemetry.io/otel/sdk/metric v1.39.0
|
||||
go.opentelemetry.io/otel/trace v1.39.0
|
||||
golang.org/x/crypto v0.46.0
|
||||
golang.org/x/image v0.34.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/text v0.32.0
|
||||
golang.org/x/time v0.14.0
|
||||
gorm.io/driver/postgres v1.6.0
|
||||
gorm.io/gorm v1.31.1
|
||||
@@ -58,32 +59,31 @@ require (
|
||||
|
||||
require (
|
||||
github.com/Azure/go-ntlmssp v0.1.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.16 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.16 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.16 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.2 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.5 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.10 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.2 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.16 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.16 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.16 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.12 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.5 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.2 // indirect
|
||||
github.com/bytedance/sonic/loader v0.4.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 // indirect
|
||||
github.com/disintegration/gift v1.2.1 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fxamacker/cbor/v2 v2.9.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.11 // indirect
|
||||
github.com/gin-contrib/sse v1.1.0 // indirect
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect
|
||||
@@ -94,14 +94,12 @@ require (
|
||||
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
|
||||
github.com/go-webauthn/x v0.1.26 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||
github.com/goccy/go-yaml v1.19.0 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
|
||||
github.com/google/go-github/v39 v39.2.0 // indirect
|
||||
github.com/google/go-querystring v1.1.0 // indirect
|
||||
github.com/google/go-tpm v0.9.7 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
@@ -117,7 +115,6 @@ require (
|
||||
github.com/lestrrat-go/dsig v1.0.0 // indirect
|
||||
github.com/lestrrat-go/dsig-secp256k1 v1.0.0 // indirect
|
||||
github.com/lestrrat-go/httpcc v1.0.1 // indirect
|
||||
github.com/lestrrat-go/option v1.0.1 // indirect
|
||||
github.com/lestrrat-go/option/v2 v2.0.0 // indirect
|
||||
github.com/lib/pq v1.10.9 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.32 // indirect
|
||||
@@ -126,44 +123,44 @@ require (
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/ncruces/go-strftime v1.0.0 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/prometheus/client_golang v1.23.2 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.67.4 // indirect
|
||||
github.com/prometheus/otlptranslator v1.0.0 // indirect
|
||||
github.com/prometheus/procfs v0.19.2 // indirect
|
||||
github.com/quic-go/qpack v0.6.0 // indirect
|
||||
github.com/quic-go/quic-go v0.57.0 // indirect
|
||||
github.com/quic-go/quic-go v0.57.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/segmentio/asm v1.2.1 // indirect
|
||||
github.com/spf13/pflag v1.0.10 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.1 // indirect
|
||||
github.com/valyala/fastjson v1.6.4 // indirect
|
||||
github.com/valyala/fastjson v1.6.5 // indirect
|
||||
github.com/x448/float16 v0.8.4 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/prometheus v0.63.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.60.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/prometheus v0.64.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.15.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.15.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.61.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.15.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||
golang.org/x/arch v0.23.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39 // indirect
|
||||
golang.org/x/net v0.47.0 // indirect
|
||||
golang.org/x/oauth2 v0.33.0 // indirect
|
||||
golang.org/x/sys v0.38.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846 // indirect
|
||||
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 // indirect
|
||||
golang.org/x/net v0.48.0 // indirect
|
||||
golang.org/x/oauth2 v0.34.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/grpc v1.77.0 // indirect
|
||||
google.golang.org/protobuf v1.36.10 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
|
||||
263
backend/go.sum
263
backend/go.sum
@@ -6,44 +6,44 @@ github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERo
|
||||
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
|
||||
github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e h1:4dAU9FXIyQktpoUAgOJK3OTFc/xug0PCXYCqU0FgDKI=
|
||||
github.com/alexbrainman/sspi v0.0.0-20250919150558-7d374ff0d59e/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
|
||||
github.com/aws/aws-sdk-go-v2 v1.40.0 h1:/WMUA0kjhZExjOQN2z3oLALDREea1A7TobfuiBrKlwc=
|
||||
github.com/aws/aws-sdk-go-v2 v1.40.0/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3 h1:DHctwEM8P8iTXFxC/QK0MRjwEpWQeM9yzidCRjldUz0=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.3/go.mod h1:xdCzcZEtnSTKVDOmUZs4l/j3pSV6rpo1WXl5ugNsL8Y=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.2 h1:4liUsdEpUUPZs5WVapsJLx5NPmQhQdez7nYFcovrytk=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.2/go.mod h1:l0hs06IFz1eCT+jTacU/qZtC33nvcnLADAPL/XyrkZI=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.2 h1:qZry8VUyTK4VIo5aEdUcBjPZHL2v4FyQ3QEOaWcFLu4=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.2/go.mod h1:YUqm5a1/kBnoK+/NY5WEiMocZihKSo15/tJdmdXnM5g=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 h1:WZVR5DbDgxzA0BJeudId89Kmgy6DIU4ORpxwsVHz0qA=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14/go.mod h1:Dadl9QO0kHgbrH1GRqGiZdYtW5w+IXXaBNCHTIaheM4=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 h1:PZHqQACxYb8mYgms4RZbhZG0a7dPW06xOjmaH0EJC/I=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14/go.mod h1:VymhrMJUWs69D8u0/lZ7jSB6WgaG/NqHi3gX0aYf6U0=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 h1:bOS19y6zlJwagBfHxs0ESzr1XCOU2KXJCWcq3E2vfjY=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14/go.mod h1:1ipeGBMAxZ0xcTm6y6paC2C/J6f6OO7LBODV9afuAyM=
|
||||
github.com/aws/aws-sdk-go-v2 v1.41.0 h1:tNvqh1s+v0vFYdA1xq0aOJH+Y5cRyZ5upu6roPgPKd4=
|
||||
github.com/aws/aws-sdk-go-v2 v1.41.0/go.mod h1:MayyLB8y+buD9hZqkCW3kX1AKq07Y5pXxtgB+rRFhz0=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4 h1:489krEF9xIGkOaaX3CE/Be2uWjiXrkCH6gUX+bZA/BU=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.4/go.mod h1:IOAPF6oT9KCsceNTvvYMNHy0+kMF8akOjeDvPENWxp4=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.5 h1:pz3duhAfUgnxbtVhIK39PGF/AHYyrzGEyRD9Og0QrE8=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.5/go.mod h1:xmDjzSUs/d0BB7ClzYPAZMmgQdrodNjPPhd6bGASwoE=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.5 h1:xMo63RlqP3ZZydpJDMBsH9uJ10hgHYfQFIk1cHDXrR4=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.5/go.mod h1:hhbH6oRcou+LpXfA/0vPElh/e0M3aFeOblE1sssAAEk=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.16 h1:80+uETIWS1BqjnN9uJ0dBUaETh+P1XwFy5vwHwK5r9k=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.16/go.mod h1:wOOsYuxYuB/7FlnVtzeBYRcjSRtQpAW0hCP7tIULMwo=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.16 h1:rgGwPzb82iBYSvHMHXc8h9mRoOUBZIGFgKb9qniaZZc=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.16/go.mod h1:L/UxsGeKpGoIj6DxfhOWHWQ/kGKcd4I1VncE4++IyKA=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.16 h1:1jtGzuV7c82xnqOVfx2F0xmJcOw5374L7N6juGW6x6U=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.16/go.mod h1:M2E5OQf+XLe+SZGmmpaI2yy+J326aFf6/+54PoxSANc=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk=
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc=
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.14 h1:ITi7qiDSv/mSGDSWNpZ4k4Ve0DQR6Ug2SJQ8zEHoDXg=
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.14/go.mod h1:k1xtME53H1b6YpZt74YmwlONMWf4ecM+lut1WQLAF/U=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.5 h1:Hjkh7kE6D81PgrHlE/m9gx+4TyyeLHuY8xJs7yXN5C4=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.5/go.mod h1:nPRXgyCfAurhyaTMoBMwRBYBhaHI4lNPAnJmjM0Tslc=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 h1:FIouAnCE46kyYqyhs0XEBDFFSREtdnr8HQuLPQPLCrY=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14/go.mod h1:UTwDc5COa5+guonQU8qBikJo1ZJ4ln2r1MkF7Dqag1E=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.14 h1:FzQE21lNtUor0Fb7QNgnEyiRCBlolLTX/Z1j65S7teM=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.14/go.mod h1:s1ydyWG9pm3ZwmmYN21HKyG9WzAZhYVW85wMHs5FV6w=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.92.1 h1:OgQy/+0+Kc3khtqiEOk23xQAglXi3Tj0y5doOxbi5tg=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.92.1/go.mod h1:wYNqY3L02Z3IgRYxOBPH9I1zD9Cjh9hI5QOy/eOjQvw=
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.2 h1:MxMBdKTYBjPQChlJhi4qlEueqB1p1KcbTEa7tD5aqPs=
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.2/go.mod h1:iS6EPmNeqCsGo+xQmXv0jIMjyYtQfnwg36zl2FwEouk=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.5 h1:ksUT5KtgpZd3SAiFJNJ0AFEJVva3gjBmN7eXUZjzUwQ=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.5/go.mod h1:av+ArJpoYf3pgyrj6tcehSFW+y9/QvAY8kMooR9bZCw=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.10 h1:GtsxyiF3Nd3JahRBJbxLCCdYW9ltGQYrFWg8XdkGDd8=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.10/go.mod h1:/j67Z5XBVDx8nZVp9EuFM9/BS5dvBznbqILGuu73hug=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.2 h1:a5UTtD4mHBU3t0o6aHQZFJTNKVfxFWfPX7J0Lr7G+uY=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.2/go.mod h1:6TxbXoDSgBQ225Qd8Q+MbxUxUh6TtNKwbRt/EPS9xso=
|
||||
github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM=
|
||||
github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.16 h1:CjMzUs78RDDv4ROu3JnJn/Ig1r6ZD7/T2DXLLRpejic=
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.16/go.mod h1:uVW4OLBqbJXSHJYA9svT9BluSvvwbzLQ2Crf6UPzR3c=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4 h1:0ryTNEdJbzUCEWkVXEXoqlXV72J5keC1GvILMOuD00E=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.4/go.mod h1:HQ4qwNZh32C3CBeO6iJLQlgtMzqeG17ziAA/3KDJFow=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.7 h1:DIBqIrJ7hv+e4CmIk2z3pyKT+3B6qVMgRsawHiR3qso=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.7/go.mod h1:vLm00xmBke75UmpNvOcZQ/Q30ZFjbczeLFqGx5urmGo=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.16 h1:oHjJHeUy0ImIV0bsrX0X91GkV5nJAyv1l1CC9lnO0TI=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.16/go.mod h1:iRSNGgOYmiYwSCXxXaKb9HfOEj40+oTKn8pTxMlYkRM=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.16 h1:NSbvS17MlI2lurYgXnCOLvCFX38sBW4eiVER7+kkgsU=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.16/go.mod h1:SwT8Tmqd4sA6G1qaGdzWCJN99bUmPGHfRwwq3G5Qb+A=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.93.2 h1:U3ygWUhCpiSPYSHOrRhb3gOl9T5Y3kB8k5Vjs//57bE=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.93.2/go.mod h1:79S2BdqCJpScXZA2y+cpZuocWsjGjJINyXnOsf5DTz8=
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.4 h1:HpI7aMmJ+mm1wkSHIA2t5EaFFv5EFYXePW30p1EIrbQ=
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.4/go.mod h1:C5RdGMYGlfM0gYq/tifqgn4EbyX99V15P2V3R+VHbQU=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.7 h1:eYnlt6QxnFINKzwxP5/Ucs1vkG7VT3Iezmvfgc2waUw=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.7/go.mod h1:+fWt2UHSb4kS7Pu8y+BMBvJF0EWx+4H0hzNwtDNRTrg=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.12 h1:AHDr0DaHIAo8c9t1emrzAlVDFp+iMMKnPdYy6XO4MCE=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.12/go.mod h1:GQ73XawFFiWxyWXMHWfhiomvP3tXtdNar/fi8z18sx0=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.5 h1:SciGFVNZ4mHdm7gpD1dgZYnCuVdX1s+lFTg4+4DOy70=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.5/go.mod h1:iW40X4QBmUxdP+fZNOpfmkdMZqsovezbAeO+Ubiv2pk=
|
||||
github.com/aws/smithy-go v1.24.0 h1:LpilSUItNPFr1eY85RYgTIg5eIEPtvFbskaFcmmIUnk=
|
||||
github.com/aws/smithy-go v1.24.0/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M=
|
||||
@@ -66,8 +66,9 @@ github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151X
|
||||
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0 h1:NMZiJj8QnKe1LgsbDayM4UoHwbvwDRwnI3hwNaAHRnc=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.4.0/go.mod h1:ZXNYxsqcloTdSy/rNShjYzMhyjf0LaoftYK0p+A3h40=
|
||||
github.com/dhui/dktest v0.4.6 h1:+DPKyScKSEp3VLtbMDHcUq6V5Lm5zfZZVb0Sk7Ahom4=
|
||||
@@ -111,8 +112,8 @@ github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GM
|
||||
github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 h1:BP4M0CvQ4S3TGls2FvczZtj5Re/2ZzkV9VwqPHH/3Bo=
|
||||
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
|
||||
github.com/go-co-op/gocron/v2 v2.18.1 h1:VVxgAghLW1Q6VHi/rc+B0ZSpFoUVlWgkw09Yximvn58=
|
||||
github.com/go-co-op/gocron/v2 v2.18.1/go.mod h1:Zii6he+Zfgy5W9B+JKk/KwejFOW0kZTFvHtwIpR4aBI=
|
||||
github.com/go-co-op/gocron/v2 v2.18.2 h1:+5VU41FUXPWSPKLXZQ/77SGzUiPCcakU0v7ENc2H20Q=
|
||||
github.com/go-co-op/gocron/v2 v2.18.2/go.mod h1:Zii6he+Zfgy5W9B+JKk/KwejFOW0kZTFvHtwIpR4aBI=
|
||||
github.com/go-ldap/ldap/v3 v3.4.12 h1:1b81mv7MagXZ7+1r7cLTWmyuTqVqdwbtJSjC0DAp9s4=
|
||||
github.com/go-ldap/ldap/v3 v3.4.12/go.mod h1:+SPAGcTtOfmGsCb3h1RFiq4xpp4N636G75OEace8lNo=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
@@ -136,14 +137,14 @@ github.com/go-webauthn/x v0.1.26 h1:eNzreFKnwNLDFoywGh9FA8YOMebBWTUNlNSdolQRebs=
|
||||
github.com/go-webauthn/x v0.1.26/go.mod h1:jmf/phPV6oIsF6hmdVre+ovHkxjDOmNH0t6fekWUxvg=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
|
||||
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/goccy/go-yaml v1.19.0 h1:EmkZ9RIsX+Uq4DYFowegAuJo8+xdX3T/2dwNPXbxEYE=
|
||||
github.com/goccy/go-yaml v1.19.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
|
||||
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
github.com/golang-migrate/migrate/v4 v4.19.0 h1:RcjOnCGz3Or6HQYEJ/EEVLfWnmw9KnoigPSjzhCuaSE=
|
||||
github.com/golang-migrate/migrate/v4 v4.19.0/go.mod h1:9dyEcu+hO+G9hPSw8AIg50yg622pXJsoHItQnDGZkI0=
|
||||
github.com/golang-migrate/migrate/v4 v4.19.1 h1:OCyb44lFuQfYXYLx1SCxPZQGU7mcaZ7gH9yH4jSFbBA=
|
||||
github.com/golang-migrate/migrate/v4 v4.19.1/go.mod h1:CTcgfjxhaUtsLipnLoQRWCrjYXycRz/g5+RWDuYgPrE=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
@@ -165,11 +166,6 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 h1:NmZ1PKzSTQbuGHw9DGPFomqkkLWMC+vZCkfs+FHv1Vg=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3/go.mod h1:zQrxl1YP88HQlA6i9c63DSVPFklWpGX4OWAc9bFuaH4=
|
||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
|
||||
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
|
||||
@@ -228,12 +224,10 @@ github.com/lestrrat-go/dsig-secp256k1 v1.0.0 h1:JpDe4Aybfl0soBvoVwjqDbp+9S1Y2OM7
|
||||
github.com/lestrrat-go/dsig-secp256k1 v1.0.0/go.mod h1:CxUgAhssb8FToqbL8NjSPoGQlnO4w3LG1P0qPWQm/NU=
|
||||
github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE=
|
||||
github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E=
|
||||
github.com/lestrrat-go/httprc/v3 v3.0.1 h1:3n7Es68YYGZb2Jf+k//llA4FTZMl3yCwIjFIk4ubevI=
|
||||
github.com/lestrrat-go/httprc/v3 v3.0.1/go.mod h1:2uAvmbXE4Xq8kAUjVrZOq1tZVYYYs5iP62Cmtru00xk=
|
||||
github.com/lestrrat-go/httprc/v3 v3.0.2 h1:7u4HUaD0NQbf2/n5+fyp+T10hNCsAnwKfqn4A4Baif0=
|
||||
github.com/lestrrat-go/httprc/v3 v3.0.2/go.mod h1:mSMtkZW92Z98M5YoNNztbRGxbXHql7tSitCvaxvo9l0=
|
||||
github.com/lestrrat-go/jwx/v3 v3.0.12 h1:p25r68Y4KrbBdYjIsQweYxq794CtGCzcrc5dGzJIRjg=
|
||||
github.com/lestrrat-go/jwx/v3 v3.0.12/go.mod h1:HiUSaNmMLXgZ08OmGBaPVvoZQgJVOQphSrGr5zMamS8=
|
||||
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
|
||||
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
|
||||
github.com/lestrrat-go/option/v2 v2.0.0 h1:XxrcaJESE1fokHy3FpaQ/cXW8ZsIdWcdFzzLOcID3Ss=
|
||||
github.com/lestrrat-go/option/v2 v2.0.0/go.mod h1:oSySsmzMoR0iRzCDCaUfsCzxQHUEuhOViQObyy7S6Vg=
|
||||
github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
@@ -267,14 +261,15 @@ github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQ
|
||||
github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
|
||||
github.com/orandin/slog-gorm v1.4.0 h1:FgA8hJufF9/jeNSYoEXmHPPBwET2gwlF3B85JdpsTUU=
|
||||
github.com/orandin/slog-gorm v1.4.0/go.mod h1:MoZ51+b7xE9lwGNPYEhxcUtRNrYzjdcKvA8QXQQGEPA=
|
||||
github.com/oschwald/maxminddb-golang/v2 v2.1.0 h1:2Iv7lmG9XtxuZA/jFAsd7LnZaC1E59pFsj5O/nU15pw=
|
||||
github.com/oschwald/maxminddb-golang/v2 v2.1.0/go.mod h1:gG4V88LsawPEqtbL1Veh1WRh+nVSYwXzJ1P5Fcn77g0=
|
||||
github.com/oschwald/maxminddb-golang/v2 v2.1.1 h1:lA8FH0oOrM4u7mLvowq8IT6a3Q/qEnqRzLQn9eH5ojc=
|
||||
github.com/oschwald/maxminddb-golang/v2 v2.1.1/go.mod h1:PLdx6PR+siSIoXqqy7C7r3SB3KZnhxWr1Dp6g0Hacl8=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
|
||||
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
|
||||
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||
@@ -287,8 +282,8 @@ github.com/prometheus/procfs v0.19.2 h1:zUMhqEW66Ex7OXIiDkll3tl9a1ZdilUOd/F6ZXw4
|
||||
github.com/prometheus/procfs v0.19.2/go.mod h1:M0aotyiemPhBCM0z5w87kL22CxfcH05ZpYlu+b4J7mw=
|
||||
github.com/quic-go/qpack v0.6.0 h1:g7W+BMYynC1LbYLSqRt8PBg5Tgwxn214ZZR34VIOjz8=
|
||||
github.com/quic-go/qpack v0.6.0/go.mod h1:lUpLKChi8njB4ty2bFLX2x4gzDqXwUpaO1DP9qMDZII=
|
||||
github.com/quic-go/quic-go v0.57.0 h1:AsSSrrMs4qI/hLrKlTH/TGQeTMY0ib1pAOX7vA3AdqE=
|
||||
github.com/quic-go/quic-go v0.57.0/go.mod h1:ly4QBAjHA2VhdnxhojRsCUOeJwKYg+taDlos92xb1+s=
|
||||
github.com/quic-go/quic-go v0.57.1 h1:25KAAR9QR8KZrCZRThWMKVAwGoiHIrNbT72ULHTuI10=
|
||||
github.com/quic-go/quic-go v0.57.1/go.mod h1:ly4QBAjHA2VhdnxhojRsCUOeJwKYg+taDlos92xb1+s=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
@@ -298,8 +293,8 @@ github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/segmentio/asm v1.2.1 h1:DTNbBqs57ioxAD4PrArqftgypG4/qNpXoJx8TVXxPR0=
|
||||
github.com/segmentio/asm v1.2.1/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
|
||||
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
|
||||
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
||||
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
@@ -308,7 +303,6 @@ github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSS
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
@@ -320,62 +314,62 @@ github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/ugorji/go/codec v1.3.1 h1:waO7eEiFDwidsBN6agj1vJQ4AG7lh2yqXyOXqhgQuyY=
|
||||
github.com/ugorji/go/codec v1.3.1/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4=
|
||||
github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ=
|
||||
github.com/valyala/fastjson v1.6.4/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY=
|
||||
github.com/valyala/fastjson v1.6.5 h1:LLabX0wszE1JDH9+IxLK6b+tb4B7gNdTEFTRasd0Ejw=
|
||||
github.com/valyala/fastjson v1.6.5/go.mod h1:CLCAqky6SMuOcxStkYQvblddUtoRxhYMGLrsQns1aXY=
|
||||
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
|
||||
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.13.0 h1:bwnLpizECbPr1RrQ27waeY2SPIPeccCx/xLuoYADZ9s=
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.13.0/go.mod h1:3nWlOiiqA9UtUnrcNk82mYasNxD8ehOspL0gOfEo6Y4=
|
||||
go.opentelemetry.io/contrib/bridges/prometheus v0.63.0 h1:/Rij/t18Y7rUayNg7Id6rPrEnHgorxYabm2E6wUdPP4=
|
||||
go.opentelemetry.io/contrib/bridges/prometheus v0.63.0/go.mod h1:AdyDPn6pkbkt2w01n3BubRVk7xAsCRq1Yg1mpfyA/0E=
|
||||
go.opentelemetry.io/contrib/exporters/autoexport v0.63.0 h1:NLnZybb9KkfMXPwZhd5diBYJoVxiO9Qa06dacEA7ySY=
|
||||
go.opentelemetry.io/contrib/exporters/autoexport v0.63.0/go.mod h1:OvRg7gm5WRSCtxzGSsrFHbDLToYlStHNZQ+iPNIyD6g=
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.63.0 h1:5kSIJ0y8ckZZKoDhZHdVtcyjVi6rXyAwyaR8mp4zLbg=
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.63.0/go.mod h1:i+fIMHvcSQtsIY82/xgiVWRklrNt/O6QriHLjzGeY+s=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0 h1:RbKq8BG0FI8OiXhBfcRtqqHcZcka+gU3cskNuf05R18=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.38.0 h1:uHsCCOSKl0kLrV2dLkFK+8Ywk9iKa/fptkytc6aFFEo=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.38.0/go.mod h1:wMRSZJZcY8ya9mApLLhwIMjqmApy2o/Ml+62lhvxyHU=
|
||||
go.opentelemetry.io/otel v1.38.0 h1:RkfdswUDRimDg0m2Az18RKOsnI8UDzppJAtj01/Ymk8=
|
||||
go.opentelemetry.io/otel v1.38.0/go.mod h1:zcmtmQ1+YmQM9wrNsTGV/q/uyusom3P8RxwExxkZhjM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 h1:OMqPldHt79PqWKOMYIAQs3CxAi7RLgPxwfFSwr4ZxtM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0/go.mod h1:1biG4qiqTxKiUCtoWDPpL3fB3KxVwCiGw81j3nKMuHE=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 h1:QQqYw3lkrzwVsoEX0w//EhH/TCnpRdEenKBOOEIMjWc=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0/go.mod h1:gSVQcr17jk2ig4jqJ2DX30IdWH251JcNAecvrqTxH1s=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0 h1:vl9obrcoWVKp/lwl8tRE33853I8Xru9HFbw/skNeLs8=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.38.0/go.mod h1:GAXRxmLJcVM3u22IjTg74zWBrRCKq8BnOqUVLodpcpw=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0 h1:Oe2z/BCg5q7k4iXC3cqJxKYg0ieRiOqF0cecFYdPTwk=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.38.0/go.mod h1:ZQM5lAJpOsKnYagGg/zV2krVqTtaVdYdDkhMoX6Oalg=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 h1:GqRJVj7UmLjCVyVJ3ZFLdPRmhDUp2zFmQe3RHIOsw24=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0/go.mod h1:ri3aaHSmCTVYu2AWv44YMauwAQc0aqI9gHKIcSbI1pU=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 h1:lwI4Dc5leUqENgGuQImwLo4WnuXFPetmPpkLi2IrX54=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0/go.mod h1:Kz/oCE7z5wuyhPxsXDuaPteSWqjSBD5YaSdbxZYGbGk=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0 h1:aTL7F04bJHUlztTsNGJ2l+6he8c+y/b//eR0jjjemT4=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.38.0/go.mod h1:kldtb7jDTeol0l3ewcmd8SDvx3EmIE7lyvqbasU3QC4=
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.60.0 h1:cGtQxGvZbnrWdC2GyjZi0PDKVSLWP/Jocix3QWfXtbo=
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.60.0/go.mod h1:hkd1EekxNo69PTV4OWFGZcKQiIqg0RfuWExcPKFvepk=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 h1:B/g+qde6Mkzxbry5ZZag0l7QrQBCtVm7lVjaLgmpje8=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0/go.mod h1:mOJK8eMmgW6ocDJn6Bn11CcZ05gi3P8GylBXEkZtbgA=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0 h1:wm/Q0GAAykXv83wzcKzGGqAnnfLFyFe7RslekZuv+VI=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.38.0/go.mod h1:ra3Pa40+oKjvYh+ZD3EdxFZZB0xdMfuileHAm4nNN7w=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 h1:kJxSDN4SgWWTjG/hPp3O7LCGLcHXFlvS2/FFOrwL+SE=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0/go.mod h1:mgIOzS7iZeKJdeB8/NYHrJ48fdGc71Llo5bJ1J4DWUE=
|
||||
go.opentelemetry.io/otel/log v0.14.0 h1:2rzJ+pOAZ8qmZ3DDHg73NEKzSZkhkGIua9gXtxNGgrM=
|
||||
go.opentelemetry.io/otel/log v0.14.0/go.mod h1:5jRG92fEAgx0SU/vFPxmJvhIuDU9E1SUnEQrMlJpOno=
|
||||
go.opentelemetry.io/otel/metric v1.38.0 h1:Kl6lzIYGAh5M159u9NgiRkmoMKjvbsKtYRwgfrA6WpA=
|
||||
go.opentelemetry.io/otel/metric v1.38.0/go.mod h1:kB5n/QoRM8YwmUahxvI3bO34eVtQf2i4utNVLr9gEmI=
|
||||
go.opentelemetry.io/otel/sdk v1.38.0 h1:l48sr5YbNf2hpCUj/FoGhW9yDkl+Ma+LrVl8qaM5b+E=
|
||||
go.opentelemetry.io/otel/sdk v1.38.0/go.mod h1:ghmNdGlVemJI3+ZB5iDEuk4bWA3GkTpW+DOoZMYBVVg=
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0 h1:JU/U3O7N6fsAXj0+CXz21Czg532dW2V4gG1HE/e8Zrg=
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0/go.mod h1:imQvII+0ZylXfKU7/wtOND8Hn4OpT3YUoIgqJVksUkM=
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.14.0 h1:eypSOd+0txRKCXPNyqLPsbSfA0jULgJcGmSAdFAnrCM=
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.14.0/go.mod h1:CRGvIBL/aAxpQU34ZxyQVFlovVcp67s4cAmQu8Jh9mc=
|
||||
go.opentelemetry.io/contrib/bridges/prometheus v0.64.0 h1:7TYhBCu6Xz6vDJGNtEslWZLuuX2IJ/aH50hBY4MVeUg=
|
||||
go.opentelemetry.io/contrib/bridges/prometheus v0.64.0/go.mod h1:tHQctZfAe7e4PBPGyt3kae6mQFXNpj+iiDJa3ithM50=
|
||||
go.opentelemetry.io/contrib/exporters/autoexport v0.64.0 h1:9pzPj3RFyKOxBAMkM2w84LpT+rdHam1XoFA+QhARiRw=
|
||||
go.opentelemetry.io/contrib/exporters/autoexport v0.64.0/go.mod h1:hlVZx1btWH0XTfXpuGX9dsquB50s+tc3fYFOO5elo2M=
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.64.0 h1:7IKZbAYwlwLXAdu7SVPhzTjDjogWZxP4MIa7rovY+PU=
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gin-gonic/gin/otelgin v0.64.0/go.mod h1:+TF5nf3NIv2X8PGxqfYOaRnAoMM43rUA2C3XsN2DoWA=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0 h1:ssfIgGNANqpVFCndZvcuyKbl0g+UAVcbBcqGkG28H0Y=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0/go.mod h1:GQ/474YrbE4Jx8gZ4q5I4hrhUzM6UPzyrqJYV2AqPoQ=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.39.0 h1:PI7pt9pkSnimWcp5sQhUA9OzLbc3Ba4sL+VEUTNsxrk=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.39.0/go.mod h1:5gV/EzPnfYIwjzj+6y8tbGW2PKWhcsz5e/7twptRVQY=
|
||||
go.opentelemetry.io/otel v1.39.0 h1:8yPrr/S0ND9QEfTfdP9V+SiwT4E0G7Y5MO7p85nis48=
|
||||
go.opentelemetry.io/otel v1.39.0/go.mod h1:kLlFTywNWrFyEdH0oj2xK0bFYZtHRYUdv1NklR/tgc8=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.15.0 h1:W+m0g+/6v3pa5PgVf2xoFMi5YtNR06WtS7ve5pcvLtM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.15.0/go.mod h1:JM31r0GGZ/GU94mX8hN4D8v6e40aFlUECSQ48HaLgHM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.15.0 h1:EKpiGphOYq3CYnIe2eX9ftUkyU+Y8Dtte8OaWyHJ4+I=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.15.0/go.mod h1:nWFP7C+T8TygkTjJ7mAyEaFaE7wNfms3nV/vexZ6qt0=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.39.0 h1:cEf8jF6WbuGQWUVcqgyWtTR0kOOAWY1DYZ+UhvdmQPw=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.39.0/go.mod h1:k1lzV5n5U3HkGvTCJHraTAGJ7MqsgL1wrGwTj1Isfiw=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.39.0 h1:nKP4Z2ejtHn3yShBb+2KawiXgpn8In5cT7aO2wXuOTE=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.39.0/go.mod h1:NwjeBbNigsO4Aj9WgM0C+cKIrxsZUaRmZUO7A8I7u8o=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 h1:f0cb2XPmrqn4XMy9PNliTgRKJgS5WcL/u0/WRYGz4t0=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0/go.mod h1:vnakAaFckOMiMtOIhFI2MNH4FYrZzXCYxmb1LlhoGz8=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 h1:in9O8ESIOlwJAEGTkkf34DesGRAc/Pn8qJ7k3r/42LM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0/go.mod h1:Rp0EXBm5tfnv0WL+ARyO/PHBEaEAT8UUHQ6AGJcSq6c=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0 h1:Ckwye2FpXkYgiHX7fyVrN1uA/UYd9ounqqTuSNAv0k4=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0/go.mod h1:teIFJh5pW2y+AN7riv6IBPX2DuesS3HgP39mwOspKwU=
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.61.0 h1:cCyZS4dr67d30uDyh8etKM2QyDsQ4zC9ds3bdbrVoD0=
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.61.0/go.mod h1:iivMuj3xpR2DkUrUya3TPS/Z9h3dz7h01GxU+fQBRNg=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.15.0 h1:0BSddrtQqLEylcErkeFrJBmwFzcqfQq9+/uxfTZq+HE=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.15.0/go.mod h1:87sjYuAPzaRCtdd09GU5gM1U9wQLrrcYrm77mh5EBoc=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.39.0 h1:5gn2urDL/FBnK8OkCfD1j3/ER79rUuTYmCvlXBKeYL8=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.39.0/go.mod h1:0fBG6ZJxhqByfFZDwSwpZGzJU671HkwpWaNe2t4VUPI=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.39.0 h1:8UPA4IbVZxpsD76ihGOQiFml99GPAEZLohDXvqHdi6U=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.39.0/go.mod h1:MZ1T/+51uIVKlRzGw1Fo46KEWThjlCBZKl2LzY5nv4g=
|
||||
go.opentelemetry.io/otel/log v0.15.0 h1:0VqVnc3MgyYd7QqNVIldC3dsLFKgazR6P3P3+ypkyDY=
|
||||
go.opentelemetry.io/otel/log v0.15.0/go.mod h1:9c/G1zbyZfgu1HmQD7Qj84QMmwTp2QCQsZH1aeoWDE4=
|
||||
go.opentelemetry.io/otel/metric v1.39.0 h1:d1UzonvEZriVfpNKEVmHXbdf909uGTOQjA0HF0Ls5Q0=
|
||||
go.opentelemetry.io/otel/metric v1.39.0/go.mod h1:jrZSWL33sD7bBxg1xjrqyDjnuzTUB0x1nBERXd7Ftcs=
|
||||
go.opentelemetry.io/otel/sdk v1.39.0 h1:nMLYcjVsvdui1B/4FRkwjzoRVsMK8uL/cj0OyhKzt18=
|
||||
go.opentelemetry.io/otel/sdk v1.39.0/go.mod h1:vDojkC4/jsTJsE+kh+LXYQlbL8CgrEcwmt1ENZszdJE=
|
||||
go.opentelemetry.io/otel/sdk/log v0.15.0 h1:WgMEHOUt5gjJE93yqfqJOkRflApNif84kxoHWS9VVHE=
|
||||
go.opentelemetry.io/otel/sdk/log v0.15.0/go.mod h1:qDC/FlKQCXfH5hokGsNg9aUBGMJQsrUyeOiW5u+dKBQ=
|
||||
go.opentelemetry.io/otel/sdk/log/logtest v0.14.0 h1:Ijbtz+JKXl8T2MngiwqBlPaHqc4YCaP/i13Qrow6gAM=
|
||||
go.opentelemetry.io/otel/sdk/log/logtest v0.14.0/go.mod h1:dCU8aEL6q+L9cYTqcVOk8rM9Tp8WdnHOPLiBgp0SGOA=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0 h1:aSH66iL0aZqo//xXzQLYozmWrXxyFkBJ6qT5wthqPoM=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0/go.mod h1:dg9PBnW9XdQ1Hd6ZnRz689CbtrUp0wMMs9iPcgT9EZA=
|
||||
go.opentelemetry.io/otel/trace v1.38.0 h1:Fxk5bKrDZJUH+AMyyIXGcFAPah0oRcT+LuNtJrmcNLE=
|
||||
go.opentelemetry.io/otel/trace v1.38.0/go.mod h1:j1P9ivuFsTceSWe1oY+EeW3sc+Pp42sO++GHkg4wwhs=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.39.0 h1:cXMVVFVgsIf2YL6QkRF4Urbr/aMInf+2WKg+sEJTtB8=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.39.0/go.mod h1:xq9HEVH7qeX69/JnwEfp6fVq5wosJsY1mt4lLfYdVew=
|
||||
go.opentelemetry.io/otel/trace v1.39.0 h1:2d2vfpEDmCJ5zVYz7ijaJdOF59xLomrvj7bjt6/qCJI=
|
||||
go.opentelemetry.io/otel/trace v1.39.0/go.mod h1:88w4/PnZSazkGzz/w84VHpQafiU4EtqqlVdxWy+rNOA=
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A=
|
||||
go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
@@ -384,53 +378,54 @@ go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y=
|
||||
go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU=
|
||||
go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0=
|
||||
go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/arch v0.23.0 h1:lKF64A2jF6Zd8L0knGltUnegD62JMFBiCPBmQpToHhg=
|
||||
golang.org/x/arch v0.23.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
|
||||
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
|
||||
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39 h1:DHNhtq3sNNzrvduZZIiFyXWOL9IWaDPHqTnLJp+rCBY=
|
||||
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
|
||||
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
|
||||
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
|
||||
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 h1:MDfG8Cvcqlt9XXrmEiD4epKn7VJHZO84hejP9Jmp0MM=
|
||||
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9/go.mod h1:EPRbTFwzwjXj9NpYyyrvenVh9Y+GFeEvMNh7Xuz7xgU=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.33.0 h1:LXRZRnv1+zGd5XBUVRFmYEphyyKJjQjCRiOuAP3sZfQ=
|
||||
golang.org/x/image v0.33.0/go.mod h1:DD3OsTYT9chzuzTQt+zMcOlBHgfoKQb1gry8p76Y1sc=
|
||||
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
|
||||
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
|
||||
golang.org/x/image v0.34.0 h1:33gCkyw9hmwbZJeZkct8XyR11yH889EQt/QH4VmXMn8=
|
||||
golang.org/x/image v0.34.0/go.mod h1:2RNFBZRB+vnwwFil8GkMdRvrJOFd1AzdZI6vOY+eJVU=
|
||||
golang.org/x/mod v0.31.0 h1:HaW9xtz0+kOcWKwli0ZXy79Ix+UW/vOfmWI5QVd2tgI=
|
||||
golang.org/x/mod v0.31.0/go.mod h1:43JraMp9cGx1Rx3AqioxrbrhNsLl2l/iNAvuBkrezpg=
|
||||
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
|
||||
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
|
||||
golang.org/x/net v0.48.0 h1:zyQRTTrjc33Lhh0fBgT/H3oZq9WuvRR5gPC70xpDiQU=
|
||||
golang.org/x/net v0.48.0/go.mod h1:+ndRgGjkh8FGtu1w1FGbEC31if4VrNVMuKTgcAAnQRY=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.33.0 h1:4Q+qn+E5z8gPRJfmRy7C2gGG3T4jIprK6aSYgTXGRpo=
|
||||
golang.org/x/oauth2 v0.33.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
|
||||
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
|
||||
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
|
||||
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
|
||||
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
|
||||
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
|
||||
golang.org/x/text v0.32.0 h1:ZD01bjUt1FQ9WJ0ClOL5vxgxOI/sVCNgX1YtKwcY0mU=
|
||||
golang.org/x/text v0.32.0/go.mod h1:o/rUWzghvpD5TXrTIBuJU77MTaN0ljMWE47kxGJQ7jY=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
|
||||
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
|
||||
golang.org/x/tools v0.40.0 h1:yLkxfA+Qnul4cs9QA3KnlFu0lVmd8JJfoq+E41uSutA=
|
||||
golang.org/x/tools v0.40.0/go.mod h1:Ik/tzLRlbscWpqqMRjyWYDisX8bG13FrdXp3o4Sr9lc=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846 h1:ZdyUkS9po3H7G0tuh955QVyyotWvOD4W0aEapeGeUYk=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846/go.mod h1:Fk4kyraUvqD7i5H6S43sj2W98fbZa75lpZz/eUyhfO0=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846 h1:Wgl1rcDNThT+Zn47YyCXOXyX/COgMTIdhJ717F0l4xk=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 h1:fCvbg86sFXwdrl5LgVcTEvNC+2txB5mgROGmRL5mrls=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:+rXWjjaukWZun3mLfjmVnQi18E1AsFbDN9QdJ5YXLto=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 h1:gRkg/vSppuSQoDjxyiGfN4Upv/h/DQmIR10ZU8dh4Ww=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
|
||||
google.golang.org/grpc v1.77.0 h1:wVVY6/8cGA6vvffn+wWK5ToddbgdU3d8MNENr4evgXM=
|
||||
google.golang.org/grpc v1.77.0/go.mod h1:z0BY1iVj0q8E1uSQCjL9cppRj+gnZjzDnzV0dHhrNig=
|
||||
google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE=
|
||||
|
||||
@@ -24,7 +24,8 @@ func initApplicationImages(ctx context.Context, fileStorage storage.FileStorage)
|
||||
// Previous versions of images
|
||||
// If these are found, they are deleted
|
||||
legacyImageHashes := imageHashMap{
|
||||
"background.jpg": mustDecodeHex("138d510030ed845d1d74de34658acabff562d306476454369a60ab8ade31933f"),
|
||||
"background.jpg": mustDecodeHex("138d510030ed845d1d74de34658acabff562d306476454369a60ab8ade31933f"),
|
||||
"background.webp": mustDecodeHex("3fc436a66d6b872b01d96a4e75046c46b5c3e2daccd51e98ecdf98fd445599ab"),
|
||||
}
|
||||
|
||||
sourceFiles, err := resources.FS.ReadDir("images")
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"time"
|
||||
|
||||
_ "github.com/golang-migrate/migrate/v4/source/file"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/pocket-id/pocket-id/backend/internal/common"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/job"
|
||||
@@ -16,16 +15,6 @@ import (
|
||||
)
|
||||
|
||||
func Bootstrap(ctx context.Context) error {
|
||||
var shutdownFns []utils.Service
|
||||
defer func() { //nolint:contextcheck
|
||||
// Invoke all shutdown functions on exit
|
||||
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
if err := utils.NewServiceRunner(shutdownFns...).Run(shutdownCtx); err != nil {
|
||||
slog.Error("Error during graceful shutdown", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
// Initialize the observability stack, including the logger, distributed tracing, and metrics
|
||||
shutdownFns, httpClient, err := initObservability(ctx, common.EnvConfig.MetricsEnabled, common.EnvConfig.TracingEnabled)
|
||||
if err != nil {
|
||||
@@ -33,80 +22,15 @@ func Bootstrap(ctx context.Context) error {
|
||||
}
|
||||
slog.InfoContext(ctx, "Pocket ID is starting")
|
||||
|
||||
// Connect to the database
|
||||
db, err := NewDatabase()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize database: %w", err)
|
||||
}
|
||||
|
||||
fileStorage, err := InitStorage(ctx, db)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize file storage (backend: %s): %w", common.EnvConfig.FileBackend, err)
|
||||
}
|
||||
// Initialize the file storage backend
|
||||
var fileStorage storage.FileStorage
|
||||
|
||||
imageExtensions, err := initApplicationImages(ctx, fileStorage)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize application images: %w", err)
|
||||
}
|
||||
|
||||
// Create all services
|
||||
svc, err := initServices(ctx, db, httpClient, imageExtensions, fileStorage)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize services: %w", err)
|
||||
}
|
||||
|
||||
waitUntil, err := svc.appLockService.Acquire(ctx, false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to acquire application lock: %w", err)
|
||||
}
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
case <-time.After(time.Until(waitUntil)):
|
||||
}
|
||||
|
||||
shutdownFn := func(shutdownCtx context.Context) error {
|
||||
sErr := svc.appLockService.Release(shutdownCtx)
|
||||
if sErr != nil {
|
||||
return fmt.Errorf("failed to release application lock: %w", sErr)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
shutdownFns = append(shutdownFns, shutdownFn)
|
||||
|
||||
// Init the job scheduler
|
||||
scheduler, err := job.NewScheduler()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create job scheduler: %w", err)
|
||||
}
|
||||
err = registerScheduledJobs(ctx, db, svc, httpClient, scheduler)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to register scheduled jobs: %w", err)
|
||||
}
|
||||
|
||||
// Init the router
|
||||
router, err := initRouter(db, svc)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize router: %w", err)
|
||||
}
|
||||
|
||||
// Run all background services
|
||||
// This call blocks until the context is canceled
|
||||
services := []utils.Service{svc.appLockService.RunRenewal, router}
|
||||
|
||||
if common.EnvConfig.AppEnv != "test" {
|
||||
services = append(services, scheduler.Run)
|
||||
}
|
||||
|
||||
err = utils.NewServiceRunner(services...).Run(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to run services: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func InitStorage(ctx context.Context, db *gorm.DB) (fileStorage storage.FileStorage, err error) {
|
||||
switch common.EnvConfig.FileBackend {
|
||||
case storage.TypeFileSystem:
|
||||
fileStorage, err = storage.NewFilesystemStorage(common.EnvConfig.UploadPath)
|
||||
@@ -128,8 +52,53 @@ func InitStorage(ctx context.Context, db *gorm.DB) (fileStorage storage.FileStor
|
||||
err = fmt.Errorf("unknown file storage backend: %s", common.EnvConfig.FileBackend)
|
||||
}
|
||||
if err != nil {
|
||||
return fileStorage, err
|
||||
return fmt.Errorf("failed to initialize file storage (backend: %s): %w", common.EnvConfig.FileBackend, err)
|
||||
}
|
||||
|
||||
return fileStorage, nil
|
||||
imageExtensions, err := initApplicationImages(ctx, fileStorage)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize application images: %w", err)
|
||||
}
|
||||
|
||||
// Create all services
|
||||
svc, err := initServices(ctx, db, httpClient, imageExtensions, fileStorage)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize services: %w", err)
|
||||
}
|
||||
|
||||
// Init the job scheduler
|
||||
scheduler, err := job.NewScheduler()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create job scheduler: %w", err)
|
||||
}
|
||||
err = registerScheduledJobs(ctx, db, svc, httpClient, scheduler)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to register scheduled jobs: %w", err)
|
||||
}
|
||||
|
||||
// Init the router
|
||||
router := initRouter(db, svc)
|
||||
|
||||
// Run all background services
|
||||
// This call blocks until the context is canceled
|
||||
err = utils.
|
||||
NewServiceRunner(router, scheduler.Run).
|
||||
Run(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to run services: %w", err)
|
||||
}
|
||||
|
||||
// Invoke all shutdown functions
|
||||
// We give these a timeout of 5s
|
||||
// Note: we use a background context because the run context has been canceled already
|
||||
shutdownCtx, shutdownCancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer shutdownCancel()
|
||||
err = utils.
|
||||
NewServiceRunner(shutdownFns...).
|
||||
Run(shutdownCtx) //nolint:contextcheck
|
||||
if err != nil {
|
||||
slog.Error("Error shutting down services", slog.Any("error", err))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -12,7 +12,12 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/glebarez/sqlite"
|
||||
"github.com/golang-migrate/migrate/v4"
|
||||
"github.com/golang-migrate/migrate/v4/database"
|
||||
postgresMigrate "github.com/golang-migrate/migrate/v4/database/postgres"
|
||||
sqliteMigrate "github.com/golang-migrate/migrate/v4/database/sqlite3"
|
||||
_ "github.com/golang-migrate/migrate/v4/source/github"
|
||||
"github.com/golang-migrate/migrate/v4/source/iofs"
|
||||
slogGorm "github.com/orandin/slog-gorm"
|
||||
"gorm.io/driver/postgres"
|
||||
"gorm.io/gorm"
|
||||
@@ -21,10 +26,11 @@ import (
|
||||
"github.com/pocket-id/pocket-id/backend/internal/common"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/utils"
|
||||
sqliteutil "github.com/pocket-id/pocket-id/backend/internal/utils/sqlite"
|
||||
"github.com/pocket-id/pocket-id/backend/resources"
|
||||
)
|
||||
|
||||
func NewDatabase() (db *gorm.DB, err error) {
|
||||
db, err = ConnectDatabase()
|
||||
db, err = connectDatabase()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to connect to database: %w", err)
|
||||
}
|
||||
@@ -33,15 +39,105 @@ func NewDatabase() (db *gorm.DB, err error) {
|
||||
return nil, fmt.Errorf("failed to get sql.DB: %w", err)
|
||||
}
|
||||
|
||||
// Choose the correct driver for the database provider
|
||||
var driver database.Driver
|
||||
switch common.EnvConfig.DbProvider {
|
||||
case common.DbProviderSqlite:
|
||||
driver, err = sqliteMigrate.WithInstance(sqlDb, &sqliteMigrate.Config{
|
||||
NoTxWrap: true,
|
||||
})
|
||||
case common.DbProviderPostgres:
|
||||
driver, err = postgresMigrate.WithInstance(sqlDb, &postgresMigrate.Config{})
|
||||
default:
|
||||
// Should never happen at this point
|
||||
return nil, fmt.Errorf("unsupported database provider: %s", common.EnvConfig.DbProvider)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create migration driver: %w", err)
|
||||
}
|
||||
|
||||
// Run migrations
|
||||
if err := utils.MigrateDatabase(sqlDb); err != nil {
|
||||
if err := migrateDatabase(driver); err != nil {
|
||||
return nil, fmt.Errorf("failed to run migrations: %w", err)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func ConnectDatabase() (db *gorm.DB, err error) {
|
||||
func migrateDatabase(driver database.Driver) error {
|
||||
// Embedded migrations via iofs
|
||||
path := "migrations/" + string(common.EnvConfig.DbProvider)
|
||||
source, err := iofs.New(resources.FS, path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create embedded migration source: %w", err)
|
||||
}
|
||||
|
||||
m, err := migrate.NewWithInstance("iofs", source, "pocket-id", driver)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create migration instance: %w", err)
|
||||
}
|
||||
|
||||
requiredVersion, err := getRequiredMigrationVersion(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get last migration version: %w", err)
|
||||
}
|
||||
|
||||
currentVersion, _, _ := m.Version()
|
||||
if currentVersion > requiredVersion {
|
||||
slog.Warn("Database version is newer than the application supports, possible downgrade detected", slog.Uint64("db_version", uint64(currentVersion)), slog.Uint64("app_version", uint64(requiredVersion)))
|
||||
if !common.EnvConfig.AllowDowngrade {
|
||||
return fmt.Errorf("database version (%d) is newer than application version (%d), downgrades are not allowed (set ALLOW_DOWNGRADE=true to enable)", currentVersion, requiredVersion)
|
||||
}
|
||||
slog.Info("Fetching migrations from GitHub to handle possible downgrades")
|
||||
return migrateDatabaseFromGitHub(driver, requiredVersion)
|
||||
}
|
||||
|
||||
if err := m.Migrate(requiredVersion); err != nil && !errors.Is(err, migrate.ErrNoChange) {
|
||||
return fmt.Errorf("failed to apply embedded migrations: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func migrateDatabaseFromGitHub(driver database.Driver, version uint) error {
|
||||
srcURL := "github://pocket-id/pocket-id/backend/resources/migrations/" + string(common.EnvConfig.DbProvider)
|
||||
|
||||
m, err := migrate.NewWithDatabaseInstance(srcURL, "pocket-id", driver)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create GitHub migration instance: %w", err)
|
||||
}
|
||||
|
||||
if err := m.Migrate(version); err != nil && !errors.Is(err, migrate.ErrNoChange) {
|
||||
return fmt.Errorf("failed to apply GitHub migrations: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// getRequiredMigrationVersion reads the embedded migration files and returns the highest version number found.
|
||||
func getRequiredMigrationVersion(path string) (uint, error) {
|
||||
entries, err := resources.FS.ReadDir(path)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to read migration directory: %w", err)
|
||||
}
|
||||
|
||||
var maxVersion uint
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
continue
|
||||
}
|
||||
name := entry.Name()
|
||||
var version uint
|
||||
n, err := fmt.Sscanf(name, "%d_", &version)
|
||||
if err == nil && n == 1 {
|
||||
if version > maxVersion {
|
||||
maxVersion = version
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return maxVersion, nil
|
||||
}
|
||||
|
||||
func connectDatabase() (db *gorm.DB, err error) {
|
||||
var dialector gorm.Dialector
|
||||
|
||||
// Choose the correct database provider
|
||||
|
||||
@@ -17,7 +17,7 @@ import (
|
||||
func init() {
|
||||
registerTestControllers = []func(apiGroup *gin.RouterGroup, db *gorm.DB, svc *services){
|
||||
func(apiGroup *gin.RouterGroup, db *gorm.DB, svc *services) {
|
||||
testService, err := service.NewTestService(db, svc.appConfigService, svc.jwtService, svc.ldapService, svc.appLockService, svc.fileStorage)
|
||||
testService, err := service.NewTestService(db, svc.appConfigService, svc.jwtService, svc.ldapService, svc.fileStorage)
|
||||
if err != nil {
|
||||
slog.Error("Failed to initialize test service", slog.Any("error", err))
|
||||
os.Exit(1)
|
||||
|
||||
@@ -29,7 +29,16 @@ import (
|
||||
// This is used to register additional controllers for tests
|
||||
var registerTestControllers []func(apiGroup *gin.RouterGroup, db *gorm.DB, svc *services)
|
||||
|
||||
func initRouter(db *gorm.DB, svc *services) (utils.Service, error) {
|
||||
func initRouter(db *gorm.DB, svc *services) utils.Service {
|
||||
runner, err := initRouterInternal(db, svc)
|
||||
if err != nil {
|
||||
slog.Error("Failed to init router", "error", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
return runner
|
||||
}
|
||||
|
||||
func initRouterInternal(db *gorm.DB, svc *services) (utils.Service, error) {
|
||||
// Set the appropriate Gin mode based on the environment
|
||||
switch common.EnvConfig.AppEnv {
|
||||
case common.AppEnvProduction:
|
||||
@@ -189,6 +198,7 @@ func initLogger(r *gin.Engine) {
|
||||
"GET /api/application-images/logo",
|
||||
"GET /api/application-images/background",
|
||||
"GET /api/application-images/favicon",
|
||||
"GET /api/application-images/email",
|
||||
"GET /_app",
|
||||
"GET /fonts",
|
||||
"GET /healthz",
|
||||
|
||||
@@ -27,7 +27,6 @@ type services struct {
|
||||
apiKeyService *service.ApiKeyService
|
||||
versionService *service.VersionService
|
||||
fileStorage storage.FileStorage
|
||||
appLockService *service.AppLockService
|
||||
}
|
||||
|
||||
// Initializes all services
|
||||
@@ -41,7 +40,6 @@ func initServices(ctx context.Context, db *gorm.DB, httpClient *http.Client, ima
|
||||
|
||||
svc.fileStorage = fileStorage
|
||||
svc.appImagesService = service.NewAppImagesService(imageExtensions, fileStorage)
|
||||
svc.appLockService = service.NewAppLockService(db)
|
||||
|
||||
svc.emailService, err = service.NewEmailService(db, svc.appConfigService)
|
||||
if err != nil {
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
package cmds
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/pocket-id/pocket-id/backend/internal/bootstrap"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/service"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
type exportFlags struct {
|
||||
Path string
|
||||
}
|
||||
|
||||
func init() {
|
||||
var flags exportFlags
|
||||
|
||||
exportCmd := &cobra.Command{
|
||||
Use: "export",
|
||||
Short: "Exports all data of Pocket ID into a ZIP file",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return runExport(cmd.Context(), flags)
|
||||
},
|
||||
}
|
||||
|
||||
exportCmd.Flags().StringVarP(&flags.Path, "path", "p", "pocket-id-export.zip", "Path to the ZIP file to export the data to, or '-' to write to stdout")
|
||||
|
||||
rootCmd.AddCommand(exportCmd)
|
||||
}
|
||||
|
||||
// runExport orchestrates the export flow
|
||||
func runExport(ctx context.Context, flags exportFlags) error {
|
||||
db, err := bootstrap.NewDatabase()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to connect to database: %w", err)
|
||||
}
|
||||
|
||||
storage, err := bootstrap.InitStorage(ctx, db)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize storage: %w", err)
|
||||
}
|
||||
|
||||
exportService := service.NewExportService(db, storage)
|
||||
|
||||
var w io.Writer
|
||||
if flags.Path == "-" {
|
||||
w = os.Stdout
|
||||
} else {
|
||||
file, err := os.Create(flags.Path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create export file: %w", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
w = file
|
||||
}
|
||||
|
||||
if err := exportService.ExportToZip(ctx, w); err != nil {
|
||||
return fmt.Errorf("failed to export data: %w", err)
|
||||
}
|
||||
|
||||
if flags.Path != "-" {
|
||||
fmt.Printf("Exported data to %s\n", flags.Path)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,191 +0,0 @@
|
||||
package cmds
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/pocket-id/pocket-id/backend/internal/bootstrap"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/common"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/service"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/utils"
|
||||
)
|
||||
|
||||
type importFlags struct {
|
||||
Path string
|
||||
Yes bool
|
||||
ForcefullyAcquireLock bool
|
||||
}
|
||||
|
||||
func init() {
|
||||
var flags importFlags
|
||||
|
||||
importCmd := &cobra.Command{
|
||||
Use: "import",
|
||||
Short: "Imports all data of Pocket ID from a ZIP file",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return runImport(cmd.Context(), flags)
|
||||
},
|
||||
}
|
||||
|
||||
importCmd.Flags().StringVarP(&flags.Path, "path", "p", "pocket-id-export.zip", "Path to the ZIP file to import the data from, or '-' to read from stdin")
|
||||
importCmd.Flags().BoolVarP(&flags.Yes, "yes", "y", false, "Skip confirmation prompts")
|
||||
importCmd.Flags().BoolVarP(&flags.ForcefullyAcquireLock, "forcefully-acquire-lock", "", false, "Forcefully acquire the application lock by terminating the Pocket ID instance")
|
||||
|
||||
rootCmd.AddCommand(importCmd)
|
||||
}
|
||||
|
||||
// runImport handles the high-level orchestration of the import process
|
||||
func runImport(ctx context.Context, flags importFlags) error {
|
||||
if !flags.Yes {
|
||||
ok, err := askForConfirmation()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get confirmation: %w", err)
|
||||
}
|
||||
if !ok {
|
||||
fmt.Println("Aborted")
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
zipReader *zip.ReadCloser
|
||||
cleanup func()
|
||||
err error
|
||||
)
|
||||
|
||||
if flags.Path == "-" {
|
||||
zipReader, cleanup, err = readZipFromStdin()
|
||||
defer cleanup()
|
||||
} else {
|
||||
zipReader, err = zip.OpenReader(flags.Path)
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open zip: %w", err)
|
||||
}
|
||||
defer zipReader.Close()
|
||||
|
||||
db, err := bootstrap.ConnectDatabase()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = acquireImportLock(ctx, db, flags.ForcefullyAcquireLock)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
storage, err := bootstrap.InitStorage(ctx, db)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize storage: %w", err)
|
||||
}
|
||||
|
||||
importService := service.NewImportService(db, storage)
|
||||
err = importService.ImportFromZip(ctx, &zipReader.Reader)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to import data from zip: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println("Import completed successfully.")
|
||||
return nil
|
||||
}
|
||||
|
||||
func acquireImportLock(ctx context.Context, db *gorm.DB, force bool) error {
|
||||
// Check if the kv table exists, in case we are starting from an empty database
|
||||
exists, err := utils.DBTableExists(db, "kv")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to check if kv table exists: %w", err)
|
||||
}
|
||||
if !exists {
|
||||
// This either means the database is empty, or the import is into an old version of PocketID that doesn't support locks
|
||||
// In either case, there's no lock to acquire
|
||||
fmt.Println("Could not acquire a lock because the 'kv' table does not exist. This is fine if you're importing into a new database, but make sure that there isn't an instance of Pocket ID currently running and using the same database.")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Note that we do not call a deferred Release if the data was imported
|
||||
// This is because we are overriding the contents of the database, so the lock is automatically lost
|
||||
appLockService := service.NewAppLockService(db)
|
||||
|
||||
opCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
waitUntil, err := appLockService.Acquire(opCtx, force)
|
||||
if err != nil {
|
||||
if errors.Is(err, service.ErrLockUnavailable) {
|
||||
//nolint:staticcheck
|
||||
return errors.New("Pocket ID must be stopped before importing data; please stop the running instance or run with --forcefully-acquire-lock to terminate the other instance")
|
||||
}
|
||||
return fmt.Errorf("failed to acquire application lock: %w", err)
|
||||
}
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
case <-time.After(time.Until(waitUntil)):
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func askForConfirmation() (bool, error) {
|
||||
fmt.Println("WARNING: This feature is experimental and may not work correctly. Please create a backup before proceeding and report any issues you encounter.")
|
||||
fmt.Println()
|
||||
fmt.Println("WARNING: Import will erase all existing data at the following locations:")
|
||||
fmt.Printf("Database: %s\n", absolutePathOrOriginal(common.EnvConfig.DbConnectionString))
|
||||
fmt.Printf("Uploads Path: %s\n", absolutePathOrOriginal(common.EnvConfig.UploadPath))
|
||||
|
||||
ok, err := utils.PromptForConfirmation("Do you want to continue?")
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return ok, nil
|
||||
}
|
||||
|
||||
// absolutePathOrOriginal returns the absolute path of the given path, or the original if it fails
|
||||
func absolutePathOrOriginal(path string) string {
|
||||
abs, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
return path
|
||||
}
|
||||
return abs
|
||||
}
|
||||
|
||||
func readZipFromStdin() (*zip.ReadCloser, func(), error) {
|
||||
tmpFile, err := os.CreateTemp("", "pocket-id-import-*.zip")
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to create temporary file: %w", err)
|
||||
}
|
||||
|
||||
cleanup := func() {
|
||||
_ = os.Remove(tmpFile.Name())
|
||||
}
|
||||
|
||||
if _, err := io.Copy(tmpFile, os.Stdin); err != nil {
|
||||
tmpFile.Close()
|
||||
cleanup()
|
||||
return nil, nil, fmt.Errorf("failed to read data from stdin: %w", err)
|
||||
}
|
||||
|
||||
if err := tmpFile.Close(); err != nil {
|
||||
cleanup()
|
||||
return nil, nil, fmt.Errorf("failed to close temporary file: %w", err)
|
||||
}
|
||||
|
||||
r, err := zip.OpenReader(tmpFile.Name())
|
||||
if err != nil {
|
||||
cleanup()
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return r, cleanup, nil
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/lestrrat-go/jwx/v3/jwa"
|
||||
@@ -79,7 +78,7 @@ func keyRotate(ctx context.Context, flags keyRotateFlags, db *gorm.DB, envConfig
|
||||
}
|
||||
if !ok {
|
||||
fmt.Println("Aborted")
|
||||
os.Exit(1)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package cmds
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
@@ -67,14 +69,78 @@ func TestKeyRotate(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
testKeyRotateWithDatabaseStorage(t, tt.flags, tt.wantErr, tt.errMsg)
|
||||
t.Run("file storage", func(t *testing.T) {
|
||||
testKeyRotateWithFileStorage(t, tt.flags, tt.wantErr, tt.errMsg)
|
||||
})
|
||||
|
||||
t.Run("database storage", func(t *testing.T) {
|
||||
testKeyRotateWithDatabaseStorage(t, tt.flags, tt.wantErr, tt.errMsg)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func testKeyRotateWithFileStorage(t *testing.T, flags keyRotateFlags, wantErr bool, errMsg string) {
|
||||
// Create temporary directory for keys
|
||||
tempDir := t.TempDir()
|
||||
keysPath := filepath.Join(tempDir, "keys")
|
||||
err := os.MkdirAll(keysPath, 0755)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Set up file storage config
|
||||
envConfig := &common.EnvConfigSchema{
|
||||
KeysStorage: "file",
|
||||
KeysPath: keysPath,
|
||||
}
|
||||
|
||||
// Create test database
|
||||
db := testingutils.NewDatabaseForTest(t)
|
||||
|
||||
// Initialize app config service and create instance
|
||||
appConfigService, err := service.NewAppConfigService(t.Context(), db)
|
||||
require.NoError(t, err)
|
||||
instanceID := appConfigService.GetDbConfig().InstanceID.Value
|
||||
|
||||
// Check if key exists before rotation
|
||||
keyProvider, err := jwkutils.GetKeyProvider(db, envConfig, instanceID)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Run the key rotation
|
||||
err = keyRotate(t.Context(), flags, db, envConfig)
|
||||
|
||||
if wantErr {
|
||||
require.Error(t, err)
|
||||
if errMsg != "" {
|
||||
require.ErrorContains(t, err, errMsg)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify key was created
|
||||
key, err := keyProvider.LoadKey()
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, key)
|
||||
|
||||
// Verify the algorithm matches what we requested
|
||||
alg, _ := key.Algorithm()
|
||||
assert.NotEmpty(t, alg)
|
||||
if flags.Alg != "" {
|
||||
expectedAlg := flags.Alg
|
||||
if expectedAlg == "EdDSA" {
|
||||
// EdDSA keys should have the EdDSA algorithm
|
||||
assert.Equal(t, "EdDSA", alg.String())
|
||||
} else {
|
||||
assert.Equal(t, expectedAlg, alg.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testKeyRotateWithDatabaseStorage(t *testing.T, flags keyRotateFlags, wantErr bool, errMsg string) {
|
||||
// Set up database storage config
|
||||
envConfig := &common.EnvConfigSchema{
|
||||
KeysStorage: "database",
|
||||
EncryptionKey: []byte("test-encryption-key-characters-long"),
|
||||
}
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@ var oneTimeAccessTokenCmd = &cobra.Command{
|
||||
}
|
||||
|
||||
// Create a new access token that expires in 1 hour
|
||||
oneTimeAccessToken, txErr = service.NewOneTimeAccessToken(user.ID, time.Hour)
|
||||
oneTimeAccessToken, txErr = service.NewOneTimeAccessToken(user.ID, time.Hour, false)
|
||||
if txErr != nil {
|
||||
return fmt.Errorf("failed to generate access token: %w", txErr)
|
||||
}
|
||||
|
||||
@@ -12,10 +12,9 @@ import (
|
||||
)
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
Use: "pocket-id",
|
||||
Short: "A simple and easy-to-use OIDC provider that allows users to authenticate with their passkeys to your services.",
|
||||
Long: "By default, this command starts the pocket-id server.",
|
||||
SilenceUsage: true,
|
||||
Use: "pocket-id",
|
||||
Short: "A simple and easy-to-use OIDC provider that allows users to authenticate with their passkeys to your services.",
|
||||
Long: "By default, this command starts the pocket-id server.",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
// Start the server
|
||||
err := bootstrap.Bootstrap(cmd.Context())
|
||||
|
||||
@@ -38,40 +38,40 @@ const (
|
||||
)
|
||||
|
||||
type EnvConfigSchema struct {
|
||||
AppEnv AppEnv `env:"APP_ENV" options:"toLower"`
|
||||
LogLevel string `env:"LOG_LEVEL" options:"toLower"`
|
||||
LogJSON bool `env:"LOG_JSON"`
|
||||
AppURL string `env:"APP_URL" options:"toLower,trimTrailingSlash"`
|
||||
DbProvider DbProvider
|
||||
DbConnectionString string `env:"DB_CONNECTION_STRING" options:"file"`
|
||||
EncryptionKey []byte `env:"ENCRYPTION_KEY" options:"file"`
|
||||
Port string `env:"PORT"`
|
||||
Host string `env:"HOST" options:"toLower"`
|
||||
UnixSocket string `env:"UNIX_SOCKET"`
|
||||
UnixSocketMode string `env:"UNIX_SOCKET_MODE"`
|
||||
LocalIPv6Ranges string `env:"LOCAL_IPV6_RANGES"`
|
||||
UiConfigDisabled bool `env:"UI_CONFIG_DISABLED"`
|
||||
MetricsEnabled bool `env:"METRICS_ENABLED"`
|
||||
TracingEnabled bool `env:"TRACING_ENABLED"`
|
||||
TrustProxy bool `env:"TRUST_PROXY"`
|
||||
AnalyticsDisabled bool `env:"ANALYTICS_DISABLED"`
|
||||
AllowDowngrade bool `env:"ALLOW_DOWNGRADE"`
|
||||
InternalAppURL string `env:"INTERNAL_APP_URL"`
|
||||
|
||||
MaxMindLicenseKey string `env:"MAXMIND_LICENSE_KEY" options:"file"`
|
||||
GeoLiteDBPath string `env:"GEOLITE_DB_PATH"`
|
||||
GeoLiteDBUrl string `env:"GEOLITE_DB_URL"`
|
||||
|
||||
FileBackend string `env:"FILE_BACKEND" options:"toLower"`
|
||||
UploadPath string `env:"UPLOAD_PATH"`
|
||||
|
||||
S3Bucket string `env:"S3_BUCKET"`
|
||||
S3Region string `env:"S3_REGION"`
|
||||
S3Endpoint string `env:"S3_ENDPOINT"`
|
||||
S3AccessKeyID string `env:"S3_ACCESS_KEY_ID"`
|
||||
S3SecretAccessKey string `env:"S3_SECRET_ACCESS_KEY"`
|
||||
S3ForcePathStyle bool `env:"S3_FORCE_PATH_STYLE"`
|
||||
S3DisableDefaultIntegrityChecks bool `env:"S3_DISABLE_DEFAULT_INTEGRITY_CHECKS"`
|
||||
AppEnv AppEnv `env:"APP_ENV" options:"toLower"`
|
||||
LogLevel string `env:"LOG_LEVEL" options:"toLower"`
|
||||
AppURL string `env:"APP_URL" options:"toLower,trimTrailingSlash"`
|
||||
DbProvider DbProvider `env:"DB_PROVIDER" options:"toLower"`
|
||||
DbConnectionString string `env:"DB_CONNECTION_STRING" options:"file"`
|
||||
FileBackend string `env:"FILE_BACKEND" options:"toLower"`
|
||||
UploadPath string `env:"UPLOAD_PATH"`
|
||||
S3Bucket string `env:"S3_BUCKET"`
|
||||
S3Region string `env:"S3_REGION"`
|
||||
S3Endpoint string `env:"S3_ENDPOINT"`
|
||||
S3AccessKeyID string `env:"S3_ACCESS_KEY_ID"`
|
||||
S3SecretAccessKey string `env:"S3_SECRET_ACCESS_KEY"`
|
||||
S3ForcePathStyle bool `env:"S3_FORCE_PATH_STYLE"`
|
||||
S3DisableDefaultIntegrityChecks bool `env:"S3_DISABLE_DEFAULT_INTEGRITY_CHECKS"`
|
||||
KeysPath string `env:"KEYS_PATH"`
|
||||
KeysStorage string `env:"KEYS_STORAGE"`
|
||||
EncryptionKey []byte `env:"ENCRYPTION_KEY" options:"file"`
|
||||
Port string `env:"PORT"`
|
||||
Host string `env:"HOST" options:"toLower"`
|
||||
UnixSocket string `env:"UNIX_SOCKET"`
|
||||
UnixSocketMode string `env:"UNIX_SOCKET_MODE"`
|
||||
MaxMindLicenseKey string `env:"MAXMIND_LICENSE_KEY" options:"file"`
|
||||
GeoLiteDBPath string `env:"GEOLITE_DB_PATH"`
|
||||
GeoLiteDBUrl string `env:"GEOLITE_DB_URL"`
|
||||
LocalIPv6Ranges string `env:"LOCAL_IPV6_RANGES"`
|
||||
UiConfigDisabled bool `env:"UI_CONFIG_DISABLED"`
|
||||
MetricsEnabled bool `env:"METRICS_ENABLED"`
|
||||
TracingEnabled bool `env:"TRACING_ENABLED"`
|
||||
LogJSON bool `env:"LOG_JSON"`
|
||||
TrustProxy bool `env:"TRUST_PROXY"`
|
||||
AuditLogRetentionDays int `env:"AUDIT_LOG_RETENTION_DAYS"`
|
||||
AnalyticsDisabled bool `env:"ANALYTICS_DISABLED"`
|
||||
AllowDowngrade bool `env:"ALLOW_DOWNGRADE"`
|
||||
InternalAppURL string `env:"INTERNAL_APP_URL"`
|
||||
}
|
||||
|
||||
var EnvConfig = defaultConfig()
|
||||
@@ -86,15 +86,17 @@ func init() {
|
||||
|
||||
func defaultConfig() EnvConfigSchema {
|
||||
return EnvConfigSchema{
|
||||
AppEnv: AppEnvProduction,
|
||||
LogLevel: "info",
|
||||
DbProvider: "sqlite",
|
||||
FileBackend: "filesystem",
|
||||
AppURL: AppUrl,
|
||||
Port: "1411",
|
||||
Host: "0.0.0.0",
|
||||
GeoLiteDBPath: "data/GeoLite2-City.mmdb",
|
||||
GeoLiteDBUrl: MaxMindGeoLiteCityUrl,
|
||||
AppEnv: AppEnvProduction,
|
||||
LogLevel: "info",
|
||||
DbProvider: "sqlite",
|
||||
FileBackend: "filesystem",
|
||||
KeysPath: "data/keys",
|
||||
AuditLogRetentionDays: 90,
|
||||
AppURL: AppUrl,
|
||||
Port: "1411",
|
||||
Host: "0.0.0.0",
|
||||
GeoLiteDBPath: "data/GeoLite2-City.mmdb",
|
||||
GeoLiteDBUrl: MaxMindGeoLiteCityUrl,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,28 +119,32 @@ func parseEnvConfig() error {
|
||||
return fmt.Errorf("error preparing env config: %w", err)
|
||||
}
|
||||
|
||||
err = validateEnvConfig(&EnvConfig)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
// ValidateEnvConfig checks the EnvConfig for required fields and valid values
|
||||
func ValidateEnvConfig(config *EnvConfigSchema) error {
|
||||
// validateEnvConfig checks the EnvConfig for required fields and valid values
|
||||
func validateEnvConfig(config *EnvConfigSchema) error {
|
||||
if _, err := sloggin.ParseLevel(config.LogLevel); err != nil {
|
||||
return errors.New("invalid LOG_LEVEL value. Must be 'debug', 'info', 'warn' or 'error'")
|
||||
}
|
||||
|
||||
if len(config.EncryptionKey) < 16 {
|
||||
return errors.New("ENCRYPTION_KEY must be at least 16 bytes long")
|
||||
}
|
||||
|
||||
switch {
|
||||
case config.DbConnectionString == "":
|
||||
config.DbProvider = DbProviderSqlite
|
||||
config.DbConnectionString = defaultSqliteConnString
|
||||
case strings.HasPrefix(config.DbConnectionString, "postgres://") || strings.HasPrefix(config.DbConnectionString, "postgresql://"):
|
||||
config.DbProvider = DbProviderPostgres
|
||||
switch config.DbProvider {
|
||||
case DbProviderSqlite:
|
||||
if config.DbConnectionString == "" {
|
||||
config.DbConnectionString = defaultSqliteConnString
|
||||
}
|
||||
case DbProviderPostgres:
|
||||
if config.DbConnectionString == "" {
|
||||
return errors.New("missing required env var 'DB_CONNECTION_STRING' for Postgres database")
|
||||
}
|
||||
default:
|
||||
config.DbProvider = DbProviderSqlite
|
||||
return errors.New("invalid DB_PROVIDER value. Must be 'sqlite' or 'postgres'")
|
||||
}
|
||||
|
||||
parsedAppUrl, err := url.Parse(config.AppURL)
|
||||
@@ -162,8 +168,27 @@ func ValidateEnvConfig(config *EnvConfigSchema) error {
|
||||
}
|
||||
}
|
||||
|
||||
switch config.KeysStorage {
|
||||
// KeysStorage defaults to "file" if empty
|
||||
case "":
|
||||
config.KeysStorage = "file"
|
||||
case "database":
|
||||
if config.EncryptionKey == nil {
|
||||
return errors.New("ENCRYPTION_KEY must be non-empty when KEYS_STORAGE is database")
|
||||
}
|
||||
case "file":
|
||||
// All good, these are valid values
|
||||
default:
|
||||
return fmt.Errorf("invalid value for KEYS_STORAGE: %s", config.KeysStorage)
|
||||
}
|
||||
|
||||
switch config.FileBackend {
|
||||
case "s3", "database":
|
||||
case "s3":
|
||||
if config.KeysStorage == "file" {
|
||||
return errors.New("KEYS_STORAGE cannot be 'file' when FILE_BACKEND is 's3'")
|
||||
}
|
||||
case "database":
|
||||
// All good, these are valid values
|
||||
case "", "filesystem":
|
||||
if config.UploadPath == "" {
|
||||
config.UploadPath = defaultFsUploadPath
|
||||
@@ -191,6 +216,10 @@ func ValidateEnvConfig(config *EnvConfigSchema) error {
|
||||
|
||||
}
|
||||
|
||||
if config.AuditLogRetentionDays <= 0 {
|
||||
return errors.New("AUDIT_LOG_RETENTION_DAYS must be greater than 0")
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
@@ -8,20 +8,6 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func parseAndValidateEnvConfig(t *testing.T) error {
|
||||
t.Helper()
|
||||
|
||||
if _, exists := os.LookupEnv("ENCRYPTION_KEY"); !exists {
|
||||
t.Setenv("ENCRYPTION_KEY", "0123456789abcdef")
|
||||
}
|
||||
|
||||
if err := parseEnvConfig(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return ValidateEnvConfig(&EnvConfig)
|
||||
}
|
||||
|
||||
func TestParseEnvConfig(t *testing.T) {
|
||||
// Store original config to restore later
|
||||
originalConfig := EnvConfig
|
||||
@@ -31,10 +17,11 @@ func TestParseEnvConfig(t *testing.T) {
|
||||
|
||||
t.Run("should parse valid SQLite config correctly", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "SQLITE") // should be lowercased automatically
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "HTTP://LOCALHOST:3000")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, DbProviderSqlite, EnvConfig.DbProvider)
|
||||
assert.Equal(t, "http://localhost:3000", EnvConfig.AppURL)
|
||||
@@ -42,76 +29,147 @@ func TestParseEnvConfig(t *testing.T) {
|
||||
|
||||
t.Run("should parse valid Postgres config correctly", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "POSTGRES")
|
||||
t.Setenv("DB_CONNECTION_STRING", "postgres://user:pass@localhost/db")
|
||||
t.Setenv("APP_URL", "https://example.com")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, DbProviderPostgres, EnvConfig.DbProvider)
|
||||
})
|
||||
|
||||
t.Run("should fail when ENCRYPTION_KEY is too short", func(t *testing.T) {
|
||||
t.Run("should fail with invalid DB_PROVIDER", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("DB_PROVIDER", "invalid")
|
||||
t.Setenv("DB_CONNECTION_STRING", "test")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("ENCRYPTION_KEY", "short")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "ENCRYPTION_KEY must be at least 16 bytes long")
|
||||
assert.ErrorContains(t, err, "invalid DB_PROVIDER value")
|
||||
})
|
||||
|
||||
t.Run("should set default SQLite connection string when DB_CONNECTION_STRING is empty", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, defaultSqliteConnString, EnvConfig.DbConnectionString)
|
||||
})
|
||||
|
||||
t.Run("should fail when Postgres DB_CONNECTION_STRING is missing", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "postgres")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "missing required env var 'DB_CONNECTION_STRING' for Postgres")
|
||||
})
|
||||
|
||||
t.Run("should fail with invalid APP_URL", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "€://not-a-valid-url")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "APP_URL is not a valid URL")
|
||||
})
|
||||
|
||||
t.Run("should fail when APP_URL contains path", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000/path")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "APP_URL must not contain a path")
|
||||
})
|
||||
|
||||
t.Run("should fail with invalid INTERNAL_APP_URL", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("INTERNAL_APP_URL", "€://not-a-valid-url")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "INTERNAL_APP_URL is not a valid URL")
|
||||
})
|
||||
|
||||
t.Run("should fail when INTERNAL_APP_URL contains path", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("INTERNAL_APP_URL", "http://localhost:3000/path")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "INTERNAL_APP_URL must not contain a path")
|
||||
})
|
||||
|
||||
t.Run("should default KEYS_STORAGE to 'file' when empty", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "file", EnvConfig.KeysStorage)
|
||||
})
|
||||
|
||||
t.Run("should fail when KEYS_STORAGE is 'database' but no encryption key", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("KEYS_STORAGE", "database")
|
||||
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "ENCRYPTION_KEY must be non-empty when KEYS_STORAGE is database")
|
||||
})
|
||||
|
||||
t.Run("should accept valid KEYS_STORAGE values", func(t *testing.T) {
|
||||
validStorageTypes := []string{"file", "database"}
|
||||
|
||||
for _, storage := range validStorageTypes {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("KEYS_STORAGE", storage)
|
||||
if storage == "database" {
|
||||
t.Setenv("ENCRYPTION_KEY", "test-key")
|
||||
}
|
||||
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, storage, EnvConfig.KeysStorage)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("should fail with invalid KEYS_STORAGE value", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("KEYS_STORAGE", "invalid")
|
||||
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "invalid value for KEYS_STORAGE")
|
||||
})
|
||||
|
||||
t.Run("should parse boolean environment variables correctly", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("UI_CONFIG_DISABLED", "true")
|
||||
@@ -120,7 +178,7 @@ func TestParseEnvConfig(t *testing.T) {
|
||||
t.Setenv("TRUST_PROXY", "true")
|
||||
t.Setenv("ANALYTICS_DISABLED", "false")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.True(t, EnvConfig.UiConfigDisabled)
|
||||
assert.True(t, EnvConfig.MetricsEnabled)
|
||||
@@ -129,19 +187,56 @@ func TestParseEnvConfig(t *testing.T) {
|
||||
assert.False(t, EnvConfig.AnalyticsDisabled)
|
||||
})
|
||||
|
||||
t.Run("should default audit log retention days to 90", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 90, EnvConfig.AuditLogRetentionDays)
|
||||
})
|
||||
|
||||
t.Run("should parse audit log retention days override", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("AUDIT_LOG_RETENTION_DAYS", "365")
|
||||
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 365, EnvConfig.AuditLogRetentionDays)
|
||||
})
|
||||
|
||||
t.Run("should fail when AUDIT_LOG_RETENTION_DAYS is non-positive", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("AUDIT_LOG_RETENTION_DAYS", "0")
|
||||
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "AUDIT_LOG_RETENTION_DAYS must be greater than 0")
|
||||
})
|
||||
|
||||
t.Run("should parse string environment variables correctly", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "postgres")
|
||||
t.Setenv("DB_CONNECTION_STRING", "postgres://test")
|
||||
t.Setenv("APP_URL", "https://prod.example.com")
|
||||
t.Setenv("APP_ENV", "PRODUCTION")
|
||||
t.Setenv("UPLOAD_PATH", "/custom/uploads")
|
||||
t.Setenv("KEYS_PATH", "/custom/keys")
|
||||
t.Setenv("PORT", "8080")
|
||||
t.Setenv("HOST", "LOCALHOST")
|
||||
t.Setenv("UNIX_SOCKET", "/tmp/app.sock")
|
||||
t.Setenv("MAXMIND_LICENSE_KEY", "test-license")
|
||||
t.Setenv("GEOLITE_DB_PATH", "/custom/geolite.mmdb")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, AppEnvProduction, EnvConfig.AppEnv) // lowercased
|
||||
assert.Equal(t, "/custom/uploads", EnvConfig.UploadPath)
|
||||
@@ -151,24 +246,38 @@ func TestParseEnvConfig(t *testing.T) {
|
||||
|
||||
t.Run("should normalize file backend and default upload path", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("FILE_BACKEND", "FILESYSTEM")
|
||||
t.Setenv("UPLOAD_PATH", "")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "filesystem", EnvConfig.FileBackend)
|
||||
assert.Equal(t, defaultFsUploadPath, EnvConfig.UploadPath)
|
||||
})
|
||||
|
||||
t.Run("should fail when FILE_BACKEND is s3 but keys are stored on filesystem", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("FILE_BACKEND", "s3")
|
||||
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "KEYS_STORAGE cannot be 'file' when FILE_BACKEND is 's3'")
|
||||
})
|
||||
|
||||
t.Run("should fail with invalid FILE_BACKEND value", func(t *testing.T) {
|
||||
EnvConfig = defaultConfig()
|
||||
t.Setenv("DB_PROVIDER", "sqlite")
|
||||
t.Setenv("DB_CONNECTION_STRING", "file:test.db")
|
||||
t.Setenv("APP_URL", "http://localhost:3000")
|
||||
t.Setenv("FILE_BACKEND", "invalid")
|
||||
|
||||
err := parseAndValidateEnvConfig(t)
|
||||
err := parseEnvConfig()
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, "invalid FILE_BACKEND value")
|
||||
})
|
||||
|
||||
@@ -38,6 +38,13 @@ type TokenInvalidOrExpiredError struct{}
|
||||
func (e *TokenInvalidOrExpiredError) Error() string { return "token is invalid or expired" }
|
||||
func (e *TokenInvalidOrExpiredError) HttpStatusCode() int { return 400 }
|
||||
|
||||
type DeviceCodeInvalid struct{}
|
||||
|
||||
func (e *DeviceCodeInvalid) Error() string {
|
||||
return "one time access code must be used on the device it was generated for"
|
||||
}
|
||||
func (e *DeviceCodeInvalid) HttpStatusCode() int { return 400 }
|
||||
|
||||
type TokenInvalidError struct{}
|
||||
|
||||
func (e *TokenInvalidError) Error() string {
|
||||
|
||||
@@ -23,11 +23,13 @@ func NewAppImagesController(
|
||||
}
|
||||
|
||||
group.GET("/application-images/logo", controller.getLogoHandler)
|
||||
group.GET("/application-images/email", controller.getEmailLogoHandler)
|
||||
group.GET("/application-images/background", controller.getBackgroundImageHandler)
|
||||
group.GET("/application-images/favicon", controller.getFaviconHandler)
|
||||
group.GET("/application-images/default-profile-picture", authMiddleware.Add(), controller.getDefaultProfilePicture)
|
||||
|
||||
group.PUT("/application-images/logo", authMiddleware.Add(), controller.updateLogoHandler)
|
||||
group.PUT("/application-images/email", authMiddleware.Add(), controller.updateEmailLogoHandler)
|
||||
group.PUT("/application-images/background", authMiddleware.Add(), controller.updateBackgroundImageHandler)
|
||||
group.PUT("/application-images/favicon", authMiddleware.Add(), controller.updateFaviconHandler)
|
||||
group.PUT("/application-images/default-profile-picture", authMiddleware.Add(), controller.updateDefaultProfilePicture)
|
||||
@@ -59,6 +61,18 @@ func (c *AppImagesController) getLogoHandler(ctx *gin.Context) {
|
||||
c.getImage(ctx, imageName)
|
||||
}
|
||||
|
||||
// getEmailLogoHandler godoc
|
||||
// @Summary Get email logo image
|
||||
// @Description Get the email logo image for use in emails
|
||||
// @Tags Application Images
|
||||
// @Produce image/png
|
||||
// @Produce image/jpeg
|
||||
// @Success 200 {file} binary "Email logo image"
|
||||
// @Router /api/application-images/email [get]
|
||||
func (c *AppImagesController) getEmailLogoHandler(ctx *gin.Context) {
|
||||
c.getImage(ctx, "logoEmail")
|
||||
}
|
||||
|
||||
// getBackgroundImageHandler godoc
|
||||
// @Summary Get background image
|
||||
// @Description Get the background image for the application
|
||||
@@ -124,6 +138,37 @@ func (c *AppImagesController) updateLogoHandler(ctx *gin.Context) {
|
||||
ctx.Status(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// updateEmailLogoHandler godoc
|
||||
// @Summary Update email logo
|
||||
// @Description Update the email logo for use in emails
|
||||
// @Tags Application Images
|
||||
// @Accept multipart/form-data
|
||||
// @Param file formData file true "Email logo image file"
|
||||
// @Success 204 "No Content"
|
||||
// @Router /api/application-images/email [put]
|
||||
func (c *AppImagesController) updateEmailLogoHandler(ctx *gin.Context) {
|
||||
file, err := ctx.FormFile("file")
|
||||
if err != nil {
|
||||
_ = ctx.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
fileType := utils.GetFileExtension(file.Filename)
|
||||
mimeType := utils.GetImageMimeType(fileType)
|
||||
|
||||
if mimeType != "image/png" && mimeType != "image/jpeg" {
|
||||
_ = ctx.Error(&common.WrongFileTypeError{ExpectedFileType: ".png or .jpg/jpeg"})
|
||||
return
|
||||
}
|
||||
|
||||
if err := c.appImagesService.UpdateImage(ctx.Request.Context(), file, "logoEmail"); err != nil {
|
||||
_ = ctx.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
ctx.Status(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// updateBackgroundImageHandler godoc
|
||||
// @Summary Update background image
|
||||
// @Description Update the application background image
|
||||
|
||||
@@ -40,11 +40,6 @@ func (tc *TestController) resetAndSeedHandler(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
if err := tc.TestService.ResetLock(c.Request.Context()); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := tc.TestService.ResetApplicationImages(c.Request.Context()); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
@@ -74,6 +69,8 @@ func (tc *TestController) resetAndSeedHandler(c *gin.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
tc.TestService.SetJWTKeys()
|
||||
|
||||
c.Status(http.StatusNoContent)
|
||||
}
|
||||
|
||||
|
||||
@@ -63,12 +63,6 @@ func NewOidcController(group *gin.RouterGroup, authMiddleware *middleware.AuthMi
|
||||
|
||||
group.GET("/oidc/users/me/clients", authMiddleware.WithAdminNotRequired().Add(), oc.listOwnAccessibleClientsHandler)
|
||||
|
||||
// OIDC API (Resource Server) routes
|
||||
group.POST("/oidc/apis", authMiddleware.Add(), oc.createAPIHandler)
|
||||
group.GET("/oidc/apis", authMiddleware.Add(), oc.listAPIsHandler)
|
||||
group.GET("/oidc/apis/:id", authMiddleware.Add(), oc.getAPIHandler)
|
||||
group.POST("/oidc/apis/:id", authMiddleware.Add(), oc.updateAPIHandler)
|
||||
group.DELETE("/oidc/apis/:id", authMiddleware.Add(), oc.deleteAPIHandler)
|
||||
}
|
||||
|
||||
type OidcController struct {
|
||||
@@ -851,151 +845,3 @@ func (oc *OidcController) getClientPreviewHandler(c *gin.Context) {
|
||||
|
||||
c.JSON(http.StatusOK, preview)
|
||||
}
|
||||
|
||||
// createAPIHandler godoc
|
||||
// @Summary Create OIDC API
|
||||
// @Description Create a new OIDC API (resource server)
|
||||
// @Tags OIDC
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param api body dto.OidcAPICreateDto true "API information"
|
||||
// @Success 201 {object} dto.OidcAPIDto "Created API"
|
||||
// @Router /api/oidc/apis [post]
|
||||
func (oc *OidcController) createAPIHandler(c *gin.Context) {
|
||||
var input dto.OidcAPICreateDto
|
||||
err := c.ShouldBindJSON(&input)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
api, err := oc.oidcService.CreateAPI(c.Request.Context(), input)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
var apiDto dto.OidcAPIDto
|
||||
err = dto.MapStruct(api, &apiDto)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusCreated, apiDto)
|
||||
}
|
||||
|
||||
// listAPIsHandler godoc
|
||||
// @Summary List OIDC APIs
|
||||
// @Description Get a paginated list of OIDC APIs (resource servers)
|
||||
// @Tags OIDC
|
||||
// @Param search query string false "Search term to filter APIs by name"
|
||||
// @Param pagination[page] query int false "Page number for pagination" default(1)
|
||||
// @Param pagination[limit] query int false "Number of items per page" default(20)
|
||||
// @Param sort[column] query string false "Column to sort by"
|
||||
// @Param sort[direction] query string false "Sort direction (asc or desc)" default("asc")
|
||||
// @Success 200 {object} dto.Paginated[dto.OidcAPIDto]
|
||||
// @Router /api/oidc/apis [get]
|
||||
func (oc *OidcController) listAPIsHandler(c *gin.Context) {
|
||||
searchTerm := c.Query("search")
|
||||
listRequestOptions := utils.ParseListRequestOptions(c)
|
||||
|
||||
apis, pagination, err := oc.oidcService.ListAPIs(c.Request.Context(), searchTerm, listRequestOptions)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
apisDto := make([]dto.OidcAPIDto, len(apis))
|
||||
for i, api := range apis {
|
||||
var apiDto dto.OidcAPIDto
|
||||
err = dto.MapStruct(api, &apiDto)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
apisDto[i] = apiDto
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, dto.Paginated[dto.OidcAPIDto]{
|
||||
Data: apisDto,
|
||||
Pagination: pagination,
|
||||
})
|
||||
}
|
||||
|
||||
// getAPIHandler godoc
|
||||
// @Summary Get OIDC API
|
||||
// @Description Get detailed information about an OIDC API (resource server)
|
||||
// @Tags OIDC
|
||||
// @Produce json
|
||||
// @Param id path string true "API ID"
|
||||
// @Success 200 {object} dto.OidcAPIDto "API information"
|
||||
// @Router /api/oidc/apis/{id} [get]
|
||||
func (oc *OidcController) getAPIHandler(c *gin.Context) {
|
||||
apiID := c.Param("id")
|
||||
api, err := oc.oidcService.GetAPI(c.Request.Context(), apiID)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
var apiDto dto.OidcAPIDto
|
||||
err = dto.MapStruct(api, &apiDto)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, apiDto)
|
||||
}
|
||||
|
||||
// updateAPIHandler godoc
|
||||
// @Summary Update OIDC API
|
||||
// @Description Update an existing OIDC API (resource server)
|
||||
// @Tags OIDC
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param id path string true "API ID"
|
||||
// @Param api body dto.OidcAPIUpdateDto true "API information"
|
||||
// @Success 200 {object} dto.OidcAPIDto "Updated API"
|
||||
// @Router /api/oidc/apis/{id} [post]
|
||||
func (oc *OidcController) updateAPIHandler(c *gin.Context) {
|
||||
var input dto.OidcAPIUpdateDto
|
||||
err := c.ShouldBindJSON(&input)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
api, err := oc.oidcService.UpdateAPI(c.Request.Context(), c.Param("id"), input)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
var apiDto dto.OidcAPIDto
|
||||
err = dto.MapStruct(api, &apiDto)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, apiDto)
|
||||
}
|
||||
|
||||
// deleteAPIHandler godoc
|
||||
// @Summary Delete OIDC API
|
||||
// @Description Delete an OIDC API (resource server) by ID
|
||||
// @Tags OIDC
|
||||
// @Param id path string true "API ID"
|
||||
// @Success 204 "No Content"
|
||||
// @Router /api/oidc/apis/{id} [delete]
|
||||
func (oc *OidcController) deleteAPIHandler(c *gin.Context) {
|
||||
err := oc.oidcService.DeleteAPI(c.Request.Context(), c.Param("id"))
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
c.Status(http.StatusNoContent)
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ type UserController struct {
|
||||
// @Description Retrieve all groups a specific user belongs to
|
||||
// @Tags Users,User Groups
|
||||
// @Param id path string true "User ID"
|
||||
// @Success 200 {array} dto.UserGroupDtoWithUsers
|
||||
// @Success 200 {array} dto.UserGroupDto
|
||||
// @Router /api/users/{id}/groups [get]
|
||||
func (uc *UserController) getUserGroupsHandler(c *gin.Context) {
|
||||
userID := c.Param("id")
|
||||
@@ -82,7 +82,7 @@ func (uc *UserController) getUserGroupsHandler(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
var groupsDto []dto.UserGroupDtoWithUsers
|
||||
var groupsDto []dto.UserGroupDto
|
||||
if err := dto.MapStructList(groups, &groupsDto); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
@@ -391,12 +391,13 @@ func (uc *UserController) RequestOneTimeAccessEmailAsUnauthenticatedUserHandler(
|
||||
return
|
||||
}
|
||||
|
||||
err := uc.userService.RequestOneTimeAccessEmailAsUnauthenticatedUser(c.Request.Context(), input.Email, input.RedirectPath)
|
||||
deviceToken, err := uc.userService.RequestOneTimeAccessEmailAsUnauthenticatedUser(c.Request.Context(), input.Email, input.RedirectPath)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
cookie.AddDeviceTokenCookie(c, deviceToken)
|
||||
c.Status(http.StatusNoContent)
|
||||
}
|
||||
|
||||
@@ -440,7 +441,8 @@ func (uc *UserController) RequestOneTimeAccessEmailAsAdminHandler(c *gin.Context
|
||||
// @Success 200 {object} dto.UserDto
|
||||
// @Router /api/one-time-access-token/{token} [post]
|
||||
func (uc *UserController) exchangeOneTimeAccessTokenHandler(c *gin.Context) {
|
||||
user, token, err := uc.userService.ExchangeOneTimeAccessToken(c.Request.Context(), c.Param("token"), c.ClientIP(), c.Request.UserAgent())
|
||||
deviceToken, _ := c.Cookie(cookie.DeviceTokenCookieName)
|
||||
user, token, err := uc.userService.ExchangeOneTimeAccessToken(c.Request.Context(), c.Param("token"), deviceToken, c.ClientIP(), c.Request.UserAgent())
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
@@ -543,7 +545,7 @@ func (uc *UserController) createSignupTokenHandler(c *gin.Context) {
|
||||
ttl = defaultSignupTokenDuration
|
||||
}
|
||||
|
||||
signupToken, err := uc.userService.CreateSignupToken(c.Request.Context(), ttl, input.UsageLimit)
|
||||
signupToken, err := uc.userService.CreateSignupToken(c.Request.Context(), ttl, input.UsageLimit, input.UserGroupIDs)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
|
||||
@@ -28,6 +28,7 @@ func NewUserGroupController(group *gin.RouterGroup, authMiddleware *middleware.A
|
||||
userGroupsGroup.PUT("/:id", ugc.update)
|
||||
userGroupsGroup.DELETE("/:id", ugc.delete)
|
||||
userGroupsGroup.PUT("/:id/users", ugc.updateUsers)
|
||||
userGroupsGroup.PUT("/:id/allowed-oidc-clients", ugc.updateAllowedOidcClients)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,7 +45,7 @@ type UserGroupController struct {
|
||||
// @Param pagination[limit] query int false "Number of items per page" default(20)
|
||||
// @Param sort[column] query string false "Column to sort by"
|
||||
// @Param sort[direction] query string false "Sort direction (asc or desc)" default("asc")
|
||||
// @Success 200 {object} dto.Paginated[dto.UserGroupDtoWithUserCount]
|
||||
// @Success 200 {object} dto.Paginated[dto.UserGroupMinimalDto]
|
||||
// @Router /api/user-groups [get]
|
||||
func (ugc *UserGroupController) list(c *gin.Context) {
|
||||
searchTerm := c.Query("search")
|
||||
@@ -57,9 +58,9 @@ func (ugc *UserGroupController) list(c *gin.Context) {
|
||||
}
|
||||
|
||||
// Map the user groups to DTOs
|
||||
var groupsDto = make([]dto.UserGroupDtoWithUserCount, len(groups))
|
||||
var groupsDto = make([]dto.UserGroupMinimalDto, len(groups))
|
||||
for i, group := range groups {
|
||||
var groupDto dto.UserGroupDtoWithUserCount
|
||||
var groupDto dto.UserGroupMinimalDto
|
||||
if err := dto.MapStruct(group, &groupDto); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
@@ -72,7 +73,7 @@ func (ugc *UserGroupController) list(c *gin.Context) {
|
||||
groupsDto[i] = groupDto
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, dto.Paginated[dto.UserGroupDtoWithUserCount]{
|
||||
c.JSON(http.StatusOK, dto.Paginated[dto.UserGroupMinimalDto]{
|
||||
Data: groupsDto,
|
||||
Pagination: pagination,
|
||||
})
|
||||
@@ -85,7 +86,7 @@ func (ugc *UserGroupController) list(c *gin.Context) {
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param id path string true "User Group ID"
|
||||
// @Success 200 {object} dto.UserGroupDtoWithUsers
|
||||
// @Success 200 {object} dto.UserGroupDto
|
||||
// @Router /api/user-groups/{id} [get]
|
||||
func (ugc *UserGroupController) get(c *gin.Context) {
|
||||
group, err := ugc.UserGroupService.Get(c.Request.Context(), c.Param("id"))
|
||||
@@ -94,7 +95,7 @@ func (ugc *UserGroupController) get(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
var groupDto dto.UserGroupDtoWithUsers
|
||||
var groupDto dto.UserGroupDto
|
||||
if err := dto.MapStruct(group, &groupDto); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
@@ -110,7 +111,7 @@ func (ugc *UserGroupController) get(c *gin.Context) {
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param userGroup body dto.UserGroupCreateDto true "User group information"
|
||||
// @Success 201 {object} dto.UserGroupDtoWithUsers "Created user group"
|
||||
// @Success 201 {object} dto.UserGroupDto "Created user group"
|
||||
// @Router /api/user-groups [post]
|
||||
func (ugc *UserGroupController) create(c *gin.Context) {
|
||||
var input dto.UserGroupCreateDto
|
||||
@@ -125,7 +126,7 @@ func (ugc *UserGroupController) create(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
var groupDto dto.UserGroupDtoWithUsers
|
||||
var groupDto dto.UserGroupDto
|
||||
if err := dto.MapStruct(group, &groupDto); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
@@ -142,7 +143,7 @@ func (ugc *UserGroupController) create(c *gin.Context) {
|
||||
// @Produce json
|
||||
// @Param id path string true "User Group ID"
|
||||
// @Param userGroup body dto.UserGroupCreateDto true "User group information"
|
||||
// @Success 200 {object} dto.UserGroupDtoWithUsers "Updated user group"
|
||||
// @Success 200 {object} dto.UserGroupDto "Updated user group"
|
||||
// @Router /api/user-groups/{id} [put]
|
||||
func (ugc *UserGroupController) update(c *gin.Context) {
|
||||
var input dto.UserGroupCreateDto
|
||||
@@ -157,7 +158,7 @@ func (ugc *UserGroupController) update(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
var groupDto dto.UserGroupDtoWithUsers
|
||||
var groupDto dto.UserGroupDto
|
||||
if err := dto.MapStruct(group, &groupDto); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
@@ -192,7 +193,7 @@ func (ugc *UserGroupController) delete(c *gin.Context) {
|
||||
// @Produce json
|
||||
// @Param id path string true "User Group ID"
|
||||
// @Param users body dto.UserGroupUpdateUsersDto true "List of user IDs to assign to this group"
|
||||
// @Success 200 {object} dto.UserGroupDtoWithUsers
|
||||
// @Success 200 {object} dto.UserGroupDto
|
||||
// @Router /api/user-groups/{id}/users [put]
|
||||
func (ugc *UserGroupController) updateUsers(c *gin.Context) {
|
||||
var input dto.UserGroupUpdateUsersDto
|
||||
@@ -207,7 +208,7 @@ func (ugc *UserGroupController) updateUsers(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
var groupDto dto.UserGroupDtoWithUsers
|
||||
var groupDto dto.UserGroupDto
|
||||
if err := dto.MapStruct(group, &groupDto); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
@@ -215,3 +216,35 @@ func (ugc *UserGroupController) updateUsers(c *gin.Context) {
|
||||
|
||||
c.JSON(http.StatusOK, groupDto)
|
||||
}
|
||||
|
||||
// updateAllowedOidcClients godoc
|
||||
// @Summary Update allowed OIDC clients
|
||||
// @Description Update the OIDC clients allowed for a specific user group
|
||||
// @Tags OIDC
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param id path string true "User Group ID"
|
||||
// @Param groups body dto.UserGroupUpdateAllowedOidcClientsDto true "OIDC client IDs to allow"
|
||||
// @Success 200 {object} dto.UserGroupDto "Updated user group"
|
||||
// @Router /api/user-groups/{id}/allowed-oidc-clients [put]
|
||||
func (ugc *UserGroupController) updateAllowedOidcClients(c *gin.Context) {
|
||||
var input dto.UserGroupUpdateAllowedOidcClientsDto
|
||||
if err := c.ShouldBindJSON(&input); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
userGroup, err := ugc.UserGroupService.UpdateAllowedOidcClient(c.Request.Context(), c.Param("id"), input)
|
||||
if err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
var userGroupDto dto.UserGroupDto
|
||||
if err := dto.MapStruct(userGroup, &userGroupDto); err != nil {
|
||||
_ = c.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, userGroupDto)
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ type AppConfigUpdateDto struct {
|
||||
LdapAttributeGroupMember string `json:"ldapAttributeGroupMember"`
|
||||
LdapAttributeGroupUniqueIdentifier string `json:"ldapAttributeGroupUniqueIdentifier"`
|
||||
LdapAttributeGroupName string `json:"ldapAttributeGroupName"`
|
||||
LdapAdminGroupName string `json:"ldapAdminGroupName"`
|
||||
LdapAttributeAdminGroup string `json:"ldapAttributeAdminGroup"`
|
||||
LdapSoftDeleteUsers string `json:"ldapSoftDeleteUsers"`
|
||||
EmailOneTimeAccessAsAdminEnabled string `json:"emailOneTimeAccessAsAdminEnabled" binding:"required"`
|
||||
EmailOneTimeAccessAsUnauthenticatedEnabled string `json:"emailOneTimeAccessAsUnauthenticatedEnabled" binding:"required"`
|
||||
|
||||
@@ -18,11 +18,12 @@ type OidcClientDto struct {
|
||||
IsPublic bool `json:"isPublic"`
|
||||
PkceEnabled bool `json:"pkceEnabled"`
|
||||
Credentials OidcClientCredentialsDto `json:"credentials"`
|
||||
IsGroupRestricted bool `json:"isGroupRestricted"`
|
||||
}
|
||||
|
||||
type OidcClientWithAllowedUserGroupsDto struct {
|
||||
OidcClientDto
|
||||
AllowedUserGroups []UserGroupDtoWithUserCount `json:"allowedUserGroups"`
|
||||
AllowedUserGroups []UserGroupMinimalDto `json:"allowedUserGroups"`
|
||||
}
|
||||
|
||||
type OidcClientWithAllowedGroupsCountDto struct {
|
||||
@@ -43,6 +44,7 @@ type OidcClientUpdateDto struct {
|
||||
HasDarkLogo bool `json:"hasDarkLogo"`
|
||||
LogoURL *string `json:"logoUrl"`
|
||||
DarkLogoURL *string `json:"darkLogoUrl"`
|
||||
IsGroupRestricted bool `json:"isGroupRestricted"`
|
||||
}
|
||||
|
||||
type OidcClientCreateDto struct {
|
||||
@@ -178,26 +180,3 @@ type AccessibleOidcClientDto struct {
|
||||
OidcClientMetaDataDto
|
||||
LastUsedAt *datatype.DateTime `json:"lastUsedAt"`
|
||||
}
|
||||
|
||||
type OidcAPIDto struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Identifier string `json:"identifier"`
|
||||
Permissions []OidcAPIPermissionDto `json:"permissions"`
|
||||
CreatedAt datatype.DateTime `json:"createdAt"`
|
||||
}
|
||||
|
||||
type OidcAPIPermissionDto struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
type OidcAPICreateDto struct {
|
||||
Name string `json:"name" binding:"required,max=100" unorm:"nfc"`
|
||||
}
|
||||
|
||||
type OidcAPIUpdateDto struct {
|
||||
Name string `json:"name" binding:"required,max=100" unorm:"nfc"`
|
||||
Identifier string `json:"identifier" binding:"omitempty,url,max=255"`
|
||||
Permissions []OidcAPIPermissionDto `json:"permissions" binding:"omitempty,dive"`
|
||||
}
|
||||
|
||||
@@ -6,15 +6,17 @@ import (
|
||||
)
|
||||
|
||||
type SignupTokenCreateDto struct {
|
||||
TTL utils.JSONDuration `json:"ttl" binding:"required,ttl"`
|
||||
UsageLimit int `json:"usageLimit" binding:"required,min=1,max=100"`
|
||||
TTL utils.JSONDuration `json:"ttl" binding:"required,ttl"`
|
||||
UsageLimit int `json:"usageLimit" binding:"required,min=1,max=100"`
|
||||
UserGroupIDs []string `json:"userGroupIds"`
|
||||
}
|
||||
|
||||
type SignupTokenDto struct {
|
||||
ID string `json:"id"`
|
||||
Token string `json:"token"`
|
||||
ExpiresAt datatype.DateTime `json:"expiresAt"`
|
||||
UsageLimit int `json:"usageLimit"`
|
||||
UsageCount int `json:"usageCount"`
|
||||
CreatedAt datatype.DateTime `json:"createdAt"`
|
||||
ID string `json:"id"`
|
||||
Token string `json:"token"`
|
||||
ExpiresAt datatype.DateTime `json:"expiresAt"`
|
||||
UsageLimit int `json:"usageLimit"`
|
||||
UsageCount int `json:"usageCount"`
|
||||
UserGroups []UserGroupMinimalDto `json:"userGroups"`
|
||||
CreatedAt datatype.DateTime `json:"createdAt"`
|
||||
}
|
||||
|
||||
@@ -8,30 +8,31 @@ import (
|
||||
)
|
||||
|
||||
type UserDto struct {
|
||||
ID string `json:"id"`
|
||||
Username string `json:"username"`
|
||||
Email *string `json:"email" `
|
||||
FirstName string `json:"firstName"`
|
||||
LastName *string `json:"lastName"`
|
||||
DisplayName string `json:"displayName"`
|
||||
IsAdmin bool `json:"isAdmin"`
|
||||
Locale *string `json:"locale"`
|
||||
CustomClaims []CustomClaimDto `json:"customClaims"`
|
||||
UserGroups []UserGroupDto `json:"userGroups"`
|
||||
LdapID *string `json:"ldapId"`
|
||||
Disabled bool `json:"disabled"`
|
||||
ID string `json:"id"`
|
||||
Username string `json:"username"`
|
||||
Email *string `json:"email" `
|
||||
FirstName string `json:"firstName"`
|
||||
LastName *string `json:"lastName"`
|
||||
DisplayName string `json:"displayName"`
|
||||
IsAdmin bool `json:"isAdmin"`
|
||||
Locale *string `json:"locale"`
|
||||
CustomClaims []CustomClaimDto `json:"customClaims"`
|
||||
UserGroups []UserGroupMinimalDto `json:"userGroups"`
|
||||
LdapID *string `json:"ldapId"`
|
||||
Disabled bool `json:"disabled"`
|
||||
}
|
||||
|
||||
type UserCreateDto struct {
|
||||
Username string `json:"username" binding:"required,username,min=2,max=50" unorm:"nfc"`
|
||||
Email *string `json:"email" binding:"omitempty,email" unorm:"nfc"`
|
||||
FirstName string `json:"firstName" binding:"required,min=1,max=50" unorm:"nfc"`
|
||||
LastName string `json:"lastName" binding:"max=50" unorm:"nfc"`
|
||||
DisplayName string `json:"displayName" binding:"required,min=1,max=100" unorm:"nfc"`
|
||||
IsAdmin bool `json:"isAdmin"`
|
||||
Locale *string `json:"locale"`
|
||||
Disabled bool `json:"disabled"`
|
||||
LdapID string `json:"-"`
|
||||
Username string `json:"username" binding:"required,username,min=2,max=50" unorm:"nfc"`
|
||||
Email *string `json:"email" binding:"omitempty,email" unorm:"nfc"`
|
||||
FirstName string `json:"firstName" binding:"required,min=1,max=50" unorm:"nfc"`
|
||||
LastName string `json:"lastName" binding:"max=50" unorm:"nfc"`
|
||||
DisplayName string `json:"displayName" binding:"required,min=1,max=100" unorm:"nfc"`
|
||||
IsAdmin bool `json:"isAdmin"`
|
||||
Locale *string `json:"locale"`
|
||||
Disabled bool `json:"disabled"`
|
||||
UserGroupIds []string `json:"userGroupIds"`
|
||||
LdapID string `json:"-"`
|
||||
}
|
||||
|
||||
func (u UserCreateDto) Validate() error {
|
||||
|
||||
@@ -8,25 +8,17 @@ import (
|
||||
)
|
||||
|
||||
type UserGroupDto struct {
|
||||
ID string `json:"id"`
|
||||
FriendlyName string `json:"friendlyName"`
|
||||
Name string `json:"name"`
|
||||
CustomClaims []CustomClaimDto `json:"customClaims"`
|
||||
LdapID *string `json:"ldapId"`
|
||||
CreatedAt datatype.DateTime `json:"createdAt"`
|
||||
ID string `json:"id"`
|
||||
FriendlyName string `json:"friendlyName"`
|
||||
Name string `json:"name"`
|
||||
CustomClaims []CustomClaimDto `json:"customClaims"`
|
||||
LdapID *string `json:"ldapId"`
|
||||
CreatedAt datatype.DateTime `json:"createdAt"`
|
||||
Users []UserDto `json:"users"`
|
||||
AllowedOidcClients []OidcClientMetaDataDto `json:"allowedOidcClients"`
|
||||
}
|
||||
|
||||
type UserGroupDtoWithUsers struct {
|
||||
ID string `json:"id"`
|
||||
FriendlyName string `json:"friendlyName"`
|
||||
Name string `json:"name"`
|
||||
CustomClaims []CustomClaimDto `json:"customClaims"`
|
||||
Users []UserDto `json:"users"`
|
||||
LdapID *string `json:"ldapId"`
|
||||
CreatedAt datatype.DateTime `json:"createdAt"`
|
||||
}
|
||||
|
||||
type UserGroupDtoWithUserCount struct {
|
||||
type UserGroupMinimalDto struct {
|
||||
ID string `json:"id"`
|
||||
FriendlyName string `json:"friendlyName"`
|
||||
Name string `json:"name"`
|
||||
@@ -36,6 +28,10 @@ type UserGroupDtoWithUserCount struct {
|
||||
CreatedAt datatype.DateTime `json:"createdAt"`
|
||||
}
|
||||
|
||||
type UserGroupUpdateAllowedOidcClientsDto struct {
|
||||
OidcClientIDs []string `json:"oidcClientIds" binding:"required"`
|
||||
}
|
||||
|
||||
type UserGroupCreateDto struct {
|
||||
FriendlyName string `json:"friendlyName" binding:"required,min=2,max=50" unorm:"nfc"`
|
||||
Name string `json:"name" binding:"required,min=2,max=255" unorm:"nfc"`
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/go-co-op/gocron/v2"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/pocket-id/pocket-id/backend/internal/common"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/model"
|
||||
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
|
||||
)
|
||||
@@ -119,11 +120,13 @@ func (j *DbCleanupJobs) clearReauthenticationTokens(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ClearAuditLogs deletes audit logs older than 90 days
|
||||
// ClearAuditLogs deletes audit logs older than the configured retention window
|
||||
func (j *DbCleanupJobs) clearAuditLogs(ctx context.Context) error {
|
||||
cutoff := time.Now().AddDate(0, 0, -common.EnvConfig.AuditLogRetentionDays)
|
||||
|
||||
st := j.db.
|
||||
WithContext(ctx).
|
||||
Delete(&model.AuditLog{}, "created_at < ?", datatype.DateTime(time.Now().AddDate(0, 0, -90)))
|
||||
Delete(&model.AuditLog{}, "created_at < ?", datatype.DateTime(cutoff))
|
||||
if st.Error != nil {
|
||||
return fmt.Errorf("failed to delete old audit logs: %w", st.Error)
|
||||
}
|
||||
|
||||
@@ -77,7 +77,7 @@ type AppConfig struct {
|
||||
LdapAttributeGroupMember AppConfigVariable `key:"ldapAttributeGroupMember"`
|
||||
LdapAttributeGroupUniqueIdentifier AppConfigVariable `key:"ldapAttributeGroupUniqueIdentifier"`
|
||||
LdapAttributeGroupName AppConfigVariable `key:"ldapAttributeGroupName"`
|
||||
LdapAdminGroupName AppConfigVariable `key:"ldapAdminGroupName"`
|
||||
LdapAttributeAdminGroup AppConfigVariable `key:"ldapAttributeAdminGroup"`
|
||||
LdapSoftDeleteUsers AppConfigVariable `key:"ldapSoftDeleteUsers"`
|
||||
}
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ type OidcClient struct {
|
||||
RequiresReauthentication bool `sortable:"true" filterable:"true"`
|
||||
Credentials OidcClientCredentials
|
||||
LaunchURL *string
|
||||
IsGroupRestricted bool `sortable:"true" filterable:"true"`
|
||||
|
||||
AllowedUserGroups []UserGroup `gorm:"many2many:oidc_clients_allowed_user_groups;"`
|
||||
CreatedByID *string
|
||||
@@ -151,33 +152,3 @@ type OidcDeviceCode struct {
|
||||
ClientID string
|
||||
Client OidcClient
|
||||
}
|
||||
|
||||
type OidcAPI struct {
|
||||
Base
|
||||
|
||||
Name string `sortable:"true"`
|
||||
Identifier string `sortable:"true"`
|
||||
Data OidcAPIData `gorm:"type:text"`
|
||||
}
|
||||
|
||||
func (a OidcAPI) DefaultIdentifier() string {
|
||||
return "api://" + a.Identifier
|
||||
}
|
||||
|
||||
//nolint:recvcheck
|
||||
type OidcAPIData struct {
|
||||
Permissions []OidcAPIPermission `json:"permissions"`
|
||||
}
|
||||
|
||||
func (p *OidcAPIData) Scan(value any) error {
|
||||
return utils.UnmarshalJSONFromDatabase(p, value)
|
||||
}
|
||||
|
||||
func (p OidcAPIData) Value() (driver.Value, error) {
|
||||
return json.Marshal(p)
|
||||
}
|
||||
|
||||
type OidcAPIPermission struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ type SignupToken struct {
|
||||
ExpiresAt datatype.DateTime `json:"expiresAt" sortable:"true"`
|
||||
UsageLimit int `json:"usageLimit" sortable:"true"`
|
||||
UsageCount int `json:"usageCount" sortable:"true"`
|
||||
UserGroups []UserGroup `gorm:"many2many:signup_tokens_user_groups;"`
|
||||
}
|
||||
|
||||
func (st *SignupToken) IsExpired() bool {
|
||||
|
||||
@@ -11,15 +11,6 @@ import (
|
||||
// DateTime custom type for time.Time to store date as unix timestamp for sqlite and as date for postgres
|
||||
type DateTime time.Time //nolint:recvcheck
|
||||
|
||||
func DateTimeFromString(str string) (DateTime, error) {
|
||||
t, err := time.Parse(time.RFC3339Nano, str)
|
||||
if err != nil {
|
||||
return DateTime{}, fmt.Errorf("failed to parse date string: %w", err)
|
||||
}
|
||||
|
||||
return DateTime(t), nil
|
||||
}
|
||||
|
||||
func (date *DateTime) Scan(value any) (err error) {
|
||||
switch v := value.(type) {
|
||||
case time.Time:
|
||||
|
||||
@@ -87,8 +87,9 @@ func (u User) Initials() string {
|
||||
|
||||
type OneTimeAccessToken struct {
|
||||
Base
|
||||
Token string
|
||||
ExpiresAt datatype.DateTime
|
||||
Token string
|
||||
DeviceToken *string
|
||||
ExpiresAt datatype.DateTime
|
||||
|
||||
UserID string
|
||||
User User
|
||||
|
||||
@@ -2,9 +2,10 @@ package model
|
||||
|
||||
type UserGroup struct {
|
||||
Base
|
||||
FriendlyName string `sortable:"true"`
|
||||
Name string `sortable:"true"`
|
||||
LdapID *string
|
||||
Users []User `gorm:"many2many:user_groups_users;"`
|
||||
CustomClaims []CustomClaim
|
||||
FriendlyName string `sortable:"true"`
|
||||
Name string `sortable:"true"`
|
||||
LdapID *string
|
||||
Users []User `gorm:"many2many:user_groups_users;"`
|
||||
CustomClaims []CustomClaim
|
||||
AllowedOidcClients []OidcClient `gorm:"many2many:oidc_clients_allowed_user_groups;"`
|
||||
}
|
||||
|
||||
@@ -102,7 +102,7 @@ func (s *AppConfigService) getDefaultDbConfig() *model.AppConfig {
|
||||
LdapAttributeGroupMember: model.AppConfigVariable{Value: "member"},
|
||||
LdapAttributeGroupUniqueIdentifier: model.AppConfigVariable{},
|
||||
LdapAttributeGroupName: model.AppConfigVariable{},
|
||||
LdapAdminGroupName: model.AppConfigVariable{},
|
||||
LdapAttributeAdminGroup: model.AppConfigVariable{},
|
||||
LdapSoftDeleteUsers: model.AppConfigVariable{Value: "true"},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,296 +0,0 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/model"
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrLockUnavailable = errors.New("lock is already held by another process")
|
||||
ErrLockLost = errors.New("lock ownership lost")
|
||||
)
|
||||
|
||||
const (
|
||||
ttl = 30 * time.Second
|
||||
renewInterval = 20 * time.Second
|
||||
renewRetries = 3
|
||||
lockKey = "application_lock"
|
||||
)
|
||||
|
||||
type AppLockService struct {
|
||||
db *gorm.DB
|
||||
lockID string
|
||||
processID int64
|
||||
hostID string
|
||||
}
|
||||
|
||||
func NewAppLockService(db *gorm.DB) *AppLockService {
|
||||
host, err := os.Hostname()
|
||||
if err != nil || host == "" {
|
||||
host = "unknown-host"
|
||||
}
|
||||
|
||||
return &AppLockService{
|
||||
db: db,
|
||||
processID: int64(os.Getpid()),
|
||||
hostID: host,
|
||||
lockID: uuid.NewString(),
|
||||
}
|
||||
}
|
||||
|
||||
type lockValue struct {
|
||||
ProcessID int64 `json:"process_id"`
|
||||
HostID string `json:"host_id"`
|
||||
LockID string `json:"lock_id"`
|
||||
ExpiresAt int64 `json:"expires_at"`
|
||||
}
|
||||
|
||||
func (lv *lockValue) Marshal() (string, error) {
|
||||
data, err := json.Marshal(lv)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(data), nil
|
||||
}
|
||||
|
||||
func (lv *lockValue) Unmarshal(raw string) error {
|
||||
if raw == "" {
|
||||
return nil
|
||||
}
|
||||
return json.Unmarshal([]byte(raw), lv)
|
||||
}
|
||||
|
||||
// Acquire obtains the lock. When force is true, the lock is stolen from any existing owner.
|
||||
// If the lock is forcefully acquired, it blocks until the previous lock has expired.
|
||||
func (s *AppLockService) Acquire(ctx context.Context, force bool) (waitUntil time.Time, err error) {
|
||||
tx := s.db.Begin()
|
||||
defer func() {
|
||||
tx.Rollback()
|
||||
}()
|
||||
|
||||
var prevLockRaw string
|
||||
err = tx.
|
||||
WithContext(ctx).
|
||||
Model(&model.KV{}).
|
||||
Where("key = ?", lockKey).
|
||||
Clauses(clause.Locking{Strength: "UPDATE"}).
|
||||
Select("value").
|
||||
Scan(&prevLockRaw).
|
||||
Error
|
||||
if err != nil {
|
||||
return time.Time{}, fmt.Errorf("query existing lock: %w", err)
|
||||
}
|
||||
|
||||
var prevLock lockValue
|
||||
if prevLockRaw != "" {
|
||||
if err := prevLock.Unmarshal(prevLockRaw); err != nil {
|
||||
return time.Time{}, fmt.Errorf("decode existing lock value: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
nowUnix := now.Unix()
|
||||
|
||||
value := lockValue{
|
||||
ProcessID: s.processID,
|
||||
HostID: s.hostID,
|
||||
LockID: s.lockID,
|
||||
ExpiresAt: now.Add(ttl).Unix(),
|
||||
}
|
||||
raw, err := value.Marshal()
|
||||
if err != nil {
|
||||
return time.Time{}, fmt.Errorf("encode lock value: %w", err)
|
||||
}
|
||||
|
||||
var query string
|
||||
switch s.db.Name() {
|
||||
case "sqlite":
|
||||
query = `
|
||||
INSERT INTO kv (key, value)
|
||||
VALUES (?, ?)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
value = excluded.value
|
||||
WHERE (json_extract(kv.value, '$.expires_at') < ?) OR ?
|
||||
`
|
||||
case "postgres":
|
||||
query = `
|
||||
INSERT INTO kv (key, value)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT(key) DO UPDATE SET
|
||||
value = excluded.value
|
||||
WHERE ((kv.value::json->>'expires_at')::bigint < $3) OR ($4::boolean IS TRUE)
|
||||
`
|
||||
default:
|
||||
return time.Time{}, fmt.Errorf("unsupported database dialect: %s", s.db.Name())
|
||||
}
|
||||
|
||||
res := tx.WithContext(ctx).Exec(query, lockKey, raw, nowUnix, force)
|
||||
if res.Error != nil {
|
||||
return time.Time{}, fmt.Errorf("lock acquisition failed: %w", res.Error)
|
||||
}
|
||||
|
||||
if err := tx.Commit().Error; err != nil {
|
||||
return time.Time{}, fmt.Errorf("commit lock acquisition: %w", err)
|
||||
}
|
||||
|
||||
// If there is a lock that is not expired and force is false, no rows will be affected
|
||||
if res.RowsAffected == 0 {
|
||||
return time.Time{}, ErrLockUnavailable
|
||||
}
|
||||
|
||||
if force && prevLock.ExpiresAt > nowUnix && prevLock.LockID != s.lockID {
|
||||
waitUntil = time.Unix(prevLock.ExpiresAt, 0)
|
||||
}
|
||||
|
||||
attrs := []any{
|
||||
slog.Int64("process_id", s.processID),
|
||||
slog.String("host_id", s.hostID),
|
||||
}
|
||||
if wait := time.Until(waitUntil); wait > 0 {
|
||||
attrs = append(attrs, slog.Duration("wait_before_proceeding", wait))
|
||||
}
|
||||
slog.Info("Acquired application lock", attrs...)
|
||||
|
||||
return waitUntil, nil
|
||||
}
|
||||
|
||||
// RunRenewal keeps renewing the lock until the context is canceled.
|
||||
func (s *AppLockService) RunRenewal(ctx context.Context) error {
|
||||
ticker := time.NewTicker(renewInterval)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil
|
||||
case <-ticker.C:
|
||||
if err := s.renew(ctx); err != nil {
|
||||
return fmt.Errorf("renew lock: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Release releases the lock if it is held by this process.
|
||||
func (s *AppLockService) Release(ctx context.Context) error {
|
||||
opCtx, cancel := context.WithTimeout(ctx, 3*time.Second)
|
||||
defer cancel()
|
||||
|
||||
var query string
|
||||
switch s.db.Name() {
|
||||
case "sqlite":
|
||||
query = `
|
||||
DELETE FROM kv
|
||||
WHERE key = ?
|
||||
AND json_extract(value, '$.lock_id') = ?
|
||||
`
|
||||
case "postgres":
|
||||
query = `
|
||||
DELETE FROM kv
|
||||
WHERE key = $1
|
||||
AND value::json->>'lock_id' = $2
|
||||
`
|
||||
default:
|
||||
return fmt.Errorf("unsupported database dialect: %s", s.db.Name())
|
||||
}
|
||||
|
||||
res := s.db.WithContext(opCtx).Exec(query, lockKey, s.lockID)
|
||||
if res.Error != nil {
|
||||
return fmt.Errorf("release lock failed: %w", res.Error)
|
||||
}
|
||||
|
||||
if res.RowsAffected == 0 {
|
||||
slog.Warn("Application lock not held by this process, cannot release",
|
||||
slog.Int64("process_id", s.processID),
|
||||
slog.String("host_id", s.hostID),
|
||||
)
|
||||
}
|
||||
|
||||
slog.Info("Released application lock",
|
||||
slog.Int64("process_id", s.processID),
|
||||
slog.String("host_id", s.hostID),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
||||
// renew tries to renew the lock, retrying up to renewRetries times (sleeping 1s between attempts).
|
||||
func (s *AppLockService) renew(ctx context.Context) error {
|
||||
var lastErr error
|
||||
for attempt := 1; attempt <= renewRetries; attempt++ {
|
||||
now := time.Now()
|
||||
nowUnix := now.Unix()
|
||||
expiresAt := now.Add(ttl).Unix()
|
||||
|
||||
value := lockValue{
|
||||
LockID: s.lockID,
|
||||
ProcessID: s.processID,
|
||||
HostID: s.hostID,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
raw, err := value.Marshal()
|
||||
if err != nil {
|
||||
return fmt.Errorf("encode lock value: %w", err)
|
||||
}
|
||||
|
||||
var query string
|
||||
switch s.db.Name() {
|
||||
case "sqlite":
|
||||
query = `
|
||||
UPDATE kv
|
||||
SET value = ?
|
||||
WHERE key = ?
|
||||
AND json_extract(value, '$.lock_id') = ?
|
||||
AND json_extract(value, '$.expires_at') > ?
|
||||
`
|
||||
case "postgres":
|
||||
query = `
|
||||
UPDATE kv
|
||||
SET value = $1
|
||||
WHERE key = $2
|
||||
AND value::json->>'lock_id' = $3
|
||||
AND ((value::json->>'expires_at')::bigint > $4)
|
||||
`
|
||||
default:
|
||||
return fmt.Errorf("unsupported database dialect: %s", s.db.Name())
|
||||
}
|
||||
|
||||
opCtx, cancel := context.WithTimeout(ctx, 3*time.Second)
|
||||
res := s.db.WithContext(opCtx).Exec(query, raw, lockKey, s.lockID, nowUnix)
|
||||
cancel()
|
||||
|
||||
switch {
|
||||
case res.Error != nil:
|
||||
lastErr = fmt.Errorf("lock renewal failed: %w", res.Error)
|
||||
case res.RowsAffected == 0:
|
||||
// Must be after checking res.Error
|
||||
return ErrLockLost
|
||||
default:
|
||||
slog.Debug("Renewed application lock",
|
||||
slog.Int64("process_id", s.processID),
|
||||
slog.String("host_id", s.hostID),
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Wait before next attempt or cancel if context is done
|
||||
if attempt < renewRetries {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
case <-time.After(1 * time.Second):
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return lastErr
|
||||
}
|
||||
@@ -1,189 +0,0 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/pocket-id/pocket-id/backend/internal/model"
|
||||
testutils "github.com/pocket-id/pocket-id/backend/internal/utils/testing"
|
||||
)
|
||||
|
||||
func newTestAppLockService(t *testing.T, db *gorm.DB) *AppLockService {
|
||||
t.Helper()
|
||||
|
||||
return &AppLockService{
|
||||
db: db,
|
||||
processID: 1,
|
||||
hostID: "test-host",
|
||||
lockID: "a13c7673-c7ae-49f1-9112-2cd2d0d4b0c1",
|
||||
}
|
||||
}
|
||||
|
||||
func insertLock(t *testing.T, db *gorm.DB, value lockValue) {
|
||||
t.Helper()
|
||||
|
||||
raw, err := value.Marshal()
|
||||
require.NoError(t, err)
|
||||
|
||||
err = db.Create(&model.KV{Key: lockKey, Value: &raw}).Error
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func readLockValue(t *testing.T, db *gorm.DB) lockValue {
|
||||
t.Helper()
|
||||
|
||||
var row model.KV
|
||||
err := db.Take(&row, "key = ?", lockKey).Error
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NotNil(t, row.Value)
|
||||
|
||||
var value lockValue
|
||||
err = value.Unmarshal(*row.Value)
|
||||
require.NoError(t, err)
|
||||
|
||||
return value
|
||||
}
|
||||
|
||||
func TestAppLockServiceAcquire(t *testing.T) {
|
||||
t.Run("creates new lock when none exists", func(t *testing.T) {
|
||||
db := testutils.NewDatabaseForTest(t)
|
||||
service := newTestAppLockService(t, db)
|
||||
|
||||
_, err := service.Acquire(context.Background(), false)
|
||||
require.NoError(t, err)
|
||||
|
||||
stored := readLockValue(t, db)
|
||||
require.Equal(t, service.processID, stored.ProcessID)
|
||||
require.Equal(t, service.hostID, stored.HostID)
|
||||
require.Greater(t, stored.ExpiresAt, time.Now().Unix())
|
||||
})
|
||||
|
||||
t.Run("returns ErrLockUnavailable when lock held by another process", func(t *testing.T) {
|
||||
db := testutils.NewDatabaseForTest(t)
|
||||
service := newTestAppLockService(t, db)
|
||||
|
||||
existing := lockValue{
|
||||
ProcessID: 99,
|
||||
HostID: "other-host",
|
||||
ExpiresAt: time.Now().Add(ttl).Unix(),
|
||||
}
|
||||
insertLock(t, db, existing)
|
||||
|
||||
_, err := service.Acquire(context.Background(), false)
|
||||
require.ErrorIs(t, err, ErrLockUnavailable)
|
||||
|
||||
current := readLockValue(t, db)
|
||||
require.Equal(t, existing, current)
|
||||
})
|
||||
|
||||
t.Run("force acquisition steals lock", func(t *testing.T) {
|
||||
db := testutils.NewDatabaseForTest(t)
|
||||
service := newTestAppLockService(t, db)
|
||||
|
||||
insertLock(t, db, lockValue{
|
||||
ProcessID: 99,
|
||||
HostID: "other-host",
|
||||
ExpiresAt: time.Now().Unix(),
|
||||
})
|
||||
|
||||
_, err := service.Acquire(context.Background(), true)
|
||||
require.NoError(t, err)
|
||||
|
||||
stored := readLockValue(t, db)
|
||||
require.Equal(t, service.processID, stored.ProcessID)
|
||||
require.Equal(t, service.hostID, stored.HostID)
|
||||
require.Greater(t, stored.ExpiresAt, time.Now().Unix())
|
||||
})
|
||||
}
|
||||
|
||||
func TestAppLockServiceRelease(t *testing.T) {
|
||||
t.Run("removes owned lock", func(t *testing.T) {
|
||||
db := testutils.NewDatabaseForTest(t)
|
||||
service := newTestAppLockService(t, db)
|
||||
|
||||
_, err := service.Acquire(context.Background(), false)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = service.Release(context.Background())
|
||||
require.NoError(t, err)
|
||||
|
||||
var row model.KV
|
||||
err = db.Take(&row, "key = ?", lockKey).Error
|
||||
require.ErrorIs(t, err, gorm.ErrRecordNotFound)
|
||||
})
|
||||
|
||||
t.Run("ignores lock held by another owner", func(t *testing.T) {
|
||||
db := testutils.NewDatabaseForTest(t)
|
||||
service := newTestAppLockService(t, db)
|
||||
|
||||
existing := lockValue{
|
||||
ProcessID: 2,
|
||||
HostID: "other-host",
|
||||
ExpiresAt: time.Now().Add(ttl).Unix(),
|
||||
}
|
||||
insertLock(t, db, existing)
|
||||
|
||||
err := service.Release(context.Background())
|
||||
require.NoError(t, err)
|
||||
|
||||
stored := readLockValue(t, db)
|
||||
require.Equal(t, existing, stored)
|
||||
})
|
||||
}
|
||||
|
||||
func TestAppLockServiceRenew(t *testing.T) {
|
||||
t.Run("extends expiration when lock is still owned", func(t *testing.T) {
|
||||
db := testutils.NewDatabaseForTest(t)
|
||||
service := newTestAppLockService(t, db)
|
||||
|
||||
_, err := service.Acquire(context.Background(), false)
|
||||
require.NoError(t, err)
|
||||
|
||||
before := readLockValue(t, db)
|
||||
|
||||
err = service.renew(context.Background())
|
||||
require.NoError(t, err)
|
||||
|
||||
after := readLockValue(t, db)
|
||||
require.Equal(t, service.processID, after.ProcessID)
|
||||
require.Equal(t, service.hostID, after.HostID)
|
||||
require.GreaterOrEqual(t, after.ExpiresAt, before.ExpiresAt)
|
||||
})
|
||||
|
||||
t.Run("returns ErrLockLost when lock is missing", func(t *testing.T) {
|
||||
db := testutils.NewDatabaseForTest(t)
|
||||
service := newTestAppLockService(t, db)
|
||||
|
||||
err := service.renew(context.Background())
|
||||
require.ErrorIs(t, err, ErrLockLost)
|
||||
})
|
||||
|
||||
t.Run("returns ErrLockLost when ownership changed", func(t *testing.T) {
|
||||
db := testutils.NewDatabaseForTest(t)
|
||||
service := newTestAppLockService(t, db)
|
||||
|
||||
_, err := service.Acquire(context.Background(), false)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Simulate a different process taking the lock.
|
||||
newOwner := lockValue{
|
||||
ProcessID: 9,
|
||||
HostID: "stolen-host",
|
||||
ExpiresAt: time.Now().Add(ttl).Unix(),
|
||||
}
|
||||
raw, marshalErr := newOwner.Marshal()
|
||||
require.NoError(t, marshalErr)
|
||||
updateErr := db.Model(&model.KV{}).
|
||||
Where("key = ?", lockKey).
|
||||
Update("value", raw).Error
|
||||
require.NoError(t, updateErr)
|
||||
|
||||
err = service.renew(context.Background())
|
||||
require.ErrorIs(t, err, ErrLockLost)
|
||||
})
|
||||
}
|
||||
@@ -7,12 +7,14 @@ import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/fxamacker/cbor/v2"
|
||||
"github.com/go-webauthn/webauthn/protocol"
|
||||
"github.com/lestrrat-go/jwx/v3/jwa"
|
||||
"github.com/lestrrat-go/jwx/v3/jwk"
|
||||
@@ -34,17 +36,15 @@ type TestService struct {
|
||||
appConfigService *AppConfigService
|
||||
ldapService *LdapService
|
||||
fileStorage storage.FileStorage
|
||||
appLockService *AppLockService
|
||||
externalIdPKey jwk.Key
|
||||
}
|
||||
|
||||
func NewTestService(db *gorm.DB, appConfigService *AppConfigService, jwtService *JwtService, ldapService *LdapService, appLockService *AppLockService, fileStorage storage.FileStorage) (*TestService, error) {
|
||||
func NewTestService(db *gorm.DB, appConfigService *AppConfigService, jwtService *JwtService, ldapService *LdapService, fileStorage storage.FileStorage) (*TestService, error) {
|
||||
s := &TestService{
|
||||
db: db,
|
||||
appConfigService: appConfigService,
|
||||
jwtService: jwtService,
|
||||
ldapService: ldapService,
|
||||
appLockService: appLockService,
|
||||
fileStorage: fileStorage,
|
||||
}
|
||||
err := s.initExternalIdP()
|
||||
@@ -169,10 +169,11 @@ func (s *TestService) SeedDatabase(baseURL string) error {
|
||||
Base: model.Base{
|
||||
ID: "606c7782-f2b1-49e5-8ea9-26eb1b06d018",
|
||||
},
|
||||
Name: "Immich",
|
||||
Secret: "$2a$10$Ak.FP8riD1ssy2AGGbG.gOpnp/rBpymd74j0nxNMtW0GG1Lb4gzxe", // PYjrE9u4v9GVqXKi52eur0eb2Ci4kc0x
|
||||
CallbackURLs: model.UrlList{"http://immich/auth/callback"},
|
||||
CreatedByID: utils.Ptr(users[1].ID),
|
||||
Name: "Immich",
|
||||
Secret: "$2a$10$Ak.FP8riD1ssy2AGGbG.gOpnp/rBpymd74j0nxNMtW0GG1Lb4gzxe", // PYjrE9u4v9GVqXKi52eur0eb2Ci4kc0x
|
||||
CallbackURLs: model.UrlList{"http://immich/auth/callback"},
|
||||
CreatedByID: utils.Ptr(users[1].ID),
|
||||
IsGroupRestricted: true,
|
||||
AllowedUserGroups: []model.UserGroup{
|
||||
userGroups[1],
|
||||
},
|
||||
@@ -185,6 +186,7 @@ func (s *TestService) SeedDatabase(baseURL string) error {
|
||||
Secret: "$2a$10$xcRReBsvkI1XI6FG8xu/pOgzeF00bH5Wy4d/NThwcdi3ZBpVq/B9a", // n4VfQeXlTzA6yKpWbR9uJcMdSx2qH0Lo
|
||||
CallbackURLs: model.UrlList{"http://tailscale/auth/callback"},
|
||||
LogoutCallbackURLs: model.UrlList{"http://tailscale/auth/logout/callback"},
|
||||
IsGroupRestricted: true,
|
||||
CreatedByID: utils.Ptr(users[0].ID),
|
||||
},
|
||||
{
|
||||
@@ -288,8 +290,8 @@ func (s *TestService) SeedDatabase(baseURL string) error {
|
||||
// openssl genpkey -algorithm EC -pkeyopt ec_paramgen_curve:P-256 | \
|
||||
// openssl pkcs8 -topk8 -nocrypt | tee >(openssl pkey -pubout)
|
||||
|
||||
publicKeyPasskey1, _ := base64.StdEncoding.DecodeString("pQMmIAEhWCDBw6jkpXXr0pHrtAQetxiR5cTcILG/YGDCdKrhVhNDHCJYIIu12YrF6B7Frwl3AUqEpdrYEwj3Fo3XkGgvrBIJEUmGAQI=")
|
||||
publicKeyPasskey2, _ := base64.StdEncoding.DecodeString("pSJYIPmc+FlEB0neERqqscxKckGF8yq1AYrANiloshAUAouHAQIDJiABIVggj4qA0PrZzg8Co1C27nyUbzrp8Ewjr7eOlGI2LfrzmbI=")
|
||||
publicKeyPasskey1, _ := s.getCborPublicKey("MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEwcOo5KV169KR67QEHrcYkeXE3CCxv2BgwnSq4VYTQxyLtdmKxegexa8JdwFKhKXa2BMI9xaN15BoL6wSCRFJhg==")
|
||||
publicKeyPasskey2, _ := s.getCborPublicKey("MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEj4qA0PrZzg8Co1C27nyUbzrp8Ewjr7eOlGI2LfrzmbL5nPhZRAdJ3hEaqrHMSnJBhfMqtQGKwDYpaLIQFAKLhw==")
|
||||
webauthnCredentials := []model.WebauthnCredential{
|
||||
{
|
||||
Name: "Passkey 1",
|
||||
@@ -318,10 +320,6 @@ func (s *TestService) SeedDatabase(baseURL string) error {
|
||||
Challenge: "challenge",
|
||||
ExpiresAt: datatype.DateTime(time.Now().Add(1 * time.Hour)),
|
||||
UserVerification: "preferred",
|
||||
CredentialParams: model.CredentialParameters{
|
||||
{Type: "public-key", Algorithm: -7},
|
||||
{Type: "public-key", Algorithm: -257},
|
||||
},
|
||||
}
|
||||
if err := tx.Create(&webauthnSession).Error; err != nil {
|
||||
return err
|
||||
@@ -331,10 +329,9 @@ func (s *TestService) SeedDatabase(baseURL string) error {
|
||||
Base: model.Base{
|
||||
ID: "5f1fa856-c164-4295-961e-175a0d22d725",
|
||||
},
|
||||
Name: "Test API Key",
|
||||
Key: "6c34966f57ef2bb7857649aff0e7ab3ad67af93c846342ced3f5a07be8706c20",
|
||||
UserID: users[0].ID,
|
||||
ExpiresAt: datatype.DateTime(time.Now().Add(30 * 24 * time.Hour)),
|
||||
Name: "Test API Key",
|
||||
Key: "6c34966f57ef2bb7857649aff0e7ab3ad67af93c846342ced3f5a07be8706c20",
|
||||
UserID: users[0].ID,
|
||||
}
|
||||
if err := tx.Create(&apiKey).Error; err != nil {
|
||||
return err
|
||||
@@ -349,6 +346,9 @@ func (s *TestService) SeedDatabase(baseURL string) error {
|
||||
ExpiresAt: datatype.DateTime(time.Now().Add(24 * time.Hour)),
|
||||
UsageLimit: 1,
|
||||
UsageCount: 0,
|
||||
UserGroups: []model.UserGroup{
|
||||
userGroups[0],
|
||||
},
|
||||
},
|
||||
{
|
||||
Base: model.Base{
|
||||
@@ -384,20 +384,6 @@ func (s *TestService) SeedDatabase(baseURL string) error {
|
||||
}
|
||||
}
|
||||
|
||||
keyValues := []model.KV{
|
||||
{
|
||||
Key: jwkutils.PrivateKeyDBKey,
|
||||
// {"alg":"RS256","d":"mvMDWSdPPvcum0c0iEHE2gbqtV2NKMmLwrl9E6K7g8lTV95SePLnW_bwyMPV7EGp7PQk3l17I5XRhFjze7GqTnFIOgKzMianPs7jv2ELtBMGK0xOPATgu1iGb70xZ6vcvuEfRyY3dJ0zr4jpUdVuXwKmx9rK4IdZn2dFCKfvSuspqIpz11RhF1ALrqDLkxGVv7ZwNh0_VhJZU9hcjG5l6xc7rQEKpPRkZp0IdjkGS8Z0FskoVaiRIWAbZuiVFB9WCW8k1czC4HQTPLpII01bUQx2ludbm0UlXRgVU9ptUUbU7GAImQqTOW8LfPGklEvcgzlIlR_oqw4P9yBxLi-yMQ","dp":"pvNCSnnhbo8Igw9psPR-DicxFnkXlu_ix4gpy6efTrxA-z1VDFDioJ814vKQNioYDzpyAP1gfMPhRkvG_q0hRZsJah3Sb9dfA-WkhSWY7lURQP4yIBTMU0PF_rEATuS7lRciYk1SOx5fqXZd3m_LP0vpBC4Ujlq6NAq6CIjCnms","dq":"TtUVGCCkPNgfOLmkYXu7dxxUCV5kB01-xAEK2OY0n0pG8vfDophH4_D_ZC7nvJ8J9uDhs_3JStexq1lIvaWtG99RNTChIEDzpdn6GH9yaVcb_eB4uJjrNm64FhF8PGCCwxA-xMCZMaARKwhMB2_IOMkxUbWboL3gnhJ2rDO_QO0","e":"AQAB","kid":"8uHDw3M6rf8","kty":"RSA","n":"yaeEL0VKoPBXIAaWXsUgmu05lAvEIIdJn0FX9lHh4JE5UY9B83C5sCNdhs9iSWzpeP11EVjWp8i3Yv2CF7c7u50BXnVBGtxpZpFC-585UXacoJ0chUmarL9GRFJcM1nPHBTFu68aRrn1rIKNHUkNaaxFo0NFGl_4EDDTO8HwawTjwkPoQlRzeByhlvGPVvwgB3Fn93B8QJ_cZhXKxJvjjrC_8Pk76heC_ntEMru71Ix77BoC3j2TuyiN7m9RNBW8BU5q6lKoIdvIeZfTFLzi37iufyfvMrJTixp9zhNB1NxlLCeOZl2MXegtiGqd2H3cbAyqoOiv9ihUWTfXj7SxJw","p":"_Yylc9e07CKdqNRD2EosMC2mrhrEa9j5oY_l00Qyy4-jmCA59Q9viyqvveRo0U7cRvFA5BWgWN6GGLh1DG3X-QBqVr0dnk3uzbobb55RYUXyPLuBZI2q6w2oasbiDwPdY7KpkVv_H-bpITQlyDvO8hhucA6rUV7F6KTQVz8M3Ms","q":"y5p3hch-7jJ21TkAhp_Vk1fLCAuD4tbErwQs2of9ja8sB4iJOs5Wn6HD3P7Mc8Plye7qaLHvzc8I5g0tPKWvC0DPd_FLPXiWwMVAzee3NUX_oGeJNOQp11y1w_KqdO9qZqHSEPZ3NcFL_SZMFgggxhM1uzRiPzsVN0lnD_6prZU","qi":"2Grt6uXHm61ji3xSdkBWNtUnj19vS1-7rFJp5SoYztVQVThf_W52BAiXKBdYZDRVoItC_VS2NvAOjeJjhYO_xQ_q3hK7MdtuXfEPpLnyXKkmWo3lrJ26wbeF6l05LexCkI7ShsOuSt-dsyaTJTszuKDIA6YOfWvfo3aVZmlWRaI","use":"sig"}
|
||||
Value: utils.Ptr("7d/5hl7diJ2rnFL14hEAQf9tzpu29aqXQ8jpJ2iqqKUNFZpdOkEpud0CmRv4H3r8yyk2u/Gqqj9klSy58DJkYXGF5PAYgLyoBIb7L3JXWRbxg4cQ3QJCug13l2OTmpAKoVc+rmX8c3j3h1sNqyJ+7Ql5sS0jSeyiYgIsFNCdnK5alBDyvtcpe/QDpklmP4JCeVpvmf2rLGplk3g5UO5ydJ8UiDXxfDmi+gF6NKJvrGnnah8Ar3G/x88z+tTJtp0DIQFwxXwUM2XZqzEVGm8K2r0w5o9/Keh6bBBaiuH2C78ZOaijGV3DovhR+e9J0cYUYGwT42MZMx9fSWQ/lvWGGnf+Uq3MXJfjWSREfhkp8KTQwR9F7+dnVJWswOEk7jPR8I7hCWTMxJyvaFX3wgAXIVmhrgXZQQbYOqTt56IoqUl0xOJku8dA8opg2UcLlmmuOh6+hfkXKsiiS/H/9c1BVIGj1fCOiT6IePh4wKKSTbwJnPD5EKmdJpgTsUpjcDnXQKY4ReO0UpdRdKxwRDDLeQuG6j+ljGxR9GPudCU9Nmci6rFVI6n5LWYkQxBA1O73RpmXRZPDzntDfpXMEonkmSvOoxaCK2Id7CRKMdqvR0kEouwnhk5WSFtsfi3sA0pkXzPFxwZeWM8vFtbffZOZzXaOhxCOfcj1NClZohlZhyc4jvkxmrpY7PSaAzih0AmHI7y0LYFi6fZu/K4EheVa1+KF55nWZ8ARikHMWKAKkyExkTak7xyN884TDmzURRaPlQg4jzQte5WMNjAG/hlHibdMBNvgwiYd49ZxteJ8ABdbiXVRl+2JGbdjl2ubpQZwOn7bJKlqO56bIwsZ+e4+pXsuOGdBahkHrUjtMEmH3DZbGc6CJLbcmdhdpApLQRRcLAazxJhzAwJ47FRYsHsj57LnYNvmcKdIxw8rxCdLUuzz95uw0T3ankEO5J9sjem+HMEuKdwXK1UcuOn2rjR8Sd/BuvQmeso27dFbPXqXYNS90Ml45YyTvcKSiopD181oZR703TFUSpR7dsiqROMr+p/2jN9h6a8WbQ8xpksyclaQByY/M77AssbXnG6wfhRsntNIINCZLbBnjXOyz6ZHIC5K4tSTdcnWaiYPeRPQmnw9UUvHAcNU2yMWsy0eU377yDS0WstTxOdQutTdkczl8kv5Lo26JiEK7mSIuRK19ffF9Zz8FG8+eKv5zdyIPjyQRDYBysUoDv5huKe2eoxJu/MWS2Pql/ZtUGeD6Ozm3mCvh0vQ9ceagBkY6Ocm3du0ziAKP29Ri0mjg4DizVorbLzsh+EQH/s2Pi9MnjUZDlEmuLl2Xfp7/w4j/8u0N0tVR70VDFuGdKpTjFY3vS8EJrPtyMTM51x1D9rb8gIql8aR/rJw4YF+huxg1mv5n6+tGVqg5msbPmF12eJijP4lkmaRwIpLW5pJTtaDkUj7uOeu1mm4k+Dt5nh0/0jPHzrv6bcTCcbV7UjMHDoTXXqEpFAAJ66rHR7zdAJu+YKsnTIZyLmOpcowq7LL8G9qTvV0OSpyQWUIavRSgbDHFqEqRs+JU94jAzkq8nCY5MTd9m5sIv9InfdT3k+pwpsE/FKge8nghFLtbUrafGkzTky8SE2druvVcIvbfXMfLIKRUYjJgnWc0gQzF5J6pzXM7D2r/RG6JDzASqjlbURq6v9bhNerlOVdMujWKEEVcKWIzlbt4RkihRjM8AUqIZQOyicGQ+4yfIjAHw5viuABONYs3OIWULnFqJxdvS9rNKhfxSjIq9cfqyzevq2xrRoMXEonobh6M3bD2Vang8OAeVeD1OXWPERi4pepCYFS9RJ/Xa/UWxptsqSNuGcb3fAzQSmLpXLGdWRoKXvSe7EYgc0bGcLOjSTu5RURKo+EF9i4KT9EJauf6VXw5dTf/CCIJRXE1bWzXhSCFYntohYhX2ldOCDYpi/jFBC6Vtkw0ud3/xq8Nmhd5gUk+SpngByCZH3Pm3H+jvlbMpiqkDkm1v74hDX13Xhrcw2eWyuqKBVoRCCniUvwpYNbGvBfjC6Hcizv0Aybciwj+4nybt5EPoEUm6S6Gs7fG7QpPdvrzpAxX70MlmdkF/gwyuhbEeJhLK+WL7qAsN5CvHPzVbsIf90x+nGTtMJPgpxVr0tJMj+vprXV4WxutfARBiOnqe58MhA857sd+MzKBgKnoLOBRTiC3qc/0/ULwbG2HCCD7nmwzz7M4nUuMvo8rgS7z0BF68OClT8X3JwSXbL5Wg=="),
|
||||
},
|
||||
}
|
||||
|
||||
for _, kv := range keyValues {
|
||||
if err := tx.Create(&kv).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
@@ -483,29 +469,47 @@ func (s *TestService) ResetAppConfig(ctx context.Context) error {
|
||||
return err
|
||||
}
|
||||
|
||||
// Manually set instance ID
|
||||
err = s.appConfigService.UpdateAppConfigValues(ctx, "instanceId", "test-instance-id")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Reload the app config from the database after resetting the values
|
||||
err = s.appConfigService.LoadDbConfig(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Reload the JWK
|
||||
if err := s.jwtService.LoadOrGenerateKey(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
return s.appConfigService.LoadDbConfig(ctx)
|
||||
}
|
||||
|
||||
func (s *TestService) ResetLock(ctx context.Context) error {
|
||||
_, err := s.appLockService.Acquire(ctx, true)
|
||||
return err
|
||||
func (s *TestService) SetJWTKeys() {
|
||||
const privateKeyString = `{"alg":"RS256","d":"mvMDWSdPPvcum0c0iEHE2gbqtV2NKMmLwrl9E6K7g8lTV95SePLnW_bwyMPV7EGp7PQk3l17I5XRhFjze7GqTnFIOgKzMianPs7jv2ELtBMGK0xOPATgu1iGb70xZ6vcvuEfRyY3dJ0zr4jpUdVuXwKmx9rK4IdZn2dFCKfvSuspqIpz11RhF1ALrqDLkxGVv7ZwNh0_VhJZU9hcjG5l6xc7rQEKpPRkZp0IdjkGS8Z0FskoVaiRIWAbZuiVFB9WCW8k1czC4HQTPLpII01bUQx2ludbm0UlXRgVU9ptUUbU7GAImQqTOW8LfPGklEvcgzlIlR_oqw4P9yBxLi-yMQ","dp":"pvNCSnnhbo8Igw9psPR-DicxFnkXlu_ix4gpy6efTrxA-z1VDFDioJ814vKQNioYDzpyAP1gfMPhRkvG_q0hRZsJah3Sb9dfA-WkhSWY7lURQP4yIBTMU0PF_rEATuS7lRciYk1SOx5fqXZd3m_LP0vpBC4Ujlq6NAq6CIjCnms","dq":"TtUVGCCkPNgfOLmkYXu7dxxUCV5kB01-xAEK2OY0n0pG8vfDophH4_D_ZC7nvJ8J9uDhs_3JStexq1lIvaWtG99RNTChIEDzpdn6GH9yaVcb_eB4uJjrNm64FhF8PGCCwxA-xMCZMaARKwhMB2_IOMkxUbWboL3gnhJ2rDO_QO0","e":"AQAB","kid":"8uHDw3M6rf8","kty":"RSA","n":"yaeEL0VKoPBXIAaWXsUgmu05lAvEIIdJn0FX9lHh4JE5UY9B83C5sCNdhs9iSWzpeP11EVjWp8i3Yv2CF7c7u50BXnVBGtxpZpFC-585UXacoJ0chUmarL9GRFJcM1nPHBTFu68aRrn1rIKNHUkNaaxFo0NFGl_4EDDTO8HwawTjwkPoQlRzeByhlvGPVvwgB3Fn93B8QJ_cZhXKxJvjjrC_8Pk76heC_ntEMru71Ix77BoC3j2TuyiN7m9RNBW8BU5q6lKoIdvIeZfTFLzi37iufyfvMrJTixp9zhNB1NxlLCeOZl2MXegtiGqd2H3cbAyqoOiv9ihUWTfXj7SxJw","p":"_Yylc9e07CKdqNRD2EosMC2mrhrEa9j5oY_l00Qyy4-jmCA59Q9viyqvveRo0U7cRvFA5BWgWN6GGLh1DG3X-QBqVr0dnk3uzbobb55RYUXyPLuBZI2q6w2oasbiDwPdY7KpkVv_H-bpITQlyDvO8hhucA6rUV7F6KTQVz8M3Ms","q":"y5p3hch-7jJ21TkAhp_Vk1fLCAuD4tbErwQs2of9ja8sB4iJOs5Wn6HD3P7Mc8Plye7qaLHvzc8I5g0tPKWvC0DPd_FLPXiWwMVAzee3NUX_oGeJNOQp11y1w_KqdO9qZqHSEPZ3NcFL_SZMFgggxhM1uzRiPzsVN0lnD_6prZU","qi":"2Grt6uXHm61ji3xSdkBWNtUnj19vS1-7rFJp5SoYztVQVThf_W52BAiXKBdYZDRVoItC_VS2NvAOjeJjhYO_xQ_q3hK7MdtuXfEPpLnyXKkmWo3lrJ26wbeF6l05LexCkI7ShsOuSt-dsyaTJTszuKDIA6YOfWvfo3aVZmlWRaI","use":"sig"}`
|
||||
|
||||
privateKey, _ := jwk.ParseKey([]byte(privateKeyString))
|
||||
_ = s.jwtService.SetKey(privateKey)
|
||||
}
|
||||
|
||||
// getCborPublicKey decodes a Base64 encoded public key and returns the CBOR encoded COSE key
|
||||
func (s *TestService) getCborPublicKey(base64PublicKey string) ([]byte, error) {
|
||||
decodedKey, err := base64.StdEncoding.DecodeString(base64PublicKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decode base64 key: %w", err)
|
||||
}
|
||||
pubKey, err := x509.ParsePKIXPublicKey(decodedKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse public key: %w", err)
|
||||
}
|
||||
|
||||
ecdsaPubKey, ok := pubKey.(*ecdsa.PublicKey)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("not an ECDSA public key")
|
||||
}
|
||||
|
||||
coseKey := map[int]interface{}{
|
||||
1: 2, // Key type: EC2
|
||||
3: -7, // Algorithm: ECDSA with SHA-256
|
||||
-1: 1, // Curve: P-256
|
||||
-2: ecdsaPubKey.X.Bytes(), // X coordinate
|
||||
-3: ecdsaPubKey.Y.Bytes(), // Y coordinate
|
||||
}
|
||||
|
||||
cborPublicKey, err := cbor.Marshal(coseKey)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal COSE key: %w", err)
|
||||
}
|
||||
|
||||
return cborPublicKey, nil
|
||||
}
|
||||
|
||||
// SyncLdap triggers an LDAP synchronization
|
||||
@@ -532,7 +536,7 @@ func (s *TestService) SetLdapTestConfig(ctx context.Context) error {
|
||||
"ldapAttributeGroupUniqueIdentifier": "uuid",
|
||||
"ldapAttributeGroupName": "uid",
|
||||
"ldapAttributeGroupMember": "member",
|
||||
"ldapAdminGroupName": "admin_group",
|
||||
"ldapAttributeAdminGroup": "admin_group",
|
||||
"ldapSoftDeleteUsers": "true",
|
||||
"ldapEnabled": "true",
|
||||
}
|
||||
|
||||
@@ -78,7 +78,7 @@ func SendEmail[V any](ctx context.Context, srv *EmailService, toEmail email.Addr
|
||||
|
||||
data := &email.TemplateData[V]{
|
||||
AppName: dbConfig.AppName.Value,
|
||||
LogoURL: common.EnvConfig.AppURL + "/api/application-images/logo",
|
||||
LogoURL: common.EnvConfig.AppURL + "/api/application-images/email",
|
||||
Data: tData,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,217 +0,0 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"path/filepath"
|
||||
|
||||
"gorm.io/gorm"
|
||||
|
||||
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/storage"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/utils"
|
||||
)
|
||||
|
||||
// ExportService handles exporting Pocket ID data into a ZIP archive.
|
||||
type ExportService struct {
|
||||
db *gorm.DB
|
||||
storage storage.FileStorage
|
||||
}
|
||||
|
||||
func NewExportService(db *gorm.DB, storage storage.FileStorage) *ExportService {
|
||||
return &ExportService{
|
||||
db: db,
|
||||
storage: storage,
|
||||
}
|
||||
}
|
||||
|
||||
// ExportToZip performs the full export process and writes the ZIP data to the given writer.
|
||||
func (s *ExportService) ExportToZip(ctx context.Context, w io.Writer) error {
|
||||
dbData, err := s.extractDatabase()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.writeExportZipStream(ctx, w, dbData)
|
||||
}
|
||||
|
||||
// extractDatabase reads all tables into a DatabaseExport struct
|
||||
func (s *ExportService) extractDatabase() (DatabaseExport, error) {
|
||||
schema, err := utils.LoadDBSchemaTypes(s.db)
|
||||
if err != nil {
|
||||
return DatabaseExport{}, fmt.Errorf("failed to load schema types: %w", err)
|
||||
}
|
||||
|
||||
version, err := s.schemaVersion()
|
||||
if err != nil {
|
||||
return DatabaseExport{}, err
|
||||
}
|
||||
|
||||
out := DatabaseExport{
|
||||
Provider: s.db.Name(),
|
||||
Version: version,
|
||||
Tables: map[string][]map[string]any{},
|
||||
// These tables need to be inserted in a specific order because of foreign key constraints
|
||||
// Not all tables are listed here, because not all tables are order-dependent
|
||||
TableOrder: []string{"users", "user_groups", "oidc_clients"},
|
||||
}
|
||||
|
||||
for table := range schema {
|
||||
if table == "storage" || table == "schema_migrations" {
|
||||
continue
|
||||
}
|
||||
err = s.dumpTable(table, schema[table], &out)
|
||||
if err != nil {
|
||||
return DatabaseExport{}, err
|
||||
}
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (s *ExportService) schemaVersion() (uint, error) {
|
||||
var version uint
|
||||
if err := s.db.Raw("SELECT version FROM schema_migrations").Row().Scan(&version); err != nil {
|
||||
return 0, fmt.Errorf("failed to query schema version: %w", err)
|
||||
}
|
||||
return version, nil
|
||||
}
|
||||
|
||||
// dumpTable selects all rows from a table and appends them to out.Tables
|
||||
func (s *ExportService) dumpTable(table string, types utils.DBSchemaTableTypes, out *DatabaseExport) error {
|
||||
rows, err := s.db.Raw("SELECT * FROM " + table).Rows()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read table %s: %w", table, err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
cols, _ := rows.Columns()
|
||||
if len(cols) != len(types) {
|
||||
// Should never happen...
|
||||
return fmt.Errorf("mismatched columns in table (%d) and schema (%d)", len(cols), len(types))
|
||||
}
|
||||
|
||||
for rows.Next() {
|
||||
vals := s.getScanValuesForTable(cols, types)
|
||||
err = rows.Scan(vals...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to scan row in table %s: %w", table, err)
|
||||
}
|
||||
|
||||
rowMap := make(map[string]any, len(cols))
|
||||
for i, col := range cols {
|
||||
rowMap[col] = vals[i]
|
||||
}
|
||||
|
||||
// Skip the app lock row in the kv table
|
||||
if table == "kv" {
|
||||
if keyPtr, ok := rowMap["key"].(*string); ok && keyPtr != nil && *keyPtr == lockKey {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
out.Tables[table] = append(out.Tables[table], rowMap)
|
||||
}
|
||||
|
||||
return rows.Err()
|
||||
}
|
||||
|
||||
func (s *ExportService) getScanValuesForTable(cols []string, types utils.DBSchemaTableTypes) []any {
|
||||
res := make([]any, len(cols))
|
||||
for i, col := range cols {
|
||||
// Store a pointer
|
||||
// Note: don't create a helper function for this switch, because it would return type "any" and mess everything up
|
||||
// If the column is nullable, we need a pointer to a pointer!
|
||||
switch types[col].Name {
|
||||
case "boolean", "bool":
|
||||
var x bool
|
||||
if types[col].Nullable {
|
||||
res[i] = utils.Ptr(utils.Ptr(x))
|
||||
} else {
|
||||
res[i] = utils.Ptr(x)
|
||||
}
|
||||
case "blob", "bytea", "jsonb":
|
||||
// Treat jsonb columns as binary too
|
||||
var x []byte
|
||||
if types[col].Nullable {
|
||||
res[i] = utils.Ptr(utils.Ptr(x))
|
||||
} else {
|
||||
res[i] = utils.Ptr(x)
|
||||
}
|
||||
case "timestamp", "timestamptz", "timestamp with time zone", "datetime":
|
||||
var x datatype.DateTime
|
||||
if types[col].Nullable {
|
||||
res[i] = utils.Ptr(utils.Ptr(x))
|
||||
} else {
|
||||
res[i] = utils.Ptr(x)
|
||||
}
|
||||
case "integer", "int", "bigint":
|
||||
var x int64
|
||||
if types[col].Nullable {
|
||||
res[i] = utils.Ptr(utils.Ptr(x))
|
||||
} else {
|
||||
res[i] = utils.Ptr(x)
|
||||
}
|
||||
default:
|
||||
// Treat everything else as a string (including the "numeric" type)
|
||||
var x string
|
||||
if types[col].Nullable {
|
||||
res[i] = utils.Ptr(utils.Ptr(x))
|
||||
} else {
|
||||
res[i] = utils.Ptr(x)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (s *ExportService) writeExportZipStream(ctx context.Context, w io.Writer, dbData DatabaseExport) error {
|
||||
zipWriter := zip.NewWriter(w)
|
||||
|
||||
// Add database.json
|
||||
jsonWriter, err := zipWriter.Create("database.json")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create database.json in zip: %w", err)
|
||||
}
|
||||
|
||||
jsonEncoder := json.NewEncoder(jsonWriter)
|
||||
jsonEncoder.SetEscapeHTML(false)
|
||||
|
||||
if err := jsonEncoder.Encode(dbData); err != nil {
|
||||
return fmt.Errorf("failed to encode database.json: %w", err)
|
||||
}
|
||||
|
||||
// Add uploaded files
|
||||
if err := s.addUploadsToZip(ctx, zipWriter); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return zipWriter.Close()
|
||||
}
|
||||
|
||||
// addUploadsToZip adds all files from the storage to the ZIP archive under the "uploads/" directory
|
||||
func (s *ExportService) addUploadsToZip(ctx context.Context, zipWriter *zip.Writer) error {
|
||||
return s.storage.Walk(ctx, "/", func(p storage.ObjectInfo) error {
|
||||
zipPath := filepath.Join("uploads", p.Path)
|
||||
|
||||
w, err := zipWriter.Create(zipPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create zip entry for %s: %w", zipPath, err)
|
||||
}
|
||||
|
||||
f, _, err := s.storage.Open(ctx, p.Path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open file %s: %w", zipPath, err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
if _, err := io.Copy(w, f); err != nil {
|
||||
return fmt.Errorf("failed to copy file %s into zip: %w", zipPath, err)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
@@ -1,264 +0,0 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
|
||||
datatype "github.com/pocket-id/pocket-id/backend/internal/model/types"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/storage"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/utils"
|
||||
)
|
||||
|
||||
// ImportService handles importing Pocket ID data from an exported ZIP archive.
|
||||
type ImportService struct {
|
||||
db *gorm.DB
|
||||
storage storage.FileStorage
|
||||
}
|
||||
|
||||
type DatabaseExport struct {
|
||||
Provider string `json:"provider"`
|
||||
Version uint `json:"version"`
|
||||
Tables map[string][]map[string]any `json:"tables"`
|
||||
TableOrder []string `json:"tableOrder"`
|
||||
}
|
||||
|
||||
func NewImportService(db *gorm.DB, storage storage.FileStorage) *ImportService {
|
||||
return &ImportService{
|
||||
db: db,
|
||||
storage: storage,
|
||||
}
|
||||
}
|
||||
|
||||
// ImportFromZip performs the full import process from the given ZIP reader.
|
||||
func (s *ImportService) ImportFromZip(ctx context.Context, r *zip.Reader) error {
|
||||
dbData, err := processZipDatabaseJson(r.File)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = s.ImportDatabase(dbData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = s.importUploads(ctx, r.File)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ImportDatabase only imports the database data from the given DatabaseExport struct.
|
||||
func (s *ImportService) ImportDatabase(dbData DatabaseExport) error {
|
||||
err := s.resetSchema(dbData.Version, dbData.Provider)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = s.insertData(dbData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// processZipDatabaseJson extracts database.json from the ZIP archive
|
||||
func processZipDatabaseJson(files []*zip.File) (dbData DatabaseExport, err error) {
|
||||
for _, f := range files {
|
||||
if f.Name == "database.json" {
|
||||
return parseDatabaseJsonStream(f)
|
||||
}
|
||||
}
|
||||
return dbData, errors.New("database.json not found in the ZIP file")
|
||||
}
|
||||
|
||||
func parseDatabaseJsonStream(f *zip.File) (dbData DatabaseExport, err error) {
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return dbData, fmt.Errorf("failed to open database.json: %w", err)
|
||||
}
|
||||
defer rc.Close()
|
||||
|
||||
err = json.NewDecoder(rc).Decode(&dbData)
|
||||
if err != nil {
|
||||
return dbData, fmt.Errorf("failed to decode database.json: %w", err)
|
||||
}
|
||||
|
||||
return dbData, nil
|
||||
}
|
||||
|
||||
// importUploads imports files from the uploads/ directory in the ZIP archive
|
||||
func (s *ImportService) importUploads(ctx context.Context, files []*zip.File) error {
|
||||
const maxFileSize = 50 << 20 // 50 MiB
|
||||
const uploadsPrefix = "uploads/"
|
||||
|
||||
for _, f := range files {
|
||||
if !strings.HasPrefix(f.Name, uploadsPrefix) {
|
||||
continue
|
||||
}
|
||||
|
||||
if f.UncompressedSize64 > maxFileSize {
|
||||
return fmt.Errorf("file %s too large (%d bytes)", f.Name, f.UncompressedSize64)
|
||||
}
|
||||
|
||||
targetPath := strings.TrimPrefix(f.Name, uploadsPrefix)
|
||||
if strings.HasSuffix(f.Name, "/") || targetPath == "" {
|
||||
continue // Skip directories
|
||||
}
|
||||
|
||||
err := s.storage.DeleteAll(ctx, targetPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to delete existing file %s: %w", targetPath, err)
|
||||
}
|
||||
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
buf, err := io.ReadAll(rc)
|
||||
rc.Close()
|
||||
if err != nil {
|
||||
return fmt.Errorf("read file %s: %w", f.Name, err)
|
||||
}
|
||||
|
||||
err = s.storage.Save(ctx, targetPath, bytes.NewReader(buf))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save file %s: %w", targetPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// resetSchema drops the existing schema and migrates to the target version
|
||||
func (s *ImportService) resetSchema(targetVersion uint, exportDbProvider string) error {
|
||||
sqlDb, err := s.db.DB()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get sql.DB: %w", err)
|
||||
}
|
||||
|
||||
m, err := utils.GetEmbeddedMigrateInstance(sqlDb)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get migrate instance: %w", err)
|
||||
}
|
||||
|
||||
err = m.Drop()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to drop existing schema: %w", err)
|
||||
}
|
||||
|
||||
// Needs to be called again to re-create the schema_migrations table
|
||||
m, err = utils.GetEmbeddedMigrateInstance(sqlDb)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get migrate instance: %w", err)
|
||||
}
|
||||
|
||||
err = m.Migrate(targetVersion)
|
||||
if err != nil {
|
||||
return fmt.Errorf("migration failed: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// insertData populates the DB with the imported data
|
||||
func (s *ImportService) insertData(dbData DatabaseExport) error {
|
||||
schema, err := utils.LoadDBSchemaTypes(s.db)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load schema types: %w", err)
|
||||
}
|
||||
|
||||
return s.db.Transaction(func(tx *gorm.DB) error {
|
||||
// Iterate through all tables
|
||||
// Some tables need to be processed in order
|
||||
tables := make([]string, 0, len(dbData.Tables))
|
||||
tables = append(tables, dbData.TableOrder...)
|
||||
|
||||
for t := range dbData.Tables {
|
||||
// Skip tables already present where the order matters
|
||||
// Also skip the schema_migrations table
|
||||
if slices.Contains(dbData.TableOrder, t) || t == "schema_migrations" {
|
||||
continue
|
||||
}
|
||||
tables = append(tables, t)
|
||||
}
|
||||
|
||||
// Insert rows
|
||||
for _, table := range tables {
|
||||
for _, row := range dbData.Tables[table] {
|
||||
err = normalizeRowWithSchema(row, table, schema)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to normalize row for table '%s': %w", table, err)
|
||||
}
|
||||
err = tx.Table(table).Create(row).Error
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed inserting into table '%s': %w", table, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// normalizeRowWithSchema converts row values based on the DB schema
|
||||
func normalizeRowWithSchema(row map[string]any, table string, schema utils.DBSchemaTypes) error {
|
||||
if schema[table] == nil {
|
||||
return fmt.Errorf("schema not found for table '%s'", table)
|
||||
}
|
||||
|
||||
for col, val := range row {
|
||||
if val == nil {
|
||||
// If the value is nil, skip the column
|
||||
continue
|
||||
}
|
||||
|
||||
colType := schema[table][col]
|
||||
|
||||
switch colType.Name {
|
||||
case "timestamp", "timestamptz", "timestamp with time zone", "datetime":
|
||||
// Dates are stored as strings
|
||||
str, ok := val.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("value for column '%s/%s' was expected to be a string, but was '%T'", table, col, val)
|
||||
}
|
||||
d, err := datatype.DateTimeFromString(str)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to decode value for column '%s/%s' as timestamp: %w", table, col, err)
|
||||
}
|
||||
row[col] = d
|
||||
|
||||
case "blob", "bytea", "jsonb":
|
||||
// Binary data and jsonb data is stored in the file as base64-encoded string
|
||||
str, ok := val.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("value for column '%s/%s' was expected to be a string, but was '%T'", table, col, val)
|
||||
}
|
||||
b, err := base64.StdEncoding.DecodeString(str)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to decode value for column '%s/%s' from base64: %w", table, col, err)
|
||||
}
|
||||
|
||||
// For jsonb, we additionally cast to json.RawMessage
|
||||
if colType.Name == "jsonb" {
|
||||
row[col] = json.RawMessage(b)
|
||||
} else {
|
||||
row[col] = b
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -18,6 +18,14 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
// PrivateKeyFile is the path in the data/keys folder where the key is stored
|
||||
// This is a JSON file containing a key encoded as JWK
|
||||
PrivateKeyFile = "jwt_private_key.json"
|
||||
|
||||
// PrivateKeyFileEncrypted is the path in the data/keys folder where the encrypted key is stored
|
||||
// This is a encrypted JSON file containing a key encoded as JWK
|
||||
PrivateKeyFileEncrypted = "jwt_private_key.json.enc"
|
||||
|
||||
// KeyUsageSigning is the usage for the private keys, for the "use" property
|
||||
KeyUsageSigning = "sig"
|
||||
|
||||
@@ -48,7 +56,6 @@ const (
|
||||
)
|
||||
|
||||
type JwtService struct {
|
||||
db *gorm.DB
|
||||
envConfig *common.EnvConfigSchema
|
||||
privateKey jwk.Key
|
||||
keyId string
|
||||
@@ -59,6 +66,7 @@ type JwtService struct {
|
||||
func NewJwtService(db *gorm.DB, appConfigService *AppConfigService) (*JwtService, error) {
|
||||
service := &JwtService{}
|
||||
|
||||
// Ensure keys are generated or loaded
|
||||
err := service.init(db, appConfigService, &common.EnvConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -70,15 +78,14 @@ func NewJwtService(db *gorm.DB, appConfigService *AppConfigService) (*JwtService
|
||||
func (s *JwtService) init(db *gorm.DB, appConfigService *AppConfigService, envConfig *common.EnvConfigSchema) (err error) {
|
||||
s.appConfigService = appConfigService
|
||||
s.envConfig = envConfig
|
||||
s.db = db
|
||||
|
||||
// Ensure keys are generated or loaded
|
||||
return s.LoadOrGenerateKey()
|
||||
return s.loadOrGenerateKey(db)
|
||||
}
|
||||
|
||||
func (s *JwtService) LoadOrGenerateKey() error {
|
||||
func (s *JwtService) loadOrGenerateKey(db *gorm.DB) error {
|
||||
// Get the key provider
|
||||
keyProvider, err := jwkutils.GetKeyProvider(s.db, s.envConfig, s.appConfigService.GetDbConfig().InstanceID.Value)
|
||||
keyProvider, err := jwkutils.GetKeyProvider(db, s.envConfig, s.appConfigService.GetDbConfig().InstanceID.Value)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get key provider: %w", err)
|
||||
}
|
||||
@@ -86,7 +93,7 @@ func (s *JwtService) LoadOrGenerateKey() error {
|
||||
// Try loading a key
|
||||
key, err := keyProvider.LoadKey()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load key: %w", err)
|
||||
return fmt.Errorf("failed to load key (provider type '%s'): %w", s.envConfig.KeysStorage, err)
|
||||
}
|
||||
|
||||
// If we have a key, store it in the object and we're done
|
||||
@@ -107,7 +114,7 @@ func (s *JwtService) LoadOrGenerateKey() error {
|
||||
// Save the newly-generated key
|
||||
err = keyProvider.SaveKey(s.privateKey)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to save private key: %w", err)
|
||||
return fmt.Errorf("failed to save private key (provider type '%s'): %w", s.envConfig.KeysStorage, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -371,7 +371,7 @@ func (s *LdapService) SyncUsers(ctx context.Context, tx *gorm.DB, client *ldap.C
|
||||
// Check if user is admin by checking if they are in the admin group
|
||||
isAdmin := false
|
||||
for _, group := range value.GetAttributeValues("memberOf") {
|
||||
if getDNProperty(dbConfig.LdapAttributeGroupName.Value, group) == dbConfig.LdapAdminGroupName.Value {
|
||||
if getDNProperty(dbConfig.LdapAttributeGroupName.Value, group) == dbConfig.LdapAttributeAdminGroup.Value {
|
||||
isAdmin = true
|
||||
break
|
||||
}
|
||||
|
||||
@@ -226,7 +226,7 @@ func (s *OidcService) hasAuthorizedClientInternal(ctx context.Context, clientID,
|
||||
|
||||
// IsUserGroupAllowedToAuthorize checks if the user group of the user is allowed to authorize the client
|
||||
func (s *OidcService) IsUserGroupAllowedToAuthorize(user model.User, client model.OidcClient) bool {
|
||||
if len(client.AllowedUserGroups) == 0 {
|
||||
if !client.IsGroupRestricted {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -778,6 +778,14 @@ func (s *OidcService) UpdateClient(ctx context.Context, clientID string, input d
|
||||
|
||||
updateOIDCClientModelFromDto(&client, &input)
|
||||
|
||||
if !input.IsGroupRestricted {
|
||||
// Clear allowed user groups if the restriction is removed
|
||||
err = tx.Model(&client).Association("AllowedUserGroups").Clear()
|
||||
if err != nil {
|
||||
return model.OidcClient{}, err
|
||||
}
|
||||
}
|
||||
|
||||
err = tx.WithContext(ctx).Save(&client).Error
|
||||
if err != nil {
|
||||
return model.OidcClient{}, err
|
||||
@@ -816,6 +824,7 @@ func updateOIDCClientModelFromDto(client *model.OidcClient, input *dto.OidcClien
|
||||
client.PkceEnabled = input.IsPublic || input.PkceEnabled
|
||||
client.RequiresReauthentication = input.RequiresReauthentication
|
||||
client.LaunchURL = input.LaunchURL
|
||||
client.IsGroupRestricted = input.IsGroupRestricted
|
||||
|
||||
// Credentials
|
||||
client.Credentials.FederatedIdentities = make([]model.OidcClientFederatedIdentity, len(input.Credentials.FederatedIdentities))
|
||||
@@ -1433,6 +1442,7 @@ func (s *OidcService) GetAllowedGroupsCountOfClient(ctx context.Context, id stri
|
||||
}
|
||||
|
||||
func (s *OidcService) ListAuthorizedClients(ctx context.Context, userID string, listRequestOptions utils.ListRequestOptions) ([]model.UserAuthorizedOidcClient, utils.PaginationResponse, error) {
|
||||
|
||||
query := s.db.
|
||||
WithContext(ctx).
|
||||
Model(&model.UserAuthorizedOidcClient{}).
|
||||
@@ -2122,106 +2132,3 @@ func (s *OidcService) updateClientLogoType(ctx context.Context, clientID string,
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *OidcService) CreateAPI(ctx context.Context, input dto.OidcAPICreateDto) (model.OidcAPI, error) {
|
||||
api := model.OidcAPI{
|
||||
Name: input.Name,
|
||||
}
|
||||
|
||||
err := s.db.
|
||||
WithContext(ctx).
|
||||
Create(&api).
|
||||
Error
|
||||
if err != nil {
|
||||
return model.OidcAPI{}, fmt.Errorf("failed to create API in database: %w", err)
|
||||
}
|
||||
|
||||
return api, nil
|
||||
}
|
||||
|
||||
func (s *OidcService) GetAPI(ctx context.Context, id string) (model.OidcAPI, error) {
|
||||
var api model.OidcAPI
|
||||
err := s.db.
|
||||
WithContext(ctx).
|
||||
First(&api, "id = ?", id).
|
||||
Error
|
||||
if err != nil {
|
||||
return model.OidcAPI{}, fmt.Errorf("failed to get API from database: %w", err)
|
||||
}
|
||||
|
||||
return api, nil
|
||||
}
|
||||
|
||||
func (s *OidcService) ListAPIs(ctx context.Context, searchTerm string, listRequestOptions utils.ListRequestOptions) ([]model.OidcAPI, utils.PaginationResponse, error) {
|
||||
var apis []model.OidcAPI
|
||||
|
||||
query := s.db.
|
||||
WithContext(ctx).
|
||||
Model(&model.OidcAPI{})
|
||||
|
||||
if searchTerm != "" {
|
||||
query = query.Where("id = ? OR name = ? OR identifier = ?", searchTerm, searchTerm, searchTerm)
|
||||
}
|
||||
|
||||
response, err := utils.PaginateFilterAndSort(listRequestOptions, query, &apis)
|
||||
return apis, response, err
|
||||
}
|
||||
|
||||
func (s *OidcService) UpdateAPI(ctx context.Context, id string, input dto.OidcAPIUpdateDto) (model.OidcAPI, error) {
|
||||
tx := s.db.Begin()
|
||||
defer func() {
|
||||
tx.Rollback()
|
||||
}()
|
||||
|
||||
var api model.OidcAPI
|
||||
err := tx.
|
||||
WithContext(ctx).
|
||||
First(&api, "id = ?", id).
|
||||
Error
|
||||
if err != nil {
|
||||
return model.OidcAPI{}, fmt.Errorf("failed to get API from database: %w", err)
|
||||
}
|
||||
|
||||
api.Name = input.Name
|
||||
api.Identifier = input.Identifier
|
||||
|
||||
// Convert permissions from DTO to model
|
||||
api.Data.Permissions = make([]model.OidcAPIPermission, len(input.Permissions))
|
||||
for i, p := range input.Permissions {
|
||||
api.Data.Permissions[i] = model.OidcAPIPermission{
|
||||
Name: p.Name,
|
||||
Description: p.Description,
|
||||
}
|
||||
}
|
||||
|
||||
err = tx.
|
||||
WithContext(ctx).
|
||||
Save(&api).
|
||||
Error
|
||||
if err != nil {
|
||||
return model.OidcAPI{}, fmt.Errorf("failed to save API in database: %w", err)
|
||||
}
|
||||
|
||||
err = tx.Commit().Error
|
||||
if err != nil {
|
||||
return model.OidcAPI{}, fmt.Errorf("failed to commit transaction: %w", err)
|
||||
}
|
||||
|
||||
return api, nil
|
||||
}
|
||||
|
||||
func (s *OidcService) DeleteAPI(ctx context.Context, id string) error {
|
||||
result := s.db.
|
||||
WithContext(ctx).
|
||||
Delete(&model.OidcAPI{}, "id = ?", id)
|
||||
|
||||
if result.Error != nil {
|
||||
return result.Error
|
||||
}
|
||||
|
||||
if result.RowsAffected == 0 {
|
||||
return gorm.ErrRecordNotFound
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -148,7 +148,6 @@ func TestOidcService_verifyClientCredentialsInternal(t *testing.T) {
|
||||
var err error
|
||||
// Create a test database
|
||||
db := testutils.NewDatabaseForTest(t)
|
||||
common.EnvConfig.EncryptionKey = []byte("0123456789abcdef0123456789abcdef")
|
||||
|
||||
// Create two JWKs for testing
|
||||
privateJWK, jwkSetJSON := generateTestECDSAKey(t)
|
||||
|
||||
@@ -53,6 +53,7 @@ func (s *UserGroupService) getInternal(ctx context.Context, id string, tx *gorm.
|
||||
Where("id = ?", id).
|
||||
Preload("CustomClaims").
|
||||
Preload("Users").
|
||||
Preload("AllowedOidcClients").
|
||||
First(&group).
|
||||
Error
|
||||
return group, err
|
||||
@@ -248,3 +249,54 @@ func (s *UserGroupService) GetUserCountOfGroup(ctx context.Context, id string) (
|
||||
Count()
|
||||
return count, nil
|
||||
}
|
||||
|
||||
func (s *UserGroupService) UpdateAllowedOidcClient(ctx context.Context, id string, input dto.UserGroupUpdateAllowedOidcClientsDto) (group model.UserGroup, err error) {
|
||||
tx := s.db.Begin()
|
||||
defer func() {
|
||||
tx.Rollback()
|
||||
}()
|
||||
|
||||
group, err = s.getInternal(ctx, id, tx)
|
||||
if err != nil {
|
||||
return model.UserGroup{}, err
|
||||
}
|
||||
|
||||
// Fetch the clients based on the client IDs
|
||||
var clients []model.OidcClient
|
||||
if len(input.OidcClientIDs) > 0 {
|
||||
err = tx.
|
||||
WithContext(ctx).
|
||||
Where("id IN (?)", input.OidcClientIDs).
|
||||
Find(&clients).
|
||||
Error
|
||||
if err != nil {
|
||||
return model.UserGroup{}, err
|
||||
}
|
||||
}
|
||||
|
||||
// Replace the current clients with the new set of clients
|
||||
err = tx.
|
||||
WithContext(ctx).
|
||||
Model(&group).
|
||||
Association("AllowedOidcClients").
|
||||
Replace(clients)
|
||||
if err != nil {
|
||||
return model.UserGroup{}, err
|
||||
}
|
||||
|
||||
// Save the updated group
|
||||
err = tx.
|
||||
WithContext(ctx).
|
||||
Save(&group).
|
||||
Error
|
||||
if err != nil {
|
||||
return model.UserGroup{}, err
|
||||
}
|
||||
|
||||
err = tx.Commit().Error
|
||||
if err != nil {
|
||||
return model.UserGroup{}, err
|
||||
}
|
||||
|
||||
return group, nil
|
||||
}
|
||||
|
||||
@@ -253,6 +253,18 @@ func (s *UserService) createUserInternal(ctx context.Context, input dto.UserCrea
|
||||
return model.User{}, &common.UserEmailNotSetError{}
|
||||
}
|
||||
|
||||
var userGroups []model.UserGroup
|
||||
if len(input.UserGroupIds) > 0 {
|
||||
err := tx.
|
||||
WithContext(ctx).
|
||||
Where("id IN ?", input.UserGroupIds).
|
||||
Find(&userGroups).
|
||||
Error
|
||||
if err != nil {
|
||||
return model.User{}, err
|
||||
}
|
||||
}
|
||||
|
||||
user := model.User{
|
||||
FirstName: input.FirstName,
|
||||
LastName: input.LastName,
|
||||
@@ -262,6 +274,7 @@ func (s *UserService) createUserInternal(ctx context.Context, input dto.UserCrea
|
||||
IsAdmin: input.IsAdmin,
|
||||
Locale: input.Locale,
|
||||
Disabled: input.Disabled,
|
||||
UserGroups: userGroups,
|
||||
}
|
||||
if input.LdapID != "" {
|
||||
user.LdapID = &input.LdapID
|
||||
@@ -285,7 +298,13 @@ func (s *UserService) createUserInternal(ctx context.Context, input dto.UserCrea
|
||||
|
||||
// Apply default groups and claims for new non-LDAP users
|
||||
if !isLdapSync {
|
||||
if err := s.applySignupDefaults(ctx, &user, tx); err != nil {
|
||||
if len(input.UserGroupIds) == 0 {
|
||||
if err := s.applyDefaultGroups(ctx, &user, tx); err != nil {
|
||||
return model.User{}, err
|
||||
}
|
||||
}
|
||||
|
||||
if err := s.applyDefaultCustomClaims(ctx, &user, tx); err != nil {
|
||||
return model.User{}, err
|
||||
}
|
||||
}
|
||||
@@ -293,10 +312,9 @@ func (s *UserService) createUserInternal(ctx context.Context, input dto.UserCrea
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (s *UserService) applySignupDefaults(ctx context.Context, user *model.User, tx *gorm.DB) error {
|
||||
func (s *UserService) applyDefaultGroups(ctx context.Context, user *model.User, tx *gorm.DB) error {
|
||||
config := s.appConfigService.GetDbConfig()
|
||||
|
||||
// Apply default user groups
|
||||
var groupIDs []string
|
||||
v := config.SignupDefaultUserGroupIDs.Value
|
||||
if v != "" && v != "[]" {
|
||||
@@ -323,10 +341,14 @@ func (s *UserService) applySignupDefaults(ctx context.Context, user *model.User,
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *UserService) applyDefaultCustomClaims(ctx context.Context, user *model.User, tx *gorm.DB) error {
|
||||
config := s.appConfigService.GetDbConfig()
|
||||
|
||||
// Apply default custom claims
|
||||
var claims []dto.CustomClaimCreateDto
|
||||
v = config.SignupDefaultCustomClaims.Value
|
||||
v := config.SignupDefaultCustomClaims.Value
|
||||
if v != "" && v != "[]" {
|
||||
err := json.Unmarshal([]byte(v), &claims)
|
||||
if err != nil {
|
||||
@@ -432,28 +454,36 @@ func (s *UserService) RequestOneTimeAccessEmailAsAdmin(ctx context.Context, user
|
||||
return &common.OneTimeAccessDisabledError{}
|
||||
}
|
||||
|
||||
return s.requestOneTimeAccessEmailInternal(ctx, userID, "", ttl)
|
||||
_, err := s.requestOneTimeAccessEmailInternal(ctx, userID, "", ttl, true)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s *UserService) RequestOneTimeAccessEmailAsUnauthenticatedUser(ctx context.Context, userID, redirectPath string) error {
|
||||
func (s *UserService) RequestOneTimeAccessEmailAsUnauthenticatedUser(ctx context.Context, userID, redirectPath string) (string, error) {
|
||||
isDisabled := !s.appConfigService.GetDbConfig().EmailOneTimeAccessAsUnauthenticatedEnabled.IsTrue()
|
||||
if isDisabled {
|
||||
return &common.OneTimeAccessDisabledError{}
|
||||
return "", &common.OneTimeAccessDisabledError{}
|
||||
}
|
||||
|
||||
var userId string
|
||||
err := s.db.Model(&model.User{}).Select("id").Where("email = ?", userID).First(&userId).Error
|
||||
if errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
// Do not return error if user not found to prevent email enumeration
|
||||
return nil
|
||||
return "", nil
|
||||
} else if err != nil {
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
|
||||
return s.requestOneTimeAccessEmailInternal(ctx, userId, redirectPath, 15*time.Minute)
|
||||
deviceToken, err := s.requestOneTimeAccessEmailInternal(ctx, userId, redirectPath, 15*time.Minute, true)
|
||||
if err != nil {
|
||||
return "", err
|
||||
} else if deviceToken == nil {
|
||||
return "", errors.New("device token expected but not returned")
|
||||
}
|
||||
|
||||
return *deviceToken, nil
|
||||
}
|
||||
|
||||
func (s *UserService) requestOneTimeAccessEmailInternal(ctx context.Context, userID, redirectPath string, ttl time.Duration) error {
|
||||
func (s *UserService) requestOneTimeAccessEmailInternal(ctx context.Context, userID, redirectPath string, ttl time.Duration, withDeviceToken bool) (*string, error) {
|
||||
tx := s.db.Begin()
|
||||
defer func() {
|
||||
tx.Rollback()
|
||||
@@ -461,21 +491,20 @@ func (s *UserService) requestOneTimeAccessEmailInternal(ctx context.Context, use
|
||||
|
||||
user, err := s.GetUser(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if user.Email == nil {
|
||||
return &common.UserEmailNotSetError{}
|
||||
return nil, &common.UserEmailNotSetError{}
|
||||
}
|
||||
|
||||
oneTimeAccessToken, err := s.createOneTimeAccessTokenInternal(ctx, user.ID, ttl, tx)
|
||||
oneTimeAccessToken, deviceToken, err := s.createOneTimeAccessTokenInternal(ctx, user.ID, ttl, withDeviceToken, tx)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = tx.Commit().Error
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// We use a background context here as this is running in a goroutine
|
||||
@@ -508,28 +537,29 @@ func (s *UserService) requestOneTimeAccessEmailInternal(ctx context.Context, use
|
||||
}
|
||||
}()
|
||||
|
||||
return nil
|
||||
return deviceToken, nil
|
||||
}
|
||||
|
||||
func (s *UserService) CreateOneTimeAccessToken(ctx context.Context, userID string, ttl time.Duration) (string, error) {
|
||||
return s.createOneTimeAccessTokenInternal(ctx, userID, ttl, s.db)
|
||||
func (s *UserService) CreateOneTimeAccessToken(ctx context.Context, userID string, ttl time.Duration) (token string, err error) {
|
||||
token, _, err = s.createOneTimeAccessTokenInternal(ctx, userID, ttl, false, s.db)
|
||||
return token, err
|
||||
}
|
||||
|
||||
func (s *UserService) createOneTimeAccessTokenInternal(ctx context.Context, userID string, ttl time.Duration, tx *gorm.DB) (string, error) {
|
||||
oneTimeAccessToken, err := NewOneTimeAccessToken(userID, ttl)
|
||||
func (s *UserService) createOneTimeAccessTokenInternal(ctx context.Context, userID string, ttl time.Duration, withDeviceToken bool, tx *gorm.DB) (token string, deviceToken *string, err error) {
|
||||
oneTimeAccessToken, err := NewOneTimeAccessToken(userID, ttl, withDeviceToken)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
err = tx.WithContext(ctx).Create(oneTimeAccessToken).Error
|
||||
if err != nil {
|
||||
return "", err
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
return oneTimeAccessToken.Token, nil
|
||||
return oneTimeAccessToken.Token, oneTimeAccessToken.DeviceToken, nil
|
||||
}
|
||||
|
||||
func (s *UserService) ExchangeOneTimeAccessToken(ctx context.Context, token string, ipAddress, userAgent string) (model.User, string, error) {
|
||||
func (s *UserService) ExchangeOneTimeAccessToken(ctx context.Context, token, deviceToken, ipAddress, userAgent string) (model.User, string, error) {
|
||||
tx := s.db.Begin()
|
||||
defer func() {
|
||||
tx.Rollback()
|
||||
@@ -549,6 +579,10 @@ func (s *UserService) ExchangeOneTimeAccessToken(ctx context.Context, token stri
|
||||
}
|
||||
return model.User{}, "", err
|
||||
}
|
||||
if oneTimeAccessToken.DeviceToken != nil && deviceToken != *oneTimeAccessToken.DeviceToken {
|
||||
return model.User{}, "", &common.DeviceCodeInvalid{}
|
||||
}
|
||||
|
||||
accessToken, err := s.jwtService.GenerateAccessToken(oneTimeAccessToken.User)
|
||||
if err != nil {
|
||||
return model.User{}, "", err
|
||||
@@ -715,12 +749,22 @@ func (s *UserService) disableUserInternal(ctx context.Context, tx *gorm.DB, user
|
||||
Error
|
||||
}
|
||||
|
||||
func (s *UserService) CreateSignupToken(ctx context.Context, ttl time.Duration, usageLimit int) (model.SignupToken, error) {
|
||||
func (s *UserService) CreateSignupToken(ctx context.Context, ttl time.Duration, usageLimit int, userGroupIDs []string) (model.SignupToken, error) {
|
||||
signupToken, err := NewSignupToken(ttl, usageLimit)
|
||||
if err != nil {
|
||||
return model.SignupToken{}, err
|
||||
}
|
||||
|
||||
var userGroups []model.UserGroup
|
||||
err = s.db.WithContext(ctx).
|
||||
Where("id IN ?", userGroupIDs).
|
||||
Find(&userGroups).
|
||||
Error
|
||||
if err != nil {
|
||||
return model.SignupToken{}, err
|
||||
}
|
||||
signupToken.UserGroups = userGroups
|
||||
|
||||
err = s.db.WithContext(ctx).Create(signupToken).Error
|
||||
if err != nil {
|
||||
return model.SignupToken{}, err
|
||||
@@ -743,9 +787,11 @@ func (s *UserService) SignUp(ctx context.Context, signupData dto.SignUpDto, ipAd
|
||||
}
|
||||
|
||||
var signupToken model.SignupToken
|
||||
var userGroupIDs []string
|
||||
if tokenProvided {
|
||||
err := tx.
|
||||
WithContext(ctx).
|
||||
Preload("UserGroups").
|
||||
Where("token = ?", signupData.Token).
|
||||
Clauses(clause.Locking{Strength: "UPDATE"}).
|
||||
First(&signupToken).
|
||||
@@ -760,14 +806,19 @@ func (s *UserService) SignUp(ctx context.Context, signupData dto.SignUpDto, ipAd
|
||||
if !signupToken.IsValid() {
|
||||
return model.User{}, "", &common.TokenInvalidOrExpiredError{}
|
||||
}
|
||||
|
||||
for _, group := range signupToken.UserGroups {
|
||||
userGroupIDs = append(userGroupIDs, group.ID)
|
||||
}
|
||||
}
|
||||
|
||||
userToCreate := dto.UserCreateDto{
|
||||
Username: signupData.Username,
|
||||
Email: signupData.Email,
|
||||
FirstName: signupData.FirstName,
|
||||
LastName: signupData.LastName,
|
||||
DisplayName: strings.TrimSpace(signupData.FirstName + " " + signupData.LastName),
|
||||
Username: signupData.Username,
|
||||
Email: signupData.Email,
|
||||
FirstName: signupData.FirstName,
|
||||
LastName: signupData.LastName,
|
||||
DisplayName: strings.TrimSpace(signupData.FirstName + " " + signupData.LastName),
|
||||
UserGroupIds: userGroupIDs,
|
||||
}
|
||||
|
||||
user, err := s.createUserInternal(ctx, userToCreate, false, tx)
|
||||
@@ -808,7 +859,7 @@ func (s *UserService) SignUp(ctx context.Context, signupData dto.SignUpDto, ipAd
|
||||
|
||||
func (s *UserService) ListSignupTokens(ctx context.Context, listRequestOptions utils.ListRequestOptions) ([]model.SignupToken, utils.PaginationResponse, error) {
|
||||
var tokens []model.SignupToken
|
||||
query := s.db.WithContext(ctx).Model(&model.SignupToken{})
|
||||
query := s.db.WithContext(ctx).Preload("UserGroups").Model(&model.SignupToken{})
|
||||
|
||||
pagination, err := utils.PaginateFilterAndSort(listRequestOptions, query, &tokens)
|
||||
return tokens, pagination, err
|
||||
@@ -818,23 +869,33 @@ func (s *UserService) DeleteSignupToken(ctx context.Context, tokenID string) err
|
||||
return s.db.WithContext(ctx).Delete(&model.SignupToken{}, "id = ?", tokenID).Error
|
||||
}
|
||||
|
||||
func NewOneTimeAccessToken(userID string, ttl time.Duration) (*model.OneTimeAccessToken, error) {
|
||||
func NewOneTimeAccessToken(userID string, ttl time.Duration, withDeviceToken bool) (*model.OneTimeAccessToken, error) {
|
||||
// If expires at is less than 15 minutes, use a 6-character token instead of 16
|
||||
tokenLength := 16
|
||||
if ttl <= 15*time.Minute {
|
||||
tokenLength = 6
|
||||
}
|
||||
|
||||
randomString, err := utils.GenerateRandomAlphanumericString(tokenLength)
|
||||
token, err := utils.GenerateRandomAlphanumericString(tokenLength)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var deviceToken *string
|
||||
if withDeviceToken {
|
||||
dt, err := utils.GenerateRandomAlphanumericString(16)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
deviceToken = &dt
|
||||
}
|
||||
|
||||
now := time.Now().Round(time.Second)
|
||||
o := &model.OneTimeAccessToken{
|
||||
UserID: userID,
|
||||
ExpiresAt: datatype.DateTime(now.Add(ttl)),
|
||||
Token: randomString,
|
||||
UserID: userID,
|
||||
ExpiresAt: datatype.DateTime(now.Add(ttl)),
|
||||
Token: token,
|
||||
DeviceToken: deviceToken,
|
||||
}
|
||||
|
||||
return o, nil
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package cookie
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
@@ -11,3 +13,7 @@ func AddAccessTokenCookie(c *gin.Context, maxAgeInSeconds int, token string) {
|
||||
func AddSessionIdCookie(c *gin.Context, maxAgeInSeconds int, sessionID string) {
|
||||
c.SetCookie(SessionIdCookieName, sessionID, maxAgeInSeconds, "/", "", true, true)
|
||||
}
|
||||
|
||||
func AddDeviceTokenCookie(c *gin.Context, deviceToken string) {
|
||||
c.SetCookie(DeviceTokenCookieName, deviceToken, int(15*time.Minute.Seconds()), "/api/one-time-access-token", "", true, true)
|
||||
}
|
||||
|
||||
@@ -8,10 +8,12 @@ import (
|
||||
|
||||
var AccessTokenCookieName = "__Host-access_token"
|
||||
var SessionIdCookieName = "__Host-session"
|
||||
var DeviceTokenCookieName = "__Host-device_token" //nolint:gosec
|
||||
|
||||
func init() {
|
||||
if strings.HasPrefix(common.EnvConfig.AppURL, "http://") {
|
||||
AccessTokenCookieName = "access_token"
|
||||
SessionIdCookieName = "session"
|
||||
DeviceTokenCookieName = "device_token"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,130 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/golang-migrate/migrate/v4"
|
||||
"github.com/golang-migrate/migrate/v4/database"
|
||||
postgresMigrate "github.com/golang-migrate/migrate/v4/database/postgres"
|
||||
sqliteMigrate "github.com/golang-migrate/migrate/v4/database/sqlite3"
|
||||
"github.com/golang-migrate/migrate/v4/source/iofs"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/common"
|
||||
"github.com/pocket-id/pocket-id/backend/resources"
|
||||
)
|
||||
|
||||
// MigrateDatabase applies database migrations using embedded migration files or fetches them from GitHub if a downgrade is detected.
|
||||
func MigrateDatabase(sqlDb *sql.DB) error {
|
||||
m, err := GetEmbeddedMigrateInstance(sqlDb)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get migrate instance: %w", err)
|
||||
}
|
||||
|
||||
path := "migrations/" + string(common.EnvConfig.DbProvider)
|
||||
requiredVersion, err := getRequiredMigrationVersion(path)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get last migration version: %w", err)
|
||||
}
|
||||
|
||||
currentVersion, _, _ := m.Version()
|
||||
if currentVersion > requiredVersion {
|
||||
slog.Warn("Database version is newer than the application supports, possible downgrade detected", slog.Uint64("db_version", uint64(currentVersion)), slog.Uint64("app_version", uint64(requiredVersion)))
|
||||
if !common.EnvConfig.AllowDowngrade {
|
||||
return fmt.Errorf("database version (%d) is newer than application version (%d), downgrades are not allowed (set ALLOW_DOWNGRADE=true to enable)", currentVersion, requiredVersion)
|
||||
}
|
||||
slog.Info("Fetching migrations from GitHub to handle possible downgrades")
|
||||
return migrateDatabaseFromGitHub(sqlDb, requiredVersion)
|
||||
}
|
||||
|
||||
if err := m.Migrate(requiredVersion); err != nil && !errors.Is(err, migrate.ErrNoChange) {
|
||||
return fmt.Errorf("failed to apply embedded migrations: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetEmbeddedMigrateInstance creates a migrate.Migrate instance using embedded migration files.
|
||||
func GetEmbeddedMigrateInstance(sqlDb *sql.DB) (*migrate.Migrate, error) {
|
||||
path := "migrations/" + string(common.EnvConfig.DbProvider)
|
||||
source, err := iofs.New(resources.FS, path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create embedded migration source: %w", err)
|
||||
}
|
||||
|
||||
driver, err := newMigrationDriver(sqlDb, common.EnvConfig.DbProvider)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create migration driver: %w", err)
|
||||
}
|
||||
|
||||
m, err := migrate.NewWithInstance("iofs", source, "pocket-id", driver)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create migration instance: %w", err)
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
// newMigrationDriver creates a database.Driver instance based on the given database provider.
|
||||
func newMigrationDriver(sqlDb *sql.DB, dbProvider common.DbProvider) (driver database.Driver, err error) {
|
||||
switch dbProvider {
|
||||
case common.DbProviderSqlite:
|
||||
driver, err = sqliteMigrate.WithInstance(sqlDb, &sqliteMigrate.Config{
|
||||
NoTxWrap: true,
|
||||
})
|
||||
case common.DbProviderPostgres:
|
||||
driver, err = postgresMigrate.WithInstance(sqlDb, &postgresMigrate.Config{})
|
||||
default:
|
||||
// Should never happen at this point
|
||||
return nil, fmt.Errorf("unsupported database provider: %s", common.EnvConfig.DbProvider)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create migration driver: %w", err)
|
||||
}
|
||||
|
||||
return driver, nil
|
||||
}
|
||||
|
||||
// migrateDatabaseFromGitHub applies database migrations fetched from GitHub to handle downgrades.
|
||||
func migrateDatabaseFromGitHub(sqlDb *sql.DB, version uint) error {
|
||||
srcURL := "github://pocket-id/pocket-id/backend/resources/migrations/" + string(common.EnvConfig.DbProvider)
|
||||
|
||||
driver, err := newMigrationDriver(sqlDb, common.EnvConfig.DbProvider)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create migration driver: %w", err)
|
||||
}
|
||||
|
||||
m, err := migrate.NewWithDatabaseInstance(srcURL, "pocket-id", driver)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create GitHub migration instance: %w", err)
|
||||
}
|
||||
|
||||
if err := m.Migrate(version); err != nil && !errors.Is(err, migrate.ErrNoChange) {
|
||||
return fmt.Errorf("failed to apply GitHub migrations: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// getRequiredMigrationVersion reads the embedded migration files and returns the highest version number found.
|
||||
func getRequiredMigrationVersion(path string) (uint, error) {
|
||||
entries, err := resources.FS.ReadDir(path)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to read migration directory: %w", err)
|
||||
}
|
||||
|
||||
var maxVersion uint
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
continue
|
||||
}
|
||||
name := entry.Name()
|
||||
var version uint
|
||||
n, err := fmt.Sscanf(name, "%d_", &version)
|
||||
if err == nil && n == 1 {
|
||||
if version > maxVersion {
|
||||
maxVersion = version
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return maxVersion, nil
|
||||
}
|
||||
@@ -1,116 +0,0 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
// DBTableExists checks if a table exists in the database
|
||||
func DBTableExists(db *gorm.DB, tableName string) (exists bool, err error) {
|
||||
switch db.Name() {
|
||||
case "postgres":
|
||||
query := `SELECT EXISTS (
|
||||
SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = ?
|
||||
)`
|
||||
err = db.Raw(query, tableName).Scan(&exists).Error
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
case "sqlite":
|
||||
query := `SELECT COUNT(*) > 0 FROM sqlite_master WHERE type='table' AND name=?`
|
||||
err = db.Raw(query, tableName).Scan(&exists).Error
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
default:
|
||||
return false, fmt.Errorf("unsupported database dialect: %s", db.Name())
|
||||
}
|
||||
|
||||
return exists, nil
|
||||
}
|
||||
|
||||
type DBSchemaColumn struct {
|
||||
Name string
|
||||
Nullable bool
|
||||
}
|
||||
type DBSchemaTableTypes = map[string]DBSchemaColumn
|
||||
type DBSchemaTypes = map[string]DBSchemaTableTypes
|
||||
|
||||
// LoadDBSchemaTypes retrieves the column types for all tables in the DB
|
||||
// Result is a map of "table --> column --> {name: column type name, nullable: boolean}"
|
||||
func LoadDBSchemaTypes(db *gorm.DB) (result DBSchemaTypes, err error) {
|
||||
result = make(DBSchemaTypes)
|
||||
|
||||
switch db.Name() {
|
||||
case "postgres":
|
||||
var rows []struct {
|
||||
TableName string
|
||||
ColumnName string
|
||||
DataType string
|
||||
Nullable bool
|
||||
}
|
||||
err := db.
|
||||
Raw(`
|
||||
SELECT table_name, column_name, data_type, is_nullable = 'YES' AS nullable
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = 'public';
|
||||
`).
|
||||
Scan(&rows).
|
||||
Error
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, r := range rows {
|
||||
t := strings.ToLower(r.DataType)
|
||||
if result[r.TableName] == nil {
|
||||
result[r.TableName] = make(map[string]DBSchemaColumn)
|
||||
}
|
||||
result[r.TableName][r.ColumnName] = DBSchemaColumn{
|
||||
Name: strings.ToLower(t),
|
||||
Nullable: r.Nullable,
|
||||
}
|
||||
}
|
||||
|
||||
case "sqlite":
|
||||
var tables []string
|
||||
err = db.
|
||||
Raw(`SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';`).
|
||||
Scan(&tables).
|
||||
Error
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, table := range tables {
|
||||
var cols []struct {
|
||||
Name string
|
||||
Type string
|
||||
Notnull bool
|
||||
}
|
||||
err := db.
|
||||
Raw(`PRAGMA table_info("` + table + `");`).
|
||||
Scan(&cols).
|
||||
Error
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, c := range cols {
|
||||
if result[table] == nil {
|
||||
result[table] = make(map[string]DBSchemaColumn)
|
||||
}
|
||||
result[table][c.Name] = DBSchemaColumn{
|
||||
Name: strings.ToLower(c.Type),
|
||||
Nullable: !c.Notnull,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported database dialect: %s", db.Name())
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
@@ -28,14 +28,22 @@ func GetKeyProvider(db *gorm.DB, envConfig *common.EnvConfigSchema, instanceID s
|
||||
return nil, fmt.Errorf("failed to load encryption key: %w", err)
|
||||
}
|
||||
|
||||
keyProvider = &KeyProviderDatabase{}
|
||||
// Get the key provider
|
||||
switch envConfig.KeysStorage {
|
||||
case "file", "":
|
||||
keyProvider = &KeyProviderFile{}
|
||||
case "database":
|
||||
keyProvider = &KeyProviderDatabase{}
|
||||
default:
|
||||
return nil, fmt.Errorf("invalid key storage '%s'", envConfig.KeysStorage)
|
||||
}
|
||||
err = keyProvider.Init(KeyProviderOpts{
|
||||
DB: db,
|
||||
EnvConfig: envConfig,
|
||||
Kek: kek,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to init key provider: %w", err)
|
||||
return nil, fmt.Errorf("failed to init key provider of type '%s': %w", envConfig.KeysStorage, err)
|
||||
}
|
||||
|
||||
return keyProvider, nil
|
||||
|
||||
202
backend/internal/utils/jwk/key_provider_file.go
Normal file
202
backend/internal/utils/jwk/key_provider_file.go
Normal file
@@ -0,0 +1,202 @@
|
||||
package jwk
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/lestrrat-go/jwx/v3/jwk"
|
||||
|
||||
"github.com/pocket-id/pocket-id/backend/internal/common"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/utils"
|
||||
cryptoutils "github.com/pocket-id/pocket-id/backend/internal/utils/crypto"
|
||||
)
|
||||
|
||||
const (
|
||||
// PrivateKeyFile is the path in the data/keys folder where the key is stored
|
||||
// This is a JSON file containing a key encoded as JWK
|
||||
PrivateKeyFile = "jwt_private_key.json"
|
||||
|
||||
// PrivateKeyFileEncrypted is the path in the data/keys folder where the encrypted key is stored
|
||||
// This is a encrypted JSON file containing a key encoded as JWK
|
||||
PrivateKeyFileEncrypted = "jwt_private_key.json.enc"
|
||||
)
|
||||
|
||||
type KeyProviderFile struct {
|
||||
envConfig *common.EnvConfigSchema
|
||||
kek []byte
|
||||
}
|
||||
|
||||
func (f *KeyProviderFile) Init(opts KeyProviderOpts) error {
|
||||
f.envConfig = opts.EnvConfig
|
||||
f.kek = opts.Kek
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *KeyProviderFile) LoadKey() (jwk.Key, error) {
|
||||
if len(f.kek) > 0 {
|
||||
return f.loadEncryptedKey()
|
||||
}
|
||||
return f.loadKey()
|
||||
}
|
||||
|
||||
func (f *KeyProviderFile) SaveKey(key jwk.Key) error {
|
||||
if len(f.kek) > 0 {
|
||||
return f.saveKeyEncrypted(key)
|
||||
}
|
||||
return f.saveKey(key)
|
||||
}
|
||||
|
||||
func (f *KeyProviderFile) loadKey() (jwk.Key, error) {
|
||||
var key jwk.Key
|
||||
|
||||
// First, check if we have a JWK file
|
||||
// If we do, then we just load that
|
||||
jwkPath := f.jwkPath()
|
||||
ok, err := utils.FileExists(jwkPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to check if private key file exists at path '%s': %w", jwkPath, err)
|
||||
}
|
||||
if !ok {
|
||||
// File doesn't exist, no key was loaded
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(jwkPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read private key file at path '%s': %w", jwkPath, err)
|
||||
}
|
||||
|
||||
key, err = jwk.ParseKey(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse private key file at path '%s': %w", jwkPath, err)
|
||||
}
|
||||
|
||||
return key, nil
|
||||
}
|
||||
|
||||
func (f *KeyProviderFile) loadEncryptedKey() (key jwk.Key, err error) {
|
||||
// First, check if we have an encrypted JWK file
|
||||
// If we do, then we just load that
|
||||
encJwkPath := f.encJwkPath()
|
||||
ok, err := utils.FileExists(encJwkPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to check if encrypted private key file exists at path '%s': %w", encJwkPath, err)
|
||||
}
|
||||
if ok {
|
||||
encB64, err := os.ReadFile(encJwkPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read encrypted private key file at path '%s': %w", encJwkPath, err)
|
||||
}
|
||||
|
||||
// Decode from base64
|
||||
enc := make([]byte, base64.StdEncoding.DecodedLen(len(encB64)))
|
||||
n, err := base64.StdEncoding.Decode(enc, encB64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read encrypted private key file at path '%s': not a valid base64-encoded file: %w", encJwkPath, err)
|
||||
}
|
||||
|
||||
// Decrypt the data
|
||||
data, err := cryptoutils.Decrypt(f.kek, enc[:n], nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to decrypt private key file at path '%s': %w", encJwkPath, err)
|
||||
}
|
||||
|
||||
// Parse the key
|
||||
key, err = jwk.ParseKey(data)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse encrypted private key file at path '%s': %w", encJwkPath, err)
|
||||
}
|
||||
|
||||
return key, nil
|
||||
}
|
||||
|
||||
// Check if we have an un-encrypted JWK file
|
||||
key, err = f.loadKey()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load un-encrypted key file: %w", err)
|
||||
}
|
||||
if key == nil {
|
||||
// No key exists, encrypted or un-encrypted
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// If we are here, we have loaded a key that was un-encrypted
|
||||
// We need to replace the plaintext key with the encrypted one before we return
|
||||
err = f.saveKeyEncrypted(key)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to save encrypted key file: %w", err)
|
||||
}
|
||||
jwkPath := f.jwkPath()
|
||||
err = os.Remove(jwkPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to remove un-encrypted key file at path '%s': %w", jwkPath, err)
|
||||
}
|
||||
|
||||
return key, nil
|
||||
}
|
||||
|
||||
func (f *KeyProviderFile) saveKey(key jwk.Key) error {
|
||||
err := os.MkdirAll(f.envConfig.KeysPath, 0700)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create directory '%s' for key file: %w", f.envConfig.KeysPath, err)
|
||||
}
|
||||
|
||||
jwkPath := f.jwkPath()
|
||||
keyFile, err := os.OpenFile(jwkPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create key file at path '%s': %w", jwkPath, err)
|
||||
}
|
||||
defer keyFile.Close()
|
||||
|
||||
// Write the JSON file to disk
|
||||
err = EncodeJWK(keyFile, key)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write key file at path '%s': %w", jwkPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *KeyProviderFile) saveKeyEncrypted(key jwk.Key) error {
|
||||
err := os.MkdirAll(f.envConfig.KeysPath, 0700)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create directory '%s' for encrypted key file: %w", f.envConfig.KeysPath, err)
|
||||
}
|
||||
|
||||
// Encode the key to JSON
|
||||
data, err := EncodeJWKBytes(key)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to encode key to JSON: %w", err)
|
||||
}
|
||||
|
||||
// Encrypt the key then encode to Base64
|
||||
enc, err := cryptoutils.Encrypt(f.kek, data, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to encrypt key: %w", err)
|
||||
}
|
||||
encB64 := make([]byte, base64.StdEncoding.EncodedLen(len(enc)))
|
||||
base64.StdEncoding.Encode(encB64, enc)
|
||||
|
||||
// Write to disk
|
||||
encJwkPath := f.encJwkPath()
|
||||
err = os.WriteFile(encJwkPath, encB64, 0600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write encrypted key file at path '%s': %w", encJwkPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *KeyProviderFile) jwkPath() string {
|
||||
return filepath.Join(f.envConfig.KeysPath, PrivateKeyFile)
|
||||
}
|
||||
|
||||
func (f *KeyProviderFile) encJwkPath() string {
|
||||
return filepath.Join(f.envConfig.KeysPath, PrivateKeyFileEncrypted)
|
||||
}
|
||||
|
||||
// Compile-time interface check
|
||||
var _ KeyProvider = (*KeyProviderFile)(nil)
|
||||
320
backend/internal/utils/jwk/key_provider_file_test.go
Normal file
320
backend/internal/utils/jwk/key_provider_file_test.go
Normal file
@@ -0,0 +1,320 @@
|
||||
package jwk
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/lestrrat-go/jwx/v3/jwk"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/pocket-id/pocket-id/backend/internal/common"
|
||||
"github.com/pocket-id/pocket-id/backend/internal/utils"
|
||||
cryptoutils "github.com/pocket-id/pocket-id/backend/internal/utils/crypto"
|
||||
)
|
||||
|
||||
func TestKeyProviderFile_LoadKey(t *testing.T) {
|
||||
// Generate a test key to use in our tests
|
||||
pk, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
|
||||
require.NoError(t, err)
|
||||
|
||||
key, err := jwk.Import(pk)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Run("LoadKey with no existing key", func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
provider := &KeyProviderFile{}
|
||||
err := provider.Init(KeyProviderOpts{
|
||||
EnvConfig: &common.EnvConfigSchema{
|
||||
KeysPath: tempDir,
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Load key when none exists
|
||||
loadedKey, err := provider.LoadKey()
|
||||
require.NoError(t, err)
|
||||
assert.Nil(t, loadedKey, "Expected nil key when no key exists")
|
||||
})
|
||||
|
||||
t.Run("LoadKey with no existing key (with kek)", func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
provider := &KeyProviderFile{}
|
||||
err = provider.Init(KeyProviderOpts{
|
||||
EnvConfig: &common.EnvConfigSchema{
|
||||
KeysPath: tempDir,
|
||||
},
|
||||
Kek: makeKEK(t),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Load key when none exists
|
||||
loadedKey, err := provider.LoadKey()
|
||||
require.NoError(t, err)
|
||||
assert.Nil(t, loadedKey, "Expected nil key when no key exists")
|
||||
})
|
||||
|
||||
t.Run("LoadKey with unencrypted key", func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
provider := &KeyProviderFile{}
|
||||
err := provider.Init(KeyProviderOpts{
|
||||
EnvConfig: &common.EnvConfigSchema{
|
||||
KeysPath: tempDir,
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Save a key
|
||||
err = provider.SaveKey(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Make sure the key file exists
|
||||
keyPath := filepath.Join(tempDir, PrivateKeyFile)
|
||||
exists, err := utils.FileExists(keyPath)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "Expected key file to exist")
|
||||
|
||||
// Load the key
|
||||
loadedKey, err := provider.LoadKey()
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, loadedKey, "Expected non-nil key when key exists")
|
||||
|
||||
// Verify the loaded key is the same as the original
|
||||
keyBytes, err := EncodeJWKBytes(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
loadedKeyBytes, err := EncodeJWKBytes(loadedKey)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, keyBytes, loadedKeyBytes, "Expected loaded key to match original key")
|
||||
})
|
||||
|
||||
t.Run("LoadKey with encrypted key", func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
provider := &KeyProviderFile{}
|
||||
err = provider.Init(KeyProviderOpts{
|
||||
EnvConfig: &common.EnvConfigSchema{
|
||||
KeysPath: tempDir,
|
||||
},
|
||||
Kek: makeKEK(t),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Save a key (will be encrypted)
|
||||
err = provider.SaveKey(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Make sure the encrypted key file exists
|
||||
encKeyPath := filepath.Join(tempDir, PrivateKeyFileEncrypted)
|
||||
exists, err := utils.FileExists(encKeyPath)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "Expected encrypted key file to exist")
|
||||
|
||||
// Make sure the unencrypted key file does not exist
|
||||
keyPath := filepath.Join(tempDir, PrivateKeyFile)
|
||||
exists, err = utils.FileExists(keyPath)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "Expected unencrypted key file to not exist")
|
||||
|
||||
// Load the key
|
||||
loadedKey, err := provider.LoadKey()
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, loadedKey, "Expected non-nil key when encrypted key exists")
|
||||
|
||||
// Verify the loaded key is the same as the original
|
||||
keyBytes, err := EncodeJWKBytes(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
loadedKeyBytes, err := EncodeJWKBytes(loadedKey)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, keyBytes, loadedKeyBytes, "Expected loaded key to match original key")
|
||||
})
|
||||
|
||||
t.Run("LoadKey replaces unencrypted key with encrypted key when kek is provided", func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
// First, create an unencrypted key
|
||||
providerNoKek := &KeyProviderFile{}
|
||||
err := providerNoKek.Init(KeyProviderOpts{
|
||||
EnvConfig: &common.EnvConfigSchema{
|
||||
KeysPath: tempDir,
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Save an unencrypted key
|
||||
err = providerNoKek.SaveKey(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify unencrypted key exists
|
||||
keyPath := filepath.Join(tempDir, PrivateKeyFile)
|
||||
exists, err := utils.FileExists(keyPath)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "Expected unencrypted key file to exist")
|
||||
|
||||
// Now create a provider with a kek
|
||||
kek := make([]byte, 32)
|
||||
_, err = rand.Read(kek)
|
||||
require.NoError(t, err)
|
||||
|
||||
providerWithKek := &KeyProviderFile{}
|
||||
err = providerWithKek.Init(KeyProviderOpts{
|
||||
EnvConfig: &common.EnvConfigSchema{
|
||||
KeysPath: tempDir,
|
||||
},
|
||||
Kek: kek,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Load the key - this should convert the unencrypted key to encrypted
|
||||
loadedKey, err := providerWithKek.LoadKey()
|
||||
require.NoError(t, err)
|
||||
assert.NotNil(t, loadedKey, "Expected non-nil key when loading and converting key")
|
||||
|
||||
// Verify the unencrypted key no longer exists
|
||||
exists, err = utils.FileExists(keyPath)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "Expected unencrypted key file to be removed")
|
||||
|
||||
// Verify the encrypted key file exists
|
||||
encKeyPath := filepath.Join(tempDir, PrivateKeyFileEncrypted)
|
||||
exists, err = utils.FileExists(encKeyPath)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "Expected encrypted key file to exist after conversion")
|
||||
|
||||
// Verify the key data
|
||||
keyBytes, err := EncodeJWKBytes(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
loadedKeyBytes, err := EncodeJWKBytes(loadedKey)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, keyBytes, loadedKeyBytes, "Expected loaded key to match original key after conversion")
|
||||
})
|
||||
}
|
||||
|
||||
func TestKeyProviderFile_SaveKey(t *testing.T) {
|
||||
// Generate a test key to use in our tests
|
||||
pk, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
|
||||
require.NoError(t, err)
|
||||
|
||||
key, err := jwk.Import(pk)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Run("SaveKey unencrypted", func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
provider := &KeyProviderFile{}
|
||||
err := provider.Init(KeyProviderOpts{
|
||||
EnvConfig: &common.EnvConfigSchema{
|
||||
KeysPath: tempDir,
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Save the key
|
||||
err = provider.SaveKey(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the key file exists
|
||||
keyPath := filepath.Join(tempDir, PrivateKeyFile)
|
||||
exists, err := utils.FileExists(keyPath)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "Expected key file to exist")
|
||||
|
||||
// Verify the content of the key file
|
||||
data, err := os.ReadFile(keyPath)
|
||||
require.NoError(t, err)
|
||||
|
||||
parsedKey, err := jwk.ParseKey(data)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Compare the saved key with the original
|
||||
keyBytes, err := EncodeJWKBytes(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
parsedKeyBytes, err := EncodeJWKBytes(parsedKey)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, keyBytes, parsedKeyBytes, "Expected saved key to match original key")
|
||||
})
|
||||
|
||||
t.Run("SaveKey encrypted", func(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
|
||||
// Generate a 64-byte kek
|
||||
kek := makeKEK(t)
|
||||
|
||||
provider := &KeyProviderFile{}
|
||||
err = provider.Init(KeyProviderOpts{
|
||||
EnvConfig: &common.EnvConfigSchema{
|
||||
KeysPath: tempDir,
|
||||
},
|
||||
Kek: kek,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Save the key (will be encrypted)
|
||||
err = provider.SaveKey(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the encrypted key file exists
|
||||
encKeyPath := filepath.Join(tempDir, PrivateKeyFileEncrypted)
|
||||
exists, err := utils.FileExists(encKeyPath)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, exists, "Expected encrypted key file to exist")
|
||||
|
||||
// Verify the unencrypted key file doesn't exist
|
||||
keyPath := filepath.Join(tempDir, PrivateKeyFile)
|
||||
exists, err = utils.FileExists(keyPath)
|
||||
require.NoError(t, err)
|
||||
assert.False(t, exists, "Expected unencrypted key file to not exist")
|
||||
|
||||
// Manually decrypt the encrypted key file to verify it contains the correct key
|
||||
encB64, err := os.ReadFile(encKeyPath)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Decode from base64
|
||||
enc := make([]byte, base64.StdEncoding.DecodedLen(len(encB64)))
|
||||
n, err := base64.StdEncoding.Decode(enc, encB64)
|
||||
require.NoError(t, err)
|
||||
enc = enc[:n] // Trim any padding
|
||||
|
||||
// Decrypt the data
|
||||
data, err := cryptoutils.Decrypt(kek, enc, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Parse the key
|
||||
parsedKey, err := jwk.ParseKey(data)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Compare the decrypted key with the original
|
||||
keyBytes, err := EncodeJWKBytes(key)
|
||||
require.NoError(t, err)
|
||||
|
||||
parsedKeyBytes, err := EncodeJWKBytes(parsedKey)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, keyBytes, parsedKeyBytes, "Expected decrypted key to match original key")
|
||||
})
|
||||
}
|
||||
|
||||
func makeKEK(t *testing.T) []byte {
|
||||
t.Helper()
|
||||
|
||||
// Generate a 32-byte kek
|
||||
kek := make([]byte, 32)
|
||||
_, err := rand.Read(kek)
|
||||
require.NoError(t, err)
|
||||
return kek
|
||||
}
|
||||
@@ -38,7 +38,6 @@ func (r *ServiceRunner) Run(ctx context.Context) error {
|
||||
|
||||
// Ignore context canceled errors here as they generally indicate that the service is stopping
|
||||
if rErr != nil && !errors.Is(rErr, context.Canceled) {
|
||||
cancel()
|
||||
errCh <- rErr
|
||||
return
|
||||
}
|
||||
|
||||
@@ -61,26 +61,6 @@ func TestServiceRunner_Run(t *testing.T) {
|
||||
require.ErrorIs(t, err, expectedErr)
|
||||
})
|
||||
|
||||
t.Run("service error cancels others", func(t *testing.T) {
|
||||
expectedErr := errors.New("boom")
|
||||
errorService := func(ctx context.Context) error {
|
||||
return expectedErr
|
||||
}
|
||||
waitingService := func(ctx context.Context) error {
|
||||
<-ctx.Done()
|
||||
return ctx.Err()
|
||||
}
|
||||
|
||||
runner := NewServiceRunner(errorService, waitingService)
|
||||
|
||||
ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second)
|
||||
defer cancel()
|
||||
|
||||
err := runner.Run(ctx)
|
||||
require.Error(t, err)
|
||||
require.ErrorIs(t, err, expectedErr)
|
||||
})
|
||||
|
||||
t.Run("context canceled", func(t *testing.T) {
|
||||
// Create a service that waits until context is canceled
|
||||
waitingService := func(ctx context.Context) error {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
../../../tests/database.json
|
||||
@@ -1,72 +0,0 @@
|
||||
package resources
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// This test is meant to enforce that for every new migration added, a file with the same migration number exists for all supported databases
|
||||
// This is necessary to ensure import/export works correctly
|
||||
// Note: if a migration is not needed for a database, ensure there's a file with an empty (no-op) migration (e.g. even just a comment)
|
||||
func TestMigrationsMatchingVersions(t *testing.T) {
|
||||
// We can ignore migrations with version below 20251115000000
|
||||
const ignoreBefore = 20251115000000
|
||||
|
||||
// Scan postgres migrations
|
||||
postgresMigrations := scanMigrations(t, FS, "migrations/postgres", ignoreBefore)
|
||||
|
||||
// Scan sqlite migrations
|
||||
sqliteMigrations := scanMigrations(t, FS, "migrations/sqlite", ignoreBefore)
|
||||
|
||||
// Sort both lists for consistent comparison
|
||||
slices.Sort(postgresMigrations)
|
||||
slices.Sort(sqliteMigrations)
|
||||
|
||||
// Compare the lists
|
||||
assert.Equal(t, postgresMigrations, sqliteMigrations, "Migration versions must match between Postgres and SQLite")
|
||||
}
|
||||
|
||||
// scanMigrations scans a directory for migration files and returns a list of versions
|
||||
func scanMigrations(t *testing.T, fs embed.FS, dir string, ignoreBefore int64) []int64 {
|
||||
t.Helper()
|
||||
|
||||
entries, err := fs.ReadDir(dir)
|
||||
require.NoErrorf(t, err, "Failed to read directory '%s'", dir)
|
||||
|
||||
// Divide by 2 because of up and down files
|
||||
versions := make([]int64, 0, len(entries)/2)
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
filename := entry.Name()
|
||||
|
||||
// Only consider .up.sql files
|
||||
if !strings.HasSuffix(filename, ".up.sql") {
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract version from filename (format: <version>_<anything>.up.sql)
|
||||
versionString, _, ok := strings.Cut(filename, "_")
|
||||
require.Truef(t, ok, "Migration file has unexpected format: %s", filename)
|
||||
|
||||
version, err := strconv.ParseInt(versionString, 10, 64)
|
||||
require.NoErrorf(t, err, "Failed to parse version from filename '%s'", filename)
|
||||
|
||||
// Exclude migrations with version below ignoreBefore
|
||||
if version < ignoreBefore {
|
||||
continue
|
||||
}
|
||||
|
||||
versions = append(versions, version)
|
||||
}
|
||||
|
||||
return versions
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 291 KiB After Width: | Height: | Size: 221 KiB |
BIN
backend/resources/images/logoEmail.png
Normal file
BIN
backend/resources/images/logoEmail.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 566 B |
@@ -1 +0,0 @@
|
||||
-- No-op in Postgres
|
||||
@@ -1 +0,0 @@
|
||||
-- No-op in Postgres
|
||||
@@ -1 +0,0 @@
|
||||
DROP TABLE oidc_apis;
|
||||
@@ -1,11 +0,0 @@
|
||||
CREATE TABLE oidc_apis
|
||||
(
|
||||
id UUID PRIMARY KEY NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
identifier TEXT NOT NULL,
|
||||
data JSONB NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX idx_oidc_apis_identifier_key ON oidc_apis(identifier);
|
||||
CREATE INDEX idx_oidc_apis_name_key ON oidc_apis(name);
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE one_time_access_tokens DROP COLUMN device_token;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE one_time_access_tokens ADD COLUMN device_token VARCHAR(16);
|
||||
@@ -0,0 +1 @@
|
||||
DROP TABLE signup_tokens_user_groups;
|
||||
@@ -0,0 +1,8 @@
|
||||
CREATE TABLE signup_tokens_user_groups
|
||||
(
|
||||
signup_token_id UUID NOT NULL,
|
||||
user_group_id UUID NOT NULL,
|
||||
PRIMARY KEY (signup_token_id, user_group_id),
|
||||
FOREIGN KEY (signup_token_id) REFERENCES signup_tokens (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (user_group_id) REFERENCES user_groups (id) ON DELETE CASCADE
|
||||
);
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE oidc_clients DROP COLUMN is_group_restricted;
|
||||
@@ -0,0 +1,10 @@
|
||||
ALTER TABLE oidc_clients
|
||||
ADD COLUMN is_group_restricted boolean NOT NULL DEFAULT false;
|
||||
|
||||
UPDATE oidc_clients oc
|
||||
SET is_group_restricted =
|
||||
EXISTS (
|
||||
SELECT 1
|
||||
FROM oidc_clients_allowed_user_groups a
|
||||
WHERE a.oidc_client_id = oc.id
|
||||
);
|
||||
@@ -1,133 +0,0 @@
|
||||
PRAGMA foreign_keys = OFF;
|
||||
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE users_old
|
||||
(
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME,
|
||||
username TEXT COLLATE NOCASE NOT NULL UNIQUE,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
first_name TEXT,
|
||||
last_name TEXT NOT NULL,
|
||||
display_name TEXT NOT NULL,
|
||||
is_admin NUMERIC DEFAULT 0 NOT NULL,
|
||||
ldap_id TEXT,
|
||||
locale TEXT,
|
||||
disabled NUMERIC DEFAULT 0 NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO users_old (
|
||||
id,
|
||||
created_at,
|
||||
username,
|
||||
email,
|
||||
first_name,
|
||||
last_name,
|
||||
display_name,
|
||||
is_admin,
|
||||
ldap_id,
|
||||
locale,
|
||||
disabled
|
||||
)
|
||||
SELECT
|
||||
id,
|
||||
created_at,
|
||||
username,
|
||||
email,
|
||||
first_name,
|
||||
last_name,
|
||||
display_name,
|
||||
CASE WHEN is_admin THEN 1 ELSE 0 END,
|
||||
ldap_id,
|
||||
locale,
|
||||
CASE WHEN disabled THEN 1 ELSE 0 END
|
||||
FROM users;
|
||||
|
||||
DROP TABLE users;
|
||||
|
||||
ALTER TABLE users_old RENAME TO users;
|
||||
|
||||
CREATE UNIQUE INDEX users_ldap_id ON users (ldap_id);
|
||||
|
||||
|
||||
|
||||
CREATE TABLE webauthn_credentials_old
|
||||
(
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
credential_id TEXT NOT NULL UNIQUE,
|
||||
public_key BLOB NOT NULL,
|
||||
attestation_type TEXT NOT NULL,
|
||||
transport BLOB NOT NULL,
|
||||
user_id TEXT REFERENCES users ON DELETE CASCADE,
|
||||
backup_eligible NUMERIC DEFAULT 0 NOT NULL,
|
||||
backup_state NUMERIC DEFAULT 0 NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO webauthn_credentials_old (
|
||||
id,
|
||||
created_at,
|
||||
name,
|
||||
credential_id,
|
||||
public_key,
|
||||
attestation_type,
|
||||
transport,
|
||||
user_id,
|
||||
backup_eligible,
|
||||
backup_state
|
||||
)
|
||||
SELECT
|
||||
id,
|
||||
created_at,
|
||||
name,
|
||||
credential_id,
|
||||
public_key,
|
||||
attestation_type,
|
||||
transport,
|
||||
user_id,
|
||||
CASE WHEN backup_eligible THEN 1 ELSE 0 END,
|
||||
CASE WHEN backup_state THEN 1 ELSE 0 END
|
||||
FROM webauthn_credentials;
|
||||
|
||||
DROP TABLE webauthn_credentials;
|
||||
|
||||
ALTER TABLE webauthn_credentials_old RENAME TO webauthn_credentials;
|
||||
|
||||
|
||||
|
||||
CREATE TABLE webauthn_sessions_old
|
||||
(
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME,
|
||||
challenge TEXT NOT NULL UNIQUE,
|
||||
expires_at DATETIME NOT NULL,
|
||||
user_verification TEXT NOT NULL,
|
||||
credential_params TEXT DEFAULT '[]' NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO webauthn_sessions_old (
|
||||
id,
|
||||
created_at,
|
||||
challenge,
|
||||
expires_at,
|
||||
user_verification,
|
||||
credential_params
|
||||
)
|
||||
SELECT
|
||||
id,
|
||||
created_at,
|
||||
challenge,
|
||||
expires_at,
|
||||
user_verification,
|
||||
credential_params
|
||||
FROM webauthn_sessions;
|
||||
|
||||
DROP TABLE webauthn_sessions;
|
||||
|
||||
ALTER TABLE webauthn_sessions_old RENAME TO webauthn_sessions;
|
||||
|
||||
COMMIT;
|
||||
|
||||
PRAGMA foreign_keys = ON;
|
||||
@@ -1,144 +0,0 @@
|
||||
PRAGMA foreign_keys = OFF;
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- 1. Create a new table with BOOLEAN columns
|
||||
CREATE TABLE users_new
|
||||
(
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME,
|
||||
username TEXT COLLATE NOCASE NOT NULL UNIQUE,
|
||||
email TEXT NOT NULL UNIQUE,
|
||||
first_name TEXT,
|
||||
last_name TEXT NOT NULL,
|
||||
display_name TEXT NOT NULL,
|
||||
is_admin BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
ldap_id TEXT,
|
||||
locale TEXT,
|
||||
disabled BOOLEAN DEFAULT FALSE NOT NULL
|
||||
);
|
||||
|
||||
-- 2. Copy all existing data, converting numeric bools to real booleans
|
||||
INSERT INTO users_new (
|
||||
id,
|
||||
created_at,
|
||||
username,
|
||||
email,
|
||||
first_name,
|
||||
last_name,
|
||||
display_name,
|
||||
is_admin,
|
||||
ldap_id,
|
||||
locale,
|
||||
disabled
|
||||
)
|
||||
SELECT
|
||||
id,
|
||||
created_at,
|
||||
username,
|
||||
email,
|
||||
first_name,
|
||||
last_name,
|
||||
display_name,
|
||||
CASE WHEN is_admin != 0 THEN TRUE ELSE FALSE END,
|
||||
ldap_id,
|
||||
locale,
|
||||
CASE WHEN disabled != 0 THEN TRUE ELSE FALSE END
|
||||
FROM users;
|
||||
|
||||
-- 3. Drop old table
|
||||
DROP TABLE users;
|
||||
|
||||
-- 4. Rename new table to original name
|
||||
ALTER TABLE users_new RENAME TO users;
|
||||
|
||||
-- 5. Recreate index
|
||||
CREATE UNIQUE INDEX users_ldap_id ON users (ldap_id);
|
||||
|
||||
-- 6. Create temporary table with changed credential_id type to BLOB
|
||||
CREATE TABLE webauthn_credentials_dg_tmp
|
||||
(
|
||||
id TEXT PRIMARY KEY,
|
||||
created_at DATETIME NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
credential_id BLOB NOT NULL UNIQUE,
|
||||
public_key BLOB NOT NULL,
|
||||
attestation_type TEXT NOT NULL,
|
||||
transport BLOB NOT NULL,
|
||||
user_id TEXT REFERENCES users ON DELETE CASCADE,
|
||||
backup_eligible BOOLEAN DEFAULT FALSE NOT NULL,
|
||||
backup_state BOOLEAN DEFAULT FALSE NOT NULL
|
||||
);
|
||||
|
||||
-- 7. Copy existing data into the temporary table
|
||||
INSERT INTO webauthn_credentials_dg_tmp (
|
||||
id,
|
||||
created_at,
|
||||
name,
|
||||
credential_id,
|
||||
public_key,
|
||||
attestation_type,
|
||||
transport,
|
||||
user_id,
|
||||
backup_eligible,
|
||||
backup_state
|
||||
)
|
||||
SELECT
|
||||
id,
|
||||
created_at,
|
||||
name,
|
||||
credential_id,
|
||||
public_key,
|
||||
attestation_type,
|
||||
transport,
|
||||
user_id,
|
||||
backup_eligible,
|
||||
backup_state
|
||||
FROM webauthn_credentials;
|
||||
|
||||
-- 8. Drop old table
|
||||
DROP TABLE webauthn_credentials;
|
||||
|
||||
-- 9. Rename temporary table to original name
|
||||
ALTER TABLE webauthn_credentials_dg_tmp
|
||||
RENAME TO webauthn_credentials;
|
||||
|
||||
-- 10. Create temporary table with credential_params type changed to BLOB
|
||||
CREATE TABLE webauthn_sessions_dg_tmp
|
||||
(
|
||||
id TEXT NOT NULL PRIMARY KEY,
|
||||
created_at DATETIME,
|
||||
challenge TEXT NOT NULL UNIQUE,
|
||||
expires_at DATETIME NOT NULL,
|
||||
user_verification TEXT NOT NULL,
|
||||
credential_params BLOB DEFAULT '[]' NOT NULL
|
||||
);
|
||||
|
||||
-- 11. Copy existing data into the temporary sessions table
|
||||
INSERT INTO webauthn_sessions_dg_tmp (
|
||||
id,
|
||||
created_at,
|
||||
challenge,
|
||||
expires_at,
|
||||
user_verification,
|
||||
credential_params
|
||||
)
|
||||
SELECT
|
||||
id,
|
||||
created_at,
|
||||
challenge,
|
||||
expires_at,
|
||||
user_verification,
|
||||
credential_params
|
||||
FROM webauthn_sessions;
|
||||
|
||||
-- 12. Drop old table
|
||||
DROP TABLE webauthn_sessions;
|
||||
|
||||
-- 13. Rename temporary sessions table to original name
|
||||
ALTER TABLE webauthn_sessions_dg_tmp
|
||||
RENAME TO webauthn_sessions;
|
||||
|
||||
COMMIT;
|
||||
|
||||
PRAGMA foreign_keys = ON;
|
||||
@@ -1 +0,0 @@
|
||||
DROP TABLE oidc_apis;
|
||||
@@ -1,11 +0,0 @@
|
||||
CREATE TABLE oidc_apis
|
||||
(
|
||||
id UUID PRIMARY KEY NOT NULL,
|
||||
created_at DATETIME NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
identifier TEXT NOT NULL,
|
||||
data TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX idx_oidc_apis_identifier_key ON oidc_apis(identifier);
|
||||
CREATE INDEX idx_oidc_apis_name_key ON oidc_apis(name);
|
||||
@@ -0,0 +1,7 @@
|
||||
PRAGMA foreign_keys=OFF;
|
||||
BEGIN;
|
||||
|
||||
ALTER TABLE one_time_access_tokens DROP COLUMN device_token;
|
||||
|
||||
COMMIT;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,7 @@
|
||||
PRAGMA foreign_keys=OFF;
|
||||
BEGIN;
|
||||
|
||||
ALTER TABLE one_time_access_tokens ADD COLUMN device_token TEXT;
|
||||
|
||||
COMMIT;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,7 @@
|
||||
PRAGMA foreign_keys=OFF;
|
||||
BEGIN;
|
||||
|
||||
DROP TABLE signup_tokens_user_groups;
|
||||
|
||||
COMMIT;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,14 @@
|
||||
PRAGMA foreign_keys=OFF;
|
||||
BEGIN;
|
||||
|
||||
CREATE TABLE signup_tokens_user_groups
|
||||
(
|
||||
signup_token_id TEXT NOT NULL,
|
||||
user_group_id TEXT NOT NULL,
|
||||
PRIMARY KEY (signup_token_id, user_group_id),
|
||||
FOREIGN KEY (signup_token_id) REFERENCES signup_tokens (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (user_group_id) REFERENCES user_groups (id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,7 @@
|
||||
PRAGMA foreign_keys=OFF;
|
||||
BEGIN;
|
||||
|
||||
ALTER TABLE oidc_clients DROP COLUMN is_group_restricted;
|
||||
|
||||
COMMIT;
|
||||
PRAGMA foreign_keys=ON;
|
||||
@@ -0,0 +1,13 @@
|
||||
PRAGMA foreign_keys= OFF;
|
||||
BEGIN;
|
||||
|
||||
ALTER TABLE oidc_clients
|
||||
ADD COLUMN is_group_restricted BOOLEAN NOT NULL DEFAULT 0;
|
||||
|
||||
UPDATE oidc_clients
|
||||
SET is_group_restricted = (SELECT CASE WHEN COUNT(*) > 0 THEN 1 ELSE 0 END
|
||||
FROM oidc_clients_allowed_user_groups
|
||||
WHERE oidc_clients_allowed_user_groups.oidc_client_id = oidc_clients.id);
|
||||
|
||||
COMMIT;
|
||||
PRAGMA foreign_keys= ON;
|
||||
@@ -10,16 +10,16 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@react-email/components": "1.0.1",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react": "^19.2.1",
|
||||
"react-dom": "^19.2.1",
|
||||
"tailwind-merge": "^3.4.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@react-email/preview-server": "5.0.5",
|
||||
"@types/node": "^24.10.1",
|
||||
"@react-email/preview-server": "5.0.7",
|
||||
"@types/node": "^24.10.2",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"react-email": "5.0.5",
|
||||
"tsx": "^4.20.6"
|
||||
"react-email": "5.0.7",
|
||||
"tsx": "^4.21.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "Nastavení",
|
||||
"update_pocket_id": "Aktualizovat Pocket ID",
|
||||
"powered_by": "Poháněno pomocí",
|
||||
"see_your_account_activities_from_the_last_3_months": "Podívejte se na aktivity Vašeho účtu za poslední 3 měsíce.",
|
||||
"see_your_recent_account_activities": "Zobrazte aktivity svého účtu v rámci nastavené doby uchovávání.",
|
||||
"time": "Čas",
|
||||
"event": "Událost",
|
||||
"approximate_location": "Přibližná poloha",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "Jste si jisti, že chcete vytvořit nový client secret? Dosavadní bude zneplatněn.",
|
||||
"generate": "Generovat",
|
||||
"new_client_secret_created_successfully": "Nový client secret byl úspěšně vytvořen",
|
||||
"allowed_user_groups_updated_successfully": "Povolené skupiny uživatelů byly úspěšně aktualizovány",
|
||||
"oidc_client_name": "OIDC Klient {name}",
|
||||
"client_id": "ID klienta",
|
||||
"client_secret": "Tajný klíč",
|
||||
"show_more_details": "Zobrazit další podrobnosti",
|
||||
"allowed_user_groups": "Povolené skupiny uživatelů",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "Přidejte do tohoto klienta uživatelské skupiny, abyste omezili přístup pouze pro uživatele v těchto skupinách. Pokud nejsou vybrány žádné skupiny uživatelů, všichni uživatelé budou mít přístup k tomuto klientovi.",
|
||||
"allowed_user_groups_description": "Vyberte skupiny uživatelů, jejichž členové se mohou přihlásit k tomuto klientovi.",
|
||||
"allowed_user_groups_status_unrestricted_description": "Nejsou použita žádná omezení uživatelských skupin. K tomuto klientovi se může přihlásit kterýkoli uživatel.",
|
||||
"unrestrict": "Bez omezení",
|
||||
"restrict": "Omezit",
|
||||
"user_groups_restriction_updated_successfully": "Omezení uživatelských skupin bylo úspěšně aktualizováno",
|
||||
"allowed_user_groups_updated_successfully": "Povolené skupiny uživatelů byly úspěšně aktualizovány",
|
||||
"favicon": "Favicona",
|
||||
"light_mode_logo": "Logo světlého režimu",
|
||||
"dark_mode_logo": "Logo tmavého režimu",
|
||||
"email_logo": "E-mailové logo",
|
||||
"background_image": "Obrázek na pozadí",
|
||||
"language": "Jazyk",
|
||||
"reset_profile_picture_question": "Resetovat profilový obrázek?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "Všichni klienti",
|
||||
"all_locations": "Všechna místa",
|
||||
"global_audit_log": "Globální protokol auditu",
|
||||
"see_all_account_activities_from_the_last_3_months": "Zobrazit veškerou aktivitu uživatele za poslední 3 měsíce.",
|
||||
"see_all_recent_account_activities": "Zobrazit aktivity účtu všech uživatelů během nastavené doby uchovávání.",
|
||||
"token_sign_in": "Přihlášení tokenem",
|
||||
"client_authorization": "Autorizace klienta",
|
||||
"new_client_authorization": "Nová autorizace klienta",
|
||||
@@ -469,5 +474,11 @@
|
||||
"default_profile_picture": "Výchozí profilový obrázek",
|
||||
"light": "Světlo",
|
||||
"dark": "Tmavá",
|
||||
"system": "Systém"
|
||||
"system": "Systém",
|
||||
"signup_token_user_groups_description": "Automaticky přiřaďte tyto skupiny uživatelům, kteří se zaregistrují pomocí tohoto tokenu.",
|
||||
"allowed_oidc_clients": "Povolené klienty OIDC",
|
||||
"allowed_oidc_clients_description": "Vyberte klienty OIDC, ke kterým se mohou členové této skupiny uživatelů přihlašovat.",
|
||||
"unrestrict_oidc_client": "Bez omezení {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "Opravdu chcete zrušit omezení klienta OIDC <b>{clientName}</b>? Tím se odstraní všechna přiřazení skupin pro tohoto klienta a přihlásit se bude moci kterýkoli uživatel.",
|
||||
"allowed_oidc_clients_updated_successfully": "Povolené klienty OIDC úspěšně aktualizovány"
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "Indstillinger",
|
||||
"update_pocket_id": "Opdater Pocket ID",
|
||||
"powered_by": "Drevet af",
|
||||
"see_your_account_activities_from_the_last_3_months": "Se dine kontoaktiviteter fra de sidste 3 måneder.",
|
||||
"see_your_recent_account_activities": "Se dine kontoaktiviteter inden for den konfigurerede opbevaringsperiode.",
|
||||
"time": "Tid",
|
||||
"event": "Hændelse",
|
||||
"approximate_location": "Omtrentlig placering",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "Vil du oprette en ny client secret? Den gamle bliver ugyldig og kan ikke længere bruges.",
|
||||
"generate": "Generér",
|
||||
"new_client_secret_created_successfully": "Ny client secret blev oprettet",
|
||||
"allowed_user_groups_updated_successfully": "Tilladte brugergrupper blev opdateret",
|
||||
"oidc_client_name": "OIDC-klient {name}",
|
||||
"client_id": "Client ID",
|
||||
"client_secret": "Client secret",
|
||||
"show_more_details": "Vis flere detaljer",
|
||||
"allowed_user_groups": "Tilladte brugergrupper",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "Tilføj brugergrupper til denne klient for at begrænse adgangen til brugere i disse grupper. Hvis ingen brugergrupper er valgt, vil alle brugere have adgang til klienten.",
|
||||
"allowed_user_groups_description": "Vælg de brugergrupper, hvis medlemmer har tilladelse til at logge på denne klient.",
|
||||
"allowed_user_groups_status_unrestricted_description": "Der gælder ingen begrænsninger for brugergrupper. Alle brugere kan logge på denne klient.",
|
||||
"unrestrict": "Ubegrænset",
|
||||
"restrict": "Begræns",
|
||||
"user_groups_restriction_updated_successfully": "Begrænsning af brugergrupper opdateret med succes",
|
||||
"allowed_user_groups_updated_successfully": "Tilladte brugergrupper blev opdateret",
|
||||
"favicon": "Favicon",
|
||||
"light_mode_logo": "Logo til lys tilstand",
|
||||
"dark_mode_logo": "Logo til mørk tilstand",
|
||||
"email_logo": "E-mail-logo",
|
||||
"background_image": "Baggrundsbillede",
|
||||
"language": "Sprog",
|
||||
"reset_profile_picture_question": "Nulstil profilbillede?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "Alle klienter",
|
||||
"all_locations": "Alle lokationer",
|
||||
"global_audit_log": "Global aktivitetslog",
|
||||
"see_all_account_activities_from_the_last_3_months": "Se al brugeraktivitet for de seneste 3 måneder.",
|
||||
"see_all_recent_account_activities": "Se alle brugeres kontoaktiviteter i den angivne opbevaringsperiode.",
|
||||
"token_sign_in": "Token-login",
|
||||
"client_authorization": "Godkendelse af klient",
|
||||
"new_client_authorization": "Ny klientgodkendelse",
|
||||
@@ -469,5 +474,11 @@
|
||||
"default_profile_picture": "Standardprofilbillede",
|
||||
"light": "Lys",
|
||||
"dark": "Mørk",
|
||||
"system": "System"
|
||||
"system": "System",
|
||||
"signup_token_user_groups_description": "Tildel automatisk disse grupper til brugere, der tilmelder sig ved hjælp af denne token.",
|
||||
"allowed_oidc_clients": "Tilladte OIDC-klienter",
|
||||
"allowed_oidc_clients_description": "Vælg de OIDC-klienter, som medlemmer af denne brugergruppe har tilladelse til at logge på.",
|
||||
"unrestrict_oidc_client": "Ubegrænset {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "Er du sikker på, at du vil ophæve begrænsningen for OIDC-klienten <b>{clientName}</b>? Dette vil fjerne alle gruppetildelinger for denne klient, og alle brugere vil kunne logge ind.",
|
||||
"allowed_oidc_clients_updated_successfully": "Tilladte OIDC-klienter opdateret med succes"
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"profile_picture": "Profilbild",
|
||||
"profile_picture_is_managed_by_ldap_server": "Das Profilbild wird vom LDAP-Server verwaltet und kann hier nicht geändert werden.",
|
||||
"click_profile_picture_to_upload_custom": "Klicke auf das Profilbild, um ein benutzerdefiniertes Bild aus deinen Dateien hochzuladen.",
|
||||
"image_should_be_in_format": "Das Bild sollte im PNG-, JPEG- oder WEBP-Format vorliegen.",
|
||||
"image_should_be_in_format": "Das Bild sollte im PNG-, JPEG- oder WEBP-Format sein.",
|
||||
"items_per_page": "Einträge pro Seite",
|
||||
"no_items_found": "Keine Einträge gefunden",
|
||||
"select_items": "Elemente auswählen...",
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "Einstellungen",
|
||||
"update_pocket_id": "Pocket ID aktualisieren",
|
||||
"powered_by": "Powered by",
|
||||
"see_your_account_activities_from_the_last_3_months": "Sieh dir deine Kontoaktivitäten der letzten drei Monate an.",
|
||||
"see_your_recent_account_activities": "Schau dir deine Kontoaktivitäten innerhalb der eingestellten Aufbewahrungsfrist an.",
|
||||
"time": "Zeit",
|
||||
"event": "Ereignis",
|
||||
"approximate_location": "Ungefährer Standort",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "Bist du sicher, dass du ein neues Client-Geheimnis erstellen möchtest? Das alte Client-Geheimnis wird dadurch ungültig.",
|
||||
"generate": "Generieren",
|
||||
"new_client_secret_created_successfully": "Neues Client-Geheimnis erfolgreich erstellt",
|
||||
"allowed_user_groups_updated_successfully": "Erlaubte Benutzergruppen erfolgreich aktualisiert",
|
||||
"oidc_client_name": "OIDC-Client {name}",
|
||||
"client_id": "Client-ID",
|
||||
"client_secret": "Client-Geheimnis",
|
||||
"show_more_details": "Mehr Details anzeigen",
|
||||
"allowed_user_groups": "Erlaubte Benutzergruppen",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "Füge diesem Client Benutzergruppen hinzu, um den Zugriff auf Benutzer in diesen Gruppen zu beschränken. Wenn keine Benutzergruppen ausgewählt sind, werden alle Benutzer Zugriff auf diesen Client haben.",
|
||||
"allowed_user_groups_description": "Wähle die Benutzergruppen aus, deren Mitglieder sich bei diesem Client anmelden dürfen.",
|
||||
"allowed_user_groups_status_unrestricted_description": "Es gibt keine Einschränkungen für Benutzergruppen. Jeder Benutzer kann sich bei diesem Client anmelden.",
|
||||
"unrestrict": "Uneingeschränkt",
|
||||
"restrict": "Einschränken",
|
||||
"user_groups_restriction_updated_successfully": "Benutzergruppenbeschränkung erfolgreich aktualisiert",
|
||||
"allowed_user_groups_updated_successfully": "Erlaubte Benutzergruppen erfolgreich aktualisiert",
|
||||
"favicon": "Favicon",
|
||||
"light_mode_logo": "Hell-Modus Logo",
|
||||
"dark_mode_logo": "Dunkel-Modus Logo",
|
||||
"email_logo": "E-Mail-Logo",
|
||||
"background_image": "Hintergrundbild",
|
||||
"language": "Sprache",
|
||||
"reset_profile_picture_question": "Profilbild zurücksetzen?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "Alle Clients",
|
||||
"all_locations": "Alle Orte",
|
||||
"global_audit_log": "Globaler Aktivitäts-Log",
|
||||
"see_all_account_activities_from_the_last_3_months": "Sieh dir alle Benutzeraktivitäten der letzten 3 Monate an.",
|
||||
"see_all_recent_account_activities": "Schau dir die Kontoaktivitäten von allen Nutzern während des festgelegten Aufbewahrungszeitraums an.",
|
||||
"token_sign_in": "Token-Anmeldung",
|
||||
"client_authorization": "Client-Autorisierung",
|
||||
"new_client_authorization": "Neue Client-Autorisierung",
|
||||
@@ -469,5 +474,11 @@
|
||||
"default_profile_picture": "Standard-Profilbild",
|
||||
"light": "Hell",
|
||||
"dark": "Dunkel",
|
||||
"system": "System"
|
||||
"system": "System",
|
||||
"signup_token_user_groups_description": "Weise diese Gruppen automatisch den Leuten zu, die sich mit diesem Token anmelden.",
|
||||
"allowed_oidc_clients": "Zugelassene OIDC-Clients",
|
||||
"allowed_oidc_clients_description": "Wähle die OIDC-Clients aus, bei denen sich Mitglieder dieser Benutzergruppe anmelden dürfen.",
|
||||
"unrestrict_oidc_client": "Uneingeschränkt {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "Bist du sicher, dass du die Beschränkung für den OIDC-Client aufheben willst? <b>{clientName}</b>? Dadurch werden alle Gruppenzuweisungen für diesen Client entfernt und jeder Benutzer kann sich anmelden.",
|
||||
"allowed_oidc_clients_updated_successfully": "Zugelassene OIDC-Clients erfolgreich aktualisiert"
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "Settings",
|
||||
"update_pocket_id": "Update Pocket ID",
|
||||
"powered_by": "Powered by",
|
||||
"see_your_account_activities_from_the_last_3_months": "See your account activities from the last 3 months.",
|
||||
"see_your_recent_account_activities": "See your account activities within the configured retention period.",
|
||||
"time": "Time",
|
||||
"event": "Event",
|
||||
"approximate_location": "Approximate Location",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "Are you sure you want to create a new client secret? The old one will be invalidated.",
|
||||
"generate": "Generate",
|
||||
"new_client_secret_created_successfully": "New client secret created successfully",
|
||||
"allowed_user_groups_updated_successfully": "Allowed user groups updated successfully",
|
||||
"oidc_client_name": "OIDC Client {name}",
|
||||
"client_id": "Client ID",
|
||||
"client_secret": "Client secret",
|
||||
"show_more_details": "Show more details",
|
||||
"allowed_user_groups": "Allowed User Groups",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "Add user groups to this client to restrict access to users in these groups. If no user groups are selected, all users will have access to this client.",
|
||||
"allowed_user_groups_description": "Select the user groups whose members are allowed to sign in to this client.",
|
||||
"allowed_user_groups_status_unrestricted_description": "No user group restrictions are applied. Any user can sign in to this client.",
|
||||
"unrestrict": "Unrestrict",
|
||||
"restrict": "Restrict",
|
||||
"user_groups_restriction_updated_successfully": "User groups restriction updated successfully",
|
||||
"allowed_user_groups_updated_successfully": "Allowed user groups updated successfully",
|
||||
"favicon": "Favicon",
|
||||
"light_mode_logo": "Light Mode Logo",
|
||||
"dark_mode_logo": "Dark Mode Logo",
|
||||
"email_logo": "Email Logo",
|
||||
"background_image": "Background Image",
|
||||
"language": "Language",
|
||||
"reset_profile_picture_question": "Reset profile picture?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "All Clients",
|
||||
"all_locations": "All Locations",
|
||||
"global_audit_log": "Global Audit Log",
|
||||
"see_all_account_activities_from_the_last_3_months": "See all user activity for the last 3 months.",
|
||||
"see_all_recent_account_activities": "View the account activities of all users during the set retention period.",
|
||||
"token_sign_in": "Token Sign In",
|
||||
"client_authorization": "Client Authorization",
|
||||
"new_client_authorization": "New Client Authorization",
|
||||
@@ -469,5 +474,14 @@
|
||||
"default_profile_picture": "Default Profile Picture",
|
||||
"light": "Light",
|
||||
"dark": "Dark",
|
||||
"system": "System"
|
||||
"system": "System",
|
||||
"signup_token_user_groups_description": "Automatically assign these groups to users who sign up using this token.",
|
||||
"allowed_oidc_clients": "Allowed OIDC Clients",
|
||||
"allowed_oidc_clients_description": "Select the OIDC clients that members of this user group are allowed to sign in to.",
|
||||
"unrestrict_oidc_client": "Unrestrict {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "Are you sure you want to unrestrict the OIDC client <b>{clientName}</b>? This will remove all group assignments for this client and any user will be able to sign in.",
|
||||
"allowed_oidc_clients_updated_successfully": "Allowed OIDC clients updated successfully",
|
||||
"yes": "Yes",
|
||||
"no": "No",
|
||||
"restricted": "Restricted"
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "Configuración",
|
||||
"update_pocket_id": "Actualizar Pocket ID",
|
||||
"powered_by": "Producido por Pocket ID",
|
||||
"see_your_account_activities_from_the_last_3_months": "Vea las actividad de su cuenta de los últimos 3 meses.",
|
||||
"see_your_recent_account_activities": "Consulta las actividades de tu cuenta dentro del período de retención configurado.",
|
||||
"time": "Tiempo",
|
||||
"event": "Evento",
|
||||
"approximate_location": "Ubicación aproximada",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "¿Estás seguro de que deseas crear un nuevo secreto de cliente? El antiguo quedará invalidado.",
|
||||
"generate": "Generar",
|
||||
"new_client_secret_created_successfully": "Se ha creado correctamente un nuevo secreto de cliente.",
|
||||
"allowed_user_groups_updated_successfully": "Grupos de usuarios permitidos actualizados correctamente",
|
||||
"oidc_client_name": "Cliente OIDC {name}",
|
||||
"client_id": "ID de cliente",
|
||||
"client_secret": "Secreto del cliente",
|
||||
"show_more_details": "Mostrar más detalles",
|
||||
"allowed_user_groups": "Grupos de usuarios permitidos",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "Añade grupos de usuarios a este cliente para restringir el acceso a los usuarios de estos grupos. Si no se selecciona ningún grupo de usuarios, todos los usuarios tendrán acceso a este cliente.",
|
||||
"allowed_user_groups_description": "Selecciona los grupos de usuarios cuyos miembros pueden iniciar sesión en este cliente.",
|
||||
"allowed_user_groups_status_unrestricted_description": "No se aplican restricciones de grupo de usuarios. Cualquier usuario puede iniciar sesión en este cliente.",
|
||||
"unrestrict": "Sin restricciones",
|
||||
"restrict": "Restringir",
|
||||
"user_groups_restriction_updated_successfully": "Restricción de grupos de usuarios actualizada correctamente",
|
||||
"allowed_user_groups_updated_successfully": "Grupos de usuarios permitidos actualizados correctamente",
|
||||
"favicon": "Favicon",
|
||||
"light_mode_logo": "Logo del modo Claro",
|
||||
"dark_mode_logo": "Logotipo en modo oscuro",
|
||||
"email_logo": "Logotipo de correo electrónico",
|
||||
"background_image": "Imagen de fondo",
|
||||
"language": "Idioma",
|
||||
"reset_profile_picture_question": "¿Restablecer foto de perfil?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "Todos los clientes",
|
||||
"all_locations": "Todas las ubicaciones",
|
||||
"global_audit_log": "Registro de auditoría global",
|
||||
"see_all_account_activities_from_the_last_3_months": "Ver toda la actividad de los usuarios durante los últimos 3 meses.",
|
||||
"see_all_recent_account_activities": "Ver las actividades de la cuenta de todos los usuarios durante el periodo de retención establecido.",
|
||||
"token_sign_in": "Inicio de sesión con token",
|
||||
"client_authorization": "Autorización del cliente",
|
||||
"new_client_authorization": "Autorización de nuevo cliente",
|
||||
@@ -469,5 +474,11 @@
|
||||
"default_profile_picture": "Imagen de perfil predeterminada",
|
||||
"light": "Luz",
|
||||
"dark": "Oscuro",
|
||||
"system": "Sistema"
|
||||
"system": "Sistema",
|
||||
"signup_token_user_groups_description": "Asignad automáticamente estos grupos a los usuarios que se registren utilizando este token.",
|
||||
"allowed_oidc_clients": "Clientes OIDC permitidos",
|
||||
"allowed_oidc_clients_description": "Selecciona los clientes OIDC en los que los miembros de este grupo de usuarios pueden iniciar sesión.",
|
||||
"unrestrict_oidc_client": "Sin restricciones {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "¿Estás seguro de que deseas eliminar las restricciones del cliente OIDC <b>{clientName}</b>? Esto eliminará todas las asignaciones de grupo para este cliente y cualquier usuario podrá iniciar sesión.",
|
||||
"allowed_oidc_clients_updated_successfully": "Clientes OIDC permitidos actualizados correctamente"
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "Asetukset",
|
||||
"update_pocket_id": "Päivitä Pocket ID",
|
||||
"powered_by": "Voimanlähteenä",
|
||||
"see_your_account_activities_from_the_last_3_months": "Katso tilisi tapahtumat viimeisen 3 kuukauden ajalta.",
|
||||
"see_your_recent_account_activities": "Tarkastele tilisi tapahtumia määritetyn säilytysajan puitteissa.",
|
||||
"time": "Aika",
|
||||
"event": "Tapahtuma",
|
||||
"approximate_location": "Arvioitu sijainti",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "Haluatko varmasti luoda uuden asiakassalaisuuden? Vanha salaisuus mitätöidään.",
|
||||
"generate": "Luo",
|
||||
"new_client_secret_created_successfully": "Uusi asiakassalaisuus luotu onnistuneesti",
|
||||
"allowed_user_groups_updated_successfully": "Sallitut käyttäjäryhmät päivitetty onnistuneesti",
|
||||
"oidc_client_name": "OIDC-asiakas {name}",
|
||||
"client_id": "Asiakas ID",
|
||||
"client_secret": "Asiakkaan salaisuus",
|
||||
"show_more_details": "Näytä lisätietoja",
|
||||
"allowed_user_groups": "Sallitut käyttäjäryhmät",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "Lisää käyttäjäryhmiä tähän asiakkaaseen rajoittaaksesi pääsyn näiden ryhmien käyttäjille. Jos käyttäjäryhmiä ei ole valittu, kaikki käyttäjät pääsevät käyttämään tätä asiakasta.",
|
||||
"allowed_user_groups_description": "Valitse käyttäjäryhmät, joiden jäsenet saavat kirjautua tähän asiakkaaseen.",
|
||||
"allowed_user_groups_status_unrestricted_description": "Käyttäjäryhmärajoituksia ei sovelleta. Kuka tahansa käyttäjä voi kirjautua tähän asiakkaaseen.",
|
||||
"unrestrict": "Rajoittamaton",
|
||||
"restrict": "Rajoita",
|
||||
"user_groups_restriction_updated_successfully": "Käyttäjäryhmien rajoitukset päivitetty onnistuneesti",
|
||||
"allowed_user_groups_updated_successfully": "Sallitut käyttäjäryhmät päivitetty onnistuneesti",
|
||||
"favicon": "Sivustokuvake",
|
||||
"light_mode_logo": "Vaalean tilan logo",
|
||||
"dark_mode_logo": "Tumman tilan logo",
|
||||
"email_logo": "Sähköpostin logo",
|
||||
"background_image": "Taustakuva",
|
||||
"language": "Kieli",
|
||||
"reset_profile_picture_question": "Palautetaanko profiilikuva?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "Kaikki asiakkaat",
|
||||
"all_locations": "Kaikki sijainnit",
|
||||
"global_audit_log": "Globaali tarkastusloki",
|
||||
"see_all_account_activities_from_the_last_3_months": "Katso kaikkien käyttäjien toiminnot viimeisen 3 kuukauden ajalta.",
|
||||
"see_all_recent_account_activities": "Tarkastele kaikkien käyttäjien tilitoimintoja asetetun säilytysajan kuluessa.",
|
||||
"token_sign_in": "Tunnuksella kirjautuminen",
|
||||
"client_authorization": "Asiakkaan valtuutus",
|
||||
"new_client_authorization": "Uuden asiakkaan valtuutus",
|
||||
@@ -469,5 +474,11 @@
|
||||
"default_profile_picture": "Oletusprofiilikuva",
|
||||
"light": "Vaalea",
|
||||
"dark": "Tumma",
|
||||
"system": "Järjestelmä"
|
||||
"system": "Järjestelmä",
|
||||
"signup_token_user_groups_description": "Määritä nämä ryhmät automaattisesti käyttäjille, jotka rekisteröityvät tällä tunnuksella.",
|
||||
"allowed_oidc_clients": "Sallitut OIDC-asiakkaat",
|
||||
"allowed_oidc_clients_description": "Valitse OIDC-asiakkaat, joihin tämän käyttäjäryhmän jäsenet voivat kirjautua.",
|
||||
"unrestrict_oidc_client": "Rajoittamaton {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "Haluatko varmasti poistaa OIDC-asiakkaan rajoitukset <b>{clientName}</b>? Tämä poistaa kaikki tämän asiakkaan ryhmämääritykset, ja kuka tahansa käyttäjä voi kirjautua sisään.",
|
||||
"allowed_oidc_clients_updated_successfully": "Sallitut OIDC-asiakkaat päivitetty onnistuneesti"
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@
|
||||
"error_occurred_with_authenticator": "Une erreur est survenue pendant l'authentification",
|
||||
"authenticator_does_not_support_discoverable_credentials": "L'authentificateur ne prend pas en charge les identifiants découvrables",
|
||||
"authenticator_does_not_support_resident_keys": "L'authentificateur ne prend pas en charge les clés résidentes",
|
||||
"passkey_was_previously_registered": "Cette clé d'accès à déjà été enregistré",
|
||||
"passkey_was_previously_registered": "Cette clé d'accès a déjà été enregistrée",
|
||||
"authenticator_does_not_support_any_of_the_requested_algorithms": "L'authentificateur ne supporte aucun des algorithmes requis",
|
||||
"authenticator_timed_out": "L'authentification a expiré",
|
||||
"critical_error_occurred_contact_administrator": "Une erreur critique s'est produite. Veuillez contacter votre administrateur.",
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "Paramètres",
|
||||
"update_pocket_id": "Mettre à jour Pocket ID",
|
||||
"powered_by": "Propulsé par",
|
||||
"see_your_account_activities_from_the_last_3_months": "Consultez les activités de votre compte au cours des 3 derniers mois.",
|
||||
"see_your_recent_account_activities": "Regarde ce qui se passe sur ton compte pendant la période de conservation que tu as choisie.",
|
||||
"time": "Date et heure",
|
||||
"event": "Événement",
|
||||
"approximate_location": "Lieu approximatif",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "Êtes-vous sûr de vouloir créer un nouveau secret client ? L'ancien secret sera invalidé.",
|
||||
"generate": "Générer",
|
||||
"new_client_secret_created_successfully": "Nouveau secret client créé avec succès",
|
||||
"allowed_user_groups_updated_successfully": "Groupes d'utilisateurs autorisés mis à jour avec succès",
|
||||
"oidc_client_name": "Client OIDC {name}",
|
||||
"client_id": "ID du client",
|
||||
"client_secret": "Secret client",
|
||||
"show_more_details": "Afficher plus de détails",
|
||||
"allowed_user_groups": "Groupes d'utilisateurs autorisés",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "Ajouter des groupes d'utilisateurs à ce client permet de restreindre l'accès aux utilisateurs de ces groupes. Si aucun groupe d'utilisateurs n'est sélectionné, tous les utilisateurs auront accès à ce client.",
|
||||
"allowed_user_groups_description": "Choisis les groupes d'utilisateurs dont les membres peuvent se connecter à ce client.",
|
||||
"allowed_user_groups_status_unrestricted_description": "Aucune restriction de groupe d'utilisateurs n'est appliquée. N'importe quel utilisateur peut se connecter à ce client.",
|
||||
"unrestrict": "Débloquer",
|
||||
"restrict": "Limiter",
|
||||
"user_groups_restriction_updated_successfully": "La restriction des groupes d'utilisateurs a été mise à jour sans problème.",
|
||||
"allowed_user_groups_updated_successfully": "Groupes d'utilisateurs autorisés mis à jour avec succès",
|
||||
"favicon": "Icône du site",
|
||||
"light_mode_logo": "Logo du Mode Clair",
|
||||
"dark_mode_logo": "Logo du Mode Sombre",
|
||||
"email_logo": "Logo e-mail",
|
||||
"background_image": "Image d'arrière-plan",
|
||||
"language": "Langue",
|
||||
"reset_profile_picture_question": "Réinitialiser la photo de profil ?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "Tous les clients",
|
||||
"all_locations": "Tous les emplacements",
|
||||
"global_audit_log": "Journal d'audit global",
|
||||
"see_all_account_activities_from_the_last_3_months": "Voir toutes les activités des utilisateurs des 3 derniers mois.",
|
||||
"see_all_recent_account_activities": "Regarde ce que tous les utilisateurs ont fait sur leur compte pendant la période de conservation choisie.",
|
||||
"token_sign_in": "Connexion par jeton",
|
||||
"client_authorization": "Autorisation client",
|
||||
"new_client_authorization": "Nouvelle autorisation client",
|
||||
@@ -467,7 +472,13 @@
|
||||
"reauthentication": "Réauthentification",
|
||||
"clear_filters": "Effacer les filtres",
|
||||
"default_profile_picture": "Photo de profil par défaut",
|
||||
"light": "Lumière",
|
||||
"light": "Clair",
|
||||
"dark": "Sombre",
|
||||
"system": "Système"
|
||||
"system": "Système",
|
||||
"signup_token_user_groups_description": "Attribuez automatiquement ces groupes aux utilisateurs qui s'inscrivent en utilisant ce jeton.",
|
||||
"allowed_oidc_clients": "Clients OIDC autorisés",
|
||||
"allowed_oidc_clients_description": "Choisissez les clients OIDC auxquels les membres de ce groupe d'utilisateurs peuvent se connecter.",
|
||||
"unrestrict_oidc_client": "Débloquer {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "Tu es sûr de vouloir supprimer les restrictions pour le client OIDC <b>{clientName}</b>? Ça va supprimer toutes les affectations de groupe pour ce client et n'importe quel utilisateur pourra se connecter.",
|
||||
"allowed_oidc_clients_updated_successfully": "Les clients OIDC autorisés ont été mis à jour sans problème."
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "Impostazioni",
|
||||
"update_pocket_id": "Aggiorna Pocket ID",
|
||||
"powered_by": "Powered by",
|
||||
"see_your_account_activities_from_the_last_3_months": "Visualizza le attività del tuo account degli ultimi 3 mesi.",
|
||||
"see_your_recent_account_activities": "Guarda cosa succede nel tuo account durante il periodo di conservazione che hai impostato.",
|
||||
"time": "Ora",
|
||||
"event": "Evento",
|
||||
"approximate_location": "Posizione approssimativa",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "Sei sicuro di voler creare un nuovo client secret? Quello vecchio sarà invalidato.",
|
||||
"generate": "Genera",
|
||||
"new_client_secret_created_successfully": "Nuovo client secret creato con successo",
|
||||
"allowed_user_groups_updated_successfully": "Gruppi utente consentiti aggiornati con successo",
|
||||
"oidc_client_name": "Client OIDC {name}",
|
||||
"client_id": "ID client",
|
||||
"client_secret": "Client secret",
|
||||
"show_more_details": "Mostra più dettagli",
|
||||
"allowed_user_groups": "Gruppi utente consentiti",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "Aggiungi gruppi utente a questo client per limitare l'accesso agli utenti in questi gruppi. Se non viene selezionato alcun gruppo utente, tutti gli utenti avranno accesso a questo client.",
|
||||
"allowed_user_groups_description": "Scegli i gruppi di utenti che possono accedere a questo client.",
|
||||
"allowed_user_groups_status_unrestricted_description": "Non ci sono restrizioni per i gruppi di utenti. Chiunque può accedere a questo client.",
|
||||
"unrestrict": "Sbloccare",
|
||||
"restrict": "Limitare",
|
||||
"user_groups_restriction_updated_successfully": "Restrizione dei gruppi di utenti aggiornata con successo",
|
||||
"allowed_user_groups_updated_successfully": "Gruppi utente consentiti aggiornati con successo",
|
||||
"favicon": "Favicon",
|
||||
"light_mode_logo": "Logo modalità chiara",
|
||||
"dark_mode_logo": "Logo modalità scura",
|
||||
"email_logo": "Logo e-mail",
|
||||
"background_image": "Immagine di sfondo",
|
||||
"language": "Lingua",
|
||||
"reset_profile_picture_question": "Reimpostare l'immagine del profilo?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "Tutti i client",
|
||||
"all_locations": "Tutte le posizioni",
|
||||
"global_audit_log": "Registro attività globale",
|
||||
"see_all_account_activities_from_the_last_3_months": "Visualizza tutte le attività degli utenti degli ultimi 3 mesi.",
|
||||
"see_all_recent_account_activities": "Guarda cosa hanno fatto tutti gli utenti nei loro account durante il periodo di conservazione che hai scelto.",
|
||||
"token_sign_in": "Accesso con token",
|
||||
"client_authorization": "Autorizzazione client",
|
||||
"new_client_authorization": "Nuova autorizzazione client",
|
||||
@@ -469,5 +474,11 @@
|
||||
"default_profile_picture": "Immagine del profilo predefinita",
|
||||
"light": "Luce",
|
||||
"dark": "Buio",
|
||||
"system": "Sistema"
|
||||
"system": "Sistema",
|
||||
"signup_token_user_groups_description": "Assegna automaticamente questi gruppi agli utenti che si registrano usando questo token.",
|
||||
"allowed_oidc_clients": "Client OIDC consentiti",
|
||||
"allowed_oidc_clients_description": "Scegli i client OIDC a cui i membri di questo gruppo di utenti possono accedere.",
|
||||
"unrestrict_oidc_client": "Sbloccare {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "Sei sicuro di voler rimuovere le restrizioni dal client OIDC <b>{clientName}</b>? Così facendo, cancellerai tutte le assegnazioni di gruppo per questo client e chiunque potrà accedere.",
|
||||
"allowed_oidc_clients_updated_successfully": "I client OIDC autorizzati sono stati aggiornati senza problemi."
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "設定",
|
||||
"update_pocket_id": "Pocket ID を更新",
|
||||
"powered_by": "Powered by",
|
||||
"see_your_account_activities_from_the_last_3_months": "過去3ヶ月間のアカウントアクティビティを確認する。",
|
||||
"see_your_recent_account_activities": "設定された保持期間内のアカウント活動を確認できます。",
|
||||
"time": "時間",
|
||||
"event": "イベント",
|
||||
"approximate_location": "おおよその場所",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "新しいクライアントシークレットを作成してもよろしいですか?古いシークレットは無効化されます。",
|
||||
"generate": "Generate",
|
||||
"new_client_secret_created_successfully": "新しいクライアントシークレットが正常に作成されました",
|
||||
"allowed_user_groups_updated_successfully": "許可されたユーザーグループが正常に更新されました",
|
||||
"oidc_client_name": "OIDC クライアント {name}",
|
||||
"client_id": "Client ID",
|
||||
"client_secret": "クライアントシークレット",
|
||||
"show_more_details": "詳細を表示",
|
||||
"allowed_user_groups": "許可されたユーザーグループ",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "Add user groups to this client to restrict access to users in these groups. If no user groups are selected, all users will have access to this client.",
|
||||
"allowed_user_groups_description": "このクライアントにサインインを許可するユーザーグループを選択してください。",
|
||||
"allowed_user_groups_status_unrestricted_description": "ユーザーグループの制限は適用されません。すべてのユーザーがこのクライアントにサインインできます。",
|
||||
"unrestrict": "制限なし",
|
||||
"restrict": "制限する",
|
||||
"user_groups_restriction_updated_successfully": "ユーザーグループの制限が正常に更新されました",
|
||||
"allowed_user_groups_updated_successfully": "許可されたユーザーグループが正常に更新されました",
|
||||
"favicon": "Favicon",
|
||||
"light_mode_logo": "ライトモードのロゴ",
|
||||
"dark_mode_logo": "ダークモードのロゴ",
|
||||
"email_logo": "メール ロゴ",
|
||||
"background_image": "背景画像",
|
||||
"language": "言語",
|
||||
"reset_profile_picture_question": "プロフィール画像をリセットしますか?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "すべてのクライアント",
|
||||
"all_locations": "すべての場所",
|
||||
"global_audit_log": "グローバル監査ログ",
|
||||
"see_all_account_activities_from_the_last_3_months": "過去3ヶ月間のすべてのユーザーアクティビティを表示します。",
|
||||
"see_all_recent_account_activities": "設定された保持期間中の全ユーザーのアカウント活動を閲覧する。",
|
||||
"token_sign_in": "トークンサインイン",
|
||||
"client_authorization": "Client Authorization",
|
||||
"new_client_authorization": "New Client Authorization",
|
||||
@@ -469,5 +474,11 @@
|
||||
"default_profile_picture": "デフォルトのプロフィール画像",
|
||||
"light": "光",
|
||||
"dark": "闇",
|
||||
"system": "システム"
|
||||
"system": "システム",
|
||||
"signup_token_user_groups_description": "このトークンを使用して登録するユーザーに、これらのグループを自動的に割り当てます。",
|
||||
"allowed_oidc_clients": "許可されたOIDCクライアント",
|
||||
"allowed_oidc_clients_description": "このユーザーグループのメンバーがサインインを許可されているOIDCクライアントを選択してください。",
|
||||
"unrestrict_oidc_client": "制限なし {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "OIDCクライアントの制限を解除してもよろしいですか? <b>{clientName}</b>?これにより、このクライアントに対するすべてのグループ割り当てが解除され、どのユーザーでもサインインできるようになります。",
|
||||
"allowed_oidc_clients_updated_successfully": "許可されたOIDCクライアントの更新が正常に完了しました"
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@
|
||||
"settings": "설정",
|
||||
"update_pocket_id": "Pocket ID 업데이트",
|
||||
"powered_by": "제공:",
|
||||
"see_your_account_activities_from_the_last_3_months": "지난 3개월 동안의 계정 활동을 확인하세요.",
|
||||
"see_your_recent_account_activities": "설정된 보존 기간 내의 계정 활동을 확인하세요.",
|
||||
"time": "시간",
|
||||
"event": "이벤트",
|
||||
"approximate_location": "대략적인 위치",
|
||||
@@ -301,16 +301,21 @@
|
||||
"are_you_sure_you_want_to_create_a_new_client_secret": "새로운 클라이언트 시크릿 생성하시겠습니까? 기존 클라이언트 시크릿은 무효화됩니다.",
|
||||
"generate": "생성",
|
||||
"new_client_secret_created_successfully": "새로운 클라이언트 시크릿이 성공적으로 생성되었습니다",
|
||||
"allowed_user_groups_updated_successfully": "허용된 사용자 그룹이 성공적으로 변경되었습니다",
|
||||
"oidc_client_name": "OIDC 클라이언트 {name}",
|
||||
"client_id": "클라이언트 ID",
|
||||
"client_secret": "클라이언트 시크릿",
|
||||
"show_more_details": "상세 정보 보기",
|
||||
"allowed_user_groups": "허용된 사용자 그룹",
|
||||
"add_user_groups_to_this_client_to_restrict_access_to_users_in_these_groups": "이 클라이언트에 사용자 그룹을 추가하여 해당 그룹의 사용자의 접근를 제한합니다. 사용자 그룹을 선택하지 않으면 모든 사용자가 이 클라이언트에 접근할 수 있습니다.",
|
||||
"allowed_user_groups_description": "이 클라이언트에 로그인할 수 있는 사용자 그룹을 선택하십시오.",
|
||||
"allowed_user_groups_status_unrestricted_description": "사용자 그룹 제한이 적용되지 않습니다. 모든 사용자가 이 클라이언트에 로그인할 수 있습니다.",
|
||||
"unrestrict": "제한 해제",
|
||||
"restrict": "제한하다",
|
||||
"user_groups_restriction_updated_successfully": "사용자 그룹 제한이 성공적으로 업데이트되었습니다.",
|
||||
"allowed_user_groups_updated_successfully": "허용된 사용자 그룹이 성공적으로 변경되었습니다",
|
||||
"favicon": "파비콘",
|
||||
"light_mode_logo": "라이트 모드 로고",
|
||||
"dark_mode_logo": "다크 모드 로고",
|
||||
"email_logo": "이메일 로고",
|
||||
"background_image": "배경 이미지",
|
||||
"language": "언어",
|
||||
"reset_profile_picture_question": "프로필 사진을 재설정하시겠습니까?",
|
||||
@@ -327,7 +332,7 @@
|
||||
"all_clients": "모든 클라이언트",
|
||||
"all_locations": "모든 위치",
|
||||
"global_audit_log": "전체 활동 기록",
|
||||
"see_all_account_activities_from_the_last_3_months": "지난 3개월 동안의 모든 사용자 활동을 확인하세요.",
|
||||
"see_all_recent_account_activities": "설정된 보존 기간 동안 모든 사용자의 계정 활동을 확인하십시오.",
|
||||
"token_sign_in": "토큰 로그인",
|
||||
"client_authorization": "클라이언트 승인",
|
||||
"new_client_authorization": "새로운 클라이언트 승인",
|
||||
@@ -469,5 +474,11 @@
|
||||
"default_profile_picture": "기본 프로필 사진",
|
||||
"light": "라이트",
|
||||
"dark": "다크",
|
||||
"system": "시스템"
|
||||
"system": "시스템",
|
||||
"signup_token_user_groups_description": "이 토큰을 사용하여 가입하는 사용자에게 자동으로 이 그룹들을 할당합니다.",
|
||||
"allowed_oidc_clients": "허용된 OIDC 클라이언트",
|
||||
"allowed_oidc_clients_description": "이 사용자 그룹의 구성원이 로그인할 수 있는 OIDC 클라이언트를 선택하십시오.",
|
||||
"unrestrict_oidc_client": "제한 해제 {clientName}",
|
||||
"confirm_unrestrict_oidc_client_description": "OIDC 클라이언트의 제한을 해제하시겠습니까? <b>{clientName}</b>? 이 작업은 해당 클라이언트의 모든 그룹 할당을 제거하며, 모든 사용자가 로그인할 수 있게 됩니다.",
|
||||
"allowed_oidc_clients_updated_successfully": "허용된 OIDC 클라이언트 업데이트 성공"
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user