Update dependencies

This commit is contained in:
Ingo Oppermann 2025-12-03 15:23:05 +01:00
parent 0431b6f8c4
commit 6cfb470d98
No known key found for this signature in database
GPG Key ID: 2AB32426E9DD229E
1041 changed files with 74395 additions and 83914 deletions

102
go.mod
View File

@ -1,23 +1,23 @@
module github.com/datarhei/core/v16
go 1.23.0
go 1.24.0
toolchain go1.24.2
require (
github.com/99designs/gqlgen v0.17.76
github.com/99designs/gqlgen v0.17.84
github.com/Masterminds/semver/v3 v3.4.0
github.com/adhocore/gronx v1.19.6
github.com/andybalholm/brotli v1.2.0
github.com/atrox/haikunatorgo/v2 v2.0.1
github.com/caddyserver/certmagic v0.23.0
github.com/caddyserver/certmagic v0.25.0
github.com/datarhei/gosrt v0.9.0
github.com/datarhei/joy4 v0.0.0-20250818192923-6dc77ee81363
github.com/dolthub/swiss v0.2.1
github.com/fujiwara/shapeio v1.0.0
github.com/go-playground/validator/v10 v10.27.0
github.com/go-playground/validator/v10 v10.28.0
github.com/gobwas/glob v0.2.3
github.com/golang-jwt/jwt/v5 v5.2.2
github.com/golang-jwt/jwt/v5 v5.3.0
github.com/google/gops v0.3.28
github.com/google/uuid v1.6.0
github.com/hashicorp/go-hclog v1.6.3
@ -25,28 +25,28 @@ require (
github.com/hashicorp/raft-boltdb/v2 v2.3.1
github.com/invopop/jsonschema v0.4.0
github.com/joho/godotenv v1.5.1
github.com/klauspost/compress v1.18.0
github.com/klauspost/cpuid/v2 v2.2.11
github.com/klauspost/compress v1.18.2
github.com/klauspost/cpuid/v2 v2.3.0
github.com/labstack/echo/v4 v4.13.4
github.com/lestrrat-go/strftime v1.1.0
github.com/lestrrat-go/strftime v1.1.1
github.com/lithammer/shortuuid/v4 v4.2.0
github.com/mattn/go-isatty v0.0.20
github.com/minio/minio-go/v7 v7.0.94
github.com/prometheus/client_golang v1.22.0
github.com/minio/minio-go/v7 v7.0.97
github.com/prometheus/client_golang v1.23.2
github.com/puzpuzpuz/xsync/v3 v3.5.1
github.com/sergi/go-diff v1.3.1
github.com/shirou/gopsutil/v3 v3.24.5
github.com/stretchr/testify v1.10.0
github.com/stretchr/testify v1.11.1
github.com/swaggo/echo-swagger v1.4.1
github.com/swaggo/swag v1.16.4
github.com/tklauser/go-sysconf v0.3.15
github.com/vektah/gqlparser/v2 v2.5.30
github.com/swaggo/swag v1.16.6
github.com/tklauser/go-sysconf v0.3.16
github.com/vektah/gqlparser/v2 v2.5.31
github.com/xeipuuv/gojsonschema v1.2.0
go.etcd.io/bbolt v1.4.2
go.etcd.io/bbolt v1.4.3
go.uber.org/automaxprocs v1.6.0
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.39.0
golang.org/x/mod v0.25.0
go.uber.org/zap v1.27.1
golang.org/x/crypto v0.45.0
golang.org/x/mod v0.30.0
)
require (
@ -58,72 +58,76 @@ require (
github.com/boltdb/bolt v1.3.1 // indirect
github.com/caddyserver/zerossl v0.1.3 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dolthub/maphash v0.1.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
github.com/gabriel-vasile/mimetype v1.4.11 // indirect
github.com/ghodss/yaml v1.0.0 // indirect
github.com/go-ini/ini v1.67.0 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/go-openapi/jsonpointer v0.21.1 // indirect
github.com/go-openapi/jsonreference v0.21.0 // indirect
github.com/go-openapi/spec v0.21.0 // indirect
github.com/go-openapi/swag v0.23.1 // indirect
github.com/go-openapi/jsonpointer v0.22.3 // indirect
github.com/go-openapi/jsonreference v0.21.3 // indirect
github.com/go-openapi/spec v0.22.1 // indirect
github.com/go-openapi/swag/conv v0.25.4 // indirect
github.com/go-openapi/swag/jsonname v0.25.4 // indirect
github.com/go-openapi/swag/jsonutils v0.25.4 // indirect
github.com/go-openapi/swag/loading v0.25.4 // indirect
github.com/go-openapi/swag/stringutils v0.25.4 // indirect
github.com/go-openapi/swag/typeutils v0.25.4 // indirect
github.com/go-openapi/swag/yamlutils v0.25.4 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-viper/mapstructure/v2 v2.3.0 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/go-viper/mapstructure/v2 v2.4.0 // indirect
github.com/goccy/go-yaml v1.18.0 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
github.com/hashicorp/go-metrics v0.5.4 // indirect
github.com/hashicorp/go-msgpack/v2 v2.1.3 // indirect
github.com/hashicorp/go-msgpack/v2 v2.1.5 // indirect
github.com/hashicorp/golang-lru v1.0.2 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/iancoleman/orderedmap v0.2.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/klauspost/crc32 v1.3.0 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/libdns/libdns v1.1.0 // indirect
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect
github.com/mailru/easyjson v0.9.0 // indirect
github.com/libdns/libdns v1.1.1 // indirect
github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mholt/acmez/v3 v3.1.2 // indirect
github.com/miekg/dns v1.1.66 // indirect
github.com/minio/crc64nvme v1.0.2 // indirect
github.com/mholt/acmez/v3 v3.1.4 // indirect
github.com/miekg/dns v1.1.68 // indirect
github.com/minio/crc64nvme v1.1.1 // indirect
github.com/minio/md5-simd v1.1.2 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/philhofer/fwd v1.2.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
github.com/prometheus/common v0.65.0 // indirect
github.com/prometheus/procfs v0.17.0 // indirect
github.com/prometheus/common v0.67.4 // indirect
github.com/prometheus/procfs v0.19.2 // indirect
github.com/rs/xid v1.6.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect
github.com/shoenig/go-m1cpu v0.1.7 // indirect
github.com/sosodev/duration v1.3.1 // indirect
github.com/swaggo/files/v2 v2.0.2 // indirect
github.com/tinylib/msgp v1.3.0 // indirect
github.com/tklauser/numcpus v0.10.0 // indirect
github.com/urfave/cli/v2 v2.27.7 // indirect
github.com/tinylib/msgp v1.5.0 // indirect
github.com/tklauser/numcpus v0.11.0 // indirect
github.com/urfave/cli/v3 v3.6.1 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasttemplate v1.2.2 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
github.com/zeebo/blake3 v0.2.4 // indirect
go.uber.org/multierr v1.11.0 // indirect
go.uber.org/zap/exp v0.3.0 // indirect
golang.org/x/net v0.41.0 // indirect
golang.org/x/sync v0.15.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/text v0.26.0 // indirect
golang.org/x/time v0.12.0 // indirect
golang.org/x/tools v0.34.0 // indirect
google.golang.org/protobuf v1.36.6 // indirect
go.yaml.in/yaml/v2 v2.4.3 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/sync v0.18.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/text v0.31.0 // indirect
golang.org/x/time v0.14.0 // indirect
golang.org/x/tools v0.39.0 // indirect
google.golang.org/protobuf v1.36.10 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

221
go.sum
View File

@ -1,13 +1,13 @@
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
github.com/99designs/gqlgen v0.17.76 h1:YsJBcfACWmXWU2t1yCjoGdOmqcTfOFpjbLAE443fmYI=
github.com/99designs/gqlgen v0.17.76/go.mod h1:miiU+PkAnTIDKMQ1BseUOIVeQHoiwYDZGCswoxl7xec=
github.com/99designs/gqlgen v0.17.84 h1:iVMdiStgUVx/BFkMb0J5GAXlqfqtQ7bqMCYK6v52kQ0=
github.com/99designs/gqlgen v0.17.84/go.mod h1:qjoUqzTeiejdo+bwUg8unqSpeYG42XrcrQboGIezmFA=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0=
github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo=
github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y=
github.com/PuerkitoBio/goquery v1.11.0 h1:jZ7pwMQXIITcUXNH83LLk+txlaEy6NVOfTuP43xxfqw=
github.com/PuerkitoBio/goquery v1.11.0/go.mod h1:wQHgxUOU3JGuj3oD/QFfxUdlzW6xPHfqyHre6VMY4DQ=
github.com/adhocore/gronx v1.19.6 h1:5KNVcoR9ACgL9HhEqCm5QXsab/gI4QDIybTAWcXDKDc=
github.com/adhocore/gronx v1.19.6/go.mod h1:7oUY1WAU8rEJWmAxXR2DN0JaO4gi9khSgKjiRypqteg=
github.com/agnivade/levenshtein v1.2.1 h1:EHBY3UOn1gwdy/VbFwgo4cxecRznFk7fKWN1KOX7eoM=
@ -37,8 +37,8 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4=
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
github.com/caddyserver/certmagic v0.23.0 h1:CfpZ/50jMfG4+1J/u2LV6piJq4HOfO6ppOnOf7DkFEU=
github.com/caddyserver/certmagic v0.23.0/go.mod h1:9mEZIWqqWoI+Gf+4Trh04MOVPD0tGSxtqsxg87hAIH4=
github.com/caddyserver/certmagic v0.25.0 h1:VMleO/XA48gEWes5l+Fh6tRWo9bHkhwAEhx63i+F5ic=
github.com/caddyserver/certmagic v0.25.0/go.mod h1:m9yB7Mud24OQbPHOiipAoyKPn9pKHhpSJxXR1jydBxA=
github.com/caddyserver/zerossl v0.1.3 h1:onS+pxp3M8HnHpN5MMbOMyNjmTheJyWRaZYwn+YTAyA=
github.com/caddyserver/zerossl v0.1.3/go.mod h1:CxA0acn7oEGO6//4rtrRjYgEoa4MFw/XofZnrYwGqG4=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
@ -46,12 +46,8 @@ github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UF
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
github.com/cpuguy83/go-md2man/v2 v2.0.7 h1:zbFlGlXEAKlwXpmvle3d8Oe3YnkKIK4xSRTd3sHPnBo=
github.com/cpuguy83/go-md2man/v2 v2.0.7/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/datarhei/gosrt v0.9.0 h1:FW8A+F8tBiv7eIa57EBHjtTJKFX+OjvLogF/tFXoOiA=
github.com/datarhei/gosrt v0.9.0/go.mod h1:rqTRK8sDZdN2YBgp1EEICSV4297mQk0oglwvpXhaWdk=
github.com/datarhei/joy4 v0.0.0-20250818192407-7b9c68697ad4 h1:coBWFnjU8DZpG4RHCCz2fQcdfuEVUfyeL7RHVXVflnM=
github.com/datarhei/joy4 v0.0.0-20250818192407-7b9c68697ad4/go.mod h1:Jcw/6jZDQQmPx8A7INEkXmuEF7E9jjBbSTfVSLwmiQw=
github.com/datarhei/joy4 v0.0.0-20250818192923-6dc77ee81363 h1:tHC7HzHIgs4I+YTCs0IkIJjP1lmygtSBNr73DiNrY3I=
github.com/datarhei/joy4 v0.0.0-20250818192923-6dc77ee81363/go.mod h1:Jcw/6jZDQQmPx8A7INEkXmuEF7E9jjBbSTfVSLwmiQw=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -71,8 +67,8 @@ github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
github.com/fujiwara/shapeio v1.0.0 h1:xG5D9oNqCSUUbryZ/jQV3cqe1v2suEjwPIcEg1gKM8M=
github.com/fujiwara/shapeio v1.0.0/go.mod h1:LmEmu6L/8jetyj1oewewFb7bZCNRwE7wLCUNzDLaLVA=
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok=
github.com/gabriel-vasile/mimetype v1.4.11 h1:AQvxbp830wPhHTqc1u7nzoLT+ZFxGY7emj5DR5DYFik=
github.com/gabriel-vasile/mimetype v1.4.11/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
@ -86,32 +82,51 @@ github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
github.com/go-openapi/jsonpointer v0.21.1 h1:whnzv/pNXtK2FbX/W9yJfRmE2gsmkfahjMKB0fZvcic=
github.com/go-openapi/jsonpointer v0.21.1/go.mod h1:50I1STOfbY1ycR8jGz8DaMeLCdXiI6aDteEdRNNzpdk=
github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ=
github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4=
github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY=
github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk=
github.com/go-openapi/swag v0.23.1 h1:lpsStH0n2ittzTnbaSloVZLuB5+fvSY/+hnagBjSNZU=
github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0=
github.com/go-openapi/jsonpointer v0.22.3 h1:dKMwfV4fmt6Ah90zloTbUKWMD+0he+12XYAsPotrkn8=
github.com/go-openapi/jsonpointer v0.22.3/go.mod h1:0lBbqeRsQ5lIanv3LHZBrmRGHLHcQoOXQnf88fHlGWo=
github.com/go-openapi/jsonreference v0.21.3 h1:96Dn+MRPa0nYAR8DR1E03SblB5FJvh7W6krPI0Z7qMc=
github.com/go-openapi/jsonreference v0.21.3/go.mod h1:RqkUP0MrLf37HqxZxrIAtTWW4ZJIK1VzduhXYBEeGc4=
github.com/go-openapi/spec v0.22.1 h1:beZMa5AVQzRspNjvhe5aG1/XyBSMeX1eEOs7dMoXh/k=
github.com/go-openapi/spec v0.22.1/go.mod h1:c7aeIQT175dVowfp7FeCvXXnjN/MrpaONStibD2WtDA=
github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
github.com/go-openapi/swag/conv v0.25.4 h1:/Dd7p0LZXczgUcC/Ikm1+YqVzkEeCc9LnOWjfkpkfe4=
github.com/go-openapi/swag/conv v0.25.4/go.mod h1:3LXfie/lwoAv0NHoEuY1hjoFAYkvlqI/Bn5EQDD3PPU=
github.com/go-openapi/swag/jsonname v0.25.4 h1:bZH0+MsS03MbnwBXYhuTttMOqk+5KcQ9869Vye1bNHI=
github.com/go-openapi/swag/jsonname v0.25.4/go.mod h1:GPVEk9CWVhNvWhZgrnvRA6utbAltopbKwDu8mXNUMag=
github.com/go-openapi/swag/jsonutils v0.25.4 h1:VSchfbGhD4UTf4vCdR2F4TLBdLwHyUDTd1/q4i+jGZA=
github.com/go-openapi/swag/jsonutils v0.25.4/go.mod h1:7OYGXpvVFPn4PpaSdPHJBtF0iGnbEaTk8AvBkoWnaAY=
github.com/go-openapi/swag/jsonutils/fixtures_test v0.25.4 h1:IACsSvBhiNJwlDix7wq39SS2Fh7lUOCJRmx/4SN4sVo=
github.com/go-openapi/swag/jsonutils/fixtures_test v0.25.4/go.mod h1:Mt0Ost9l3cUzVv4OEZG+WSeoHwjWLnarzMePNDAOBiM=
github.com/go-openapi/swag/loading v0.25.4 h1:jN4MvLj0X6yhCDduRsxDDw1aHe+ZWoLjW+9ZQWIKn2s=
github.com/go-openapi/swag/loading v0.25.4/go.mod h1:rpUM1ZiyEP9+mNLIQUdMiD7dCETXvkkC30z53i+ftTE=
github.com/go-openapi/swag/stringutils v0.25.4 h1:O6dU1Rd8bej4HPA3/CLPciNBBDwZj9HiEpdVsb8B5A8=
github.com/go-openapi/swag/stringutils v0.25.4/go.mod h1:GTsRvhJW5xM5gkgiFe0fV3PUlFm0dr8vki6/VSRaZK0=
github.com/go-openapi/swag/typeutils v0.25.4 h1:1/fbZOUN472NTc39zpa+YGHn3jzHWhv42wAJSN91wRw=
github.com/go-openapi/swag/typeutils v0.25.4/go.mod h1:Ou7g//Wx8tTLS9vG0UmzfCsjZjKhpjxayRKTHXf2pTE=
github.com/go-openapi/swag/yamlutils v0.25.4 h1:6jdaeSItEUb7ioS9lFoCZ65Cne1/RZtPBZ9A56h92Sw=
github.com/go-openapi/swag/yamlutils v0.25.4/go.mod h1:MNzq1ulQu+yd8Kl7wPOut/YHAAU/H6hL91fF+E2RFwc=
github.com/go-openapi/testify/enable/yaml/v2 v2.0.2 h1:0+Y41Pz1NkbTHz8NngxTuAXxEodtNSI1WG1c/m5Akw4=
github.com/go-openapi/testify/enable/yaml/v2 v2.0.2/go.mod h1:kme83333GCtJQHXQ8UKX3IBZu6z8T5Dvy5+CW3NLUUg=
github.com/go-openapi/testify/v2 v2.0.2 h1:X999g3jeLcoY8qctY/c/Z8iBHTbwLz7R2WXd6Ub6wls=
github.com/go-openapi/testify/v2 v2.0.2/go.mod h1:HCPmvFFnheKK2BuwSA0TbbdxJ3I16pjwMkYkP4Ywn54=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4=
github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688=
github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk=
github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs=
github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/goccy/go-yaml v1.18.0 h1:8W7wMFS12Pcas7KU+VVkaiCng+kG8QiFeFwzFb+rwuw=
github.com/goccy/go-yaml v1.18.0/go.mod h1:XBurs7gK8ATbW4ZPGKgcbrY1Br56PdM69F7LkFRi1kA=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang-jwt/jwt/v5 v5.2.2 h1:Rl4B7itRWVtYIHFrSNd7vhTiz9UpLdi6gZhZ3wEeDy8=
github.com/golang-jwt/jwt/v5 v5.2.2/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang-jwt/jwt/v5 v5.3.0 h1:pv4AsKCKKZuqlgs5sUmn4x8UlGa0kEVt/puTpKx9vvo=
github.com/golang-jwt/jwt/v5 v5.3.0/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
@ -146,8 +161,8 @@ github.com/hashicorp/go-metrics v0.5.4 h1:8mmPiIJkTPPEbAiV97IxdAGNdRdaWwVap1BU6e
github.com/hashicorp/go-metrics v0.5.4/go.mod h1:CG5yz4NZ/AI/aQt9Ucm/vdBnbh7fvmv4lxZ350i+QQI=
github.com/hashicorp/go-msgpack v0.5.5 h1:i9R9JSrqIz0QVLz3sz+i3YJdT7TTSLcfLLzJi9aZTuI=
github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-msgpack/v2 v2.1.3 h1:cB1w4Zrk0O3jQBTcFMKqYQWRFfsSQ/TYKNyUUVyCP2c=
github.com/hashicorp/go-msgpack/v2 v2.1.3/go.mod h1:SjlwKKFnwBXvxD/I1bEcfJIBbEJ+MCUn39TxymNR5ZU=
github.com/hashicorp/go-msgpack/v2 v2.1.5 h1:Ue879bPnutj/hXfmUk6s/jtIK90XxgiUIcXRl656T44=
github.com/hashicorp/go-msgpack/v2 v2.1.5/go.mod h1:bjCsRXpZ7NsJdk45PoCQnzRGDaK8TKm5ZnDI/9y3J4M=
github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs=
github.com/hashicorp/go-uuid v1.0.0 h1:RS8zrF7PhGwyNPOtxSClXXj9HA8feRnJzgnI1RJCSnM=
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
@ -169,8 +184,6 @@ github.com/invopop/jsonschema v0.4.0 h1:Yuy/unfgCnfV5Wl7H0HgFufp/rlurqPOOuacqyBy
github.com/invopop/jsonschema v0.4.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
@ -178,11 +191,13 @@ github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk=
github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.11 h1:0OwqZRYI2rFrjS4kvkDnqJkKHdHaRnCm68/DY4OxRzU=
github.com/klauspost/cpuid/v2 v2.2.11/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y=
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/klauspost/crc32 v1.3.0 h1:sSmTt3gUt81RP655XGZPElI0PelVTZ6YwCRnPSupoFM=
github.com/klauspost/crc32 v1.3.0/go.mod h1:D7kQaZhnkX/Y0tstFGf8VUzv2UofNGqCjnC3zdHB0Hw=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
@ -203,16 +218,14 @@ github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc h1:RKf14vYWi2ttpEmkA4aQ3j4u9dStX2t4M8UM6qqNsG8=
github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc/go.mod h1:kopuH9ugFRkIXf3YoqHKyrJ9YfUFsckUU9S7B+XP+is=
github.com/lestrrat-go/strftime v1.1.0 h1:gMESpZy44/4pXLO/m+sL0yBd1W6LjgjrrD4a68Gapyg=
github.com/lestrrat-go/strftime v1.1.0/go.mod h1:uzeIB52CeUJenCo1syghlugshMysrqUT51HlxphXVeI=
github.com/libdns/libdns v1.1.0 h1:9ze/tWvt7Df6sbhOJRB8jT33GHEHpEQXdtkE3hPthbU=
github.com/libdns/libdns v1.1.0/go.mod h1:4Bj9+5CQiNMVGf87wjX4CY3HQJypUHRuLvlsfsZqLWQ=
github.com/lestrrat-go/strftime v1.1.1 h1:zgf8QCsgj27GlKBy3SU9/8MMgegZ8UCzlCyHYrUF0QU=
github.com/lestrrat-go/strftime v1.1.1/go.mod h1:YDrzHJAODYQ+xxvrn5SG01uFIQAeDTzpxNVppCz7Nmw=
github.com/libdns/libdns v1.1.1 h1:wPrHrXILoSHKWJKGd0EiAVmiJbFShguILTg9leS/P/U=
github.com/libdns/libdns v1.1.1/go.mod h1:4Bj9+5CQiNMVGf87wjX4CY3HQJypUHRuLvlsfsZqLWQ=
github.com/lithammer/shortuuid/v4 v4.2.0 h1:LMFOzVB3996a7b8aBuEXxqOBflbfPQAiVzkIcHO0h8c=
github.com/lithammer/shortuuid/v4 v4.2.0/go.mod h1:D5noHZ2oFw/YaKCfGy0YxyE7M0wMbezmMjPdhyEFe6Y=
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 h1:PpXWgLPs+Fqr325bN2FD2ISlRRztXibcX6e8f5FR5Dc=
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35/go.mod h1:autxFIvghDt3jPTLoqZ9OZ7s9qTGNAWmYCjVFWPX/zg=
github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4=
github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU=
github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3 h1:PwQumkgq4/acIiZhtifTV5OUqqiP82UAl0h87xj/l9k=
github.com/lufia/plan9stats v0.0.0-20251013123823-9fd1530e3ec3/go.mod h1:autxFIvghDt3jPTLoqZ9OZ7s9qTGNAWmYCjVFWPX/zg=
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
@ -222,16 +235,16 @@ github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27k
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mholt/acmez/v3 v3.1.2 h1:auob8J/0FhmdClQicvJvuDavgd5ezwLBfKuYmynhYzc=
github.com/mholt/acmez/v3 v3.1.2/go.mod h1:L1wOU06KKvq7tswuMDwKdcHeKpFFgkppZy/y0DFxagQ=
github.com/miekg/dns v1.1.66 h1:FeZXOS3VCVsKnEAd+wBkjMC3D2K+ww66Cq3VnCINuJE=
github.com/miekg/dns v1.1.66/go.mod h1:jGFzBsSNbJw6z1HYut1RKBKHA9PBdxeHrZG8J+gC2WE=
github.com/minio/crc64nvme v1.0.2 h1:6uO1UxGAD+kwqWWp7mBFsi5gAse66C4NXO8cmcVculg=
github.com/minio/crc64nvme v1.0.2/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg=
github.com/mholt/acmez/v3 v3.1.4 h1:DyzZe/RnAzT3rpZj/2Ii5xZpiEvvYk3cQEN/RmqxwFQ=
github.com/mholt/acmez/v3 v3.1.4/go.mod h1:L1wOU06KKvq7tswuMDwKdcHeKpFFgkppZy/y0DFxagQ=
github.com/miekg/dns v1.1.68 h1:jsSRkNozw7G/mnmXULynzMNIsgY2dHC8LO6U6Ij2JEA=
github.com/miekg/dns v1.1.68/go.mod h1:fujopn7TB3Pu3JM69XaawiU0wqjpL9/8xGop5UrTPps=
github.com/minio/crc64nvme v1.1.1 h1:8dwx/Pz49suywbO+auHCBpCtlW1OfpcLN7wYgVR6wAI=
github.com/minio/crc64nvme v1.1.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg=
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
github.com/minio/minio-go/v7 v7.0.94 h1:1ZoksIKPyaSt64AVOyaQvhDOgVC3MfZsWM6mZXRUGtM=
github.com/minio/minio-go/v7 v7.0.94/go.mod h1:71t2CqDt3ThzESgZUlU1rBN54mksGGlkLcFgguDnnAc=
github.com/minio/minio-go/v7 v7.0.97 h1:lqhREPyfgHTB/ciX8k2r8k0D93WaFqxbJX36UZq5occ=
github.com/minio/minio-go/v7 v7.0.97/go.mod h1:re5VXuo0pwEtoNLsNuSr0RrLfT/MBtohwdaSmPPSRSk=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
@ -259,8 +272,8 @@ github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5Fsn
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q=
github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0=
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
@ -270,31 +283,29 @@ github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE=
github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8=
github.com/prometheus/common v0.67.4 h1:yR3NqWO1/UyO1w2PhUvXlGQs/PtFmoveVO0KZ4+Lvsc=
github.com/prometheus/common v0.67.4/go.mod h1:gP0fq6YjjNCLssJCQp0yk4M8W6ikLURwkdd/YKtTbyI=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/prometheus/procfs v0.17.0 h1:FuLQ+05u4ZI+SS/w9+BWEM2TXiHKsUQ9TADiRH7DuK0=
github.com/prometheus/procfs v0.17.0/go.mod h1:oPQLaDAMRbA+u8H5Pbfq+dl3VDAvHxMUOVhe0wYB2zw=
github.com/prometheus/procfs v0.19.2 h1:zUMhqEW66Ex7OXIiDkll3tl9a1ZdilUOd/F6ZXw4Vws=
github.com/prometheus/procfs v0.19.2/go.mod h1:M0aotyiemPhBCM0z5w87kL22CxfcH05ZpYlu+b4J7mw=
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/shirou/gopsutil/v3 v3.24.5 h1:i0t8kL+kQTvpAYToeuiVk3TgDeKOFioZO3Ztz/iZ9pI=
github.com/shirou/gopsutil/v3 v3.24.5/go.mod h1:bsoOS1aStSs9ErQ1WWfxllSeS1K5D+U30r2NfcubMVk=
github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM=
github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
github.com/shoenig/go-m1cpu v0.1.7 h1:C76Yd0ObKR82W4vhfjZiCp0HxcSZ8Nqd84v+HZ0qyI0=
github.com/shoenig/go-m1cpu v0.1.7/go.mod h1:KkDOw6m3ZJQAPHbrzkZki4hnx+pDRR1Lo+ldA56wD5w=
github.com/shoenig/test v1.7.0 h1:eWcHtTXa6QLnBvm0jgEabMRN/uJ4DMV3M8xUGgRkZmk=
github.com/shoenig/test v1.7.0/go.mod h1:UxJ6u/x2v/TNs/LoLxBNJRV9DiwBBKYxXSyczsBHFoI=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
@ -307,29 +318,29 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
github.com/swaggo/echo-swagger v1.4.1 h1:Yf0uPaJWp1uRtDloZALyLnvdBeoEL5Kc7DtnjzO/TUk=
github.com/swaggo/echo-swagger v1.4.1/go.mod h1:C8bSi+9yH2FLZsnhqMZLIZddpUxZdBYuNHbtaS1Hljc=
github.com/swaggo/files/v2 v2.0.2 h1:Bq4tgS/yxLB/3nwOMcul5oLEUKa877Ykgz3CJMVbQKU=
github.com/swaggo/files/v2 v2.0.2/go.mod h1:TVqetIzZsO9OhHX1Am9sRf9LdrFZqoK49N37KON/jr0=
github.com/swaggo/swag v1.16.4 h1:clWJtd9LStiG3VeijiCfOVODP6VpHtKdQy9ELFG3s1A=
github.com/swaggo/swag v1.16.4/go.mod h1:VBsHJRsDvfYvqoiMKnsdwhNV9LEMHgEDZcyVYX0sxPg=
github.com/tinylib/msgp v1.3.0 h1:ULuf7GPooDaIlbyvgAxBV/FI7ynli6LZ1/nVUNu+0ww=
github.com/tinylib/msgp v1.3.0/go.mod h1:ykjzy2wzgrlvpDCRc4LA8UXy6D8bzMSuAF3WD57Gok0=
github.com/tklauser/go-sysconf v0.3.15 h1:VE89k0criAymJ/Os65CSn1IXaol+1wrsFHEB8Ol49K4=
github.com/tklauser/go-sysconf v0.3.15/go.mod h1:Dmjwr6tYFIseJw7a3dRLJfsHAMXZ3nEnL/aZY+0IuI4=
github.com/tklauser/numcpus v0.10.0 h1:18njr6LDBk1zuna922MgdjQuJFjrdppsZG60sHGfjso=
github.com/tklauser/numcpus v0.10.0/go.mod h1:BiTKazU708GQTYF4mB+cmlpT2Is1gLk7XVuEeem8LsQ=
github.com/swaggo/swag v1.16.6 h1:qBNcx53ZaX+M5dxVyTrgQ0PJ/ACK+NzhwcbieTt+9yI=
github.com/swaggo/swag v1.16.6/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4Xesg=
github.com/tinylib/msgp v1.5.0 h1:GWnqAE54wmnlFazjq2+vgr736Akg58iiHImh+kPY2pc=
github.com/tinylib/msgp v1.5.0/go.mod h1:cvjFkb4RiC8qSBOPMGPSzSAx47nAsfhLVTCZZNuHv5o=
github.com/tklauser/go-sysconf v0.3.16 h1:frioLaCQSsF5Cy1jgRBrzr6t502KIIwQ0MArYICU0nA=
github.com/tklauser/go-sysconf v0.3.16/go.mod h1:/qNL9xxDhc7tx3HSRsLWNnuzbVfh3e7gh/BmM179nYI=
github.com/tklauser/numcpus v0.11.0 h1:nSTwhKH5e1dMNsCdVBukSZrURJRoHbSEQjdEbY+9RXw=
github.com/tklauser/numcpus v0.11.0/go.mod h1:z+LwcLq54uWZTX0u/bGobaV34u6V7KNlTZejzM6/3MQ=
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
github.com/urfave/cli/v2 v2.27.7 h1:bH59vdhbjLv3LAvIu6gd0usJHgoTTPhCFib8qqOwXYU=
github.com/urfave/cli/v2 v2.27.7/go.mod h1:CyNAG/xg+iAOg0N4MPGZqVmv2rCoP267496AOXUZjA4=
github.com/urfave/cli/v3 v3.6.1 h1:j8Qq8NyUawj/7rTYdBGrxcH7A/j7/G8Q5LhWEW4G3Mo=
github.com/urfave/cli/v3 v3.6.1/go.mod h1:ysVLtOEmg2tOy6PknnYVhDoouyC/6N42TMeoMzskhso=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/vektah/gqlparser/v2 v2.5.30 h1:EqLwGAFLIzt1wpx1IPpY67DwUujF1OfzgEyDsLrN6kE=
github.com/vektah/gqlparser/v2 v2.5.30/go.mod h1:D1/VCZtV3LPnQrcPBeR/q5jkSQIPti0uYCP/RI0gIeo=
github.com/vektah/gqlparser/v2 v2.5.31 h1:YhWGA1mfTjID7qJhd1+Vxhpk5HTgydrGU9IgkWBTJ7k=
github.com/vektah/gqlparser/v2 v2.5.31/go.mod h1:c1I28gSOVNzlfc4WuDlqU7voQnsqI6OG2amkBAFmgts=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
@ -337,8 +348,6 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
@ -349,40 +358,44 @@ github.com/zeebo/blake3 v0.2.4 h1:KYQPkhpRtcqh0ssGYcKLG1JYvddkEA8QwCM/yBqhaZI=
github.com/zeebo/blake3 v0.2.4/go.mod h1:7eeQ6d2iXWRGF6npfaxl2CU+xy2Fjo2gxeyZGCRUjcE=
github.com/zeebo/pcg v1.0.1 h1:lyqfGeWiv4ahac6ttHs+I5hwtH/+1mrhlCtVNQM2kHo=
github.com/zeebo/pcg v1.0.1/go.mod h1:09F0S9iiKrwn9rlI5yjLkmrug154/YRW6KnnXVDM/l4=
go.etcd.io/bbolt v1.4.2 h1:IrUHp260R8c+zYx/Tm8QZr04CX+qWS5PGfPdevhdm1I=
go.etcd.io/bbolt v1.4.2/go.mod h1:Is8rSHO/b4f3XigBC0lL0+4FwAQv3HXEEIgFMuKHceM=
go.etcd.io/bbolt v1.4.3 h1:dEadXpI6G79deX5prL3QRNP6JB8UxVkqo4UPnHaNXJo=
go.etcd.io/bbolt v1.4.3/go.mod h1:tKQlpPaYCVFctUIgFKFnAlvbmB3tpy1vkTnDWohtc0E=
go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs=
go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
go.uber.org/zap v1.27.1 h1:08RqriUEv8+ArZRYSTXy1LeBScaMpVSTBhCeaZYfMYc=
go.uber.org/zap v1.27.1/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
go.uber.org/zap/exp v0.3.0 h1:6JYzdifzYkGmTdRR59oYH+Ng7k49H9qVpWwNSsGJj3U=
go.uber.org/zap/exp v0.3.0/go.mod h1:5I384qq7XGxYyByIhHm6jg5CHkGY0nsTfbDLgDDlgJQ=
go.yaml.in/yaml/v2 v2.4.3 h1:6gvOSjQoTB3vt1l+CU+tSyi/HOjfOjRLJ4YwYZGwRO0=
go.yaml.in/yaml/v2 v2.4.3/go.mod h1:zSxWcmIDjOzPXpjlTTbAsKokqkDNAVtZO0WOMiT90s8=
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM=
golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U=
golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w=
golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/mod v0.30.0 h1:fDEXFVZ/fmCKProc/yAXXUijritrDzahmwwefnjoPFk=
golang.org/x/mod v0.30.0/go.mod h1:lAsf5O2EvJeSFMiBxXDki7sCgAxEUcZHXoXMKT4GJKc=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=
golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=
golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY=
golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I=
golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -404,18 +417,18 @@ golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM=
golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM=
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo=
golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg=
golang.org/x/tools v0.39.0 h1:ik4ho21kwuQln40uelmciQPp9SipgNDdrafrYA4TmQQ=
golang.org/x/tools v0.39.0/go.mod h1:JnefbkDPyD8UU2kI5fuf8ZX4/yUeh9W877ZeBONxUqQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
@ -425,8 +438,8 @@ google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miE
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY=
google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY=
google.golang.org/protobuf v1.36.10 h1:AYd7cD/uASjIL6Q9LiTjz8JLcrh/88q5UObnmY3aOOE=
google.golang.org/protobuf v1.36.10/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=

View File

@ -1,3 +1,4 @@
# top-most EditorConfig file
root = true
[*]
@ -11,8 +12,15 @@ indent_size = 4
[*.{go,gotpl}]
indent_style = tab
[*.yml]
indent_size = 2
# Ignore yaml https://learn.microsoft.com/en-us/visualstudio/code-quality/use-roslyn-analyzers?view=vs-2022
[*.{yaml,yml,yml.j2,yaml.j2}]
generated_code = true
# charset = unset
# end_of_line = unset
# insert_final_newline = unset
# trim_trailing_whitespace = unset
# indent_style = unset
# indent_size = unset
# These often end up with go code inside, so lets keep tabs
[*.{html,md}]

View File

@ -1,13 +1,50 @@
# All settings can be found here https://github.com/golangci/golangci-lint/blob/HEAD/.golangci.reference.yml
version: "2"
run:
concurrency: 8
modules-download-mode: readonly
issues-exit-code: 1
tests: true
allow-parallel-runners: false
issues:
# Maximum count of issues with the same text.
# Set to 0 to disable.
# Default: 3
max-issues-per-linter: 0
max-same-issues: 0
new: false
formatters:
exclusions:
paths:
- codegen/testserver/followschema/resolver.go
- codegen/testserver/singlefile/resolver.go
- codegen/testserver/usefunctionsyntaxforexecutioncontext/resolver.go
- generated
enable:
- golines
- gofumpt
- gci
settings:
gci:
sections:
- standard
- default
- prefix(github.com/99designs/gqlgen)
golines:
# Target maximum line length.
# Default: 100
max-len: 100
linters:
default: none
enable:
- asasalint
- asciicheck
- bidichk
- bodyclose
- copyloopvar
- dupl
- dupword
- durationcheck
- errcheck
- gocritic
- govet
@ -17,17 +54,28 @@ linters:
- nolintlint
- perfsprint
- prealloc
- reassign
- revive
- staticcheck
- testableexamples
- testifylint
- unconvert
- unparam
- unused
- usestdlibvars
- usetesting
- wastedassign
settings:
errcheck:
exclude-functions:
- (io.Writer).Write
- (http.ResponseWriter).Write
- (*bytes.Buffer).WriteByte
- (*strings.Builder).WriteByte
- (*strings.Builder).WriteString
- io.Copy
- io.WriteString
- fmt.Fprintln
gocritic:
enabled-checks:
- emptyStringTest
@ -136,14 +184,4 @@ linters:
- third_party$
- builtin$
- examples$
formatters:
enable:
- gofmt
- goimports
exclusions:
generated: lax
paths:
- bin
- third_party$
- builtin$
- examples$
- generated$

View File

@ -22,18 +22,16 @@ cd example
go mod init example
```
2. Add `github.com/99designs/gqlgen` to your [project's tools.go](https://go.dev/wiki/Modules#how-can-i-track-tool-dependencies-for-a-module)
2. Add `github.com/99designs/gqlgen` to your project, as a [tool dependency](https://go.dev/doc/modules/managing-dependencies#tools)
```shell
printf '//go:build tools\npackage tools\nimport (_ "github.com/99designs/gqlgen"\n _ "github.com/99designs/gqlgen/graphql/introspection")' | gofmt > tools.go
go mod tidy
go get -tool github.com/99designs/gqlgen
```
3. Initialise gqlgen config and generate models
```shell
go run github.com/99designs/gqlgen init
go mod tidy
go tool gqlgen init
```
4. Start the graphql server

View File

@ -16,8 +16,12 @@ import (
)
var (
urlRegex = regexp.MustCompile(`(?s)@link.*\(.*url:\s*?"(.*?)"[^)]+\)`) // regex to grab the url of a link directive, should it exist
versionRegex = regexp.MustCompile(`v(\d+).(\d+)$`) // regex to grab the version number from a url
urlRegex = regexp.MustCompile(
`(?s)@link.*\(.*url:\s*?"(.*?)"[^)]+\)`,
) // regex to grab the url of a link directive, should it exist
versionRegex = regexp.MustCompile(
`v(\d+).(\d+)$`,
) // regex to grab the version number from a url
)
func Generate(cfg *config.Config, option ...Option) error {
@ -33,7 +37,8 @@ func Generate(cfg *config.Config, option ...Option) error {
plugins = append(plugins, resolvergen.New())
if cfg.Federation.IsDefined() {
if cfg.Federation.Version == 0 { // default to using the user's choice of version, but if unset, try to sort out which federation version to use
// check the sources, and if one is marked as federation v2, we mark the entirety to be generated using that format
// check the sources, and if one is marked as federation v2, we mark the entirety to be
// generated using that format
for _, v := range cfg.Sources {
cfg.Federation.Version = 1
urlString := urlRegex.FindStringSubmatch(v.Input)
@ -63,6 +68,7 @@ func Generate(cfg *config.Config, option ...Option) error {
}
for _, p := range plugins {
//nolint:staticcheck // for backwards compatibility only
if inj, ok := p.(plugin.EarlySourceInjector); ok {
if s := inj.InjectSourceEarly(); s != nil {
cfg.Sources = append(cfg.Sources, s)
@ -101,6 +107,11 @@ func Generate(cfg *config.Config, option ...Option) error {
return fmt.Errorf("failed to load schema: %w", err)
}
codegen.ClearInlineArgsMetadata()
if err := codegen.ExpandInlineArguments(cfg.Schema); err != nil {
return fmt.Errorf("failed to expand inline arguments: %w", err)
}
if err := cfg.Init(); err != nil {
return fmt.Errorf("generating core failed: %w", err)
}
@ -122,6 +133,7 @@ func Generate(cfg *config.Config, option ...Option) error {
}
}
}
// Merge again now that the generated models have been injected into the typemap
dataPlugins := make([]any, len(plugins))
for index := range plugins {

View File

@ -31,7 +31,8 @@ type FieldArgument struct {
func (f *FieldArgument) ImplDirectives() []*Directive {
d := make([]*Directive, 0)
for i := range f.Directives {
if !f.Directives[i].SkipRuntime && f.Directives[i].IsLocation(ast.LocationArgumentDefinition) {
if !f.Directives[i].SkipRuntime &&
f.Directives[i].IsLocation(ast.LocationArgumentDefinition) {
d = append(d, f.Directives[i])
}
}
@ -76,7 +77,11 @@ func (b *builder) buildArg(obj *Object, arg *ast.ArgumentDefinition) (*FieldArgu
return &newArg, nil
}
func (b *builder) bindArgs(field *Field, sig *types.Signature, params *types.Tuple) ([]*FieldArgument, error) {
func (b *builder) bindArgs(
field *Field,
sig *types.Signature,
params *types.Tuple,
) ([]*FieldArgument, error) {
n := params.Len()
newArgs := make([]*FieldArgument, 0, len(field.Args))
// Accept variadic methods (i.e. have optional parameters).

View File

@ -10,10 +10,18 @@ func (ec *executionContext) {{ $name }}(ctx context.Context, rawArgs map[string]
args := map[string]any{}
{{- range $i, $arg := . }}
{{ if $useFunctionSyntaxForExecutionContext -}}
arg{{$i}}, err := {{ $name }}{{$arg.Name | go}}(ctx, ec, rawArgs)
{{ if $arg.ImplDirectives }}
{{ if $useFunctionSyntaxForExecutionContext -}}
arg{{$i}}, err := {{ $name }}{{$arg.Name | go}}(ctx, ec, rawArgs)
{{- else -}}
arg{{$i}}, err := ec.{{ $name }}{{$arg.Name | go}}(ctx, rawArgs)
{{- end }}
{{- else -}}
arg{{$i}}, err := ec.{{ $name }}{{$arg.Name | go}}(ctx, rawArgs)
{{ if $useFunctionSyntaxForExecutionContext -}}
arg{{$i}}, err := graphql.ProcessArgFieldWithEC(ctx, ec, rawArgs, {{$arg.Name|quote}}, {{ $arg.TypeReference.UnmarshalFunc }})
{{- else -}}
arg{{$i}}, err := graphql.ProcessArgField(ctx, rawArgs, {{$arg.Name|quote}}, ec.{{ $arg.TypeReference.UnmarshalFunc }})
{{- end }}
{{- end }}
if err != nil {
return nil, err
@ -24,6 +32,9 @@ func (ec *executionContext) {{ $name }}(ctx context.Context, rawArgs map[string]
}
{{- range $i, $arg := . }}
{{ if not $arg.ImplDirectives -}}
{{- continue -}}
{{- end }}
{{ if $useFunctionSyntaxForExecutionContext -}}
func {{ $name }}{{$arg.Name | go}}(
ctx context.Context,

View File

@ -175,9 +175,10 @@ func indexDefs(pkg *packages.Package) map[string]types.Object {
}
if _, ok := res[astNode.Name]; !ok {
// The above check may not be really needed, it is only here to have a consistent behavior with
// previous implementation of FindObject() function which only honored the first inclusion of a def.
// If this is still needed, we can consider something like sync.Map.LoadOrStore() to avoid two lookups.
// The above check may not be really needed, it is only here to have a consistent
// behavior with previous implementation of FindObject() function which only honored the
// first inclusion of a def. If this is still needed, we can consider something like
// sync.Map.LoadOrStore() to avoid two lookups.
res[astNode.Name] = def
}
}
@ -192,7 +193,8 @@ func (b *Binder) PointerTo(ref *TypeReference) *TypeReference {
return &newRef
}
// TypeReference is used by args and field types. The Definition can refer to both input and output types.
// TypeReference is used by args and field types. The Definition can refer to both input and output
// types.
type TypeReference struct {
Definition *ast.Definition
GQL *ast.Type
@ -230,7 +232,8 @@ func (ref *TypeReference) IsPtr() bool {
return isPtr
}
// fix for https://github.com/golang/go/issues/31103 may make it possible to remove this (may still be useful)
// fix for https://github.com/golang/go/issues/31103 may make it possible to remove this (may still
// be useful)
func (ref *TypeReference) IsPtrToPtr() bool {
if p, isPtr := ref.GO.(*types.Pointer); isPtr {
_, isPtr := p.Elem().(*types.Pointer)
@ -293,7 +296,9 @@ func (ref *TypeReference) UniquenessKey() string {
// Fix for #896
elemNullability = "ᚄ"
}
return nullability + ref.Definition.Name + "2" + templates.TypeIdentifier(ref.GO) + elemNullability
return nullability + ref.Definition.Name + "2" + templates.TypeIdentifier(
ref.GO,
) + elemNullability
}
func (ref *TypeReference) MarshalFunc() string {
@ -370,7 +375,10 @@ func unwrapOmittable(t types.Type) (types.Type, bool) {
return named.TypeArgs().At(0), true
}
func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret *TypeReference, err error) {
func (b *Binder) TypeReference(
schemaType *ast.Type,
bindTarget types.Type,
) (ret *TypeReference, err error) {
if bindTarget != nil {
bindTarget = code.Unalias(bindTarget)
}
@ -464,7 +472,8 @@ func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret
ref.GO = t
ref.IsMarshaler = true
} else if underlying := basicUnderlying(t); def.IsLeafType() && underlying != nil && underlying.Kind() == types.String {
// TODO delete before v1. Backwards compatibility case for named types wrapping strings (see #595)
// TODO delete before v1. Backwards compatibility case for named types wrapping strings
// (see #595)
ref.GO = t
ref.CastType = underlying
@ -485,8 +494,11 @@ func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret
if bindTarget != nil {
if err = code.CompatibleTypes(ref.GO, bindTarget); err != nil {
// if the bind type implements the graphql.ContextMarshaler/graphql.ContextUnmarshaler/graphql.Marshaler/graphql.Unmarshaler interface, we can use it
if hasMethod(bindTarget, "MarshalGQLContext") && hasMethod(bindTarget, "UnmarshalGQLContext") {
// if the bind type implements the
// graphql.ContextMarshaler/graphql.ContextUnmarshaler/graphql.Marshaler/graphql.Unmarshaler
// interface, we can use it
if hasMethod(bindTarget, "MarshalGQLContext") &&
hasMethod(bindTarget, "UnmarshalGQLContext") {
ref.IsContext = true
ref.IsMarshaler = true
ref.Marshaler = nil
@ -523,7 +535,8 @@ func (b *Binder) CopyModifiersFromAst(t *ast.Type, base types.Type) types.Type {
base = types.Unalias(base)
if t.Elem != nil {
child := b.CopyModifiersFromAst(t.Elem, base)
if _, isStruct := child.Underlying().(*types.Struct); isStruct && !b.cfg.OmitSliceElementPointers {
if _, isStruct := child.Underlying().(*types.Struct); isStruct &&
!b.cfg.OmitSliceElementPointers {
child = types.NewPointer(child)
}
return types.NewSlice(child)
@ -613,7 +626,11 @@ func (b *Binder) enumValues(def *ast.Definition) map[string]EnumValue {
return model.EnumValues
}
func (b *Binder) enumReference(ref *TypeReference, obj types.Object, values map[string]EnumValue) error {
func (b *Binder) enumReference(
ref *TypeReference,
obj types.Object,
values map[string]EnumValue,
) error {
if len(ref.Definition.EnumValues) != len(values) {
return fmt.Errorf("not all enum values are binded for %v", ref.Definition.Name)
}
@ -637,7 +654,11 @@ func (b *Binder) enumReference(ref *TypeReference, obj types.Object, values map[
for _, value := range ref.Definition.EnumValues {
v, ok := values[value.Name]
if !ok {
return fmt.Errorf("enum value not found for: %v, of enum: %v", value.Name, ref.Definition.Name)
return fmt.Errorf(
"enum value not found for: %v, of enum: %v",
value.Name,
ref.Definition.Name,
)
}
pkgName, typeName := code.PkgAndType(v.Value)

View File

@ -12,10 +12,10 @@ import (
"sort"
"strings"
"github.com/goccy/go-yaml"
"github.com/vektah/gqlparser/v2"
"github.com/vektah/gqlparser/v2/ast"
"golang.org/x/tools/go/packages"
"gopkg.in/yaml.v3"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/99designs/gqlgen/internal/code"
@ -30,6 +30,7 @@ type Config struct {
AutoBind []string `yaml:"autobind"`
Models TypeMap `yaml:"models,omitempty"`
StructTag string `yaml:"struct_tag,omitempty"`
EmbeddedStructsPrefix string `yaml:"embedded_structs_prefix,omitempty"`
Directives map[string]DirectiveConfig `yaml:"directives,omitempty"`
LocalPrefix string `yaml:"local_prefix,omitempty"`
GoBuildTags StringList `yaml:"go_build_tags,omitempty"`
@ -68,8 +69,9 @@ type Config struct {
var cfgFilenames = []string{".gqlgen.yml", "gqlgen.yml", "gqlgen.yaml"}
// templatePackageNames is a list of packages names that the default templates use, in order to preload those for performance considerations
// any additional package added to the base templates should be added here to improve performance and load all packages in bulk
// templatePackageNames is a list of packages names that the default templates use, in order to
// preload those for performance considerations any additional package added to the base templates
// should be added here to improve performance and load all packages in bulk
var templatePackageNames = []string{
"context", "fmt", "io", "strconv", "time", "sync", "strings", "sync/atomic", "embed", "golang.org/x/sync/semaphore",
"errors", "bytes", "github.com/vektah/gqlparser/v2", "github.com/vektah/gqlparser/v2/ast",
@ -90,6 +92,7 @@ func DefaultConfig() *Config {
ResolversAlwaysReturnPointers: true,
NullableInputOmittable: false,
EnableModelJsonOmitzeroTag: &falseValue,
EmbeddedStructsPrefix: "Base",
}
}
@ -106,13 +109,17 @@ func LoadDefaultConfig() (*Config, error) {
return nil, fmt.Errorf("unable to open schema: %w", err)
}
config.Sources = append(config.Sources, &ast.Source{Name: filename, Input: string(schemaRaw)})
config.Sources = append(
config.Sources,
&ast.Source{Name: filename, Input: string(schemaRaw)},
)
}
return config, nil
}
// LoadConfigFromDefaultLocations looks for a config file in the current directory, and all parent directories
// LoadConfigFromDefaultLocations looks for a config file in the current directory, and all parent
// directories
// walking up the tree. The closest config file will be returned.
func LoadConfigFromDefaultLocations() (*Config, error) {
cfgFile, err := findCfg()
@ -144,11 +151,17 @@ func LoadConfig(filename string) (*Config, error) {
return ReadConfig(bytes.NewReader(b))
}
func ReadConfig(cfgFile io.Reader) (*Config, error) {
func ReadConfig(cfgFile io.Reader) (cfg *Config, err error) {
defer func() {
if r := recover(); r != nil {
cfg = nil
err = fmt.Errorf("unable to parse config: panic during decode: %v", r)
}
}()
config := DefaultConfig()
dec := yaml.NewDecoder(cfgFile)
dec.KnownFields(true)
dec := yaml.NewDecoder(cfgFile, yaml.DisallowUnknownField())
if err := dec.Decode(config); err != nil {
return nil, fmt.Errorf("unable to parse config: %w", err)
@ -189,20 +202,24 @@ func CompleteConfig(config *Config) error {
pathParts := strings.SplitN(f, "**", 2)
rest := strings.TrimPrefix(strings.TrimPrefix(pathParts[1], `\`), `/`)
// turn the rest of the glob into a regex, anchored only at the end because ** allows
// for any number of dirs in between and walk will let us match against the full path name
// for any number of dirs in between and walk will let us match against the full path
// name
globRe := regexp.MustCompile(path2regex.Replace(rest) + `$`)
if err := filepath.Walk(pathParts[0], func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if err := filepath.Walk(
pathParts[0],
func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if globRe.MatchString(strings.TrimPrefix(path, pathParts[0])) {
matches = append(matches, path)
}
if globRe.MatchString(strings.TrimPrefix(path, pathParts[0])) {
matches = append(matches, path)
}
return nil
}); err != nil {
return nil
},
); err != nil {
return fmt.Errorf("failed to walk schema at root %s: %w", pathParts[0], err)
}
} else {
@ -230,7 +247,10 @@ func CompleteConfig(config *Config) error {
return fmt.Errorf("unable to open schema: %w", err)
}
config.Sources = append(config.Sources, &ast.Source{Name: filename, Input: string(schemaRaw)})
config.Sources = append(
config.Sources,
&ast.Source{Name: filename, Input: string(schemaRaw)},
)
}
config.GoInitialisms.setInitialisms()
@ -296,7 +316,7 @@ func (c *Config) IsRoot(def *ast.Definition) bool {
}
func (c *Config) injectTypesFromSchema() error {
for _, d := range []string{"goModel", "goExtraField", "goField", "goTag", "goEnum"} {
for _, d := range []string{"goModel", "goExtraField", "goField", "goTag", "goEnum", "inlineArguments"} {
c.Directives[d] = DirectiveConfig{SkipRuntime: true}
}
@ -423,7 +443,10 @@ func (c *Config) injectTypesFromSchema() error {
if extraFieldName == "" {
// Embeddable fields
typeMapEntry.EmbedExtraFields = append(typeMapEntry.EmbedExtraFields, extraField)
typeMapEntry.EmbedExtraFields = append(
typeMapEntry.EmbedExtraFields,
extraField,
)
} else {
// Regular fields
if typeMapEntry.ExtraFields == nil {
@ -599,34 +622,43 @@ func (c *Config) check() error {
if err := c.Resolver.Check(); err != nil {
return fmt.Errorf("config.resolver: %w", err)
}
fileList[c.Resolver.ImportPath()] = append(fileList[c.Resolver.ImportPath()], FilenamePackage{
Filename: c.Resolver.Filename,
Package: c.Resolver.Package,
Declaree: "resolver",
})
fileList[c.Resolver.ImportPath()] = append(
fileList[c.Resolver.ImportPath()],
FilenamePackage{
Filename: c.Resolver.Filename,
Package: c.Resolver.Package,
Declaree: "resolver",
},
)
}
if c.Federation.IsDefined() {
if err := c.Federation.Check(); err != nil {
return fmt.Errorf("config.federation: %w", err)
}
fileList[c.Federation.ImportPath()] = append(fileList[c.Federation.ImportPath()], FilenamePackage{
Filename: c.Federation.Filename,
Package: c.Federation.Package,
Declaree: "federation",
})
fileList[c.Federation.ImportPath()] = append(
fileList[c.Federation.ImportPath()],
FilenamePackage{
Filename: c.Federation.Filename,
Package: c.Federation.Package,
Declaree: "federation",
},
)
if c.Federation.ImportPath() != c.Exec.ImportPath() {
return errors.New("federation and exec must be in the same package")
}
}
if c.Federated {
return errors.New("federated has been removed, instead use\nfederation:\n filename: path/to/federated.go")
return errors.New(
"federated has been removed, instead use\nfederation:\n filename: path/to/federated.go",
)
}
for importPath, pkg := range fileList {
for _, file1 := range pkg {
for _, file2 := range pkg {
if file1.Package != file2.Package {
return fmt.Errorf("%s and %s define the same import path (%s) with different package names (%s vs %s)",
return fmt.Errorf(
"%s and %s define the same import path (%s) with different package names (%s vs %s)",
file1.Declaree,
file2.Declaree,
importPath,
@ -657,14 +689,22 @@ func (tm TypeMap) Check() error {
for typeName, entry := range tm {
for _, model := range entry.Model {
if strings.LastIndex(model, ".") < strings.LastIndex(model, "/") {
return fmt.Errorf("model %s: invalid type specifier \"%s\" - you need to specify a struct to map to", typeName, entry.Model)
return fmt.Errorf(
"model %s: invalid type specifier \"%s\" - you need to specify a struct to map to",
typeName,
entry.Model,
)
}
}
if len(entry.Model) == 0 {
for enum, v := range entry.EnumValues {
if v.Value != "" {
return fmt.Errorf("model is empty for: %v, but enum value is specified for %v", typeName, enum)
return fmt.Errorf(
"model is empty for: %v, but enum value is specified for %v",
typeName,
enum,
)
}
}
}
@ -719,7 +759,8 @@ type DirectiveConfig struct {
// The function implemmentation should be provided here as a string.
//
// The function should have the following signature:
// func(ctx context.Context, obj any, next graphql.Resolver[, directive arguments if any]) (res any, err error)
// func(ctx context.Context, obj any, next graphql.Resolver[, directive arguments if any]) (res
// any, err error)
Implementation *string
}
@ -779,7 +820,10 @@ func (c *Config) autobind() error {
for i, p := range ps {
if p == nil || p.Module == nil {
return fmt.Errorf("unable to load %s - make sure you're using an import path to a package that exists", c.AutoBind[i])
return fmt.Errorf(
"unable to load %s - make sure you're using an import path to a package that exists",
c.AutoBind[i],
)
}
autobindType := c.lookupAutobindType(p, t)
@ -831,17 +875,31 @@ func (c *Config) lookupAutobindType(p *packages.Package, schemaType *ast.Definit
func (c *Config) injectBuiltins() {
builtins := TypeMap{
"__Directive": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Directive"}},
"__Directive": {
Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Directive"},
},
"__DirectiveLocation": {Model: StringList{"github.com/99designs/gqlgen/graphql.String"}},
"__Type": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Type"}},
"__TypeKind": {Model: StringList{"github.com/99designs/gqlgen/graphql.String"}},
"__Field": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Field"}},
"__EnumValue": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.EnumValue"}},
"__InputValue": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.InputValue"}},
"__Schema": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Schema"}},
"Float": {Model: StringList{"github.com/99designs/gqlgen/graphql.FloatContext"}},
"String": {Model: StringList{"github.com/99designs/gqlgen/graphql.String"}},
"Boolean": {Model: StringList{"github.com/99designs/gqlgen/graphql.Boolean"}},
"__Type": {
Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Type"},
},
"__TypeKind": {Model: StringList{"github.com/99designs/gqlgen/graphql.String"}},
"__Field": {
Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Field"},
},
"__EnumValue": {
Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.EnumValue"},
},
"__InputValue": {
Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.InputValue"},
},
"__Schema": {
Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Schema"},
},
"Float": {
Model: StringList{"github.com/99designs/gqlgen/graphql.FloatContext"},
},
"String": {Model: StringList{"github.com/99designs/gqlgen/graphql.String"}},
"Boolean": {Model: StringList{"github.com/99designs/gqlgen/graphql.Boolean"}},
"Int": {
// FIXME: using int / int64 for Int is not spec compliant and introduces
// security risks. We should default to int32.
@ -880,7 +938,8 @@ func (c *Config) injectBuiltins() {
}
for typeName, entry := range extraBuiltins {
if t, ok := c.Schema.Types[typeName]; !c.Models.Exists(typeName) && ok && t.Kind == ast.Scalar {
if t, ok := c.Schema.Types[typeName]; !c.Models.Exists(typeName) && ok &&
t.Kind == ast.Scalar {
c.Models[typeName] = entry
}
}

View File

@ -33,7 +33,8 @@ type ExecLayout string
var (
// Write all generated code to a single file.
ExecLayoutSingleFile ExecLayout = "single-file"
// Write generated code to a directory, generating one Go source file for each GraphQL schema file.
// Write generated code to a directory, generating one Go source file for each GraphQL schema
// file.
ExecLayoutFollowSchema ExecLayout = "follow-schema"
)
@ -48,7 +49,9 @@ func (r *ExecConfig) Check() error {
return errors.New("filename must be specified when using single-file layout")
}
if !strings.HasSuffix(r.Filename, ".go") {
return errors.New("filename should be path to a go source file when using single-file layout")
return errors.New(
"filename should be path to a go source file when using single-file layout",
)
}
r.Filename = abs(r.Filename)
case ExecLayoutFollowSchema:
@ -61,7 +64,9 @@ func (r *ExecConfig) Check() error {
}
if strings.ContainsAny(r.Package, "./\\") {
return errors.New("package should be the output package name only, do not include the output filename")
return errors.New(
"package should be the output package name only, do not include the output filename",
)
}
if r.Package == "" && r.Dir() != "" {

View File

@ -6,7 +6,8 @@ import (
"github.com/99designs/gqlgen/codegen/templates"
)
// GoInitialismsConfig allows to modify the default behavior of naming Go methods, types and properties
// GoInitialismsConfig allows to modify the default behavior of naming Go methods, types and
// properties
type GoInitialismsConfig struct {
// If true, the Initialisms won't get appended to the default ones but replace them
ReplaceDefaults bool `yaml:"replace_defaults"`

View File

@ -44,7 +44,9 @@ func (c *PackageConfig) IsDefined() bool {
func (c *PackageConfig) Check() error {
if strings.ContainsAny(c.Package, "./\\") {
return errors.New("package should be the output package name only, do not include the output filename")
return errors.New(
"package should be the output package name only, do not include the output filename",
)
}
if c.Filename == "" {
return errors.New("filename must be specified")

View File

@ -43,7 +43,10 @@ func (r *ResolverConfig) Check() error {
return fmt.Errorf("filename must be specified with layout=%s", r.Layout)
}
if !strings.HasSuffix(r.Filename, ".go") {
return fmt.Errorf("filename should be path to a go source file with layout=%s", r.Layout)
return fmt.Errorf(
"filename should be path to a go source file with layout=%s",
r.Layout,
)
}
r.Filename = abs(r.Filename)
case LayoutFollowSchema:
@ -57,11 +60,18 @@ func (r *ResolverConfig) Check() error {
r.Filename = abs(r.Filename)
}
default:
return fmt.Errorf("invalid layout %s. must be %s or %s", r.Layout, LayoutSingleFile, LayoutFollowSchema)
return fmt.Errorf(
"invalid layout %s. must be %s or %s",
r.Layout,
LayoutSingleFile,
LayoutFollowSchema,
)
}
if strings.ContainsAny(r.Package, "./\\") {
return errors.New("package should be the output package name only, do not include the output filename")
return errors.New(
"package should be the output package name only, do not include the output filename",
)
}
if r.Package == "" && r.Dir() != "" {

View File

@ -13,7 +13,8 @@ import (
"github.com/99designs/gqlgen/codegen/config"
)
// Data is a unified model of the code to be generated. Plugins may modify this structure to do things like implement
// Data is a unified model of the code to be generated. Plugins may modify this structure to do
// things like implement
// resolvers or directives automatically (eg grpc, validation)
type Data struct {
Config *config.Config
@ -48,7 +49,8 @@ func (d *Data) HasEmbeddableSources() bool {
return hasEmbeddableSources
}
// AugmentedSource contains extra information about graphql schema files which is not known directly from the Config.Sources data
// AugmentedSource contains extra information about graphql schema files which is not known directly
// from the Config.Sources data
type AugmentedSource struct {
// path relative to Config.Exec.Filename
RelativePath string
@ -103,7 +105,6 @@ func (d *Data) Directives() DirectiveList {
}
func BuildData(cfg *config.Config, plugins ...any) (*Data, error) {
// We reload all packages to allow packages to be compared correctly.
cfg.ReloadAllPackages()
b := builder{
@ -195,10 +196,18 @@ func BuildData(cfg *config.Config, plugins ...any) (*Data, error) {
}
// otherwise show a generic error message
return nil, errors.New("invalid types were encountered while traversing the go source code, this probably means the invalid code generated isnt correct. add try adding -v to debug")
return nil, errors.New(
"invalid types were encountered while traversing the go source code, this probably means the invalid code generated isnt correct. add try adding -v to debug",
)
}
var sources []*ast.Source
sources, err = SerializeTransformedSchema(cfg.Schema, cfg.Sources)
if err != nil {
return nil, fmt.Errorf("failed to serialize transformed schema: %w", err)
}
aSources := []AugmentedSource{}
for _, s := range cfg.Sources {
for _, s := range sources {
wd, err := os.Getwd()
if err != nil {
return nil, fmt.Errorf("failed to get working directory: %w", err)
@ -207,7 +216,12 @@ func BuildData(cfg *config.Config, plugins ...any) (*Data, error) {
sourcePath := filepath.Join(wd, s.Name)
relative, err := filepath.Rel(outputDir, sourcePath)
if err != nil {
return nil, fmt.Errorf("failed to compute path of %s relative to %s: %w", sourcePath, outputDir, err)
return nil, fmt.Errorf(
"failed to compute path of %s relative to %s: %w",
sourcePath,
outputDir,
err,
)
}
relative = filepath.ToSlash(relative)
embeddable := true

View File

@ -38,7 +38,10 @@ func (d *Directive) IsLocation(location ...ast.DirectiveLocation) bool {
return false
}
func locationDirectives(directives DirectiveList, location ...ast.DirectiveLocation) map[string]*Directive {
func locationDirectives(
directives DirectiveList,
location ...ast.DirectiveLocation,
) map[string]*Directive {
mDirectives := make(map[string]*Directive)
for name, d := range directives {
if d.IsLocation(location...) {
@ -73,7 +76,12 @@ func (b *builder) buildDirectives() (map[string]*Directive, error) {
var err error
newArg.Default, err = arg.DefaultValue.Value(nil)
if err != nil {
return nil, fmt.Errorf("default value for directive argument %s(%s) is not valid: %w", dir.Name, arg.Name, err)
return nil, fmt.Errorf(
"default value for directive argument %s(%s) is not valid: %w",
dir.Name,
arg.Name,
err,
)
}
}
args = append(args, newArg)
@ -142,7 +150,14 @@ func (d *Directive) CallArgs() string {
args := []string{"ctx", "obj", "n"}
for _, arg := range d.Args {
args = append(args, fmt.Sprintf("args[%q].(%s)", arg.Name, templates.CurrentImports.LookupType(arg.TypeReference.GO)))
args = append(
args,
fmt.Sprintf(
"args[%q].(%s)",
arg.Name,
templates.CurrentImports.LookupType(arg.TypeReference.GO),
),
)
}
return strings.Join(args, ", ")
@ -170,9 +185,17 @@ func (d *Directive) CallName() string {
func (d *Directive) Declaration() string {
res := d.CallName() + " func(ctx context.Context, obj any, next graphql.Resolver"
var resSb173 strings.Builder
for _, arg := range d.Args {
res += fmt.Sprintf(", %s %s", templates.ToGoPrivate(arg.Name), templates.CurrentImports.LookupType(arg.TypeReference.GO))
resSb173.WriteString(
fmt.Sprintf(
", %s %s",
templates.ToGoPrivate(arg.Name),
templates.CurrentImports.LookupType(arg.TypeReference.GO),
),
)
}
res += resSb173.String()
res += ") (res any, err error)"
return res

View File

@ -73,7 +73,7 @@
if data, ok := tmp.(graphql.Marshaler); ok {
return data
}
ec.Errorf(ctx, `unexpected type %T from directive, should be graphql.Marshaler`, tmp)
graphql.AddErrorf(ctx, `unexpected type %T from directive, should be graphql.Marshaler`, tmp)
return graphql.Null
{{end}}
@ -147,7 +147,7 @@ func (ec *executionContext) _subscriptionMiddleware(ctx context.Context, obj *as
if data, ok := tmp.(func(ctx context.Context) graphql.Marshaler); ok {
return data
}
ec.Errorf(ctx, `unexpected type %T from directive, should be graphql.Marshaler`, tmp)
graphql.AddErrorf(ctx, `unexpected type %T from directive, should be graphql.Marshaler`, tmp)
return func(ctx context.Context) graphql.Marshaler {
return graphql.Null
}
@ -156,9 +156,9 @@ func (ec *executionContext) _subscriptionMiddleware(ctx context.Context, obj *as
{{ if .Directives.LocationDirectives "FIELD" }}
{{ if $useFunctionSyntaxForExecutionContext -}}
func _fieldMiddleware(ctx context.Context, ec *executionContext, obj any, next graphql.Resolver) any {
func _fieldMiddleware(ctx context.Context, ec *executionContext, obj any, next graphql.Resolver) graphql.Resolver {
{{- else -}}
func (ec *executionContext) _fieldMiddleware(ctx context.Context, obj any, next graphql.Resolver) any {
func (ec *executionContext) _fieldMiddleware(ctx context.Context, obj any, next graphql.Resolver) graphql.Resolver {
{{- end }}
{{- if .Directives.LocationDirectives "FIELD" }}
fc := graphql.GetFieldContext(ctx)
@ -186,11 +186,6 @@ func (ec *executionContext) _subscriptionMiddleware(ctx context.Context, obj *as
}
}
{{- end }}
res, err := ec.ResolverMiddleware(ctx, next)
if err != nil {
ec.Error(ctx, err)
return nil
}
return res
return next
}
{{ end }}

View File

@ -76,7 +76,8 @@ func (b *builder) buildField(obj *Object, field *ast.FieldDefinition) (*Field, e
log.Println(err.Error())
}
if f.IsResolver && b.Config.ResolversAlwaysReturnPointers && !f.TypeReference.IsPtr() && f.TypeReference.IsStruct() {
if f.IsResolver && b.Config.ResolversAlwaysReturnPointers && !f.TypeReference.IsPtr() &&
f.TypeReference.IsStruct() {
f.TypeReference = b.Binder.PointerTo(f.TypeReference)
}
@ -317,9 +318,13 @@ func (b *builder) findBindMethodTarget(in types.Type, name string) (types.Object
return nil, nil
}
func (b *builder) findBindMethoderTarget(methodFunc func(i int) *types.Func, methodCount int, name string) (types.Object, error) {
func (b *builder) findBindMethoderTarget(
methodFunc func(i int) *types.Func,
methodCount int,
name string,
) (types.Object, error) {
var found types.Object
for i := 0; i < methodCount; i++ {
for i := range methodCount {
method := methodFunc(i)
if !method.Exported() || !strings.EqualFold(method.Name(), name) {
continue
@ -373,7 +378,10 @@ func (b *builder) findBindEmbedsTarget(in types.Type, name string) (types.Object
return nil, nil
}
func (b *builder) findBindStructEmbedsTarget(strukt *types.Struct, name string) (types.Object, error) {
func (b *builder) findBindStructEmbedsTarget(
strukt *types.Struct,
name string,
) (types.Object, error) {
var found types.Object
for i := 0; i < strukt.NumFields(); i++ {
field := strukt.Field(i)
@ -403,7 +411,10 @@ func (b *builder) findBindStructEmbedsTarget(strukt *types.Struct, name string)
return found, nil
}
func (b *builder) findBindInterfaceEmbedsTarget(iface *types.Interface, name string) (types.Object, error) {
func (b *builder) findBindInterfaceEmbedsTarget(
iface *types.Interface,
name string,
) (types.Object, error) {
var found types.Object
for i := 0; i < iface.NumEmbeddeds(); i++ {
embeddedType := iface.EmbeddedType(i)
@ -518,6 +529,36 @@ func (f *Field) IsRoot() bool {
return f.Object.Root
}
func contains(slice []string, str string) bool {
for _, s := range slice {
if s == str {
return true
}
}
return false
}
func formatGoType(goType string) string {
if strings.Contains(goType, "/") {
lastDot := strings.LastIndex(goType, ".")
if lastDot == -1 {
return goType
}
packagePath := goType[:lastDot]
typeName := goType[lastDot+1:]
alias := templates.CurrentImports.Lookup(packagePath)
if alias == "" {
return typeName
}
return alias + "." + typeName
}
return goType
}
func (f *Field) ShortResolverDeclaration() string {
return f.ShortResolverSignature(nil)
}
@ -537,9 +578,33 @@ func (f *Field) ShortResolverSignature(ft *goast.FuncType) string {
if !f.Object.Root {
res += fmt.Sprintf(", obj %s", templates.CurrentImports.LookupType(f.Object.Reference()))
}
for _, arg := range f.Args {
res += fmt.Sprintf(", %s %s", arg.VarName, templates.CurrentImports.LookupType(arg.TypeReference.GO))
var resSb540 strings.Builder
var inlineInfo *InlineArgsInfo
if f.Object != nil && f.Object.Definition != nil {
inlineInfo = GetInlineArgsMetadata(f.Object.Name, f.Name)
}
if inlineInfo != nil {
goType := formatGoType(inlineInfo.GoType)
resSb540.WriteString(fmt.Sprintf(", %s %s", inlineInfo.OriginalArgName, goType))
for _, arg := range f.Args {
if !contains(inlineInfo.ExpandedArgs, arg.Name) {
resSb540.WriteString(
fmt.Sprintf(
", %s %s",
arg.VarName,
templates.CurrentImports.LookupType(arg.TypeReference.GO),
),
)
}
}
} else {
for _, arg := range f.Args {
resSb540.WriteString(fmt.Sprintf(", %s %s", arg.VarName, templates.CurrentImports.LookupType(arg.TypeReference.GO)))
}
}
res += resSb540.String()
result := templates.CurrentImports.LookupType(f.TypeReference.GO)
if f.Object.Stream {
@ -555,7 +620,11 @@ func (f *Field) ShortResolverSignature(ft *goast.FuncType) string {
namedE = ft.Results.List[1].Names[0].Name
}
}
res += fmt.Sprintf(") (%s %s, %s error)", namedV, result, namedE)
if namedV != "" || namedE != "" {
res += fmt.Sprintf(") (%s %s, %s error)", namedV, result, namedE)
} else {
res += fmt.Sprintf(") (%s, error)", result)
}
return res
}
@ -568,9 +637,17 @@ func (f *Field) GoResultName() (string, bool) {
func (f *Field) ComplexitySignature() string {
res := "func(childComplexity int"
var resSb571 strings.Builder
for _, arg := range f.Args {
res += fmt.Sprintf(", %s %s", arg.VarName, templates.CurrentImports.LookupType(arg.TypeReference.GO))
resSb571.WriteString(
fmt.Sprintf(
", %s %s",
arg.VarName,
templates.CurrentImports.LookupType(arg.TypeReference.GO),
),
)
}
res += resSb571.String()
res += ") int"
return res
}
@ -578,7 +655,11 @@ func (f *Field) ComplexitySignature() string {
func (f *Field) ComplexityArgs() string {
args := make([]string, len(f.Args))
for i, arg := range f.Args {
args[i] = "args[" + strconv.Quote(arg.Name) + "].(" + templates.CurrentImports.LookupType(arg.TypeReference.GO) + ")"
args[i] = "args[" + strconv.Quote(
arg.Name,
) + "].(" + templates.CurrentImports.LookupType(
arg.TypeReference.GO,
) + ")"
}
return strings.Join(args, ", ")
@ -588,7 +669,7 @@ func (f *Field) CallArgs() string {
args := make([]string, 0, len(f.Args)+2)
if f.IsResolver {
args = append(args, "rctx")
args = append(args, "ctx")
if !f.Object.Root {
args = append(args, "obj")
@ -597,21 +678,100 @@ func (f *Field) CallArgs() string {
args = append(args, "ctx")
}
for _, arg := range f.Args {
tmp := "fc.Args[" + strconv.Quote(arg.Name) + "].(" + templates.CurrentImports.LookupType(arg.TypeReference.GO) + ")"
var inlineInfo *InlineArgsInfo
if f.Object != nil && f.Object.Definition != nil {
inlineInfo = GetInlineArgsMetadata(f.Object.Name, f.Name)
}
if inlineInfo != nil {
isMap := strings.Contains(inlineInfo.GoType, "map[")
if iface, ok := arg.TypeReference.GO.(*types.Interface); ok && iface.Empty() {
tmp = fmt.Sprintf(`
var entries []string
for _, argName := range inlineInfo.ExpandedArgs {
var argRef *FieldArgument
for _, arg := range f.Args {
if arg.Name == argName {
argRef = arg
break
}
}
if argRef != nil {
goType := templates.CurrentImports.LookupType(argRef.TypeReference.GO)
var entry string
if isMap {
entry = fmt.Sprintf("%q: fc.Args[%q].(%s)", argName, argName, goType)
} else {
fieldName := templates.ToGo(argName)
entry = fmt.Sprintf("%s: fc.Args[%q].(%s)", fieldName, argName, goType)
}
entries = append(entries, entry)
}
}
goType := formatGoType(inlineInfo.GoType)
bundled := fmt.Sprintf("%s{\n\t\t%s,\n\t}", goType, strings.Join(entries, ",\n\t\t"))
args = append(args, bundled)
for _, arg := range f.Args {
if !contains(inlineInfo.ExpandedArgs, arg.Name) {
tmp := "fc.Args[" + strconv.Quote(
arg.Name,
) + "].(" + templates.CurrentImports.LookupType(
arg.TypeReference.GO,
) + ")"
if iface, ok := arg.TypeReference.GO.(*types.Interface); ok && iface.Empty() {
tmp = fmt.Sprintf(`
func () any {
if fc.Args["%s"] == nil {
return nil
}
return fc.Args["%s"].(any)
}()`, arg.Name, arg.Name,
)
}
)
}
args = append(args, tmp)
args = append(args, tmp)
}
}
} else {
for _, arg := range f.Args {
tmp := "fc.Args[" + strconv.Quote(arg.Name) + "].(" + templates.CurrentImports.LookupType(arg.TypeReference.GO) + ")"
if iface, ok := arg.TypeReference.GO.(*types.Interface); ok && iface.Empty() {
tmp = fmt.Sprintf(`
func () any {
if fc.Args["%s"] == nil {
return nil
}
return fc.Args["%s"].(any)
}()`, arg.Name, arg.Name,
)
}
args = append(args, tmp)
}
}
return strings.Join(args, ", ")
}
// StubCallArgs returns a comma-separated list of argument variable names for stub code.
func (f *Field) StubCallArgs() string {
args := make([]string, 0, len(f.Args)+2)
inlineInfo := GetInlineArgsMetadata(f.Object.Name, f.Name)
if inlineInfo != nil {
args = append(args, inlineInfo.OriginalArgName)
for _, arg := range f.Args {
if !contains(inlineInfo.ExpandedArgs, arg.Name) {
args = append(args, arg.VarName)
}
}
} else {
for _, arg := range f.Args {
args = append(args, arg.VarName)
}
}
return strings.Join(args, ", ")

View File

@ -11,24 +11,26 @@ func (ec *executionContext) _{{$object.Name}}_{{$field.Name}}(ctx context.Contex
{{- if $object.Stream }}
{{- $null = "nil" }}
{{- end }}
{{ if $useFunctionSyntaxForExecutionContext -}}
fc, err := {{ $field.FieldContextFunc }}(ctx, ec, field)
{{- else -}}
fc, err := ec.{{ $field.FieldContextFunc }}(ctx, field)
{{- end }}
if err != nil {
return {{ $null }}
}
ctx = graphql.WithFieldContext(ctx, fc)
{{- if not $.Config.OmitPanicHandler }}
defer func () {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = {{ $null }}
}
}()
{{- end }}
{{- if $field.TypeReference.IsRoot }}
{{ if $useFunctionSyntaxForExecutionContext -}}
fc, err := {{ $field.FieldContextFunc }}(ctx, ec, field)
{{- else -}}
fc, err := ec.{{ $field.FieldContextFunc }}(ctx, field)
{{- end }}
if err != nil {
return {{ $null }}
}
ctx = graphql.WithFieldContext(ctx, fc)
{{- if not $.Config.OmitPanicHandler }}
defer func () {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = {{ $null }}
}
}()
{{- end }}
{{- if $field.TypeReference.IsPtr }}
res := &{{ $field.TypeReference.Elem.GO | ref }}{}
{{- else }}
@ -41,62 +43,46 @@ func (ec *executionContext) _{{$object.Name}}_{{$field.Name}}(ctx context.Contex
return ec.{{ $field.TypeReference.MarshalFunc }}(ctx, field.Selections, res)
{{- end }}
{{- else}}
{{- if $.AllDirectives.LocationDirectives "FIELD" }}
return graphql.ResolveField{{- if $object.Stream }}Stream{{- end }}(
ctx,
ec.OperationContext,
field,
{{ if $useFunctionSyntaxForExecutionContext -}}
resTmp := _fieldMiddleware(ctx, ec, {{if $object.Root}}nil{{else}}obj{{end}}, func(rctx context.Context) (any, error) {
func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) { return {{ $field.FieldContextFunc }}(ctx, ec, field) },
{{- else -}}
resTmp := ec._fieldMiddleware(ctx, {{if $object.Root}}nil{{else}}obj{{end}}, func(rctx context.Context) (any, error) {
ec.{{ $field.FieldContextFunc }},
{{- end }}
{{ template "field" (dict "Field" $field "UseFunctionSyntaxForExecutionContext" $useFunctionSyntaxForExecutionContext) }}
})
{{ else }}
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) {
{{ template "field" (dict "Field" $field "UseFunctionSyntaxForExecutionContext" $useFunctionSyntaxForExecutionContext) }}
})
if err != nil {
ec.Error(ctx, err)
return {{ $null }}
}
{{- end }}
if resTmp == nil {
{{- if $field.TypeReference.GQL.NonNull }}
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
{{- end }}
return {{ $null }}
}
{{- if $object.Stream }}
return func(ctx context.Context) graphql.Marshaler {
select {
case res, ok := <-resTmp.(<-chan {{$field.TypeReference.GO | ref}}):
if !ok {
return nil
}
return graphql.WriterFunc(func(w io.Writer) {
w.Write([]byte{'{'})
graphql.MarshalString(field.Alias).MarshalGQL(w)
w.Write([]byte{':'})
func(ctx context.Context) (any, error) {
{{- template "fieldDefinition" $field }}
},
{{if or ($.AllDirectives.LocationDirectives "FIELD") $field.HasDirectives -}}
func(ctx context.Context, next graphql.Resolver) graphql.Resolver {
{{- if $field.HasDirectives -}}
directive0 := next
{{ template "implDirectives" (dict "Field" $field "UseFunctionSyntaxForExecutionContext" $useFunctionSyntaxForExecutionContext) }}
next = directive{{$field.ImplDirectives|len}}
{{end}}
{{- if $.AllDirectives.LocationDirectives "FIELD" -}}
{{ if $useFunctionSyntaxForExecutionContext -}}
{{ $field.TypeReference.MarshalFunc }}(ctx, ec, field.Selections, res).MarshalGQL(w)
return _fieldMiddleware(ctx, ec, {{if $object.Root}}nil{{else}}obj{{end}}, next)
{{- else -}}
ec.{{ $field.TypeReference.MarshalFunc }}(ctx, field.Selections, res).MarshalGQL(w)
return ec._fieldMiddleware(ctx, {{if $object.Root}}nil{{else}}obj{{end}}, next)
{{- end }}
w.Write([]byte{'}'})
})
case <-ctx.Done():
return nil
}
}
{{- else }}
res := resTmp.({{$field.TypeReference.GO | ref}})
fc.Result = res
{{- else -}}
return next
{{end -}}
},
{{else -}}
nil,
{{end -}}
{{ if $useFunctionSyntaxForExecutionContext -}}
return {{ $field.TypeReference.MarshalFunc }}(ctx, ec, field.Selections, res)
func(ctx context.Context, selections ast.SelectionSet, v {{ $field.TypeReference.GO | ref }}) graphql.Marshaler { return {{ $field.TypeReference.MarshalFunc }}(ctx, ec, selections, v) },
{{- else -}}
return ec.{{ $field.TypeReference.MarshalFunc }}(ctx, field.Selections, res)
ec.{{ $field.TypeReference.MarshalFunc }},
{{- end }}
{{- end }}
{{ not $.Config.OmitPanicHandler }},
{{ $field.TypeReference.GQL.NonNull }},
)
{{- end }}
}
@ -154,32 +140,12 @@ func (ec *executionContext) {{ $field.FieldContextFunc }}({{ if not $field.Args
{{- end }}{{- end}}
{{ define "field" }}
{{- $useFunctionSyntaxForExecutionContext := .UseFunctionSyntaxForExecutionContext -}}
{{- if .Field.HasDirectives -}}
directive0 := func(rctx context.Context) (any, error) {
ctx = rctx // use context from middleware stack in children
{{ template "fieldDefinition" .Field }}
}
{{ template "implDirectives" (dict "Field" .Field "UseFunctionSyntaxForExecutionContext" $useFunctionSyntaxForExecutionContext) }}
tmp, err := directive{{.Field.ImplDirectives|len}}(rctx)
if err != nil {
return nil, graphql.ErrorOnPath(ctx, err)
}
if tmp == nil {
return nil, nil
}
if data, ok := tmp.({{if .Field.Stream}}<-chan {{end}}{{ .Field.TypeReference.GO | ref }}) ; ok {
return data, nil
}
return nil, fmt.Errorf(`unexpected type %T from directive, should be {{if .Field.Stream}}<-chan {{end}}{{ .Field.TypeReference.GO }}`, tmp)
{{- else -}}
ctx = rctx // use context from middleware stack in children
{{ template "fieldDefinition" .Field }}
{{- end -}}
{{ end }}
{{ define "fieldDefinition" }}
{{- if or .IsResolver .IsMethod -}}
{{- if gt (len .Args) 0 -}}
fc := graphql.GetFieldContext(ctx)
{{- end }}
{{ end }}
{{- if .IsResolver -}}
return ec.resolvers.{{ .ShortInvocation }}
{{- else if .IsMap -}}

View File

@ -145,7 +145,7 @@
}
{{ end }}
return e.complexity.{{ucFirst $object.Name}}.{{$field.GoFieldName}}(childComplexity{{if $field.Args}}, {{$field.ComplexityArgs}} {{ end }}), true
{{ end }}
{{- end }}
{{- end }}
{{- end }}
{{ end }}

View File

@ -0,0 +1,156 @@
package codegen
import (
"bytes"
"fmt"
"github.com/vektah/gqlparser/v2/ast"
"github.com/vektah/gqlparser/v2/formatter"
)
// InlineArgsInfo stores metadata about arguments that were inlined.
// Used during codegen to bundle expanded arguments back into a single resolver parameter.
type InlineArgsInfo struct {
OriginalArgName string
OriginalType string
OriginalASTType *ast.Type
GoType string
ExpandedArgs []string
}
// inlineArgsMetadata maps "TypeName.FieldName" to inline args metadata.
var inlineArgsMetadata = make(map[string]*InlineArgsInfo)
// ExpandInlineArguments expands arguments marked with @inlineArguments
// and stores metadata for later codegen phase.
func ExpandInlineArguments(schema *ast.Schema) error {
for typeName, typeDef := range schema.Types {
if typeDef.Kind != ast.Object && typeDef.Kind != ast.Interface {
continue
}
for _, field := range typeDef.Fields {
var inlinedIndices []int
var expandedArguments [][]*ast.ArgumentDefinition
for i, arg := range field.Arguments {
if arg.Directives.ForName("inlineArguments") == nil {
continue
}
argTypeName := arg.Type.Name()
inputType := schema.Types[argTypeName]
if inputType == nil {
return fmt.Errorf(
"@inlineArguments on %s.%s(%s): type %s not found in schema",
typeName, field.Name, arg.Name, argTypeName,
)
}
if inputType.Kind != ast.InputObject {
return fmt.Errorf(
"@inlineArguments on %s.%s(%s): type %s must be an INPUT_OBJECT (input types only), got %s. The directive can only expand input object types into individual arguments",
typeName,
field.Name,
arg.Name,
argTypeName,
inputType.Kind,
)
}
var expanded []*ast.ArgumentDefinition
var expandedNames []string
for _, inputField := range inputType.Fields {
expandedArg := &ast.ArgumentDefinition{
Name: inputField.Name,
Type: inputField.Type,
Description: inputField.Description,
DefaultValue: inputField.DefaultValue,
Directives: inputField.Directives,
Position: inputField.Position,
}
expanded = append(expanded, expandedArg)
expandedNames = append(expandedNames, inputField.Name)
}
goType := argTypeName
if goModelDir := inputType.Directives.ForName("goModel"); goModelDir != nil {
if modelArg := goModelDir.Arguments.ForName("model"); modelArg != nil {
if modelValue, err := modelArg.Value.Value(nil); err == nil {
goType = modelValue.(string)
}
}
}
key := fmt.Sprintf("%s.%s", typeName, field.Name)
inlineArgsMetadata[key] = &InlineArgsInfo{
OriginalArgName: arg.Name,
OriginalType: argTypeName,
OriginalASTType: arg.Type,
GoType: goType,
ExpandedArgs: expandedNames,
}
inlinedIndices = append(inlinedIndices, i)
expandedArguments = append(expandedArguments, expanded)
}
if len(inlinedIndices) > 0 {
var newArgs ast.ArgumentDefinitionList
for i, arg := range field.Arguments {
inlinedIdx := -1
for idx, inlined := range inlinedIndices {
if inlined == i {
inlinedIdx = idx
break
}
}
if inlinedIdx >= 0 {
newArgs = append(newArgs, expandedArguments[inlinedIdx]...)
} else {
newArgs = append(newArgs, arg)
}
}
field.Arguments = newArgs
}
}
}
return nil
}
// GetInlineArgsMetadata retrieves metadata for a given type and field.
func GetInlineArgsMetadata(typeName, fieldName string) *InlineArgsInfo {
key := fmt.Sprintf("%s.%s", typeName, fieldName)
return inlineArgsMetadata[key]
}
// ClearInlineArgsMetadata clears all stored metadata.
func ClearInlineArgsMetadata() {
inlineArgsMetadata = make(map[string]*InlineArgsInfo)
}
func SerializeTransformedSchema(
schema *ast.Schema,
originalSources []*ast.Source,
) ([]*ast.Source, error) {
if len(inlineArgsMetadata) == 0 {
return originalSources, nil
}
var buf bytes.Buffer
f := formatter.NewFormatter(&buf)
f.FormatSchema(schema)
return []*ast.Source{
{
Name: "inline_arguments_transformed_schema.graphql",
Input: buf.String(),
BuiltIn: true,
},
}, nil
}

View File

@ -89,7 +89,11 @@ func (b *builder) buildInterface(typ *ast.Definition) (*Interface, error) {
}
if !anyValid {
return nil, fmt.Errorf("%s does not satisfy the interface %s", implementorType.String(), i.Type.String())
return nil, fmt.Errorf(
"%s does not satisfy the interface %s",
implementorType.String(),
i.Type.String(),
)
}
}

View File

@ -97,9 +97,11 @@ type Objects []*Object
func (o *Object) Implementors() string {
satisfiedBy := strconv.Quote(o.Name)
var satisfiedBySb100 strings.Builder
for _, s := range o.Implements {
satisfiedBy += ", " + strconv.Quote(s.Name)
satisfiedBySb100.WriteString(", " + strconv.Quote(s.Name))
}
satisfiedBy += satisfiedBySb100.String()
return "[]string{" + satisfiedBy + "}"
}

View File

@ -15,7 +15,7 @@ func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.Selec
Object: {{$object.Name|quote}},
})
if len(fields) != 1 {
ec.Errorf(ctx, "must subscribe to exactly one stream")
graphql.AddErrorf(ctx, "must subscribe to exactly one stream")
return nil
}

View File

@ -32,12 +32,14 @@ func (i *Import) String() string {
func (s *Imports) String() string {
res := ""
var resSb35 strings.Builder
for i, imp := range s.imports {
if i != 0 {
res += "\n"
resSb35.WriteString("\n")
}
res += imp.String()
resSb35.WriteString(imp.String())
}
res += resSb35.String()
return res
}

View File

@ -22,8 +22,9 @@ import (
"github.com/99designs/gqlgen/internal/imports"
)
// CurrentImports keeps track of all the import declarations that are needed during the execution of a plugin.
// this is done with a global because subtemplates currently get called in functions. Lets aim to remove this eventually.
// CurrentImports keeps track of all the import declarations that are needed during the execution of
// a plugin. this is done with a global because subtemplates currently get called in functions. Lets
// aim to remove this eventually.
var CurrentImports *Imports
// Options specify various parameters to rendering a template.
@ -542,7 +543,9 @@ func wordWalker(str string, f func(*wordInfo)) {
// do NOT count this as an initialism.
switch upperWord {
case "ID", "IP":
if remainingRunes := runes[w:]; word == string(remainingRunes[:2]) && !eow && len(remainingRunes) > 3 && unicode.IsUpper(remainingRunes[3]) {
if remainingRunes := runes[w:]; word == string(remainingRunes[:2]) && !eow &&
len(remainingRunes) > 3 &&
unicode.IsUpper(remainingRunes[3]) {
continue
}
}
@ -811,7 +814,8 @@ var CommonInitialisms = map[string]bool{
"GCP": true,
}
// GetInitialisms returns the initialisms to capitalize in Go names. If unchanged, default initialisms will be returned
// GetInitialisms returns the initialisms to capitalize in Go names. If unchanged, default
// initialisms will be returned
var GetInitialisms = func() map[string]bool {
return CommonInitialisms
}

View File

@ -21,7 +21,14 @@ func processType(ret map[string]*config.TypeReference, ref *config.TypeReference
existingGQL := fmt.Sprintf("%v", existing.GQL)
newGQL := fmt.Sprintf("%v", ref.GQL)
if existingGQL != newGQL {
panic(fmt.Sprintf("non-unique key \"%s\", trying to replace %s with %s", key, existingGQL, newGQL))
panic(
fmt.Sprintf(
"non-unique key \"%s\", trying to replace %s with %s",
key,
existingGQL,
newGQL,
),
)
}
}
ret[key] = ref

View File

@ -217,7 +217,7 @@
if v == nil {
{{- if $type.GQL.NonNull }}
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "the requested element is null which the schema does not allow")
graphql.AddErrorf(ctx, "the requested element is null which the schema does not allow")
}
{{- end }}
return graphql.Null
@ -253,7 +253,7 @@
{{- if $type.GQL.NonNull }}
if res == graphql.Null {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
ec.Errorf(ctx, "the requested element is null which the schema does not allow")
graphql.AddErrorf(ctx, "the requested element is null which the schema does not allow")
}
}
{{- end }}
@ -288,7 +288,7 @@
}
{{- end }}
{{- if $type.HasEnumValues }}
{{- if and $type.HasEnumValues (not $type.IsSlice) }}
{{- $enum := $type.GO }}
{{- if $type.IsNilable }}
{{- $enum = $type.GO.Elem }}

View File

@ -14,7 +14,11 @@ func findGoNamedType(def types.Type) (*types.Named, error) {
namedType, ok := def.(*types.Named)
//nolint:staticcheck // yes, it is bad to end in newline here
if !ok {
return nil, fmt.Errorf("expected %s to be a named type, instead found %T\n", def.String(), def)
return nil, fmt.Errorf(
"expected %s to be a named type, instead found %T\n",
def.String(),
def,
)
}
return namedType, nil
@ -34,7 +38,11 @@ func findGoInterface(def types.Type) (*types.Interface, error) {
underlying, ok := namedType.Underlying().(*types.Interface)
if !ok {
return nil, fmt.Errorf("expected %s to be a named interface, instead found %s", def.String(), namedType.String())
return nil, fmt.Errorf(
"expected %s to be a named interface, instead found %s",
def.String(),
namedType.String(),
)
}
return underlying, nil

View File

@ -8,7 +8,12 @@ import (
"github.com/99designs/gqlgen/graphql"
)
func Calculate(ctx context.Context, es graphql.ExecutableSchema, op *ast.OperationDefinition, vars map[string]any) int {
func Calculate(
ctx context.Context,
es graphql.ExecutableSchema,
op *ast.OperationDefinition,
vars map[string]any,
) int {
walker := complexityWalker{
es: es,
schema: es.Schema(),
@ -23,7 +28,10 @@ type complexityWalker struct {
vars map[string]any
}
func (cw complexityWalker) selectionSetComplexity(ctx context.Context, selectionSet ast.SelectionSet) int {
func (cw complexityWalker) selectionSetComplexity(
ctx context.Context,
selectionSet ast.SelectionSet,
) int {
var complexity int
for _, selection := range selectionSet {
switch s := selection.(type) {
@ -59,7 +67,13 @@ func (cw complexityWalker) selectionSetComplexity(ctx context.Context, selection
return complexity
}
func (cw complexityWalker) interfaceFieldComplexity(ctx context.Context, def *ast.Definition, field string, childComplexity int, args map[string]any) int {
func (cw complexityWalker) interfaceFieldComplexity(
ctx context.Context,
def *ast.Definition,
field string,
childComplexity int,
args map[string]any,
) int {
// Interfaces don't have their own separate field costs, so they have to assume the worst case.
// We iterate over all implementors and choose the most expensive one.
maxComplexity := 0
@ -73,8 +87,14 @@ func (cw complexityWalker) interfaceFieldComplexity(ctx context.Context, def *as
return maxComplexity
}
func (cw complexityWalker) fieldComplexity(ctx context.Context, object, field string, childComplexity int, args map[string]any) int {
if customComplexity, ok := cw.es.Complexity(ctx, object, field, childComplexity, args); ok && customComplexity >= childComplexity {
func (cw complexityWalker) fieldComplexity(
ctx context.Context,
object, field string,
childComplexity int,
args map[string]any,
) int {
if customComplexity, ok := cw.es.Complexity(ctx, object, field, childComplexity, args); ok &&
customComplexity >= 1 {
return customComplexity
}
// default complexity calculation

42
vendor/github.com/99designs/gqlgen/graphql/args.go generated vendored Normal file
View File

@ -0,0 +1,42 @@
package graphql
import (
"context"
)
// ProcessArgField Parses argument value without Execution Context
// This function is called from generated code
func ProcessArgField[T any](
ctx context.Context,
rawArgs map[string]any,
fieldName string,
valueMapperFn func(ctx context.Context, value any) (T, error),
) (T, error) {
value, exists := rawArgs[fieldName]
if !exists {
var zeroVal T
return zeroVal, nil
}
ctx = WithPathContext(ctx, NewPathWithField(fieldName))
return valueMapperFn(ctx, value)
}
// ProcessArgFieldWithEC Parses argument value with Execution Context
// This function is called from generated code
func ProcessArgFieldWithEC[T, EC any](
ctx context.Context,
ec EC,
rawArgs map[string]any,
fieldName string,
valueMapperFn func(ctx context.Context, ec EC, value any) (T, error),
) (T, error) {
value, exists := rawArgs[fieldName]
if !exists {
var zeroVal T
return zeroVal, nil
}
ctx = WithPathContext(ctx, NewPathWithField(fieldName))
return valueMapperFn(ctx, ec, value)
}

View File

@ -11,7 +11,8 @@ type Cache[T any] interface {
Add(ctx context.Context, key string, value T)
}
// MapCache is the simplest implementation of a cache, because it can not evict it should only be used in tests
// MapCache is the simplest implementation of a cache, because it can not evict it should only be
// used in tests
type MapCache[T any] map[string]T
// Get looks up a key's value from the cache.

View File

@ -0,0 +1,79 @@
package graphql
import (
"hash/fnv"
"reflect"
"sync"
"github.com/vektah/gqlparser/v2/ast"
)
// collectFieldsCacheKey is the cache key for CollectFields results.
type collectFieldsCacheKey struct {
selectionPtr uintptr // Pointer to the underlying SelectionSet data
selectionLen int // Length of the selection set
satisfiesHash uint64 // Hash of the satisfies array
}
// collectFieldsCacheStore manages CollectFields cache entries safely.
type collectFieldsCacheStore struct {
mu sync.RWMutex
items map[collectFieldsCacheKey][]CollectedField
}
// Get returns the cached result for the key if present.
func (s *collectFieldsCacheStore) Get(key collectFieldsCacheKey) ([]CollectedField, bool) {
s.mu.RLock()
defer s.mu.RUnlock()
if s.items == nil {
return nil, false
}
val, ok := s.items[key]
return val, ok
}
// Add stores the value when absent and returns the cached value.
func (s *collectFieldsCacheStore) Add(
key collectFieldsCacheKey,
value []CollectedField,
) []CollectedField {
s.mu.Lock()
defer s.mu.Unlock()
if s.items == nil {
s.items = make(map[collectFieldsCacheKey][]CollectedField)
}
if existing, ok := s.items[key]; ok {
return existing
}
s.items[key] = value
return value
}
// Len returns the number of cached entries.
func (s *collectFieldsCacheStore) Len() int {
s.mu.RLock()
defer s.mu.RUnlock()
return len(s.items)
}
// makeCollectFieldsCacheKey generates a cache key for CollectFields.
func makeCollectFieldsCacheKey(selSet ast.SelectionSet, satisfies []string) collectFieldsCacheKey {
var selectionPtr uintptr
if selSet != nil {
selectionPtr = reflect.ValueOf(selSet).Pointer()
}
h := fnv.New64a()
for _, s := range satisfies {
h.Write([]byte(s))
h.Write([]byte{0})
}
return collectFieldsCacheKey{
selectionPtr: selectionPtr,
selectionLen: len(selSet),
satisfiesHash: h.Sum64(),
}
}

View File

@ -18,7 +18,8 @@ type FieldContext struct {
Parent *FieldContext
// The name of the type this field belongs to
Object string
// These are the args after processing, they can be mutated in middleware to change what the resolver will get.
// These are the args after processing, they can be mutated in middleware to change what the
// resolver will get.
Args map[string]any
// The raw field
Field CollectedField

View File

@ -27,6 +27,8 @@ type OperationContext struct {
RootResolverMiddleware RootFieldMiddleware
Stats Stats
collectFieldsCache collectFieldsCacheStore
}
func (c *OperationContext) Validate(ctx context.Context) error {
@ -84,8 +86,9 @@ func CollectFieldsCtx(ctx context.Context, satisfies []string) []CollectedField
return CollectFields(GetOperationContext(ctx), resctx.Field.Selections, satisfies)
}
// CollectAllFields returns a slice of all GraphQL field names that were selected for the current resolver context.
// The slice will contain the unique set of all field names requested regardless of fragment type conditions.
// CollectAllFields returns a slice of all GraphQL field names that were selected for the current
// resolver context. The slice will contain the unique set of all field names requested regardless
// of fragment type conditions.
func CollectAllFields(ctx context.Context) []string {
resctx := GetFieldContext(ctx)
collected := CollectFields(GetOperationContext(ctx), resctx.Field.Selections, nil)
@ -103,6 +106,7 @@ Next:
}
// Errorf sends an error string to the client, passing it through the formatter.
//
// Deprecated: use graphql.AddErrorf(ctx, err) instead
func (c *OperationContext) Errorf(ctx context.Context, format string, args ...any) {
AddErrorf(ctx, format, args...)

View File

@ -29,7 +29,11 @@ func getResponseContext(ctx context.Context) *responseContext {
return val
}
func WithResponseContext(ctx context.Context, presenterFunc ErrorPresenterFunc, recoverFunc RecoverFunc) context.Context {
func WithResponseContext(
ctx context.Context,
presenterFunc ErrorPresenterFunc,
recoverFunc RecoverFunc,
) context.Context {
return context.WithValue(ctx, resultCtx, &responseContext{
errorPresenter: presenterFunc,
recover: recoverFunc,
@ -57,6 +61,9 @@ func AddError(ctx context.Context, err error) {
c := getResponseContext(ctx)
presentedError := c.errorPresenter(ctx, ErrorOnPath(ctx, err))
if presentedError == nil {
return
}
c.errorsMu.Lock()
defer c.errorsMu.Unlock()

View File

@ -8,6 +8,18 @@ import (
)
// UnmarshalDuration returns the duration from a string in ISO8601 format
// PnDTnHnMn.nS with days considered to be exactly 24 hours.
// See https://en.wikipedia.org/wiki/ISO_8601#Durations
// P - Period
// D - D is the
// T - T is the time designator that precedes the time components
// H - H is the hour designator that follows the value for the number of hours.
// M - M is the minute designator that follows the value for the number of minutes.
// S - S is the second designator that follows the value for the number of seconds.
// "PT20.345S" -- parses as "20.345 seconds"
// "PT15M" -- parses as "15 minutes" (where a minute is 60 seconds)
// "PT10H" -- parses as "10 hours" (where an hour is 3600 seconds)
// "P2D" -- parses as "2 days" (where a day is 24 hours or 86400 seconds)
func UnmarshalDuration(v any) (time.Duration, error) {
input, ok := v.(string)
if !ok {
@ -21,7 +33,19 @@ func UnmarshalDuration(v any) (time.Duration, error) {
return d2.ToTimeDuration(), nil
}
// MarshalDuration returns the duration on ISO8601 format
// MarshalDuration returns the duration in ISO8601 format
// PnDTnHnMn.nS with days considered to be exactly 24 hours.
// See https://en.wikipedia.org/wiki/ISO_8601#Durations
// P - Period
// D - D is the
// T - T is the time designator that precedes the time components
// H - H is the hour designator that follows the value for the number of hours.
// M - M is the minute designator that follows the value for the number of minutes.
// S - S is the second designator that follows the value for the number of seconds.
// "PT20.345S" -- parses as "20.345 seconds"
// "PT15M" -- parses as "15 minutes" (where a minute is 60 seconds)
// "PT10H" -- parses as "10 hours" (where an hour is 3600 seconds)
// "P2D" -- parses as "2 days" (where a day is 24 hours or 86400 seconds)
func MarshalDuration(d time.Duration) Marshaler {
return MarshalString(dur.Format(d))
}

View File

@ -12,17 +12,40 @@ import (
type ExecutableSchema interface {
Schema() *ast.Schema
Complexity(ctx context.Context, typeName, fieldName string, childComplexity int, args map[string]any) (int, bool)
Complexity(
ctx context.Context,
typeName, fieldName string,
childComplexity int,
args map[string]any,
) (int, bool)
Exec(ctx context.Context) ResponseHandler
}
// CollectFields returns the set of fields from an ast.SelectionSet where all collected fields satisfy at least one of the GraphQL types
// passed through satisfies. Providing an empty slice for satisfies will collect all fields regardless of fragment type conditions.
func CollectFields(reqCtx *OperationContext, selSet ast.SelectionSet, satisfies []string) []CollectedField {
return collectFields(reqCtx, selSet, satisfies, map[string]bool{})
// CollectFields returns the set of fields from an ast.SelectionSet where all collected fields
// satisfy at least one of the GraphQL types passed through satisfies. Providing an empty slice for
// satisfies will collect all fields regardless of fragment type conditions.
func CollectFields(
reqCtx *OperationContext,
selSet ast.SelectionSet,
satisfies []string,
) []CollectedField {
cacheKey := makeCollectFieldsCacheKey(selSet, satisfies)
if cached, ok := reqCtx.collectFieldsCache.Get(cacheKey); ok {
return cached
}
result := collectFields(reqCtx, selSet, satisfies, map[string]bool{})
return reqCtx.collectFieldsCache.Add(cacheKey, result)
}
func collectFields(reqCtx *OperationContext, selSet ast.SelectionSet, satisfies []string, visited map[string]bool) []CollectedField {
func collectFields(
reqCtx *OperationContext,
selSet ast.SelectionSet,
satisfies []string,
visited map[string]bool,
) []CollectedField {
groupedFields := make([]CollectedField, 0, len(selSet))
for _, sel := range selSet {
@ -127,7 +150,12 @@ func doesFragmentConditionMatch(typeCondition string, satisfies []string) bool {
return false
}
func getOrCreateAndAppendField(c *[]CollectedField, name, alias string, objectDefinition *ast.Definition, creator func() CollectedField) *CollectedField {
func getOrCreateAndAppendField(
c *[]CollectedField,
name, alias string,
objectDefinition *ast.Definition,
creator func() CollectedField,
) *CollectedField {
for i, cf := range *c {
if cf.Name == name && cf.Alias == alias {
if cf.ObjectDefinition == objectDefinition {
@ -179,7 +207,10 @@ func shouldIncludeNode(directives ast.DirectiveList, variables map[string]any) b
return !skip && include
}
func deferrable(directives ast.DirectiveList, variables map[string]any) (shouldDefer bool, label string) {
func deferrable(
directives ast.DirectiveList,
variables map[string]any,
) (shouldDefer bool, label string) {
d := directives.ForName("defer")
if d == nil {
return false, ""

View File

@ -27,6 +27,7 @@ type Executor struct {
parserTokenLimit int
disableSuggestion bool
defaultRulesFn func() *rules.Rules
}
var _ graphql.GraphExecutor = &Executor{}
@ -45,6 +46,11 @@ func New(es graphql.ExecutableSchema) *Executor {
return e
}
// SetDefaultRulesFn is to customize the Default GraphQL Validation Rules
func (e *Executor) SetDefaultRulesFn(f func() *rules.Rules) {
e.defaultRulesFn = f
}
func (e *Executor) CreateOperationContext(
ctx context.Context,
params *graphql.RawParams,
@ -86,7 +92,11 @@ func (e *Executor) CreateOperationContext(
}
var err error
opCtx.Variables, err = validator.VariableValues(e.es.Schema(), opCtx.Operation, params.Variables)
opCtx.Variables, err = validator.VariableValues(
e.es.Schema(),
opCtx.Operation,
params.Variables,
)
if err != nil {
gqlErr, ok := err.(*gqlerror.Error)
if ok {
@ -109,10 +119,9 @@ func (e *Executor) DispatchOperation(
ctx context.Context,
opCtx *graphql.OperationContext,
) (graphql.ResponseHandler, context.Context) {
ctx = graphql.WithOperationContext(ctx, opCtx)
innerCtx := graphql.WithOperationContext(ctx, opCtx)
var innerCtx context.Context
res := e.ext.operationMiddleware(ctx, func(ctx context.Context) graphql.ResponseHandler {
res := e.ext.operationMiddleware(innerCtx, func(ctx context.Context) graphql.ResponseHandler {
innerCtx = ctx
tmpResponseContext := graphql.WithResponseContext(ctx, e.errorPresenter, e.recoverFunc)
@ -223,8 +232,12 @@ func (e *Executor) parseQuery(
return nil, gqlerror.List{gqlErr}
}
currentRules := rules.NewDefaultRules()
var currentRules *rules.Rules
if e.defaultRulesFn == nil {
currentRules = rules.NewDefaultRules()
} else {
currentRules = e.defaultRulesFn()
}
// Customise rules as required
// TODO(steve): consider currentRules.RemoveRule(rules.MaxIntrospectionDepth.Name)

View File

@ -29,22 +29,26 @@ func (e *Executor) Use(extension graphql.HandlerExtension) {
}
}
// AroundFields is a convenience method for creating an extension that only implements field middleware
// AroundFields is a convenience method for creating an extension that only implements field
// middleware
func (e *Executor) AroundFields(f graphql.FieldMiddleware) {
e.Use(aroundFieldFunc(f))
}
// AroundRootFields is a convenience method for creating an extension that only implements root field middleware
// AroundRootFields is a convenience method for creating an extension that only implements root
// field middleware
func (e *Executor) AroundRootFields(f graphql.RootFieldMiddleware) {
e.Use(aroundRootFieldFunc(f))
}
// AroundOperations is a convenience method for creating an extension that only implements operation middleware
// AroundOperations is a convenience method for creating an extension that only implements operation
// middleware
func (e *Executor) AroundOperations(f graphql.OperationMiddleware) {
e.Use(aroundOpFunc(f))
}
// AroundResponses is a convenience method for creating an extension that only implements response middleware
// AroundResponses is a convenience method for creating an extension that only implements response
// middleware
func (e *Executor) AroundResponses(f graphql.ResponseMiddleware) {
e.Use(aroundRespFunc(f))
}
@ -140,7 +144,10 @@ func (r aroundOpFunc) Validate(schema graphql.ExecutableSchema) error {
return nil
}
func (r aroundOpFunc) InterceptOperation(ctx context.Context, next graphql.OperationHandler) graphql.ResponseHandler {
func (r aroundOpFunc) InterceptOperation(
ctx context.Context,
next graphql.OperationHandler,
) graphql.ResponseHandler {
return r(ctx, next)
}
@ -157,7 +164,10 @@ func (r aroundRespFunc) Validate(schema graphql.ExecutableSchema) error {
return nil
}
func (r aroundRespFunc) InterceptResponse(ctx context.Context, next graphql.ResponseHandler) *graphql.Response {
func (r aroundRespFunc) InterceptResponse(
ctx context.Context,
next graphql.ResponseHandler,
) *graphql.Response {
return r(ctx, next)
}
@ -174,7 +184,10 @@ func (f aroundFieldFunc) Validate(schema graphql.ExecutableSchema) error {
return nil
}
func (f aroundFieldFunc) InterceptField(ctx context.Context, next graphql.Resolver) (res any, err error) {
func (f aroundFieldFunc) InterceptField(
ctx context.Context,
next graphql.Resolver,
) (res any, err error) {
return f(ctx, next)
}
@ -191,6 +204,9 @@ func (f aroundRootFieldFunc) Validate(schema graphql.ExecutableSchema) error {
return nil
}
func (f aroundRootFieldFunc) InterceptRootField(ctx context.Context, next graphql.RootResolver) graphql.Marshaler {
func (f aroundRootFieldFunc) InterceptRootField(
ctx context.Context,
next graphql.RootResolver,
) graphql.Marshaler {
return f(ctx, next)
}

View File

@ -40,7 +40,8 @@ func (m *FieldSet) Dispatch(ctx context.Context) {
d := m.delayed[0]
m.Values[d.i] = d.f(ctx)
} else if len(m.delayed) > 1 {
// more than one concurrent task, use the main goroutine to do one, only spawn goroutines for the others
// more than one concurrent task, use the main goroutine to do one, only spawn goroutines
// for the others
var wg sync.WaitGroup
for _, d := range m.delayed[1:] {

View File

@ -33,13 +33,20 @@ type (
}
GraphExecutor interface {
CreateOperationContext(ctx context.Context, params *RawParams) (*OperationContext, gqlerror.List)
DispatchOperation(ctx context.Context, opCtx *OperationContext) (ResponseHandler, context.Context)
CreateOperationContext(
ctx context.Context,
params *RawParams,
) (*OperationContext, gqlerror.List)
DispatchOperation(
ctx context.Context,
opCtx *OperationContext,
) (ResponseHandler, context.Context)
DispatchError(ctx context.Context, list gqlerror.List) *Response
}
// HandlerExtension adds functionality to the http handler. See the list of possible hook points below
// Its important to understand the lifecycle of a graphql request and the terminology we use in gqlgen
// HandlerExtension adds functionality to the http handler. See the list of possible hook points
// below Its important to understand the lifecycle of a graphql request and the terminology we
// use in gqlgen
// before working with these
//
// +--- REQUEST POST /graphql --------------------------------------------+
@ -51,30 +58,36 @@ type (
// | +--------------------------------------------------------------------+ |
// +------------------------------------------------------------------------+
HandlerExtension interface {
// ExtensionName should be a CamelCase string version of the extension which may be shown in stats and logging.
// ExtensionName should be a CamelCase string version of the extension which may be shown in
// stats and logging.
ExtensionName() string
// Validate is called when adding an extension to the server, it allows validation against the servers schema.
// Validate is called when adding an extension to the server, it allows validation against
// the servers schema.
Validate(schema ExecutableSchema) error
}
// OperationParameterMutator is called before creating a request context. allows manipulating the raw query
// OperationParameterMutator is called before creating a request context. allows manipulating
// the raw query
// on the way in.
OperationParameterMutator interface {
MutateOperationParameters(ctx context.Context, request *RawParams) *gqlerror.Error
}
// OperationContextMutator is called after creating the request context, but before executing the root resolver.
// OperationContextMutator is called after creating the request context, but before executing
// the root resolver.
OperationContextMutator interface {
MutateOperationContext(ctx context.Context, opCtx *OperationContext) *gqlerror.Error
}
// OperationInterceptor is called for each incoming query, for basic requests the writer will be invoked once,
// OperationInterceptor is called for each incoming query, for basic requests the writer will be
// invoked once,
// for subscriptions it will be invoked multiple times.
OperationInterceptor interface {
InterceptOperation(ctx context.Context, next OperationHandler) ResponseHandler
}
// ResponseInterceptor is called around each graphql operation response. This can be called many times for a single
// ResponseInterceptor is called around each graphql operation response. This can be called many
// times for a single
// operation the case of subscriptions.
ResponseInterceptor interface {
InterceptResponse(ctx context.Context, next ResponseHandler) *Response
@ -89,7 +102,8 @@ type (
InterceptField(ctx context.Context, next Resolver) (res any, err error)
}
// Transport provides support for different wire level encodings of graphql requests, eg Form, Get, Post, Websocket
// Transport provides support for different wire level encodings of graphql requests, eg Form,
// Get, Post, Websocket
Transport interface {
Supports(r *http.Request) bool
Do(w http.ResponseWriter, r *http.Request, exec GraphExecutor)
@ -110,7 +124,11 @@ func (p *RawParams) AddUpload(upload Upload, key, path string) *gqlerror.Error {
for i, p := range parts[1:] {
last := i == len(parts)-2
if ptr == nil {
return gqlerror.Errorf("path is missing \"variables.\" prefix, key: %s, path: %s", key, path)
return gqlerror.Errorf(
"path is missing \"variables.\" prefix, key: %s, path: %s",
key,
path,
)
}
if index, parseNbrErr := strconv.Atoi(p); parseNbrErr == nil {
if last {

View File

@ -18,8 +18,9 @@ const (
errPersistedQueryNotFoundCode = "PERSISTED_QUERY_NOT_FOUND"
)
// AutomaticPersistedQuery saves client upload by optimistically sending only the hashes of queries, if the server
// does not yet know what the query is for the hash it will respond telling the client to send the query along with the
// AutomaticPersistedQuery saves client upload by optimistically sending only the hashes of queries,
// if the server does not yet know what the query is for the hash it will respond telling the client
// to send the query along with the
// hash in the next request.
// see https://github.com/apollographql/apollo-link-persisted-queries
type AutomaticPersistedQuery struct {
@ -52,7 +53,10 @@ func (a AutomaticPersistedQuery) Validate(schema graphql.ExecutableSchema) error
return nil
}
func (a AutomaticPersistedQuery) MutateOperationParameters(ctx context.Context, rawParams *graphql.RawParams) *gqlerror.Error {
func (a AutomaticPersistedQuery) MutateOperationParameters(
ctx context.Context,
rawParams *graphql.RawParams,
) *gqlerror.Error {
if rawParams.Extensions["persistedQuery"] == nil {
return nil
}

View File

@ -58,7 +58,10 @@ func (c *ComplexityLimit) Validate(schema graphql.ExecutableSchema) error {
return nil
}
func (c ComplexityLimit) MutateOperationContext(ctx context.Context, opCtx *graphql.OperationContext) *gqlerror.Error {
func (c ComplexityLimit) MutateOperationContext(
ctx context.Context,
opCtx *graphql.OperationContext,
) *gqlerror.Error {
op := opCtx.Doc.Operations.ForName(opCtx.OperationName)
complexityCalcs := complexity.Calculate(ctx, c.es, op, opCtx.Variables)
@ -70,7 +73,11 @@ func (c ComplexityLimit) MutateOperationContext(ctx context.Context, opCtx *grap
})
if complexityCalcs > limit {
err := gqlerror.Errorf("operation has complexity %d, which exceeds the limit of %d", complexityCalcs, limit)
err := gqlerror.Errorf(
"operation has complexity %d, which exceeds the limit of %d",
complexityCalcs,
limit,
)
errcode.Set(err, errComplexityLimit)
return err
}

View File

@ -24,7 +24,10 @@ func (c Introspection) Validate(schema graphql.ExecutableSchema) error {
return nil
}
func (c Introspection) MutateOperationContext(ctx context.Context, opCtx *graphql.OperationContext) *gqlerror.Error {
func (c Introspection) MutateOperationContext(
ctx context.Context,
opCtx *graphql.OperationContext,
) *gqlerror.Error {
opCtx.DisableIntrospection = false
return nil
}

View File

@ -10,6 +10,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
"github.com/vektah/gqlparser/v2/gqlerror"
"github.com/vektah/gqlparser/v2/validator/rules"
"github.com/99designs/gqlgen/graphql"
"github.com/99designs/gqlgen/graphql/executor"
@ -25,27 +26,31 @@ type (
}
)
// New returns a new Server for the given executable schema. The Server is not
// ready for use until the transports you require are added and configured with
// Server.AddTransport. See the implementation of [NewDefaultServer] for an
// example.
func New(es graphql.ExecutableSchema) *Server {
return &Server{
exec: executor.New(es),
}
}
// NewDefaultServer is a demonstration only. Not for prod.
// NewDefaultServer returns a Server for the given executable schema which is
// only suitable for use in examples.
//
// Currently, the server just picks the first available transport,
// so this example NewDefaultServer orders them, but it is just
// for demonstration purposes.
// You will likely want to tune and better configure Websocket transport
// since adding a new one (To configure it) doesn't have effect.
// Deprecated:
// The Server returned by NewDefaultServer is not suitable for production use.
// Use [New] instead and add transports configured for your use case,
// appropriate caches, and introspection if required. See the implementation of
// NewDefaultServer for an example of starting point to construct a Server.
//
// Also SSE support is not in here at all!
// SSE when used over HTTP/1.1 (but not HTTP/2 or HTTP/3),
// SSE suffers from a severe limitation to the maximum number
// of open connections of 6 per browser. See:
// SSE is not supported using this example. SSE when used over HTTP/1.1 (but not
// HTTP/2 or HTTP/3) suffers from a severe limitation to the maximum number of
// open connections of 6 per browser, see [Using server-sent events].
//
// [Using server-sent events]:
// https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#sect1
//
// Deprecated: This was and is just an example.
func NewDefaultServer(es graphql.ExecutableSchema) *Server {
srv := New(es)
@ -67,6 +72,9 @@ func NewDefaultServer(es graphql.ExecutableSchema) *Server {
return srv
}
// AddTransport adds a transport to the Server. The server picks the first
// supported transport. Adding a transport which has already been added has no
// effect.
func (s *Server) AddTransport(transport graphql.Transport) {
s.transports = append(s.transports, transport)
}
@ -91,26 +99,32 @@ func (s *Server) SetDisableSuggestion(value bool) {
s.exec.SetDisableSuggestion(value)
}
// Use adds the given extension middleware to the server. Extensions are run in
// order from first to last added.
func (s *Server) Use(extension graphql.HandlerExtension) {
s.exec.Use(extension)
}
// AroundFields is a convenience method for creating an extension that only implements field middleware
// AroundFields is a convenience method for creating an extension that only implements field
// middleware
func (s *Server) AroundFields(f graphql.FieldMiddleware) {
s.exec.AroundFields(f)
}
// AroundRootFields is a convenience method for creating an extension that only implements field middleware
// AroundRootFields is a convenience method for creating an extension that only implements field
// middleware
func (s *Server) AroundRootFields(f graphql.RootFieldMiddleware) {
s.exec.AroundRootFields(f)
}
// AroundOperations is a convenience method for creating an extension that only implements operation middleware
// AroundOperations is a convenience method for creating an extension that only implements operation
// middleware
func (s *Server) AroundOperations(f graphql.OperationMiddleware) {
s.exec.AroundOperations(f)
}
// AroundResponses is a convenience method for creating an extension that only implements response middleware
// AroundResponses is a convenience method for creating an extension that only implements response
// middleware
func (s *Server) AroundResponses(f graphql.ResponseMiddleware) {
s.exec.AroundResponses(f)
}
@ -124,6 +138,11 @@ func (s *Server) getTransport(r *http.Request) graphql.Transport {
return nil
}
// SetValidationRulesFn is to customize the Default GraphQL Validation Rules
func (s *Server) SetValidationRulesFn(f func() *rules.Rules) {
s.exec.SetDefaultRulesFn(f)
}
func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
defer func() {
if err := recover(); err != nil {
@ -173,7 +192,10 @@ func (r OperationFunc) Validate(schema graphql.ExecutableSchema) error {
return nil
}
func (r OperationFunc) InterceptOperation(ctx context.Context, next graphql.OperationHandler) graphql.ResponseHandler {
func (r OperationFunc) InterceptOperation(
ctx context.Context,
next graphql.OperationHandler,
) graphql.ResponseHandler {
return r(ctx, next)
}
@ -190,7 +212,10 @@ func (r ResponseFunc) Validate(schema graphql.ExecutableSchema) error {
return nil
}
func (r ResponseFunc) InterceptResponse(ctx context.Context, next graphql.ResponseHandler) *graphql.Response {
func (r ResponseFunc) InterceptResponse(
ctx context.Context,
next graphql.ResponseHandler,
) *graphql.Response {
return r(ctx, next)
}

View File

@ -10,7 +10,8 @@ import (
"github.com/99designs/gqlgen/graphql"
)
// SendError sends a best effort error to a raw response writer. It assumes the client can understand the standard
// SendError sends a best effort error to a raw response writer. It assumes the client can
// understand the standard
// json error response
func SendError(w http.ResponseWriter, code int, errors ...*gqlerror.Error) {
w.WriteHeader(code)

View File

@ -11,7 +11,11 @@ const (
acceptApplicationGraphqlResponseJson = "application/graphql-response+json"
)
func determineResponseContentType(explicitHeaders map[string][]string, r *http.Request, useGrapQLResponseJsonByDefault bool) string {
func determineResponseContentType(
explicitHeaders map[string][]string,
r *http.Request,
useGrapQLResponseJsonByDefault bool,
) string {
for k, v := range explicitHeaders {
if strings.EqualFold(k, "Content-Type") {
return v[0]

View File

@ -10,7 +10,8 @@ import (
"github.com/99designs/gqlgen/graphql"
)
// MultipartForm the Multipart request spec https://github.com/jaydenseric/graphql-multipart-request-spec
// MultipartForm the Multipart request spec
// https://github.com/jaydenseric/graphql-multipart-request-spec
type MultipartForm struct {
// MaxUploadSize sets the maximum number of bytes used to parse a request body
// as multipart/form-data.
@ -38,7 +39,7 @@ func (f MultipartForm) Supports(r *http.Request) bool {
return false
}
return r.Method == "POST" && mediaType == "multipart/form-data"
return r.Method == http.MethodPost && mediaType == "multipart/form-data"
}
func (f MultipartForm) maxUploadSize() int64 {

View File

@ -31,7 +31,7 @@ func (h UrlEncodedForm) Supports(r *http.Request) bool {
return false
}
return r.Method == "POST" && mediaType == "application/x-www-form-urlencoded"
return r.Method == http.MethodPost && mediaType == "application/x-www-form-urlencoded"
}
func (h UrlEncodedForm) Do(w http.ResponseWriter, r *http.Request, exec graphql.GraphExecutor) {

View File

@ -20,7 +20,8 @@ type GET struct {
// Map of all headers that are added to graphql response. If not
// set, only one header: Content-Type: application/graphql-response+json will be set.
ResponseHeaders map[string][]string
// UseGrapQLResponseJsonByDefault determines whether to use 'application/graphql-response+json' as the response content type
// UseGrapQLResponseJsonByDefault determines whether to use 'application/graphql-response+json'
// as the response content type
// when the Accept header is empty or 'application/*' or '*/*'.
UseGrapQLResponseJsonByDefault bool
}
@ -32,7 +33,7 @@ func (h GET) Supports(r *http.Request) bool {
return false
}
return r.Method == "GET"
return r.Method == http.MethodGet
}
func (h GET) Do(w http.ResponseWriter, r *http.Request, exec graphql.GraphExecutor) {
@ -42,7 +43,11 @@ func (h GET) Do(w http.ResponseWriter, r *http.Request, exec graphql.GraphExecut
writeJsonError(w, err.Error())
return
}
contentType := determineResponseContentType(h.ResponseHeaders, r, h.UseGrapQLResponseJsonByDefault)
contentType := determineResponseContentType(
h.ResponseHeaders,
r,
h.UseGrapQLResponseJsonByDefault,
)
responseHeaders := mergeHeaders(
map[string][]string{
"Content-Type": {contentType},

View File

@ -33,7 +33,7 @@ func (h GRAPHQL) Supports(r *http.Request) bool {
return false
}
return r.Method == "POST" && mediaType == "application/graphql"
return r.Method == http.MethodPost && mediaType == "application/graphql"
}
func (h GRAPHQL) Do(w http.ResponseWriter, r *http.Request, exec graphql.GraphExecutor) {

View File

@ -40,16 +40,20 @@ func (t MultipartMixed) Supports(r *http.Request) bool {
// Do implements the multipart/mixed spec as a multipart/mixed response
func (t MultipartMixed) Do(w http.ResponseWriter, r *http.Request, exec graphql.GraphExecutor) {
// Implements the multipart/mixed spec as a multipart/mixed response:
// * https://github.com/graphql/graphql-wg/blob/e4ef5f9d5997815d9de6681655c152b6b7838b4c/rfcs/DeferStream.md
// *
// https://github.com/graphql/graphql-wg/blob/e4ef5f9d5997815d9de6681655c152b6b7838b4c/rfcs/DeferStream.md
// 2022/08/23 as implemented by gqlgen.
// * https://github.com/graphql/graphql-wg/blob/f22ea7748c6ebdf88fdbf770a8d9e41984ebd429/rfcs/DeferStream.md June 2023 Spec for the
// *
// https://github.com/graphql/graphql-wg/blob/f22ea7748c6ebdf88fdbf770a8d9e41984ebd429/rfcs/DeferStream.md
// June 2023 Spec for the
// `incremental` field
// * https://github.com/graphql/graphql-over-http/blob/main/rfcs/IncrementalDelivery.md
// multipart specification
// Follows the format that is used in the Apollo Client tests:
// https://github.com/apollographql/apollo-client/blob/v3.11.8/src/link/http/__tests__/responseIterator.ts#L68
// Apollo Client, despite mentioning in its requests that they require the 2022 spec, it wants the
// `incremental` field to be an array of responses, not a single response. Theoretically we could
// Apollo Client, despite mentioning in its requests that they require the 2022 spec, it wants
// the `incremental` field to be an array of responses, not a single response. Theoretically we
// could
// batch responses in the `incremental` field, if we wanted to optimize this code.
ctx := r.Context()
flusher, ok := w.(http.Flusher)
@ -178,7 +182,8 @@ func writeContentTypeHeader(w io.Writer) {
fmt.Fprintf(w, "Content-Type: application/json\r\n\r\n")
}
// multipartResponseAggregator helps us reduce the number of responses sent to the frontend by batching all the
// multipartResponseAggregator helps us reduce the number of responses sent to the frontend by
// batching all the
// incremental responses together.
type multipartResponseAggregator struct {
mu sync.Mutex

View File

@ -20,7 +20,8 @@ type POST struct {
// set, only one header: Content-Type: application/graphql-response+json will be set.
ResponseHeaders map[string][]string
// UseGrapQLResponseJsonByDefault determines whether to use 'application/graphql-response+json' as the response content type
// UseGrapQLResponseJsonByDefault determines whether to use 'application/graphql-response+json'
// as the response content type
// when the Accept header is empty or 'application/*' or '*/*'.
UseGrapQLResponseJsonByDefault bool
}
@ -37,7 +38,7 @@ func (h POST) Supports(r *http.Request) bool {
return false
}
return r.Method == "POST" && mediaType == "application/json"
return r.Method == http.MethodPost && mediaType == "application/json"
}
func getRequestBody(r *http.Request) (string, error) {
@ -59,7 +60,11 @@ var pool = sync.Pool{
func (h POST) Do(w http.ResponseWriter, r *http.Request, exec graphql.GraphExecutor) {
ctx := r.Context()
contentType := determineResponseContentType(h.ResponseHeaders, r, h.UseGrapQLResponseJsonByDefault)
contentType := determineResponseContentType(
h.ResponseHeaders,
r,
h.UseGrapQLResponseJsonByDefault,
)
responseHeaders := mergeHeaders(
map[string][]string{
"Content-Type": {contentType},

View File

@ -16,7 +16,7 @@ type Options struct {
var _ graphql.Transport = Options{}
func (o Options) Supports(r *http.Request) bool {
return r.Method == "HEAD" || r.Method == "OPTIONS"
return r.Method == http.MethodHead || r.Method == http.MethodOptions
}
func (o Options) Do(w http.ResponseWriter, r *http.Request, exec graphql.GraphExecutor) {

View File

@ -19,7 +19,7 @@ func (r *bytesReader) Read(b []byte) (n int, err error) {
}
n = copy(b, (*r.s)[r.i:])
r.i += int64(n)
return
return n, err
}
func (r *bytesReader) Seek(offset int64, whence int) (int64, error) {

View File

@ -103,11 +103,15 @@ func (t Websocket) Do(w http.ResponseWriter, r *http.Request, exec graphql.Graph
var me messageExchanger
switch ws.Subprotocol() {
default:
msg := websocket.FormatCloseMessage(websocket.CloseProtocolError, fmt.Sprintf("unsupported negotiated subprotocol %s", ws.Subprotocol()))
msg := websocket.FormatCloseMessage(
websocket.CloseProtocolError,
fmt.Sprintf("unsupported negotiated subprotocol %s", ws.Subprotocol()),
)
_ = ws.WriteMessage(websocket.CloseMessage, msg)
return
case graphqlwsSubprotocol, "":
// clients are required to send a subprotocol, to be backward compatible with the previous implementation we select
// clients are required to send a subprotocol, to be backward compatible with the previous
// implementation we select
// "graphql-ws" by default
me = graphqlwsMessageExchanger{c: ws}
case graphqltransportwsSubprotocol:
@ -245,7 +249,8 @@ func (c *wsConnection) run() {
// If we're running in graphql-ws mode, create a timer that will trigger a
// keep alive message every interval
if (c.conn.Subprotocol() == "" || c.conn.Subprotocol() == graphqlwsSubprotocol) && c.KeepAlivePingInterval != 0 {
if (c.conn.Subprotocol() == "" || c.conn.Subprotocol() == graphqlwsSubprotocol) &&
c.KeepAlivePingInterval != 0 {
c.mu.Lock()
c.keepAliveTicker = time.NewTicker(c.KeepAlivePingInterval)
c.mu.Unlock()
@ -498,7 +503,10 @@ func (c *wsConnection) close(closeCode int, message string) {
c.mu.Unlock()
return
}
_ = c.conn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(closeCode, message))
_ = c.conn.WriteMessage(
websocket.CloseMessage,
websocket.FormatCloseMessage(closeCode, message),
)
for _, closer := range c.active {
closer()
}

View File

@ -18,7 +18,8 @@ type subscriptionError struct {
errs []*gqlerror.Error
}
// AddSubscriptionError is used to let websocket return an error message after subscription resolver returns a channel.
// AddSubscriptionError is used to let websocket return an error message after subscription resolver
// returns a channel.
// for example:
//
// func (r *subscriptionResolver) Method(ctx context.Context) (<-chan *model.Message, error) {

View File

@ -22,7 +22,8 @@ func BuildUnmarshalerMap(unmarshaler ...any) map[reflect.Type]reflect.Value {
return maps
}
// WithUnmarshalerMap returns a new context with a map from input types to their unmarshaler functions.
// WithUnmarshalerMap returns a new context with a map from input types to their unmarshaler
// functions.
func WithUnmarshalerMap(ctx context.Context, maps map[reflect.Type]reflect.Value) context.Context {
return context.WithValue(ctx, unmarshalInputCtx, maps)
}

View File

@ -5,83 +5,84 @@ import (
"fmt"
"io"
"math"
"reflect"
"strconv"
)
func MarshalInt(i int) Marshaler {
return WriterFunc(func(w io.Writer) {
io.WriteString(w, strconv.Itoa(i))
_, _ = io.WriteString(w, strconv.FormatInt(int64(i), 10))
})
}
func UnmarshalInt(v any) (int, error) {
switch v := v.(type) {
case string:
return strconv.Atoi(v)
case int:
return v, nil
case int64:
return int(v), nil
case json.Number:
return strconv.Atoi(string(v))
case nil:
return 0, nil
default:
return 0, fmt.Errorf("%T is not an int", v)
}
return interfaceToSignedNumber[int](v)
}
func MarshalInt64(i int64) Marshaler {
func MarshalInt8(i int8) Marshaler {
return WriterFunc(func(w io.Writer) {
io.WriteString(w, strconv.FormatInt(i, 10))
_, _ = io.WriteString(w, strconv.FormatInt(int64(i), 10))
})
}
func UnmarshalInt64(v any) (int64, error) {
switch v := v.(type) {
case string:
return strconv.ParseInt(v, 10, 64)
case int:
return int64(v), nil
case int64:
return v, nil
case json.Number:
return strconv.ParseInt(string(v), 10, 64)
case nil:
return 0, nil
default:
return 0, fmt.Errorf("%T is not an int", v)
}
func UnmarshalInt8(v any) (int8, error) {
return interfaceToSignedNumber[int8](v)
}
func MarshalInt16(i int16) Marshaler {
return WriterFunc(func(w io.Writer) {
_, _ = io.WriteString(w, strconv.FormatInt(int64(i), 10))
})
}
func UnmarshalInt16(v any) (int16, error) {
return interfaceToSignedNumber[int16](v)
}
func MarshalInt32(i int32) Marshaler {
return WriterFunc(func(w io.Writer) {
io.WriteString(w, strconv.FormatInt(int64(i), 10))
_, _ = io.WriteString(w, strconv.FormatInt(int64(i), 10))
})
}
func UnmarshalInt32(v any) (int32, error) {
return interfaceToSignedNumber[int32](v)
}
func MarshalInt64(i int64) Marshaler {
return WriterFunc(func(w io.Writer) {
_, _ = io.WriteString(w, strconv.FormatInt(i, 10))
})
}
func UnmarshalInt64(v any) (int64, error) {
return interfaceToSignedNumber[int64](v)
}
type number interface {
int | int8 | int16 | int32 | int64 | uint | uint8 | uint16 | uint32 | uint64
}
func interfaceToSignedNumber[N number](v any) (N, error) {
switch v := v.(type) {
case int, int8, int16, int32, int64:
return safeCastSignedNumber[N](reflect.ValueOf(v).Int())
case string:
iv, err := strconv.ParseInt(v, 10, 64)
if err != nil {
return 0, err
}
return safeCastInt32(iv)
case int:
return safeCastInt32(int64(v))
case int64:
return safeCastInt32(v)
return safeCastSignedNumber[N](iv)
case json.Number:
iv, err := strconv.ParseInt(string(v), 10, 64)
if err != nil {
return 0, err
}
return safeCastInt32(iv)
return safeCastSignedNumber[N](iv)
case nil:
return 0, nil
default:
return 0, fmt.Errorf("%T is not an int", v)
return 0, fmt.Errorf("%T is not an %T", v, N(0))
}
}
@ -98,27 +99,62 @@ func (e IntegerError) Error() string {
return e.Message
}
type Int32OverflowError struct {
Value int64
type NumberOverflowError struct {
Value any
*IntegerError
}
func newInt32OverflowError(i int64) *Int32OverflowError {
return &Int32OverflowError{
Value: i,
IntegerError: &IntegerError{
Message: fmt.Sprintf("%d overflows signed 32-bit integer", i),
},
type maxNumber interface {
int64 | uint64
}
func newNumberOverflowError[N maxNumber](i any, bitsize int) *NumberOverflowError {
switch v := i.(type) {
case int64:
return &NumberOverflowError{
Value: v,
IntegerError: &IntegerError{
Message: fmt.Sprintf("%d overflows signed %d-bit integer", i, bitsize),
},
}
default:
return &NumberOverflowError{
Value: v,
IntegerError: &IntegerError{
Message: fmt.Sprintf("%d overflows unsigned %d-bit integer", i, bitsize),
},
}
}
}
func (e *Int32OverflowError) Unwrap() error {
func (e *NumberOverflowError) Unwrap() error {
return e.IntegerError
}
func safeCastInt32(i int64) (int32, error) {
if i > math.MaxInt32 || i < math.MinInt32 {
return 0, newInt32OverflowError(i)
// safeCastSignedNumber converts an int64 to a number of type N.
func safeCastSignedNumber[N number](val int64) (N, error) {
var zero N
switch any(zero).(type) {
case int8:
if val > math.MaxInt8 || val < math.MinInt8 {
return 0, newNumberOverflowError[int64](val, 8)
}
case int16:
if val > math.MaxInt16 || val < math.MinInt16 {
return 0, newNumberOverflowError[int64](val, 16)
}
case int32:
if val > math.MaxInt32 || val < math.MinInt32 {
return 0, newNumberOverflowError[int64](val, 32)
}
case int:
if strconv.IntSize == 32 && (val > math.MaxInt32 || val < math.MinInt32) {
return 0, newNumberOverflowError[int64](val, 32)
}
case int64:
default:
return 0, fmt.Errorf("invalid type %T", zero)
}
return int32(i), nil
return N(val), nil
}

View File

@ -1,4 +1,5 @@
// introspection implements the spec defined in https://github.com/facebook/graphql/blob/master/spec/Section%204%20--%20Introspection.md#schema-introspection
// introspection implements the spec defined in
// https://github.com/facebook/graphql/blob/master/spec/Section%204%20--%20Introspection.md#schema-introspection
package introspection
import (

View File

@ -46,8 +46,9 @@ func (o Omittable[T]) IsSet() bool {
}
// IsZero returns true then json.Marshal will omit this value.
// > The "omitzero" option specifies that the field should be omitted from the encoding if the field has a zero value, according to rules:
// > 1) If the field type has an "IsZero() bool" method, that will be used to determine whether the value is zero.
// > The "omitzero" option specifies that the field should be omitted from the encoding if the field
// has a zero value, according to rules: > 1) If the field type has an "IsZero() bool" method, that
// will be used to determine whether the value is zero.
// > 2) Otherwise, the value is zero if it is the zero value for its type.
// https://pkg.go.dev/encoding/json#Marshal
func (o Omittable[T]) IsZero() bool {

View File

@ -7,6 +7,11 @@ import (
"net/http"
)
const (
apolloSandboxMainJs = "https://embeddable-sandbox.cdn.apollographql.com/02e2da0fccbe0240ef03d2396d6c98559bab5b06/embeddable-sandbox.umd.production.min.js"
apolloSandboxMainSri = "sha256-asj/scPAF8jmMGj1J+YwCHps3uI57AZ78cHs0bJkML4="
)
// NOTE: New version available at https://embeddable-sandbox.cdn.apollographql.com/ -->
var apolloSandboxPage = template.Must(template.New("ApolloSandbox").Parse(`<!doctype html>
<html>
@ -26,7 +31,7 @@ var apolloSandboxPage = template.Must(template.New("ApolloSandbox").Parse(`<!doc
<body>
<div style="width: 100vw; height: 100vh;" id='embedded-sandbox'></div>
<script rel="preload" as="script" crossorigin="anonymous" integrity="{{.mainSRI}}" type="text/javascript" src="https://embeddable-sandbox.cdn.apollographql.com/02e2da0fccbe0240ef03d2396d6c98559bab5b06/embeddable-sandbox.umd.production.min.js"></script>
<script rel="preload" as="script" crossorigin="anonymous" integrity="{{.mainSRI}}" type="text/javascript" src="{{.mainJs}}"></script>
<script>
{{- if .endpointIsAbsolute}}
const url = {{.endpoint}};
@ -46,11 +51,16 @@ var apolloSandboxPage = template.Must(template.New("ApolloSandbox").Parse(`<!doc
// ApolloSandboxHandler responsible for setting up the apollo sandbox playground
func ApolloSandboxHandler(title, endpoint string, opts ...ApolloSandboxOption) http.HandlerFunc {
options := &apolloSandboxOptions{
HideCookieToggle: true,
InitialState: apolloSandboxInitialState{
IncludeCookies: true,
PollForSchemaUpdates: false,
options := &apolloSandboxHandlerOptions{
MainJs: apolloSandboxMainJs,
MainSri: apolloSandboxMainSri,
ApolloSandboxOption: &apolloSandboxOptions{
HideCookieToggle: true,
EndpointIsEditable: false,
InitialState: &apolloSandboxInitialState{
IncludeCookies: true,
PollForSchemaUpdates: false,
},
},
}
@ -58,7 +68,7 @@ func ApolloSandboxHandler(title, endpoint string, opts ...ApolloSandboxOption) h
opt(options)
}
optionsBytes, err := json.Marshal(options)
optionsBytes, err := json.Marshal(options.ApolloSandboxOption)
if err != nil {
panic(fmt.Errorf("failed to marshal apollo sandbox options: %w", err))
}
@ -68,7 +78,8 @@ func ApolloSandboxHandler(title, endpoint string, opts ...ApolloSandboxOption) h
"title": title,
"endpoint": endpoint,
"endpointIsAbsolute": endpointHasScheme(endpoint),
"mainSRI": "sha256-pYhw/8TGkZxk960PMMpDtjhw9YtKXUzGv6XQQaMJSh8=",
"mainJs": options.MainJs,
"mainSRI": options.MainSri,
"options": string(optionsBytes),
})
if err != nil {
@ -77,11 +88,17 @@ func ApolloSandboxHandler(title, endpoint string, opts ...ApolloSandboxOption) h
}
}
type apolloSandboxHandlerOptions struct {
MainJs string
MainSri string
ApolloSandboxOption *apolloSandboxOptions
}
// See https://www.apollographql.com/docs/graphos/explorer/sandbox/#options -->
type apolloSandboxOptions struct {
HideCookieToggle bool `json:"hideCookieToggle"`
EndpointIsEditable bool `json:"endpointIsEditable"`
InitialState apolloSandboxInitialState `json:"initialState,omitempty"`
HideCookieToggle bool `json:"hideCookieToggle"`
EndpointIsEditable bool `json:"endpointIsEditable"`
InitialState *apolloSandboxInitialState `json:"initialState,omitempty"`
}
type apolloSandboxInitialState struct {
@ -95,92 +112,122 @@ type apolloSandboxInitialState struct {
SharedHeaders map[string]any `json:"sharedHeaders,omitempty"`
}
type ApolloSandboxOption func(options *apolloSandboxOptions)
type ApolloSandboxOption func(options *apolloSandboxHandlerOptions)
// WithApolloSandboxHideCookieToggle By default, the embedded Sandbox does not show the Include cookies toggle in its connection settings.
// WithApolloSandboxHideCookieToggle By default, the embedded Sandbox does not show the Include
// cookies toggle in its connection settings.
//
// Set hideCookieToggle to false to enable users of your embedded Sandbox instance to toggle the Include cookies setting.
// Set hideCookieToggle to false to enable users of your embedded Sandbox instance to toggle the
// Include cookies setting.
func WithApolloSandboxHideCookieToggle(hideCookieToggle bool) ApolloSandboxOption {
return func(options *apolloSandboxOptions) {
options.HideCookieToggle = hideCookieToggle
return func(options *apolloSandboxHandlerOptions) {
options.ApolloSandboxOption.HideCookieToggle = hideCookieToggle
}
}
// WithApolloSandboxEndpointIsEditable By default, the embedded Sandbox has a URL input box that is editable by users.
// WithApolloSandboxEndpointIsEditable By default, the embedded Sandbox has a URL input box that is
// editable by users.
//
// Set endpointIsEditable to false to prevent users of your embedded Sandbox instance from changing the endpoint URL.
// Set endpointIsEditable to false to prevent users of your embedded Sandbox instance from changing
// the endpoint URL.
func WithApolloSandboxEndpointIsEditable(endpointIsEditable bool) ApolloSandboxOption {
return func(options *apolloSandboxOptions) {
options.EndpointIsEditable = endpointIsEditable
return func(options *apolloSandboxHandlerOptions) {
options.ApolloSandboxOption.EndpointIsEditable = endpointIsEditable
}
}
// WithApolloSandboxInitialStateIncludeCookies Set this value to true if you want the Sandbox to pass { credentials: 'include' } for its requests by default.
// WithApolloSandboxInitialStateIncludeCookies Set this value to true if you want the Sandbox to
// pass { credentials: 'include' } for its requests by default.
//
// If you set hideCookieToggle to false, users can override this default setting with the Include cookies toggle. (By default, the embedded Sandbox does not show the Include cookies toggle in its connection settings.)
// If you set hideCookieToggle to false, users can override this default setting with the Include
// cookies toggle. (By default, the embedded Sandbox does not show the Include cookies toggle in its
// connection settings.)
//
// If you also pass the handleRequest option, this option is ignored.
//
// Read more about the fetch API and credentials here https://developer.mozilla.org/en-US/docs/Web/API/fetch#credentials
// Read more about the fetch API and credentials here
// https://developer.mozilla.org/en-US/docs/Web/API/fetch#credentials
func WithApolloSandboxInitialStateIncludeCookies(includeCookies bool) ApolloSandboxOption {
return func(options *apolloSandboxOptions) {
options.InitialState.IncludeCookies = includeCookies
return func(options *apolloSandboxHandlerOptions) {
options.ApolloSandboxOption.InitialState.IncludeCookies = includeCookies
}
}
// WithApolloSandboxInitialStateDocument Document operation to populate in the Sandbox's editor on load.
// WithApolloSandboxInitialStateDocument Document operation to populate in the Sandbox's editor on
// load.
//
// If you omit this, the Sandbox initially loads an example query based on your schema.
func WithApolloSandboxInitialStateDocument(document string) ApolloSandboxOption {
return func(options *apolloSandboxOptions) {
options.InitialState.Document = document
return func(options *apolloSandboxHandlerOptions) {
options.ApolloSandboxOption.InitialState.Document = document
}
}
// WithApolloSandboxInitialStateVariables Variables containing initial variable values to populate in the Sandbox on load.
// WithApolloSandboxInitialStateVariables Variables containing initial variable values to populate
// in the Sandbox on load.
//
// If provided, these variables should apply to the initial query you provide for document.
func WithApolloSandboxInitialStateVariables(variables map[string]any) ApolloSandboxOption {
return func(options *apolloSandboxOptions) {
options.InitialState.Variables = variables
return func(options *apolloSandboxHandlerOptions) {
options.ApolloSandboxOption.InitialState.Variables = variables
}
}
// WithApolloSandboxInitialStateHeaders Headers containing initial variable values to populate in the Sandbox on load.
// WithApolloSandboxInitialStateHeaders Headers containing initial variable values to populate in
// the Sandbox on load.
//
// If provided, these variables should apply to the initial query you provide for document.
func WithApolloSandboxInitialStateHeaders(headers map[string]any) ApolloSandboxOption {
return func(options *apolloSandboxOptions) {
options.InitialState.Headers = headers
return func(options *apolloSandboxHandlerOptions) {
options.ApolloSandboxOption.InitialState.Headers = headers
}
}
// WithApolloSandboxInitialStateCollectionIdAndOperationId The ID of a collection, paired with an operation ID to populate in the Sandbox on load.
// WithApolloSandboxInitialStateCollectionIdAndOperationId The ID of a collection, paired with an
// operation ID to populate in the Sandbox on load.
//
// You can find these values from a registered graph in Studio by clicking the ... menu next to an operation in the Explorer of that graph and selecting View operation details.
func WithApolloSandboxInitialStateCollectionIdAndOperationId(collectionId, operationId string) ApolloSandboxOption {
return func(options *apolloSandboxOptions) {
options.InitialState.CollectionId = collectionId
options.InitialState.OperationId = operationId
// You can find these values from a registered graph in Studio by clicking the ... menu next to an
// operation in the Explorer of that graph and selecting View operation details.
func WithApolloSandboxInitialStateCollectionIdAndOperationId(
collectionId, operationId string,
) ApolloSandboxOption {
return func(options *apolloSandboxHandlerOptions) {
options.ApolloSandboxOption.InitialState.CollectionId = collectionId
options.ApolloSandboxOption.InitialState.OperationId = operationId
}
}
// WithApolloSandboxInitialStatePollForSchemaUpdates If true, the embedded Sandbox periodically polls your initialEndpoint for schema updates.
// WithApolloSandboxInitialStatePollForSchemaUpdates If true, the embedded Sandbox periodically
// polls your initialEndpoint for schema updates.
//
// The default value is false.
func WithApolloSandboxInitialStatePollForSchemaUpdates(pollForSchemaUpdates bool) ApolloSandboxOption {
return func(options *apolloSandboxOptions) {
options.InitialState.PollForSchemaUpdates = pollForSchemaUpdates
func WithApolloSandboxInitialStatePollForSchemaUpdates(
pollForSchemaUpdates bool,
) ApolloSandboxOption {
return func(options *apolloSandboxHandlerOptions) {
options.ApolloSandboxOption.InitialState.PollForSchemaUpdates = pollForSchemaUpdates
}
}
// WithApolloSandboxInitialStateSharedHeaders Headers that are applied by default to every operation executed by the embedded Sandbox.
// WithApolloSandboxInitialStateSharedHeaders Headers that are applied by default to every operation
// executed by the embedded Sandbox.
//
// Users can disable the application of these headers, but they can't modify their values.
//
// The embedded Sandbox always includes these headers in its introspection queries to your initialEndpoint.
// The embedded Sandbox always includes these headers in its introspection queries to your
// initialEndpoint.
func WithApolloSandboxInitialStateSharedHeaders(sharedHeaders map[string]any) ApolloSandboxOption {
return func(options *apolloSandboxOptions) {
options.InitialState.SharedHeaders = sharedHeaders
return func(options *apolloSandboxHandlerOptions) {
options.ApolloSandboxOption.InitialState.SharedHeaders = sharedHeaders
}
}
// WithApolloSandboxJs The main javascript resource and its subresource integrity checksum
//
// You can change the version of apollo sandbox.
func WithApolloSandboxJs(mainJs, mainSri string) ApolloSandboxOption {
return func(options *apolloSandboxHandlerOptions) {
options.MainJs = mainJs
options.MainSri = mainSri
}
}

View File

@ -13,52 +13,59 @@ var page = template.Must(template.New("graphiql").Parse(`<!DOCTYPE html>
<title>{{.Title}}</title>
<style>
body {
height: 100%;
margin: 0;
width: 100%;
overflow: hidden;
}
#graphiql {
height: 100vh;
}
.loading {
height: 100%;
display: flex;
align-items: center;
justify-content: center;
font-size: 4rem;
}
</style>
<script
src="https://cdn.jsdelivr.net/npm/react@18.2.0/umd/react.production.min.js"
src="{{.ReactUrl}}"
integrity="{{.ReactSRI}}"
crossorigin="anonymous"
></script>
<script
src="https://cdn.jsdelivr.net/npm/react-dom@18.2.0/umd/react-dom.production.min.js"
src="{{.ReactDOMUrl}}"
integrity="{{.ReactDOMSRI}}"
crossorigin="anonymous"
></script>
<link
rel="stylesheet"
href="https://cdn.jsdelivr.net/npm/graphiql@{{.Version}}/graphiql.min.css"
href="{{.CssUrl}}"
integrity="{{.CssSRI}}"
crossorigin="anonymous"
/>
{{- if .EnablePluginExplorer}}
<link
rel="stylesheet"
href="https://cdn.jsdelivr.net/npm/@graphiql/plugin-explorer@{{.PluginExplorerVersion}}/dist/style.css"
href="{{.PluginExplorerCssUrl}}"
integrity="{{.PluginExplorerCssSRI}}"
crossorigin="anonymous"
/>
{{- end}}
</head>
<body>
<div id="graphiql">Loading...</div>
<div id="graphiql">
<div class="loading">Loading</div>
</div>
<script
src="https://cdn.jsdelivr.net/npm/graphiql@{{.Version}}/graphiql.min.js"
src="{{.JsUrl}}"
integrity="{{.JsSRI}}"
crossorigin="anonymous"
></script>
{{- if .EnablePluginExplorer}}
<script
src="https://cdn.jsdelivr.net/npm/@graphiql/plugin-explorer@{{.PluginExplorerVersion}}/dist/index.umd.js"
src="{{.PluginExplorerJsUrl}}"
integrity="{{.PluginExplorerJsSRI}}"
crossorigin="anonymous"
></script>
@ -168,26 +175,26 @@ var page = template.Must(template.New("graphiql").Parse(`<!DOCTYPE html>
`))
type GraphiqlConfig struct {
Title string
StoragePrefix string
Endpoint string
FetcherHeaders map[string]string
UiHeaders map[string]string
EndpointIsAbsolute bool
SubscriptionEndpoint string
Version string
EnablePluginExplorer bool
PluginExplorerVersion string
// https://www.jsdelivr.com/package/npm/@graphiql/plugin-explorer?tab=files
PluginExplorerCssSRI string
Title string
StoragePrefix string
Endpoint string
FetcherHeaders map[string]string
UiHeaders map[string]string
EndpointIsAbsolute bool
SubscriptionEndpoint string
JsUrl template.URL
JsSRI string
CssUrl template.URL
CssSRI string
ReactUrl template.URL
ReactSRI string
ReactDOMUrl template.URL
ReactDOMSRI string
EnablePluginExplorer bool
PluginExplorerJsUrl template.URL
PluginExplorerJsSRI string
// https://www.jsdelivr.com/package/npm/graphiql?tab=files
CssSRI string
JsSRI string
// https://www.jsdelivr.com/package/npm/react?tab=files
ReactSRI string
// https://www.jsdelivr.com/package/npm/react-dom?tab=files
ReactDOMSRI string
PluginExplorerCssUrl template.URL
PluginExplorerCssSRI string
}
type GraphiqlConfigOption func(*GraphiqlConfig)
@ -203,6 +210,35 @@ func WithGraphiqlUiHeaders(headers map[string]string) GraphiqlConfigOption {
}
}
func WithGraphiqlVersion(jsUrl, cssUrl, jsSri, cssSri string) GraphiqlConfigOption {
return func(config *GraphiqlConfig) {
config.JsUrl = template.URL(jsUrl)
config.CssUrl = template.URL(cssUrl)
config.JsSRI = jsSri
config.CssSRI = cssSri
}
}
func WithGraphiqlReactVersion(
reactJsUrl, reactDomJsUrl, reactJsSri, reactDomJsSri string,
) GraphiqlConfigOption {
return func(config *GraphiqlConfig) {
config.ReactUrl = template.URL(reactJsUrl)
config.ReactDOMUrl = template.URL(reactDomJsUrl)
config.ReactSRI = reactJsSri
config.ReactDOMSRI = reactDomJsSri
}
}
func WithGraphiqlPluginExplorerVersion(jsUrl, cssUrl, jsSri, cssSri string) GraphiqlConfigOption {
return func(config *GraphiqlConfig) {
config.PluginExplorerJsUrl = template.URL(jsUrl)
config.PluginExplorerCssUrl = template.URL(cssUrl)
config.PluginExplorerJsSRI = jsSri
config.PluginExplorerCssSRI = cssSri
}
}
func WithGraphiqlEnablePluginExplorer(enable bool) GraphiqlConfigOption {
return func(config *GraphiqlConfig) {
config.EnablePluginExplorer = enable
@ -217,27 +253,40 @@ func WithStoragePrefix(prefix string) GraphiqlConfigOption {
// Handler responsible for setting up the playground
func Handler(title, endpoint string, opts ...GraphiqlConfigOption) http.HandlerFunc {
data := GraphiqlConfig{
Title: title,
Endpoint: endpoint,
EndpointIsAbsolute: endpointHasScheme(endpoint),
SubscriptionEndpoint: getSubscriptionEndpoint(endpoint),
// https://www.jsdelivr.com/package/npm/graphiql?tab=files
JsUrl: "https://cdn.jsdelivr.net/npm/graphiql@4.1.2/graphiql.min.js",
JsSRI: "sha256-hnImuor1znlJkD/FOTL3jayfS/xsyNoP04abi8bFJWs=",
CssUrl: "https://cdn.jsdelivr.net/npm/graphiql@4.1.2/graphiql.min.css",
CssSRI: "sha256-MEh+B2NdMSpj9kexQNN3QKc8UzMrCXW/Sx/phcpuyIU=",
// https://www.jsdelivr.com/package/npm/react?tab=files
ReactUrl: "https://cdn.jsdelivr.net/npm/react@18.2.0/umd/react.production.min.js",
ReactSRI: "sha256-S0lp+k7zWUMk2ixteM6HZvu8L9Eh//OVrt+ZfbCpmgY=",
// https://www.jsdelivr.com/package/npm/react-dom?tab=files
ReactDOMUrl: "https://cdn.jsdelivr.net/npm/react-dom@18.2.0/umd/react-dom.production.min.js",
ReactDOMSRI: "sha256-IXWO0ITNDjfnNXIu5POVfqlgYoop36bDzhodR6LW5Pc=",
// https://www.jsdelivr.com/package/npm/@graphiql/plugin-explorer?tab=files
PluginExplorerJsUrl: template.URL(
"https://cdn.jsdelivr.net/npm/@graphiql/plugin-explorer@4.0.6/dist/index.umd.js",
),
PluginExplorerJsSRI: "sha256-UM8sWOS0Xa9yLY85q6Clh0pF4qpxX+TOcJ41flECqBs=",
PluginExplorerCssUrl: template.URL(
"https://cdn.jsdelivr.net/npm/@graphiql/plugin-explorer@4.0.6/dist/style.min.css",
),
PluginExplorerCssSRI: "sha256-b0izygy8aEMY3fCLmtNkm9PKdE3kRD4Qjn6Q8gw5xKI=",
}
for _, opt := range opts {
opt(&data)
}
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Add("Content-Type", "text/html; charset=UTF-8")
var data = GraphiqlConfig{
Title: title,
Endpoint: endpoint,
EndpointIsAbsolute: endpointHasScheme(endpoint),
SubscriptionEndpoint: getSubscriptionEndpoint(endpoint),
Version: "3.7.0",
CssSRI: "sha256-Dbkv2LUWis+0H4Z+IzxLBxM2ka1J133lSjqqtSu49o8=",
JsSRI: "sha256-qsScAZytFdTAEOM8REpljROHu8DvdvxXBK7xhoq5XD0=",
ReactSRI: "sha256-S0lp+k7zWUMk2ixteM6HZvu8L9Eh//OVrt+ZfbCpmgY=",
ReactDOMSRI: "sha256-IXWO0ITNDjfnNXIu5POVfqlgYoop36bDzhodR6LW5Pc=",
PluginExplorerVersion: "3.2.5",
PluginExplorerCssSRI: "sha256-+fdus37Qf3cEIKiD3VvTvgMdc8qOAT1NGUKEevz5l6k=",
PluginExplorerJsSRI: "sha256-minamf9GZIDrlzoMXDvU55DKk6DC5D6pNctIDWFMxS0=",
}
for _, opt := range opts {
opt(&data)
}
err := page.Execute(w, data)
if err != nil {
if err := page.Execute(w, data); err != nil {
panic(err)
}
}
@ -250,7 +299,12 @@ func HandlerWithHeaders(
title, endpoint string,
fetcherHeaders, uiHeaders map[string]string,
) http.HandlerFunc {
return Handler(title, endpoint, WithGraphiqlFetcherHeaders(fetcherHeaders), WithGraphiqlUiHeaders(uiHeaders))
return Handler(
title,
endpoint,
WithGraphiqlFetcherHeaders(fetcherHeaders),
WithGraphiqlUiHeaders(uiHeaders),
)
}
// endpointHasScheme checks if the endpoint has a scheme.

View File

@ -0,0 +1,142 @@
package graphql
import (
"context"
"io"
"github.com/vektah/gqlparser/v2/ast"
)
func ResolveField[T any](
ctx context.Context,
oc *OperationContext,
field CollectedField,
initializeFieldContext func(ctx context.Context, field CollectedField) (*FieldContext, error),
fieldResolver func(ctx context.Context) (any, error),
middlewareChain func(ctx context.Context, next Resolver) Resolver,
marshal func(ctx context.Context, sel ast.SelectionSet, v T) Marshaler,
recoverFromPanic bool,
nonNull bool,
) Marshaler {
return resolveField[T, Marshaler](
ctx,
oc,
field,
initializeFieldContext,
fieldResolver,
middlewareChain,
recoverFromPanic,
nonNull,
Null,
func(ctx context.Context, res T) Marshaler {
return marshal(ctx, field.Selections, res)
},
)
}
func ResolveFieldStream[T any](
ctx context.Context,
oc *OperationContext,
field CollectedField,
initializeFieldContext func(ctx context.Context, field CollectedField) (*FieldContext, error),
fieldResolver func(context.Context) (any, error),
middlewareChain func(ctx context.Context, next Resolver) Resolver,
marshal func(ctx context.Context, sel ast.SelectionSet, v T) Marshaler,
recoverFromPanic bool,
nonNull bool,
) func(context.Context) Marshaler {
return resolveField(
ctx,
oc,
field,
initializeFieldContext,
fieldResolver,
middlewareChain,
recoverFromPanic,
nonNull,
nil,
func(ctx context.Context, res <-chan T) func(context.Context) Marshaler {
return func(ctx context.Context) Marshaler {
select {
case v, ok := <-res:
if !ok {
return nil
}
return WriterFunc(func(w io.Writer) {
w.Write([]byte{'{'})
MarshalString(field.Alias).MarshalGQL(w)
w.Write([]byte{':'})
marshal(ctx, field.Selections, v).MarshalGQL(w)
w.Write([]byte{'}'})
})
case <-ctx.Done():
return nil
}
}
},
)
}
func resolveField[T, R any](
ctx context.Context,
oc *OperationContext,
field CollectedField,
initializeFieldContext func(ctx context.Context, field CollectedField) (*FieldContext, error),
fieldResolver func(ctx context.Context) (any, error),
middlewareChain func(ctx context.Context, next Resolver) Resolver,
recoverFromPanic bool,
nonNull bool,
defaultResult R,
result func(ctx context.Context, res T) R,
) (ret R) {
fc, err := initializeFieldContext(ctx, field)
if err != nil {
return defaultResult
}
ctx = WithFieldContext(ctx, fc)
if recoverFromPanic {
defer func() {
if r := recover(); r != nil {
oc.Error(ctx, oc.Recover(ctx, r))
ret = defaultResult
}
}()
}
next := func(rctx context.Context) (any, error) {
ctx = rctx // use context from middleware stack in children
return fieldResolver(rctx)
}
if middlewareChain != nil {
next = middlewareChain(ctx, next)
}
resTmp, err := oc.ResolverMiddleware(ctx, next)
if err != nil {
oc.Error(ctx, err)
return defaultResult
}
if resTmp == nil {
if nonNull {
if !HasFieldError(ctx, fc) {
oc.Errorf(ctx, "must not be null")
}
}
return defaultResult
}
res, ok := resTmp.(T)
if !ok {
var t T
oc.Errorf(
ctx,
`unexpected type %T from middleware/directive chain, should be %T`,
resTmp,
t,
)
return defaultResult
}
fc.Result = res
return result(ctx, res)
}

View File

@ -12,7 +12,8 @@ type Stats struct {
Parsing TraceTiming
Validation TraceTiming
// Stats collected by handler extensions. Don't use directly, the extension should provide a type safe way to
// Stats collected by handler extensions. Don't use directly, the extension should provide a
// type safe way to
// access this.
extension map[string]any
}
@ -24,9 +25,10 @@ type TraceTiming struct {
var ctxTraceStart key = "trace_start"
// StartOperationTrace captures the current time and stores it in context. This will eventually be added to request
// context but we want to grab it as soon as possible. For transports that can only handle a single graphql query
// per http requests you don't need to call this at all, the server will do it for you. For transports that handle
// StartOperationTrace captures the current time and stores it in context. This will eventually be
// added to request context but we want to grab it as soon as possible. For transports that can only
// handle a single graphql query per http requests you don't need to call this at all, the server
// will do it for you. For transports that handle
// multiple (eg batching, subscriptions) this should be called before decoding each request.
func StartOperationTrace(ctx context.Context) context.Context {
return context.WithValue(ctx, ctxTraceStart, Now())

View File

@ -18,8 +18,57 @@ func MarshalTime(t time.Time) Marshaler {
}
func UnmarshalTime(v any) (time.Time, error) {
if tmpStr, ok := v.(string); ok {
return time.Parse(time.RFC3339Nano, tmpStr)
if v == nil {
return time.Time{}, nil
}
if tmpStr, ok := v.(string); ok {
if tmpStr == "" {
return time.Time{}, nil
}
t, err := time.Parse(time.RFC3339Nano, tmpStr)
if err == nil {
return t, nil
}
t, err = time.Parse(time.RFC3339, tmpStr)
if err == nil {
return t, nil
}
t, err = time.Parse(time.DateTime, tmpStr)
if err == nil {
return t, nil
}
}
return time.Time{}, errors.New("time should be RFC3339Nano formatted string")
}
func MarshalDate(t time.Time) Marshaler {
if t.IsZero() {
return Null
}
return WriterFunc(func(w io.Writer) {
io.WriteString(w, strconv.Quote(t.Format(time.DateOnly)))
})
}
func UnmarshalDate(v any) (time.Time, error) {
if v == nil {
return time.Time{}, nil
}
if tmpStr, ok := v.(string); ok {
if tmpStr == "" {
return time.Time{}, nil
}
t, err := time.Parse(time.DateOnly, tmpStr)
if err == nil {
return t, nil
}
}
return time.Time{}, errors.New("time should be DateOnly formatted string")
}

View File

@ -6,6 +6,7 @@ import (
"fmt"
"io"
"math"
"reflect"
"strconv"
)
@ -16,87 +17,27 @@ func MarshalUint(i uint) Marshaler {
}
func UnmarshalUint(v any) (uint, error) {
switch v := v.(type) {
case string:
u64, err := strconv.ParseUint(v, 10, 64)
if err != nil {
var strconvErr *strconv.NumError
if errors.As(err, &strconvErr) && isSignedInteger(v) {
return 0, newUintSignError(v)
}
return 0, err
}
return uint(u64), err
case int:
if v < 0 {
return 0, newUintSignError(strconv.FormatInt(int64(v), 10))
}
return uint(v), nil
case int64:
if v < 0 {
return 0, newUintSignError(strconv.FormatInt(v, 10))
}
return uint(v), nil
case json.Number:
u64, err := strconv.ParseUint(string(v), 10, 64)
if err != nil {
var strconvErr *strconv.NumError
if errors.As(err, &strconvErr) && isSignedInteger(string(v)) {
return 0, newUintSignError(string(v))
}
return 0, err
}
return uint(u64), err
case nil:
return 0, nil
default:
return 0, fmt.Errorf("%T is not an uint", v)
}
return interfaceToUnsignedNumber[uint](v)
}
func MarshalUint64(i uint64) Marshaler {
func MarshalUint8(i uint8) Marshaler {
return WriterFunc(func(w io.Writer) {
_, _ = io.WriteString(w, strconv.FormatUint(i, 10))
_, _ = io.WriteString(w, strconv.FormatUint(uint64(i), 10))
})
}
func UnmarshalUint64(v any) (uint64, error) {
switch v := v.(type) {
case string:
i, err := strconv.ParseUint(v, 10, 64)
if err != nil {
var strconvErr *strconv.NumError
if errors.As(err, &strconvErr) && isSignedInteger(v) {
return 0, newUintSignError(v)
}
return 0, err
}
return i, nil
case int:
if v < 0 {
return 0, newUintSignError(strconv.FormatInt(int64(v), 10))
}
return uint64(v), nil
case int64:
if v < 0 {
return 0, newUintSignError(strconv.FormatInt(v, 10))
}
return uint64(v), nil
case json.Number:
i, err := strconv.ParseUint(string(v), 10, 64)
if err != nil {
var strconvErr *strconv.NumError
if errors.As(err, &strconvErr) && isSignedInteger(string(v)) {
return 0, newUintSignError(string(v))
}
return 0, err
}
return i, nil
case nil:
return 0, nil
default:
return 0, fmt.Errorf("%T is not an uint", v)
}
func UnmarshalUint8(v any) (uint8, error) {
return interfaceToUnsignedNumber[uint8](v)
}
func MarshalUint16(i uint16) Marshaler {
return WriterFunc(func(w io.Writer) {
_, _ = io.WriteString(w, strconv.FormatUint(uint64(i), 10))
})
}
func UnmarshalUint16(v any) (uint16, error) {
return interfaceToUnsignedNumber[uint16](v)
}
func MarshalUint32(i uint32) Marshaler {
@ -106,9 +47,30 @@ func MarshalUint32(i uint32) Marshaler {
}
func UnmarshalUint32(v any) (uint32, error) {
return interfaceToUnsignedNumber[uint32](v)
}
func MarshalUint64(i uint64) Marshaler {
return WriterFunc(func(w io.Writer) {
_, _ = io.WriteString(w, strconv.FormatUint(i, 10))
})
}
func UnmarshalUint64(v any) (uint64, error) {
return interfaceToUnsignedNumber[uint64](v)
}
func interfaceToUnsignedNumber[N number](v any) (N, error) {
switch v := v.(type) {
case int, int64:
if reflect.ValueOf(v).Int() < 0 {
return 0, newUintSignError(strconv.FormatInt(reflect.ValueOf(v).Int(), 10))
}
return safeCastUnsignedNumber[N](uint64(reflect.ValueOf(v).Int()))
case uint, uint8, uint16, uint32, uint64:
return safeCastUnsignedNumber[N](reflect.ValueOf(v).Uint())
case string:
iv, err := strconv.ParseUint(v, 10, 64)
uv, err := strconv.ParseUint(v, 10, 64)
if err != nil {
var strconvErr *strconv.NumError
if errors.As(err, &strconvErr) && isSignedInteger(v) {
@ -116,19 +78,9 @@ func UnmarshalUint32(v any) (uint32, error) {
}
return 0, err
}
return safeCastUint32(iv)
case int:
if v < 0 {
return 0, newUintSignError(strconv.FormatInt(int64(v), 10))
}
return safeCastUint32(uint64(v))
case int64:
if v < 0 {
return 0, newUintSignError(strconv.FormatInt(v, 10))
}
return safeCastUint32(uint64(v))
return safeCastUnsignedNumber[N](uv)
case json.Number:
iv, err := strconv.ParseUint(string(v), 10, 64)
uv, err := strconv.ParseUint(string(v), 10, 64)
if err != nil {
var strconvErr *strconv.NumError
if errors.As(err, &strconvErr) && isSignedInteger(string(v)) {
@ -136,11 +88,11 @@ func UnmarshalUint32(v any) (uint32, error) {
}
return 0, err
}
return safeCastUint32(iv)
return safeCastUnsignedNumber[N](uv)
case nil:
return 0, nil
default:
return 0, fmt.Errorf("%T is not an uint", v)
return 0, fmt.Errorf("%T is not an %T", v, N(0))
}
}
@ -160,6 +112,30 @@ func (e *UintSignError) Unwrap() error {
return e.IntegerError
}
// safeCastUnsignedNumber converts an uint64 to a number of type N.
func safeCastUnsignedNumber[N number](val uint64) (N, error) {
var zero N
switch any(zero).(type) {
case uint8:
if val > math.MaxUint8 {
return 0, newNumberOverflowError[uint64](val, 8)
}
case uint16:
if val > math.MaxUint16 {
return 0, newNumberOverflowError[uint64](val, 16)
}
case uint32:
if val > math.MaxUint32 {
return 0, newNumberOverflowError[uint64](val, 32)
}
case uint64, uint, int:
default:
return 0, fmt.Errorf("invalid type %T", zero)
}
return N(val), nil
}
func isSignedInteger(v string) bool {
if v == "" {
return false
@ -172,28 +148,3 @@ func isSignedInteger(v string) bool {
}
return false
}
type Uint32OverflowError struct {
Value uint64
*IntegerError
}
func newUint32OverflowError(i uint64) *Uint32OverflowError {
return &Uint32OverflowError{
Value: i,
IntegerError: &IntegerError{
Message: fmt.Sprintf("%d overflows unsigned 32-bit integer", i),
},
}
}
func (e *Uint32OverflowError) Unwrap() error {
return e.IntegerError
}
func safeCastUint32(i uint64) (uint32, error) {
if i > math.MaxUint32 {
return 0, newUint32OverflowError(i)
}
return uint32(i), nil
}

View File

@ -1,3 +1,3 @@
package graphql
const Version = "v0.17.76"
const Version = "v0.17.84"

View File

@ -21,7 +21,8 @@ func init() {
}
}
// NameForDir manually looks for package stanzas in files located in the given directory. This can be
// NameForDir manually looks for package stanzas in files located in the given directory. This can
// be
// much faster than having to consult go list, because we already know exactly where to look.
func NameForDir(dir string) string {
dir, err := filepath.Abs(dir)
@ -55,7 +56,8 @@ type goModuleSearchResult struct {
var goModuleRootCache = map[string]goModuleSearchResult{}
// goModuleRoot returns the root of the current go module if there is a go.mod file in the directory tree
// goModuleRoot returns the root of the current go module if there is a go.mod file in the directory
// tree
// If not, it returns false
func goModuleRoot(dir string) (string, bool) {
dir, err := filepath.Abs(dir)
@ -99,7 +101,8 @@ func goModuleRoot(dir string) (string, bool) {
// create a cache for each path in a tree traversed, except the top one as it is already cached
for _, d := range dirs[:len(dirs)-1] {
if result.moduleName == "" {
// go.mod is not found in the tree, so the same sentinel value fits all the directories in a tree
// go.mod is not found in the tree, so the same sentinel value fits all the directories
// in a tree
goModuleRootCache[d] = result
} else {
relPath, err := filepath.Rel(result.goModPath, d)

View File

@ -20,7 +20,8 @@ var mode = packages.NeedName |
packages.NeedModule
type (
// Packages is a wrapper around x/tools/go/packages that maintains a (hopefully prewarmed) cache of packages
// Packages is a wrapper around x/tools/go/packages that maintains a (hopefully prewarmed) cache
// of packages
// that can be invalidated as writes are made and packages are known to change.
Packages struct {
packages map[string]*packages.Package
@ -58,7 +59,8 @@ func PackagePrefixToCache(prefixPath string) func(p *Packages) {
}
// NewPackages creates a new packages cache
// It will load all packages in the current module, and any packages that are passed to Load or LoadAll
// It will load all packages in the current module, and any packages that are passed to Load or
// LoadAll
func NewPackages(opts ...Option) *Packages {
p := &Packages{}
for _, opt := range opts {
@ -169,8 +171,9 @@ func (p *Packages) Load(importPath string) *packages.Package {
return pkgs[0]
}
// LoadWithTypes tries a standard load, which may not have enough type info (TypesInfo== nil) available if the imported package is a
// second order dependency. Fortunately this doesnt happen very often, so we can just issue a load when we detect it.
// LoadWithTypes tries a standard load, which may not have enough type info (TypesInfo== nil)
// available if the imported package is a second order dependency. Fortunately this doesnt happen
// very often, so we can just issue a load when we detect it.
func (p *Packages) LoadWithTypes(importPath string) *packages.Package {
pkg := p.Load(importPath)
if pkg == nil || pkg.TypesInfo == nil {
@ -189,8 +192,9 @@ func (p *Packages) LoadWithTypes(importPath string) *packages.Package {
return pkg
}
// LoadAllNames will call packages.Load with the NeedName mode only and will store the package name in a cache.
// it does not return any package data, but after calling this you can call NameForPackage to get the package name without loading the full package data.
// LoadAllNames will call packages.Load with the NeedName mode only and will store the package name
// in a cache. it does not return any package data, but after calling this you can call
// NameForPackage to get the package name without loading the full package data.
func (p *Packages) LoadAllNames(importPaths ...string) {
importPaths = dedupPackages(importPaths)
missing := make([]string, 0, len(importPaths))
@ -228,7 +232,6 @@ func (p *Packages) LoadAllNames(importPaths ...string) {
Mode: packages.NeedName,
BuildFlags: p.buildFlags,
}, missing...)
if err != nil {
p.loadErrors = append(p.loadErrors, err)
}
@ -243,7 +246,8 @@ func (p *Packages) LoadAllNames(importPaths ...string) {
}
}
// NameForPackage looks up the package name from the package stanza in the go files at the given import path.
// NameForPackage looks up the package name from the package stanza in the go files at the given
// import path.
func (p *Packages) NameForPackage(importPath string) string {
p.numNameCalls++
p.LoadAllNames(importPath)
@ -252,7 +256,8 @@ func (p *Packages) NameForPackage(importPath string) string {
return p.importToName[importPath]
}
// Evict removes a given package import path from the cache. Further calls to Load will fetch it from disk.
// Evict removes a given package import path from the cache. Further calls to Load will fetch it
// from disk.
func (p *Packages) Evict(importPath string) {
delete(p.packages, importPath)
}
@ -268,7 +273,8 @@ func (p *Packages) ModTidy() error {
return nil
}
// Errors returns any errors that were returned by Load, either from the call itself or any of the loaded packages.
// Errors returns any errors that were returned by Load, either from the call itself or any of the
// loaded packages.
func (p *Packages) Errors() PkgErrors {
res := append([]error{}, p.loadErrors...)
for _, pkg := range p.packages {

View File

@ -36,7 +36,8 @@ func NormalizeVendor(pkg string) string {
// github.com/99designs/gqlgen/graphql becomes
// github.com/foo/vendor/github.com/99designs/gqlgen/graphql
//
// x/tools/packages only supports 'qualified package paths' so this will need to be done prior to calling it
// x/tools/packages only supports 'qualified package paths' so this will need to be done prior to
// calling it
// See https://github.com/golang/go/issues/30289
func QualifyPackagePath(importPath string) string {
wd, _ := os.Getwd()

View File

@ -43,7 +43,11 @@ func Prune(filename string, src []byte, packages *code.Packages) ([]byte, error)
return nil, err
}
return imports.Process(filename, buf.Bytes(), &imports.Options{FormatOnly: true, Comments: true, TabIndent: true, TabWidth: 8})
return imports.Process(
filename,
buf.Bytes(),
&imports.Options{FormatOnly: true, Comments: true, TabIndent: true, TabWidth: 8},
)
}
func getUnusedImports(file ast.Node, packages *code.Packages) map[string]string {

View File

@ -4,6 +4,7 @@ package main
import (
"bytes"
"context"
_ "embed"
"errors"
"fmt"
@ -14,7 +15,7 @@ import (
"os"
"path/filepath"
"github.com/urfave/cli/v2"
"github.com/urfave/cli/v3"
"github.com/99designs/gqlgen/api"
"github.com/99designs/gqlgen/codegen/config"
@ -31,7 +32,8 @@ var configFileTemplate string
func getConfigFileContent(pkgName string) string {
var buf bytes.Buffer
if err := template.Must(template.New("gqlgen.yml").Parse(configFileTemplate)).Execute(&buf, pkgName); err != nil {
if err := template.Must(template.New("gqlgen.yml").Parse(configFileTemplate)).
Execute(&buf, pkgName); err != nil {
panic(err)
}
return buf.String()
@ -92,10 +94,10 @@ var initCmd = &cli.Command{
Value: "graph/schema.graphqls",
},
},
Action: func(ctx *cli.Context) error {
configFilename := ctx.String("config")
serverFilename := ctx.String("server")
schemaFilename := ctx.String("schema")
Action: func(ctx context.Context, c *cli.Command) error {
configFilename := c.String("config")
serverFilename := c.String("server")
schemaFilename := c.String("schema")
cwd, err := os.Getwd()
if err != nil {
@ -167,10 +169,10 @@ var generateCmd = &cli.Command{
&cli.BoolFlag{Name: "verbose, v", Usage: "show logs"},
&cli.StringFlag{Name: "config, c", Usage: "the config filename"},
},
Action: func(ctx *cli.Context) error {
Action: func(ctx context.Context, c *cli.Command) error {
var cfg *config.Config
var err error
if configFilename := ctx.String("config"); configFilename != "" {
if configFilename := c.String("config"); configFilename != "" {
cfg, err = config.LoadConfig(configFilename)
if err != nil {
return err
@ -193,27 +195,27 @@ var generateCmd = &cli.Command{
var versionCmd = &cli.Command{
Name: "version",
Usage: "print the version string",
Action: func(ctx *cli.Context) error {
Action: func(ctx context.Context, c *cli.Command) error {
fmt.Println(graphql.Version)
return nil
},
}
func main() {
app := cli.NewApp()
app := &cli.Command{}
app.Name = "gqlgen"
app.Usage = generateCmd.Usage
app.Description = "This is a library for quickly creating strictly typed graphql servers in golang. See https://gqlgen.com/ for a getting started guide."
app.HideVersion = true
app.Flags = generateCmd.Flags
app.Version = graphql.Version
app.Before = func(context *cli.Context) error {
if context.Bool("verbose") {
app.Before = func(ctx context.Context, c *cli.Command) (context.Context, error) {
if c.Bool("verbose") {
log.SetFlags(0)
} else {
log.SetOutput(io.Discard)
}
return nil
return ctx, nil
}
app.Action = generateCmd.Action
@ -223,7 +225,7 @@ func main() {
versionCmd,
}
if err := app.Run(os.Args); err != nil {
if err := app.Run(context.Background(), os.Args); err != nil {
fmt.Fprint(os.Stderr, err.Error()+"\n")
os.Exit(1)
}

View File

@ -49,7 +49,8 @@ type Requires struct {
func (e *Entity) allFieldsAreExternal(federationVersion int) bool {
for _, field := range e.Def.Fields {
if !e.isFieldImplicitlyExternal(field, federationVersion) && field.Directives.ForName("external") == nil {
if !e.isFieldImplicitlyExternal(field, federationVersion) &&
field.Directives.ForName("external") == nil {
return false
}
}
@ -62,8 +63,9 @@ func (e *Entity) isFieldImplicitlyExternal(field *ast.FieldDefinition, federatio
if federationVersion != 2 {
return false
}
// TODO: From the spec, it seems like if an entity is not resolvable then it should not only not have a resolver, but should not appear in the _Entity union.
// The current implementation is a less drastic departure from the previous behavior, but should probably be reviewed.
// TODO: From the spec, it seems like if an entity is not resolvable then it should not only not
// have a resolver, but should not appear in the _Entity union. The current implementation is a
// less drastic departure from the previous behavior, but should probably be reviewed.
// See https://www.apollographql.com/docs/federation/subgraph-spec/
if e.isResolvable() {
return false
@ -88,7 +90,8 @@ func (e *Entity) isResolvable() bool {
// If there is no resolvable argument, the entity is resolvable.
return true
}
// only if resolvable: false has been set on the @key directive do we consider the entity non-resolvable.
// only if resolvable: false has been set on the @key directive do we consider the entity
// non-resolvable.
return resolvable.Value.Raw != "false"
}

View File

@ -42,6 +42,10 @@ type PackageOptions struct {
// ComputedRequires generates resolver functions to compute values for
// fields using the @required directive.
ComputedRequires bool
// EntityResolverMulti is default engine for entityResolver generation.
// This can be overriding by @entityResolver(multi: Boolean) directive.
// false by default.
EntityResolverMulti bool
}
// New returns a federation plugin that injects
@ -64,30 +68,63 @@ func New(version int, cfg *config.Config) (*Federation, error) {
func buildPackageOptions(cfg *config.Config) (PackageOptions, error) {
packageOptions := cfg.Federation.Options
explicitRequires := packageOptions["explicit_requires"]
computedRequires := packageOptions["computed_requires"]
const (
optionExplicitRequires = "explicit_requires"
optionComputedRequires = "computed_requires"
optionEntityResolverMulti = "entity_resolver_multi"
)
var explicitRequires,
computedRequires,
entityResolverMulti bool
for k, v := range packageOptions {
switch k {
case optionExplicitRequires:
explicitRequires = v
case optionComputedRequires:
computedRequires = v
case optionEntityResolverMulti:
entityResolverMulti = v
default:
return PackageOptions{}, fmt.Errorf("unknown package option: %s", k)
}
}
if explicitRequires && computedRequires {
return PackageOptions{}, errors.New("only one of explicit_requires or computed_requires can be set to true")
return PackageOptions{}, fmt.Errorf(
"only one of %s or %s can be set to true",
optionExplicitRequires,
optionComputedRequires,
)
}
if computedRequires {
if cfg.Federation.Version != 2 {
return PackageOptions{}, errors.New("when using federation.options.computed_requires you must be using Federation 2")
return PackageOptions{}, fmt.Errorf(
"when using federation.options.%s you must be using Federation 2",
optionComputedRequires,
)
}
// We rely on injecting a null argument with a directives for fields with @requires, so we need to ensure
// We rely on injecting a null argument with a directives for fields with @requires, so we
// need to ensure
// our directive is always called.
if !cfg.CallArgumentDirectivesWithNull {
return PackageOptions{}, errors.New("when using federation.options.computed_requires, call_argument_directives_with_null must be set to true")
return PackageOptions{}, fmt.Errorf(
"when using federation.options.%s, call_argument_directives_with_null must be set to true",
optionComputedRequires,
)
}
}
// We rely on injecting a null argument with a directives for fields with @requires, so we need to ensure
// We rely on injecting a null argument with a directives for fields with @requires, so we need
// to ensure
// our directive is always called.
return PackageOptions{
ExplicitRequires: explicitRequires,
ComputedRequires: computedRequires,
ExplicitRequires: explicitRequires,
ComputedRequires: computedRequires,
EntityResolverMulti: entityResolverMulti,
}, nil
}
@ -100,7 +137,10 @@ func (f *Federation) Name() string {
func (f *Federation) MutateConfig(cfg *config.Config) error {
for typeName, entry := range builtins {
if cfg.Models.Exists(typeName) {
return fmt.Errorf("%v already exists which must be reserved when Federation is enabled", typeName)
return fmt.Errorf(
"%v already exists which must be reserved when Federation is enabled",
typeName,
)
}
cfg.Models[typeName] = entry
}
@ -127,7 +167,9 @@ func (f *Federation) MutateConfig(cfg *config.Config) error {
if f.usesRequires && f.PackageOptions.ComputedRequires {
cfg.Schema.Directives[dirPopulateFromRepresentations.Name] = dirPopulateFromRepresentations
cfg.Directives[dirPopulateFromRepresentations.Name] = config.DirectiveConfig{Implementation: &populateFromRepresentationsImplementation}
cfg.Directives[dirPopulateFromRepresentations.Name] = config.DirectiveConfig{
Implementation: &populateFromRepresentationsImplementation,
}
cfg.Schema.Directives[dirEntityReference.Name] = dirEntityReference
cfg.Directives[dirEntityReference.Name] = config.DirectiveConfig{SkipRuntime: true}
@ -142,7 +184,8 @@ func (f *Federation) MutateConfig(cfg *config.Config) error {
func (f *Federation) InjectSourcesEarly() ([]*ast.Source, error) {
input := ``
// add version-specific changes on key directive, as well as adding the new directives for federation 2
// add version-specific changes on key directive, as well as adding the new directives for
// federation 2
switch f.version {
case 1:
input += federationVersion1Schema
@ -157,7 +200,7 @@ func (f *Federation) InjectSourcesEarly() ([]*ast.Source, error) {
}}, nil
}
// InjectSourceLate creates a GraphQL Entity type with all
// InjectSourcesLate creates a GraphQL Entity type with all
// the fields that had the @key directive
func (f *Federation) InjectSourcesLate(schema *ast.Schema) ([]*ast.Source, error) {
f.Entities = f.buildEntities(schema, f.version)
@ -178,7 +221,10 @@ func (f *Federation) InjectSourcesLate(schema *ast.Schema) ([]*ast.Source, error
resolverSDL, entityResolverInputSDL := buildResolverSDL(r, e.Multi)
resolvers = append(resolvers, resolverSDL)
if entityResolverInputSDL != "" {
entityResolverInputDefinitions = append(entityResolverInputDefinitions, entityResolverInputSDL)
entityResolverInputDefinitions = append(
entityResolverInputDefinitions,
entityResolverInputSDL,
)
}
}
}
@ -393,7 +439,10 @@ func (f *Federation) buildEntity(
}
if (schemaType.Kind == ast.Interface) && (len(schema.GetPossibleTypes(schemaType)) == 0) {
fmt.Printf("@key directive found on unused \"interface %s\". Will be ignored.\n", schemaType.Name)
fmt.Printf(
"@key directive found on unused \"interface %s\". Will be ignored.\n",
schemaType.Name,
)
return nil
}
@ -402,7 +451,7 @@ func (f *Federation) buildEntity(
Def: schemaType,
Resolvers: nil,
Requires: nil,
Multi: isMultiEntity(schemaType),
Multi: f.isMultiEntity(schemaType),
}
// If our schema has a field with a type defined in
@ -436,10 +485,12 @@ func (f *Federation) buildEntity(
return entity
}
func isMultiEntity(schemaType *ast.Definition) bool {
// isMultiEntity returns @entityResolver(multi) value, if directive is not defined,
// then global configuration parameter will be used.
func (f *Federation) isMultiEntity(schemaType *ast.Definition) bool {
dir := schemaType.Directives.ForName(dirNameEntityResolver)
if dir == nil {
return false
return f.PackageOptions.EntityResolverMulti
}
if dirArg := dir.Arguments.ForName("multi"); dirArg != nil {
@ -448,7 +499,7 @@ func isMultiEntity(schemaType *ast.Definition) bool {
}
}
return false
return f.PackageOptions.EntityResolverMulti
}
func buildResolvers(
@ -493,7 +544,8 @@ func extractFields(
) (string, error) {
var arg *ast.Argument
// since directives are able to now have multiple arguments, we need to check both possible for a possible @key(fields="" fields="")
// since directives are able to now have multiple arguments, we need to check both possible for
// a possible @key(fields="" fields="")
for _, a := range dir.Arguments {
if a.Name == DirArgFields {
if arg != nil {
@ -611,12 +663,22 @@ func (f *Federation) generateExplicitRequires(
Entity: entity,
}
populator.Comment = strings.TrimSpace(strings.TrimLeft(rewriter.GetMethodComment("executionContext", populator.FuncName), `\`))
populator.Implementation = strings.TrimSpace(rewriter.GetMethodBody("executionContext", populator.FuncName))
populator.Comment = strings.TrimSpace(
strings.TrimLeft(
rewriter.GetMethodComment("executionContext", populator.FuncName),
`\`,
),
)
populator.Implementation = strings.TrimSpace(
rewriter.GetMethodBody("executionContext", populator.FuncName),
)
if populator.Implementation == "" {
populator.Exists = false
populator.Implementation = fmt.Sprintf("panic(fmt.Errorf(\"not implemented: %v\"))", populator.FuncName)
populator.Implementation = fmt.Sprintf(
"panic(fmt.Errorf(\"not implemented: %v\"))",
populator.FuncName,
)
}
populators = append(populators, populator)
}
@ -660,27 +722,43 @@ func buildResolverSDL(
) (resolverSDL, entityResolverInputSDL string) {
if multi {
entityResolverInputSDL = buildEntityResolverInputDefinitionSDL(resolver)
resolverSDL := fmt.Sprintf("\t%s(reps: [%s]!): [%s]", resolver.ResolverName, resolver.InputTypeName, resolver.ReturnTypeName)
resolverSDL := fmt.Sprintf(
"\t%s(reps: [%s]!): [%s]",
resolver.ResolverName,
resolver.InputTypeName,
resolver.ReturnTypeName,
)
return resolverSDL, entityResolverInputSDL
}
resolverArgs := ""
var resolverArgsSb705 strings.Builder
for _, keyField := range resolver.KeyFields {
resolverArgs += fmt.Sprintf("%s: %s,", keyField.Field.ToGoPrivate(), keyField.Definition.Type.String())
resolverArgsSb705.WriteString(
fmt.Sprintf("%s: %s,", keyField.Field.ToGoPrivate(), keyField.Definition.Type.String()),
)
}
resolverSDL = fmt.Sprintf("\t%s(%s): %s!", resolver.ResolverName, resolverArgs, resolver.ReturnTypeName)
resolverArgs += resolverArgsSb705.String()
resolverSDL = fmt.Sprintf(
"\t%s(%s): %s!",
resolver.ResolverName,
resolverArgs,
resolver.ReturnTypeName,
)
return resolverSDL, ""
}
func buildEntityResolverInputDefinitionSDL(resolver *EntityResolver) string {
entityResolverInputDefinition := "input " + resolver.InputTypeName + " {\n"
var entityResolverInputDefinitionSb714 strings.Builder
for _, keyField := range resolver.KeyFields {
entityResolverInputDefinition += fmt.Sprintf(
entityResolverInputDefinitionSb714.WriteString(fmt.Sprintf(
"\t%s: %s\n",
keyField.Field.ToGo(),
keyField.Definition.Type.String(),
)
))
}
entityResolverInputDefinition += entityResolverInputDefinitionSb714.String()
return entityResolverInputDefinition + "}"
}

View File

@ -46,7 +46,10 @@ func New(raw string, prefix []string) Set {
}
// FieldDefinition looks up a field in the type.
func (f Field) FieldDefinition(schemaType *ast.Definition, schema *ast.Schema) *ast.FieldDefinition {
func (f Field) FieldDefinition(
schemaType *ast.Definition,
schema *ast.Schema,
) *ast.FieldDefinition {
objType := schemaType
def := objType.Fields.ForName(f[0])
@ -88,9 +91,11 @@ func (f Field) TypeReference(obj *codegen.Object, objects codegen.Objects) *code
func (f Field) ToGo() string {
var ret string
var retSb91 strings.Builder
for _, field := range f {
ret += templates.ToGo(field)
retSb91.WriteString(templates.ToGo(field))
}
ret += retSb91.String()
return ret
}
@ -98,14 +103,16 @@ func (f Field) ToGo() string {
func (f Field) ToGoPrivate() string {
var ret string
var retSb101 strings.Builder
for i, field := range f {
if i == 0 {
field = trimArgumentFromFieldName(field)
ret += templates.ToGoPrivate(field)
retSb101.WriteString(templates.ToGoPrivate(field))
continue
}
ret += templates.ToGo(field)
retSb101.WriteString(templates.ToGo(field))
}
ret += retSb101.String()
return ret
}
@ -114,7 +121,8 @@ func (f Field) Join(str string) string {
return strings.Join(f, str)
}
// JoinGo concatenates the Go name of field parts with a string separator between. Useful in templates.
// JoinGo concatenates the Go name of field parts with a string separator between. Useful in
// templates.
func (f Field) JoinGo(str string) string {
strs := []string{}
@ -149,7 +157,8 @@ func parseUnnestedKeyFieldSet(raw string, prefix []string) Set {
unionField = false
}
next := append(prefix[0:len(prefix):len(prefix)], s) //nolint:gocritic // set cap=len in order to force slice reallocation
next := prefix[0:len(prefix):len(prefix)]
next = append(next, s)
ret = append(ret, next)
}
return ret
@ -163,7 +172,13 @@ func extractSubs(str string) (string, string, string) {
if start < 0 || end < 0 {
panic("invalid key fieldSet: " + str)
}
return trimArgumentFromFieldName(strings.TrimSpace(str[:start])), strings.TrimSpace(str[start+1 : end]), strings.TrimSpace(str[end+1:])
return trimArgumentFromFieldName(
strings.TrimSpace(str[:start]),
), strings.TrimSpace(
str[start+1 : end],
), strings.TrimSpace(
str[end+1:],
)
}
// matchingBracketIndex returns the index of the closing bracket, assuming an open bracket at start.

View File

@ -0,0 +1,179 @@
package modelgen
import (
"errors"
"github.com/vektah/gqlparser/v2/ast"
)
// interfaceGraph tracks interface implementation relationships.
type interfaceGraph struct {
schema *ast.Schema
parentInterfaces map[string][]string // interface -> interfaces it implements
childInterfaces map[string][]string // interface -> interfaces that implement it
}
func newInterfaceGraph(schema *ast.Schema) *interfaceGraph {
g := &interfaceGraph{
schema: schema,
parentInterfaces: make(map[string][]string),
childInterfaces: make(map[string][]string),
}
for _, schemaType := range schema.Types {
if schemaType.Kind != ast.Interface {
continue
}
if len(schemaType.Interfaces) == 0 {
g.parentInterfaces[schemaType.Name] = []string{}
} else {
g.parentInterfaces[schemaType.Name] = append([]string{}, schemaType.Interfaces...)
for _, parent := range schemaType.Interfaces {
g.childInterfaces[parent] = append(g.childInterfaces[parent], schemaType.Name)
}
}
}
return g
}
// topologicalSort returns interfaces ordered with parents before children.
// Only considers relationships between interfaces in the provided list.
func (g *interfaceGraph) topologicalSort(interfaces []string) ([]string, error) {
interfaceSet := make(map[string]bool)
for _, iface := range interfaces {
interfaceSet[iface] = true
}
inDegree := make(map[string]int)
for _, iface := range interfaces {
count := 0
for _, parent := range g.parentInterfaces[iface] {
if interfaceSet[parent] {
count++
}
}
inDegree[iface] = count
}
var queue []string
for _, iface := range interfaces {
if inDegree[iface] == 0 {
queue = append(queue, iface)
}
}
var result []string
for len(queue) > 0 {
current := queue[0]
queue = queue[1:]
result = append(result, current)
for _, child := range g.childInterfaces[current] {
if interfaceSet[child] {
inDegree[child]--
if inDegree[child] == 0 {
queue = append(queue, child)
}
}
}
}
if len(result) != len(interfaces) {
return nil, errors.New("cycle detected in interface implementations")
}
return result, nil
}
// embeddingInfo contains information about interface embedding relationships.
type embeddingInfo struct {
Parents []string // embeddable parent interfaces with goEmbedInterface directive
SkippedFields []*ast.FieldDefinition // fields from intermediate parents without the directive
}
// getInterfaceOwnFields returns only the fields that are not inherited from parent interfaces.
func (g *interfaceGraph) getInterfaceOwnFields(interfaceName string) []*ast.FieldDefinition {
schemaInterface := g.schema.Types[interfaceName]
if schemaInterface == nil || schemaInterface.Kind != ast.Interface {
return nil
}
parents := g.parentInterfaces[interfaceName]
if len(parents) == 0 {
return schemaInterface.Fields
}
parentFieldNames := make(map[string]bool)
for _, parentName := range parents {
parentInterface := g.schema.Types[parentName]
if parentInterface == nil {
continue
}
for _, field := range parentInterface.Fields {
parentFieldNames[field.Name] = true
}
}
ownFields := []*ast.FieldDefinition{}
for _, field := range schemaInterface.Fields {
if !parentFieldNames[field.Name] {
ownFields = append(ownFields, field)
}
}
return ownFields
}
// getEmbeddingInfo returns information about embeddable parent interfaces and fields
// from intermediate parents that don't have the goEmbedInterface directive.
func (g *interfaceGraph) getEmbeddingInfo(interfaceName string) embeddingInfo {
info := embeddingInfo{
Parents: []string{},
SkippedFields: []*ast.FieldDefinition{},
}
visited := make(map[string]bool)
var walkParents func(name string)
walkParents = func(name string) {
if visited[name] {
return
}
visited[name] = true
parentDef := g.schema.Types[name]
if parentDef == nil || parentDef.Kind != ast.Interface {
return
}
// Check if this parent has the directive (is embeddable)
if g.isEmbeddable(name) {
info.Parents = append(info.Parents, name)
} else {
// Not embeddable - collect its fields and walk up
info.SkippedFields = append(info.SkippedFields, g.getInterfaceOwnFields(name)...)
for _, grandparent := range parentDef.Interfaces {
walkParents(grandparent)
}
}
}
currentDef := g.schema.Types[interfaceName]
if currentDef != nil {
for _, parent := range currentDef.Interfaces {
walkParents(parent)
}
}
return info
}
// isEmbeddable returns true if the interface has the goEmbedInterface directive.
func (g *interfaceGraph) isEmbeddable(interfaceName string) bool {
iface := g.schema.Types[interfaceName]
if iface == nil || iface.Kind != ast.Interface {
return false
}
return iface.Directives.ForName("goEmbedInterface") != nil
}

View File

@ -24,7 +24,8 @@ type (
FieldMutateHook = func(td *ast.Definition, fd *ast.FieldDefinition, f *Field) (*Field, error)
)
// DefaultFieldMutateHook is the default hook for the Plugin which applies the GoFieldHook and GoTagFieldHook.
// DefaultFieldMutateHook is the default hook for the Plugin which applies the GoFieldHook and
// GoTagFieldHook.
func DefaultFieldMutateHook(td *ast.Definition, fd *ast.FieldDefinition, f *Field) (*Field, error) {
return GoTagFieldHook(td, fd, f)
}
@ -104,73 +105,53 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
PackageName: cfg.Model.Package,
}
cfg.Directives["goEmbedInterface"] = config.DirectiveConfig{SkipRuntime: true}
binder := cfg.NewBinder()
// Generate Base structs for interfaces if embedded structs are enabled
embedder := newEmbeddedInterfaceGenerator(cfg, binder, nil, b)
specs, err := embedder.generateAllInterfaceBaseStructs()
if err != nil {
return err
}
for _, spec := range specs {
obj, err := m.buildBaseObjectFromSpec(cfg, binder, spec)
if err != nil {
return err
}
if obj != nil {
b.Models = append(b.Models, obj)
}
}
for _, schemaType := range cfg.Schema.Types {
userDefined := cfg.Models.UserDefined(schemaType.Name)
switch schemaType.Kind {
case ast.Interface, ast.Union:
if !userDefined {
it, err := m.getInterface(cfg, schemaType)
if err != nil {
return err
}
b.Interfaces = append(b.Interfaces, it)
}
}
}
for _, schemaType := range cfg.Schema.Types {
if cfg.Models.UserDefined(schemaType.Name) {
continue
}
switch schemaType.Kind {
case ast.Interface, ast.Union:
var fields []*Field
var err error
if !cfg.OmitGetters {
fields, err = m.generateFields(cfg, schemaType)
if err != nil {
return err
}
}
it := &Interface{
Description: schemaType.Description,
Name: schemaType.Name,
Implements: schemaType.Interfaces,
Fields: fields,
OmitCheck: cfg.OmitInterfaceChecks,
}
// if the interface has a key directive as an entity interface, allow it to implement _Entity
if schemaType.Directives.ForName("key") != nil {
it.Implements = append(it.Implements, "_Entity")
}
b.Interfaces = append(b.Interfaces, it)
case ast.Object, ast.InputObject:
if cfg.IsRoot(schemaType) {
if !cfg.OmitRootModels {
b.Models = append(b.Models, &Object{
Description: schemaType.Description,
Name: schemaType.Name,
})
}
continue
}
fields, err := m.generateFields(cfg, schemaType)
it, err := m.getObject(cfg, schemaType, b)
if err != nil {
return err
}
it := &Object{
Description: schemaType.Description,
Name: schemaType.Name,
Fields: fields,
}
// If Interface A implements interface B, and Interface C also implements interface B
// then both A and C have methods of B.
// The reason for checking unique is to prevent the same method B from being generated twice.
uniqueMap := map[string]bool{}
for _, implementor := range cfg.Schema.GetImplements(schemaType) {
if !uniqueMap[implementor.Name] {
it.Implements = append(it.Implements, implementor.Name)
uniqueMap[implementor.Name] = true
}
// for interface implements
for _, iface := range implementor.Interfaces {
if !uniqueMap[iface] {
it.Implements = append(it.Implements, iface)
uniqueMap[iface] = true
}
}
if it == nil {
continue
}
b.Models = append(b.Models, it)
@ -192,21 +173,26 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
b.Scalars = append(b.Scalars, schemaType.Name)
}
}
sort.Slice(b.Enums, func(i, j int) bool { return b.Enums[i].Name < b.Enums[j].Name })
sort.Slice(b.Models, func(i, j int) bool { return b.Models[i].Name < b.Models[j].Name })
sort.Slice(b.Interfaces, func(i, j int) bool { return b.Interfaces[i].Name < b.Interfaces[j].Name })
sort.Slice(
b.Interfaces,
func(i, j int) bool { return b.Interfaces[i].Name < b.Interfaces[j].Name },
)
// if we are not just turning all struct-type fields in generated structs into pointers, we need to at least
// if we are not just turning all struct-type fields in generated structs into pointers, we need
// to at least
// check for cyclical relationships and recursive structs
if !cfg.StructFieldsAlwaysPointers {
findAndHandleCyclicalRelationships(b)
}
for _, it := range b.Enums {
cfg.Models.Add(it.Name, cfg.Model.ImportPath()+"."+templates.ToGo(it.Name))
cfg.Models.Add(it.Name, cfg.Model.ImportPath()+"."+templates.ToGoModelName(it.Name))
}
for _, it := range b.Models {
cfg.Models.Add(it.Name, cfg.Model.ImportPath()+"."+templates.ToGo(it.Name))
cfg.Models.Add(it.Name, cfg.Model.ImportPath()+"."+templates.ToGoModelName(it.Name))
}
for _, it := range b.Interfaces {
// On a given interface we want to keep a reference to all the models that implement it
@ -221,7 +207,7 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
}
}
}
cfg.Models.Add(it.Name, cfg.Model.ImportPath()+"."+templates.ToGo(it.Name))
cfg.Models.Add(it.Name, cfg.Model.ImportPath()+"."+templates.ToGoModelName(it.Name))
}
for _, it := range b.Scalars {
cfg.Models.Add(it, "github.com/99designs/gqlgen/graphql.String")
@ -274,10 +260,22 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
}
goType := templates.CurrentImports.LookupType(field.Type)
if strings.HasPrefix(goType, "[]") {
getter := fmt.Sprintf("func (this %s) Get%s() %s {\n", templates.ToGo(model.Name), field.GoName, goType)
getter := fmt.Sprintf(
"func (this %s) Get%s() %s {\n",
templates.ToGoModelName(model.Name),
field.GoName,
goType,
)
getter += fmt.Sprintf("\tif this.%s == nil { return nil }\n", field.GoName)
getter += fmt.Sprintf("\tinterfaceSlice := make(%s, 0, len(this.%s))\n", goType, field.GoName)
getter += fmt.Sprintf("\tfor _, concrete := range this.%s { interfaceSlice = append(interfaceSlice, ", field.GoName)
getter += fmt.Sprintf(
"\tinterfaceSlice := make(%s, 0, len(this.%s))\n",
goType,
field.GoName,
)
getter += fmt.Sprintf(
"\tfor _, concrete := range this.%s { interfaceSlice = append(interfaceSlice, ",
field.GoName,
)
if interfaceFieldTypeIsPointer && !structFieldTypeIsPointer {
getter += "&"
} else if !interfaceFieldTypeIsPointer && structFieldTypeIsPointer {
@ -288,7 +286,12 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
getter += "}"
return getter
}
getter := fmt.Sprintf("func (this %s) Get%s() %s { return ", templates.ToGo(model.Name), field.GoName, goType)
getter := fmt.Sprintf(
"func (this %s) Get%s() %s { return ",
templates.ToGoModelName(model.Name),
field.GoName,
goType,
)
if interfaceFieldTypeIsPointer && !structFieldTypeIsPointer {
getter += "&"
@ -308,7 +311,7 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
newModelTemplate = readModelTemplate(cfg.Model.ModelTemplate)
}
err := templates.Render(templates.Options{
err = templates.Render(templates.Options{
PackageName: cfg.Model.Package,
Filename: cfg.Model.Filename,
Data: b,
@ -328,11 +331,28 @@ func (m *Plugin) MutateConfig(cfg *config.Config) error {
return nil
}
func (m *Plugin) generateFields(cfg *config.Config, schemaType *ast.Definition) ([]*Field, error) {
func (m *Plugin) generateFields(
cfg *config.Config,
schemaType *ast.Definition,
model *ModelBuild,
) ([]*Field, error) {
binder := cfg.NewBinder()
embeddedGen := newEmbeddedInterfaceGenerator(cfg, binder, schemaType, model)
fields := make([]*Field, 0)
embeddedFieldMap := embeddedGen.generateEmbeddedFields(schemaType.Fields)
for _, field := range schemaType.Fields {
if embeddedField, isEmbedded := embeddedFieldMap[field.Name]; isEmbedded {
// First field of interface gets the embedded base struct
if embeddedField != nil {
fields = append(fields, embeddedField)
}
// Skip this field (either it's first with embedded field, or subsequent field from same
// interface)
continue
}
f, err := m.generateField(cfg, binder, schemaType, field)
if err != nil {
return nil, err
@ -394,7 +414,7 @@ func (m *Plugin) generateField(
case ast.Object, ast.InputObject:
// no user defined model, must reference a generated struct
typ = types.NewNamed(
types.NewTypeName(0, cfg.Model.Pkg(), templates.ToGo(field.Type.Name()), nil),
types.NewTypeName(0, cfg.Model.Pkg(), templates.ToGoModelName(field.Type.Name()), nil),
types.NewStruct(nil, nil),
nil,
)
@ -428,8 +448,9 @@ func (m *Plugin) generateField(
Type: typ,
Description: field.Description,
Tag: getStructTagFromField(cfg, field),
Omittable: cfg.NullableInputOmittable && schemaType.Kind == ast.InputObject && !field.Type.NonNull,
IsResolver: cfg.Models[schemaType.Name].Fields[field.Name].Resolver,
Omittable: cfg.NullableInputOmittable && schemaType.Kind == ast.InputObject &&
!field.Type.NonNull,
IsResolver: cfg.Models[schemaType.Name].Fields[field.Name].Resolver,
}
if omittable := cfg.Models[schemaType.Name].Fields[field.Name].Omittable; omittable != nil {
@ -441,6 +462,12 @@ func (m *Plugin) generateField(
if err != nil {
return nil, fmt.Errorf("generror: field %v.%v: %w", schemaType.Name, field.Name, err)
}
if mf == nil {
// the field hook wants to omit the field
return nil, nil
}
f = mf
}
@ -450,10 +477,16 @@ func (m *Plugin) generateField(
if f.Omittable {
if schemaType.Kind != ast.InputObject || field.Type.NonNull {
return nil, fmt.Errorf("generror: field %v.%v: omittable is only applicable to nullable input fields", schemaType.Name, field.Name)
return nil, fmt.Errorf(
"generror: field %v.%v: omittable is only applicable to nullable input fields",
schemaType.Name,
field.Name,
)
}
omittableType, err := binder.FindTypeFromName("github.com/99designs/gqlgen/graphql.Omittable")
omittableType, err := binder.FindTypeFromName(
"github.com/99designs/gqlgen/graphql.Omittable",
)
if err != nil {
return nil, err
}
@ -528,11 +561,13 @@ func getExtraFields(cfg *config.Config, modelName string) []*Field {
func getStructTagFromField(cfg *config.Config, field *ast.FieldDefinition) string {
var tags []string
if !field.Type.NonNull && (cfg.EnableModelJsonOmitemptyTag == nil || *cfg.EnableModelJsonOmitemptyTag) {
if !field.Type.NonNull &&
(cfg.EnableModelJsonOmitemptyTag == nil || *cfg.EnableModelJsonOmitemptyTag) {
tags = append(tags, "omitempty")
}
if !field.Type.NonNull && (cfg.EnableModelJsonOmitzeroTag == nil || *cfg.EnableModelJsonOmitzeroTag) {
if !field.Type.NonNull &&
(cfg.EnableModelJsonOmitzeroTag == nil || *cfg.EnableModelJsonOmitzeroTag) {
tags = append(tags, "omitzero")
}
@ -634,10 +669,16 @@ func removeDuplicateTags(t string) string {
// iterate backwards through tags so appended goTag directives are prioritized
for i := len(tt) - 1; i >= 0; i-- {
ti := tt[i]
// check if ti contains ":", and not contains any empty space. if not, tag is in wrong format
// check if ti contains ":", and not contains any empty space. if not, tag is in wrong
// format
// correct example: json:"name"
if !strings.Contains(ti, ":") {
panic(fmt.Errorf("wrong format of tags: %s. goTag directive should be in format: @goTag(key: \"something\", value:\"value\"), ", t))
panic(
fmt.Errorf(
"wrong format of tags: %s. goTag directive should be in format: @goTag(key: \"something\", value:\"value\"), ",
t,
),
)
}
kv := strings.Split(ti, ":")
@ -654,7 +695,12 @@ func removeDuplicateTags(t string) string {
isContained := containsInvalidSpace(value)
if isContained {
panic(fmt.Errorf("tag value should not contain any leading or trailing spaces: %s", value))
panic(
fmt.Errorf(
"tag value should not contain any leading or trailing spaces: %s",
value,
),
)
}
returnTags = key + ":" + value + returnTags
@ -674,7 +720,8 @@ func isStruct(t types.Type) bool {
return is
}
// findAndHandleCyclicalRelationships checks for cyclical relationships between generated structs and replaces them
// findAndHandleCyclicalRelationships checks for cyclical relationships between generated structs
// and replaces them
// with pointers. These relationships will produce compilation errors if they are not pointers.
// Also handles recursive structs.
func findAndHandleCyclicalRelationships(b *ModelBuild) {
@ -687,9 +734,11 @@ func findAndHandleCyclicalRelationships(b *ModelBuild) {
continue
}
// the field Type string will be in the form "github.com/99designs/gqlgen/codegen/testserver/followschema.LoopA"
// the field Type string will be in the form
// "github.com/99designs/gqlgen/codegen/testserver/followschema.LoopA"
// we only want the part after the last dot: "LoopA"
// this could lead to false positives, as we are only checking the name of the struct type, but these
// this could lead to false positives, as we are only checking the name of the struct
// type, but these
// should be extremely rare, if it is even possible at all.
fieldAStructNameParts := strings.Split(fieldA.Type.String(), ".")
fieldAStructName := fieldAStructNameParts[len(fieldAStructNameParts)-1]
@ -716,7 +765,8 @@ func findAndHandleCyclicalRelationships(b *ModelBuild) {
}
}
// if this is a recursive struct (i.e. structA == structB), ensure that we only change this field to a pointer once
// if this is a recursive struct (i.e. structA == structB), ensure that we only
// change this field to a pointer once
if cyclicalReferenceFound && ii != jj {
fieldA.Type = types.NewPointer(fieldA.Type)
break
@ -733,3 +783,116 @@ func readModelTemplate(customModelTemplate string) string {
}
return string(contentBytes)
}
func (m *Plugin) getInterface(
cfg *config.Config,
schemaType *ast.Definition,
) (*Interface, error) {
var fields []*Field
var err error
if !cfg.OmitGetters {
fields, err = m.generateFields(cfg, schemaType, nil)
if err != nil {
return nil, err
}
}
it := &Interface{
Description: schemaType.Description,
Name: schemaType.Name,
Implements: schemaType.Interfaces,
Fields: fields,
OmitCheck: cfg.OmitInterfaceChecks,
}
// if the interface has a key directive as an entity interface, allow it to implement _Entity
if schemaType.Directives.ForName("key") != nil {
it.Implements = append(it.Implements, "_Entity")
}
return it, nil
}
func (m *Plugin) getObject(
cfg *config.Config,
schemaType *ast.Definition,
b *ModelBuild,
) (*Object, error) {
if cfg.IsRoot(schemaType) {
if !cfg.OmitRootModels {
return &Object{
Description: schemaType.Description,
Name: schemaType.Name,
}, nil
}
return nil, nil
}
fields, err := m.generateFields(cfg, schemaType, b)
if err != nil {
return nil, err
}
it := &Object{
Description: schemaType.Description,
Name: schemaType.Name,
Fields: fields,
}
// If Interface A implements interface B, and Interface C also implements interface B
// then both A and C have methods of B.
// The reason for checking unique is to prevent the same method B from being generated twice.
uniqueMap := map[string]bool{}
for _, implementor := range cfg.Schema.GetImplements(schemaType) {
if !uniqueMap[implementor.Name] {
it.Implements = append(it.Implements, implementor.Name)
uniqueMap[implementor.Name] = true
}
// for interface implements
for _, iface := range implementor.Interfaces {
if !uniqueMap[iface] {
it.Implements = append(it.Implements, iface)
uniqueMap[iface] = true
}
}
}
return it, nil
}
func (m *Plugin) buildBaseObjectFromSpec(
cfg *config.Config,
binder *config.Binder,
spec *baseStructSpec,
) (*Object, error) {
fields := make([]*Field, 0)
for _, parentType := range spec.ParentEmbeddings {
fields = append(fields, &Field{
Name: "",
GoName: "", // Empty GoName creates anonymous embedding
Type: parentType,
})
}
// Generate fields from schema definitions
for _, fieldDef := range spec.FieldsToGenerate {
f, err := m.generateField(cfg, binder, spec.SchemaType, fieldDef)
if err != nil {
return nil, err
}
if f != nil {
fields = append(fields, f)
}
}
fields = append(fields, getExtraFields(cfg, spec.SchemaType.Name)...)
return &Object{
Description: spec.SchemaType.Description,
Name: fmt.Sprintf("%s%s", cfg.EmbeddedStructsPrefix, spec.SchemaType.Name),
Fields: fields,
Implements: spec.ImplementsInterfaces,
}, nil
}

View File

@ -37,7 +37,7 @@
{{- with .Description }}
{{.|prefixLines "// "}}
{{- end}}
{{ $field.GoName }} {{$field.Type | ref}} `{{$field.Tag}}`
{{ if $field.GoName }}{{ $field.GoName }} {{ end }}{{$field.Type | ref}}{{ if $field.Tag }} `{{$field.Tag}}`{{ end }}
{{- end }}
}

View File

@ -0,0 +1,301 @@
package modelgen
import (
"fmt"
"go/types"
"log"
"sort"
"github.com/vektah/gqlparser/v2/ast"
"github.com/99designs/gqlgen/codegen/config"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/99designs/gqlgen/internal/code"
)
// embeddedInterfaceGenerator generates Base structs for interfaces to enable embedding.
type embeddedInterfaceGenerator struct {
cfg *config.Config
binder *config.Binder
schemaType *ast.Definition
model *ModelBuild
graph *interfaceGraph
}
func newEmbeddedInterfaceGenerator(
cfg *config.Config,
binder *config.Binder,
schemaType *ast.Definition,
model *ModelBuild,
) *embeddedInterfaceGenerator {
return &embeddedInterfaceGenerator{
cfg: cfg,
binder: binder,
schemaType: schemaType,
model: model,
graph: newInterfaceGraph(cfg.Schema),
}
}
// generateAllInterfaceBaseStructs returns Base struct specs ordered with parents before children.
func (g *embeddedInterfaceGenerator) generateAllInterfaceBaseStructs() ([]*baseStructSpec, error) {
// Filter to only embeddable interfaces (have directive) and not bound to external packages
var interfaceNames []string
for name := range g.graph.parentInterfaces {
// Only include interfaces with directive
if g.graph.isEmbeddable(name) {
// Skip interfaces bound to external packages - their Base structs already exist there
if !g.cfg.Models.UserDefined(name) {
interfaceNames = append(interfaceNames, name)
}
}
}
sorted, err := g.graph.topologicalSort(interfaceNames)
if err != nil {
return nil, fmt.Errorf("failed to sort interfaces: %w", err)
}
var specs []*baseStructSpec
for _, name := range sorted {
spec, err := g.generateBaseStructForInterface(g.cfg.Schema.Types[name])
if err != nil {
return nil, err
}
if spec != nil {
specs = append(specs, spec)
}
}
return specs, nil
}
// baseStructSpec defines Base struct structure for an interface
type baseStructSpec struct {
SchemaType *ast.Definition
ParentEmbeddings []types.Type
FieldsToGenerate []*ast.FieldDefinition
ImplementsInterfaces []string
}
func (g *embeddedInterfaceGenerator) generateBaseStructForInterface(
schemaType *ast.Definition,
) (*baseStructSpec, error) {
if schemaType.Kind != ast.Interface {
return nil, fmt.Errorf(
"generateBaseStructForInterface called on non-interface type: %s",
schemaType.Name,
)
}
spec := &baseStructSpec{
SchemaType: schemaType,
FieldsToGenerate: g.graph.getInterfaceOwnFields(schemaType.Name),
ImplementsInterfaces: []string{schemaType.Name},
}
// Get embeddable parents and fields from skipped intermediate parents
embedInfo := g.graph.getEmbeddingInfo(schemaType.Name)
if len(embedInfo.Parents) > 1 {
log.Printf(
"WARN: Base%s: implements %d interfaces %v (potential diamond problem)",
schemaType.Name,
len(embedInfo.Parents),
embedInfo.Parents,
)
}
for _, parent := range embedInfo.Parents {
spec.ParentEmbeddings = append(spec.ParentEmbeddings, g.createParentBaseType(parent))
spec.ImplementsInterfaces = append(spec.ImplementsInterfaces, parent)
}
// Add fields from intermediate parents without the directive
spec.FieldsToGenerate = append(spec.FieldsToGenerate, embedInfo.SkippedFields...)
return spec, nil
}
func (g *embeddedInterfaceGenerator) createParentBaseType(interfaceName string) types.Type {
baseName := templates.ToGo(fmt.Sprintf("%s%s", g.cfg.EmbeddedStructsPrefix, interfaceName))
// Check if interface is bound to external package
if g.cfg.Models.UserDefined(interfaceName) {
if models := g.cfg.Models[interfaceName]; len(models.Model) > 0 {
if extType, err := g.binder.FindTypeFromName(models.Model[0]); err == nil {
if named, ok := extType.(*types.Named); ok {
if pkg := named.Obj().Pkg(); pkg != nil {
if obj := pkg.Scope().Lookup(baseName); obj != nil {
if typeObj, ok := obj.(*types.TypeName); ok {
return typeObj.Type()
}
}
}
}
}
}
}
// Default: reference local package type
return types.NewNamed(
types.NewTypeName(0, g.cfg.Model.Pkg(), baseName, nil),
types.NewStruct(nil, nil),
nil,
)
}
// generateEmbeddedFields returns map: field name -> embedded Base struct (or nil for subsequent
// fields).
// Covariant overrides prevent embedding and require explicit field generation.
func (g *embeddedInterfaceGenerator) generateEmbeddedFields(
fields []*ast.FieldDefinition,
) map[string]*Field {
if g.model == nil || g.schemaType.Kind != ast.Object {
return nil
}
covariantInterfaces := g.findInterfacesWithCovariantOverrides(fields)
result := make(map[string]*Field)
processed := make(map[string]bool)
for _, field := range fields {
interfaceName := g.findInterfaceForField(field)
if interfaceName == "" || covariantInterfaces[interfaceName] {
continue
}
if processed[interfaceName] {
result[field.Name] = nil // subsequent field from same interface
} else {
result[field.Name] = &Field{Type: g.createEmbeddedBaseType(interfaceName)}
processed[interfaceName] = true
}
}
return result
}
func (g *embeddedInterfaceGenerator) findInterfacesWithCovariantOverrides(
fields []*ast.FieldDefinition,
) map[string]bool {
result := make(map[string]bool)
for _, implField := range fields {
for _, interfaceName := range g.schemaType.Interfaces {
if !g.graph.isEmbeddable(interfaceName) {
continue
}
iface := g.cfg.Schema.Types[interfaceName]
if iface == nil {
continue
}
for _, ifaceField := range iface.Fields {
if ifaceField.Name != implField.Name ||
typesMatch(ifaceField.Type, implField.Type) {
continue
}
if !result[interfaceName] {
log.Printf(
"WARN: %s.%s: covariant override %s -> %s (skipping Base%s embedding)",
g.schemaType.Name,
implField.Name,
ifaceField.Type.Name(),
implField.Type.Name(),
interfaceName,
)
}
result[interfaceName] = true
break
}
}
}
return result
}
// findInterfaceForField returns deepest interface containing this field with matching type.
func (g *embeddedInterfaceGenerator) findInterfaceForField(field *ast.FieldDefinition) string {
interfaces := g.schemaType.Interfaces
if len(interfaces) == 0 {
return ""
}
// Sort deepest-first (child interfaces before parent interfaces)
if len(interfaces) > 1 {
sorted := make([]string, len(interfaces))
copy(sorted, interfaces)
sort.Slice(sorted, func(i, j int) bool {
depthI := len(g.cfg.Schema.Types[sorted[i]].Interfaces)
depthJ := len(g.cfg.Schema.Types[sorted[j]].Interfaces)
return depthI > depthJ
})
interfaces = sorted
}
for _, ifaceName := range interfaces {
if iface := g.cfg.Schema.Types[ifaceName]; iface != nil &&
(iface.Kind == ast.Interface || iface.Kind == ast.Union) {
if !g.graph.isEmbeddable(ifaceName) {
continue
}
for _, ifaceField := range iface.Fields {
if ifaceField.Name == field.Name && typesMatch(ifaceField.Type, field.Type) {
return ifaceName
}
}
}
}
return ""
}
// typesMatch checks if two GraphQL types are identical (same base type, nullability, and list
// wrapping).
func typesMatch(a, b *ast.Type) bool {
if a.Name() != b.Name() || a.NonNull != b.NonNull {
return false
}
// Base type reached
if a.NamedType != "" && b.NamedType != "" {
return true
}
// Both must be lists or both must not be lists
if (a.Elem == nil) != (b.Elem == nil) {
return false
}
// Recursively check list element types
if a.Elem != nil {
return typesMatch(a.Elem, b.Elem)
}
return true
}
func (g *embeddedInterfaceGenerator) createEmbeddedBaseType(interfaceName string) types.Type {
baseName := templates.ToGo(fmt.Sprintf("%s%s", g.cfg.EmbeddedStructsPrefix, interfaceName))
// Check if interface is bound to external package
if g.cfg.Models.UserDefined(interfaceName) {
if pkgPath, _ := code.PkgAndType(g.cfg.Models[interfaceName].Model[0]); pkgPath != "" {
if boundType, _ := g.binder.FindTypeFromName(
pkgPath + "." + baseName,
); boundType != nil {
return boundType
}
}
}
// Default: reference local package type
return types.NewNamed(
types.NewTypeName(0, g.cfg.Model.Pkg(), baseName, nil),
types.NewStruct(nil, nil),
nil,
)
}

View File

@ -29,6 +29,7 @@ type CodeGenerator interface {
}
// EarlySourceInjector is used to inject things that are required for user schema files to compile.
//
// Deprecated: Use EarlySourcesInjector instead
type EarlySourceInjector interface {
InjectSourceEarly() *ast.Source
@ -40,7 +41,7 @@ type EarlySourcesInjector interface {
}
// LateSourceInjector is used to inject more sources, after we have loaded the users schema.
// Deprecated: Use LateSourcesInjector instead
// // Deprecated: Use LateSourcesInjector instead
type LateSourceInjector interface {
InjectSourceLate(schema *ast.Schema) *ast.Source
}

View File

@ -69,7 +69,9 @@ func (m *Plugin) generateSingleFile(data *codegen.Data) error {
for _, o := range data.Objects {
if o.HasResolvers() {
caser := cases.Title(language.English, cases.NoLower)
rewriter.MarkStructCopied(templates.LcFirst(o.Name) + templates.UcFirst(data.Config.Resolver.Type))
rewriter.MarkStructCopied(
templates.LcFirst(o.Name) + templates.UcFirst(data.Config.Resolver.Type),
)
rewriter.GetMethodBody(data.Config.Resolver.Type, caser.String(o.Name))
file.Objects = append(file.Objects, o)
@ -81,10 +83,19 @@ func (m *Plugin) generateSingleFile(data *codegen.Data) error {
}
structName := templates.LcFirst(o.Name) + templates.UcFirst(data.Config.Resolver.Type)
comment := strings.TrimSpace(strings.TrimLeft(rewriter.GetMethodComment(structName, f.GoFieldName), `\`))
comment := strings.TrimSpace(
strings.TrimLeft(rewriter.GetMethodComment(structName, f.GoFieldName), `\`),
)
implementation := strings.TrimSpace(rewriter.GetMethodBody(structName, f.GoFieldName))
if implementation != "" {
resolver := Resolver{o, f, rewriter.GetPrevDecl(structName, f.GoFieldName), comment, implementation, nil}
resolver := Resolver{
o,
f,
rewriter.GetPrevDecl(structName, f.GoFieldName),
comment,
implementation,
nil,
}
file.Resolvers = append(file.Resolvers, &resolver)
} else {
resolver := Resolver{o, f, nil, "", `panic("not implemented")`, nil}
@ -141,7 +152,11 @@ func (m *Plugin) generatePerSchema(data *codegen.Data) error {
for _, o := range objects {
if o.HasResolvers() {
fnCase := gqlToResolverName(data.Config.Resolver.Dir(), o.Position.Src.Name, data.Config.Resolver.FilenameTemplate)
fnCase := gqlToResolverName(
data.Config.Resolver.Dir(),
o.Position.Src.Name,
data.Config.Resolver.FilenameTemplate,
)
fn := strings.ToLower(fnCase)
if files[fn] == nil {
files[fn] = &File{
@ -150,7 +165,9 @@ func (m *Plugin) generatePerSchema(data *codegen.Data) error {
}
caser := cases.Title(language.English, cases.NoLower)
rewriter.MarkStructCopied(templates.LcFirst(o.Name) + templates.UcFirst(data.Config.Resolver.Type))
rewriter.MarkStructCopied(
templates.LcFirst(o.Name) + templates.UcFirst(data.Config.Resolver.Type),
)
rewriter.GetMethodBody(data.Config.Resolver.Type, caser.String(o.Name))
files[fn].Objects = append(files[fn].Objects, o)
}
@ -160,9 +177,18 @@ func (m *Plugin) generatePerSchema(data *codegen.Data) error {
}
structName := templates.LcFirst(o.Name) + templates.UcFirst(data.Config.Resolver.Type)
// TODO(steve): Why do we need to trimLeft "\" here? Some bazel thing?
comment := strings.TrimSpace(strings.TrimLeft(rewriter.GetMethodComment(structName, f.GoFieldName), `\`))
comment := strings.TrimSpace(
strings.TrimLeft(rewriter.GetMethodComment(structName, f.GoFieldName), `\`),
)
implementation := strings.TrimSpace(rewriter.GetMethodBody(structName, f.GoFieldName))
resolver := Resolver{o, f, rewriter.GetPrevDecl(structName, f.GoFieldName), comment, implementation, nil}
resolver := Resolver{
o,
f,
rewriter.GetPrevDecl(structName, f.GoFieldName),
comment,
implementation,
nil,
}
var implExists bool
for _, p := range data.Plugins {
rImpl, ok := p.(plugin.ResolverImplementer)
@ -175,7 +201,11 @@ func (m *Plugin) generatePerSchema(data *codegen.Data) error {
implExists = true
resolver.ImplementationRender = rImpl.Implement
}
fnCase := gqlToResolverName(data.Config.Resolver.Dir(), f.Position.Src.Name, data.Config.Resolver.FilenameTemplate)
fnCase := gqlToResolverName(
data.Config.Resolver.Dir(),
f.Position.Src.Name,
data.Config.Resolver.FilenameTemplate,
)
fn := strings.ToLower(fnCase)
if files[fn] == nil {
files[fn] = &File{
@ -196,7 +226,8 @@ func (m *Plugin) generatePerSchema(data *codegen.Data) error {
allImports = append(allImports, i.ImportPath)
}
}
data.Config.Packages.LoadAllNames(allImports...) // Preload all names in one Load call for performance reasons
data.Config.Packages.LoadAllNames(
allImports...) // Preload all names in one Load call for performance reasons
newResolverTemplate := resolverTemplate
if data.Config.Resolver.ResolverTemplate != "" {
@ -219,7 +250,8 @@ func (m *Plugin) generatePerSchema(data *codegen.Data) error {
var fileNotice strings.Builder
if !data.Config.OmitGQLGenFileNotice {
fileNotice.WriteString(`
// This file will be automatically regenerated based on the schema, any resolver implementations
// This file will be automatically regenerated based on the schema, any resolver
// implementations
// will be copied through when generating and any unknown code will be moved to the end.
// Code generated by github.com/99designs/gqlgen`,
)
@ -248,7 +280,8 @@ func (m *Plugin) generatePerSchema(data *codegen.Data) error {
FileNotice: `
// This file will not be regenerated automatically.
//
// It serves as dependency injection for your app, add any dependencies you require here.`,
// It serves as dependency injection for your app, add any dependencies you require
// here.`,
Template: `type {{.}} struct {}`,
Filename: data.Config.Resolver.Filename,
Data: data.Config.Resolver.Type,
@ -271,8 +304,9 @@ type ResolverBuild struct {
type File struct {
name string
// These are separated because the type definition of the resolver object may live in a different file from the
// resolver method implementations, for example when extending a type in a different graphql schema file
// These are separated because the type definition of the resolver object may live in a
// different file from the resolver method implementations, for example when extending a type in
// a different graphql schema file
Objects []*codegen.Object
Resolvers []*Resolver
imports []rewrite.Import
@ -307,7 +341,11 @@ func (r *Resolver) Implementation() string {
// if not implementation was previously used, use default implementation
if r.ImplementationStr == "" {
// use default implementation, if no implementation was previously used
return fmt.Sprintf("panic(fmt.Errorf(\"not implemented: %v - %v\"))", r.Field.GoFieldName, r.Field.Name)
return fmt.Sprintf(
"panic(fmt.Errorf(\"not implemented: %v - %v\"))",
r.Field.GoFieldName,
r.Field.Name,
)
}
// use previously used implementation
return r.ImplementationStr

View File

@ -113,7 +113,7 @@ CertMagic - Automatic HTTPS using Let's Encrypt
- Cross-platform support! Mac, Windows, Linux, BSD, Android...
- Scales to hundreds of thousands of names/certificates per instance
- Use in conjunction with your own certificates
- Full support for [draft-ietf-acme-ari](https://datatracker.ietf.org/doc/draft-ietf-acme-ari/) (ACME Renewal Information; ARI) extension
- Full support for [RFC 9773](https://datatracker.ietf.org/doc/html/rfc9773) (ACME Renewal Information; ARI) extension
## Requirements

View File

@ -36,9 +36,27 @@ import (
"go.uber.org/zap"
)
// getAccount either loads or creates a new account, depending on if
// getAccountToUse will either load or create an account based on the configuration of the issuer.
// It will try to get one from storage if one exists, and if not, it will create one, all the while
// honoring the configured account key PEM (if any) to restrict which account is used.
func (iss *ACMEIssuer) getAccountToUse(ctx context.Context, directory string) (acme.Account, error) {
var account acme.Account
var err error
if iss.AccountKeyPEM != "" {
iss.Logger.Info("using configured ACME account")
account, err = iss.GetAccount(ctx, []byte(iss.AccountKeyPEM))
} else {
account, err = iss.loadOrCreateAccount(ctx, directory, iss.getEmail())
}
if err != nil {
return acme.Account{}, fmt.Errorf("getting ACME account: %v", err)
}
return account, nil
}
// loadOrCreateAccount either loads or creates a new account, depending on if
// an account can be found in storage for the given CA + email combo.
func (am *ACMEIssuer) getAccount(ctx context.Context, ca, email string) (acme.Account, error) {
func (am *ACMEIssuer) loadOrCreateAccount(ctx context.Context, ca, email string) (acme.Account, error) {
acct, err := am.loadAccount(ctx, ca, email)
if errors.Is(err, fs.ErrNotExist) {
am.Logger.Info("creating new account because no account for configured email is known to us",
@ -407,7 +425,7 @@ func (am *ACMEIssuer) mostRecentAccountEmail(ctx context.Context, caURL string)
return "", false
}
account, err := am.getAccount(ctx, caURL, path.Base(accountList[0]))
account, err := am.loadOrCreateAccount(ctx, caURL, path.Base(accountList[0]))
if err != nil {
return "", false
}

View File

@ -55,23 +55,7 @@ func (iss *ACMEIssuer) newACMEClientWithAccount(ctx context.Context, useTestCA,
// we try loading the account from storage before a potential
// lock, and after obtaining the lock as well, to ensure we don't
// repeat work done by another instance or goroutine
getAccount := func() (acme.Account, error) {
// look up or create the ACME account
var account acme.Account
if iss.AccountKeyPEM != "" {
iss.Logger.Info("using configured ACME account")
account, err = iss.GetAccount(ctx, []byte(iss.AccountKeyPEM))
} else {
account, err = iss.getAccount(ctx, client.Directory, iss.getEmail())
}
if err != nil {
return acme.Account{}, fmt.Errorf("getting ACME account: %v", err)
}
return account, nil
}
// first try getting the account
account, err := getAccount()
account, err := iss.getAccountToUse(ctx, client.Directory)
if err != nil {
return nil, err
}
@ -95,7 +79,7 @@ func (iss *ACMEIssuer) newACMEClientWithAccount(ctx context.Context, useTestCA,
}()
// if we're not the only one waiting for this account, then by this point it should already be registered and in storage; reload it
account, err = getAccount()
account, err = iss.getAccountToUse(ctx, client.Directory)
if err != nil {
return nil, err
}
@ -207,26 +191,34 @@ func (iss *ACMEIssuer) newACMEClient(useTestCA bool) (*acmez.Client, error) {
if iss.DNS01Solver == nil {
// enable HTTP-01 challenge
if !iss.DisableHTTPChallenge {
client.ChallengeSolvers[acme.ChallengeTypeHTTP01] = distributedSolver{
storage: iss.config.Storage,
storageKeyIssuerPrefix: iss.storageKeyCAPrefix(client.Directory),
solver: &httpSolver{
handler: iss.HTTPChallengeHandler(http.NewServeMux()),
address: net.JoinHostPort(iss.ListenHost, strconv.Itoa(iss.getHTTPPort())),
},
var solver acmez.Solver = &httpSolver{
handler: iss.HTTPChallengeHandler(http.NewServeMux()),
address: net.JoinHostPort(iss.ListenHost, strconv.Itoa(iss.getHTTPPort())),
}
if !iss.DisableDistributedSolvers {
solver = distributedSolver{
storage: iss.config.Storage,
storageKeyIssuerPrefix: iss.storageKeyCAPrefix(client.Directory),
solver: solver,
}
}
client.ChallengeSolvers[acme.ChallengeTypeHTTP01] = solver
}
// enable TLS-ALPN-01 challenge
if !iss.DisableTLSALPNChallenge {
client.ChallengeSolvers[acme.ChallengeTypeTLSALPN01] = distributedSolver{
storage: iss.config.Storage,
storageKeyIssuerPrefix: iss.storageKeyCAPrefix(client.Directory),
solver: &tlsALPNSolver{
config: iss.config,
address: net.JoinHostPort(iss.ListenHost, strconv.Itoa(iss.getTLSALPNPort())),
},
var solver acmez.Solver = &tlsALPNSolver{
config: iss.config,
address: net.JoinHostPort(iss.ListenHost, strconv.Itoa(iss.getTLSALPNPort())),
}
if !iss.DisableDistributedSolvers {
solver = distributedSolver{
storage: iss.config.Storage,
storageKeyIssuerPrefix: iss.storageKeyCAPrefix(client.Directory),
solver: solver,
}
}
client.ChallengeSolvers[acme.ChallengeTypeTLSALPN01] = solver
}
} else {
// use DNS challenge exclusively

View File

@ -93,6 +93,15 @@ type ACMEIssuer struct {
// Disable all TLS-ALPN challenges
DisableTLSALPNChallenge bool
// Disable distributed solving; avoids writing
// challenge info to storage backend and will
// only use data in memory to solve the HTTP and
// TLS-ALPN challenges; will still attempt to
// solve distributed HTTP challenges blindly by
// using available account and challenge token
// as read from request URI
DisableDistributedSolvers bool
// The host (ONLY the host, not port) to listen
// on if necessary to start a listener to solve
// an ACME challenge
@ -340,7 +349,7 @@ func (iss *ACMEIssuer) isAgreed() bool {
// IP certificates via ACME are defined in RFC 8738.
func (am *ACMEIssuer) PreCheck(ctx context.Context, names []string, interactive bool) error {
publicCAsAndIPCerts := map[string]bool{ // map of public CAs to whether they support IP certificates (last updated: Q1 2024)
"api.letsencrypt.org": false, // https://community.letsencrypt.org/t/certificate-for-static-ip/84/2?u=mholt
"api.letsencrypt.org": true, // https://letsencrypt.org/2025/07/01/issuing-our-first-ip-address-certificate/
"acme.zerossl.com": false, // only supported via their API, not ACME endpoint
"api.pki.goog": true, // https://pki.goog/faq/#faq-IPCerts
"api.buypass.com": false, // https://community.buypass.com/t/h7hm76w/buypass-support-for-rfc-8738

View File

@ -247,7 +247,7 @@ func (certCache *Cache) unsyncedCacheCertificate(cert Certificate) {
rnd := weakrand.Intn(cacheSize)
i := 0
for _, randomCert := range certCache.cache {
if i == rnd {
if i >= rnd && randomCert.managed { // don't evict manually-loaded certs
certCache.logger.Debug("cache full; evicting random certificate",
zap.Strings("removing_subjects", randomCert.Names),
zap.String("removing_hash", randomCert.hash),
@ -365,6 +365,10 @@ func (certCache *Cache) getConfig(cert Certificate) (*Config, error) {
if err != nil {
return nil, err
}
if cfg == nil {
// this is bad if this happens, probably a programmer error (oops)
return nil, fmt.Errorf("no configuration associated with certificate: %v;", cert.Names)
}
if cfg.certCache == nil {
return nil, fmt.Errorf("config returned for certificate %v has nil cache; expected %p (this one)",
cert.Names, certCache)

View File

@ -19,6 +19,7 @@ import (
"crypto/tls"
"crypto/x509"
"encoding/json"
"errors"
"fmt"
"math/rand"
"net"
@ -346,7 +347,11 @@ func (cfg *Config) CacheUnmanagedTLSCertificate(ctx context.Context, tlsCert tls
}
err = stapleOCSP(ctx, cfg.OCSP, cfg.Storage, &cert, nil)
if err != nil {
cfg.Logger.Warn("stapling OCSP", zap.Error(err))
if errors.Is(err, ErrNoOCSPServerSpecified) {
cfg.Logger.Debug("stapling OCSP", zap.Error(err))
} else {
cfg.Logger.Warn("stapling OCSP", zap.Error(err))
}
}
cfg.emit(ctx, "cached_unmanaged_cert", map[string]any{"sans": cert.Names})
cert.Tags = tags
@ -370,6 +375,37 @@ func (cfg *Config) CacheUnmanagedCertificatePEMBytes(ctx context.Context, certBy
return cert.hash, nil
}
// CacheUnmanagedCertificatePEMBytesAsReplacement is the same as CacheUnmanagedCertificatePEMBytes,
// but it also removes any other loaded certificates for the SANs on the certificate being cached.
// This has the effect of using this certificate exclusively and immediately for its SANs. The SANs
// for which the certificate should apply may optionally be passed in as well. By default, a cert
// is used for any of its SANs.
//
// This method is safe for concurrent use.
//
// EXPERIMENTAL: Subject to change/removal.
func (cfg *Config) CacheUnmanagedCertificatePEMBytesAsReplacement(ctx context.Context, certBytes, keyBytes []byte, tags, sans []string) (string, error) {
cert, err := cfg.makeCertificateWithOCSP(ctx, certBytes, keyBytes)
if err != nil {
return "", err
}
cert.Tags = tags
if len(sans) > 0 {
cert.Names = sans
}
cfg.certCache.mu.Lock()
for _, san := range cert.Names {
existingCerts := cfg.certCache.getAllMatchingCerts(san)
for _, existingCert := range existingCerts {
cfg.certCache.removeCertificate(existingCert)
}
}
cfg.certCache.unsyncedCacheCertificate(cert)
cfg.certCache.mu.Unlock()
cfg.emit(ctx, "cached_unmanaged_cert", map[string]any{"sans": cert.Names, "replacement": true})
return cert.hash, nil
}
// makeCertificateFromDiskWithOCSP makes a Certificate by loading the
// certificate and key files. It fills out all the fields in
// the certificate except for the Managed and OnDemand flags.
@ -394,7 +430,9 @@ func (cfg Config) makeCertificateWithOCSP(ctx context.Context, certPEMBlock, key
return cert, err
}
err = stapleOCSP(ctx, cfg.OCSP, cfg.Storage, &cert, certPEMBlock)
if err != nil {
if errors.Is(err, ErrNoOCSPServerSpecified) {
cfg.Logger.Debug("stapling OCSP", zap.Error(err), zap.Strings("identifiers", cert.Names))
} else {
cfg.Logger.Warn("stapling OCSP", zap.Error(err), zap.Strings("identifiers", cert.Names))
}
return cert, nil

View File

@ -490,6 +490,33 @@ func (cfg *Config) manageOne(ctx context.Context, domainName string, async bool)
return renew()
}
// renewLockLease extends the lease duration on an existing lock if the storage
// backend supports it. The lease duration is calculated based on the retry attempt
// number and includes the certificate obtain timeout. This prevents locks from
// expiring during long-running certificate operations with retries.
func (cfg *Config) renewLockLease(ctx context.Context, storage Storage, lockKey string, attempt int) error {
l, ok := storage.(LockLeaseRenewer)
if !ok {
return nil
}
leaseDuration := maxRetryDuration
if attempt < len(retryIntervals) && attempt >= 0 {
leaseDuration = retryIntervals[attempt]
}
leaseDuration = leaseDuration + DefaultACME.CertObtainTimeout
log := cfg.Logger.Named("renewLockLease")
log.Debug("renewing lock lease", zap.String("lockKey", lockKey), zap.Int("attempt", attempt))
err := l.RenewLockLease(ctx, lockKey, leaseDuration)
if err == nil {
locksMu.Lock()
locks[lockKey] = storage
locksMu.Unlock()
}
return err
}
// ObtainCertSync generates a new private key and obtains a certificate for
// name using cfg in the foreground; i.e. interactively and without retries.
// It stows the renewed certificate and its assets in storage if successful.
@ -546,6 +573,15 @@ func (cfg *Config) obtainCert(ctx context.Context, name string, interactive bool
log.Info("lock acquired", zap.String("identifier", name))
f := func(ctx context.Context) error {
// renew lease on the lock if the certificate store supports it
attempt, ok := ctx.Value(AttemptsCtxKey).(*int)
if ok {
err = cfg.renewLockLease(ctx, cfg.Storage, lockKey, *attempt)
if err != nil {
return fmt.Errorf("unable to renew lock lease '%s': %v", lockKey, err)
}
}
// check if obtain is still needed -- might have been obtained during lock
if cfg.storageHasCertResourcesAnyIssuer(ctx, name) {
log.Info("certificate already exists in storage", zap.String("identifier", name))
@ -805,6 +841,16 @@ func (cfg *Config) renewCert(ctx context.Context, name string, force, interactiv
log.Info("lock acquired", zap.String("identifier", name))
f := func(ctx context.Context) error {
// renew lease on the certificate store lock if the store implementation supports it;
// prevents the lock from being acquired by another process/instance while we're renewing
attempt, ok := ctx.Value(AttemptsCtxKey).(*int)
if ok {
err = cfg.renewLockLease(ctx, cfg.Storage, lockKey, *attempt)
if err != nil {
return fmt.Errorf("unable to renew lock lease '%s': %v", lockKey, err)
}
}
// prepare for renewal (load PEM cert, key, and meta)
certRes, err := cfg.loadCertResourceAnyIssuer(ctx, name)
if err != nil {
@ -1112,20 +1158,29 @@ func (cfg *Config) TLSConfig() *tls.Config {
}
}
// getChallengeInfo loads the challenge info from either the internal challenge memory
// getACMEChallengeInfo loads the challenge info from either the internal challenge memory
// or the external storage (implying distributed solving). The second return value
// indicates whether challenge info was loaded from external storage. If true, the
// challenge is being solved in a distributed fashion; if false, from internal memory.
// If no matching challenge information can be found, an error is returned.
func (cfg *Config) getChallengeInfo(ctx context.Context, identifier string) (Challenge, bool, error) {
func (cfg *Config) getACMEChallengeInfo(ctx context.Context, identifier string, allowDistributed bool) (Challenge, bool, error) {
// first, check if our process initiated this challenge; if so, just return it
chalData, ok := GetACMEChallenge(identifier)
if ok {
return chalData, false, nil
}
// if distributed solving is disabled, and we don't have it in memory, return an error
if !allowDistributed {
return Challenge{}, false, fmt.Errorf("distributed solving disabled and no challenge information found internally for identifier: %s", identifier)
}
// otherwise, perhaps another instance in the cluster initiated it; check
// the configured storage to retrieve challenge data
// the configured storage to retrieve challenge data (requires storage)
if cfg.Storage == nil {
return Challenge{}, false, errors.New("challenge was not initiated internally and no storage is configured for distributed solving")
}
var chalInfo acme.Challenge
var chalInfoBytes []byte

View File

@ -21,7 +21,6 @@ import (
"fmt"
"io"
"io/fs"
"log"
"os"
"path"
"path/filepath"
@ -213,7 +212,7 @@ func (s *FileStorage) Lock(ctx context.Context, name string) error {
// the previous acquirer either crashed or had some sort of failure that
// caused them to be unable to fully acquire or retain the lock, therefore
// we should treat it as if the lockfile did not exist
log.Printf("[INFO][%s] %s: Empty lockfile (%v) - likely previous process crashed or storage medium failure; treating as stale", s, filename, err2)
defaultLogger.Sugar().Infof("[%s] %s: Empty lockfile (%v) - likely previous process crashed or storage medium failure; treating as stale", s, filename, err2)
}
} else if err2 != nil {
return fmt.Errorf("decoding lockfile contents: %w", err2)
@ -235,8 +234,7 @@ func (s *FileStorage) Lock(ctx context.Context, name string) error {
// either have potential to cause infinite loops, as in caddyserver/caddy#4448,
// or must give up on perfect mutual exclusivity; however, these cases are rare,
// so we prefer the simpler solution that avoids infinite loops)
log.Printf("[INFO][%s] Lock for '%s' is stale (created: %s, last update: %s); removing then retrying: %s",
s, name, meta.Created, meta.Updated, filename)
defaultLogger.Sugar().Infof("[%s] Lock for '%s' is stale (created: %s, last update: %s); removing then retrying: %s", s, name, meta.Created, meta.Updated, filename)
if err = os.Remove(filename); err != nil { // hopefully we can replace the lock file quickly!
if !errors.Is(err, fs.ErrNotExist) {
return fmt.Errorf("unable to delete stale lockfile; deadlocked: %w", err)
@ -311,7 +309,7 @@ func keepLockfileFresh(filename string) {
if err := recover(); err != nil {
buf := make([]byte, stackTraceBufferSize)
buf = buf[:runtime.Stack(buf, false)]
log.Printf("panic: active locking: %v\n%s", err, buf)
defaultLogger.Sugar().Errorf("active locking: %v\n%s", err, buf)
}
}()
@ -319,7 +317,7 @@ func keepLockfileFresh(filename string) {
time.Sleep(lockFreshnessInterval)
done, err := updateLockfileFreshness(filename)
if err != nil {
log.Printf("[ERROR] Keeping lock file fresh: %v - terminating lock maintenance (lockfile: %s)", err, filename)
defaultLogger.Sugar().Errorf("Keeping lock file fresh: %v - terminating lock maintenance (lockfile: %s)", err, filename)
return
}
if done {

View File

@ -599,7 +599,11 @@ func (cfg *Config) handshakeMaintenance(ctx context.Context, hello *tls.ClientHe
if err != nil {
// An error with OCSP stapling is not the end of the world, and in fact, is
// quite common considering not all certs have issuer URLs that support it.
logger.Warn("stapling OCSP", zap.Error(err))
if errors.Is(err, ErrNoOCSPServerSpecified) {
logger.Debug("stapling OCSP", zap.Error(err))
} else {
logger.Warn("stapling OCSP", zap.Error(err))
}
} else {
logger.Debug("successfully stapled new OCSP response",
zap.Int("ocsp_status", cert.ocsp.Status),
@ -849,7 +853,7 @@ func (cfg *Config) getCertFromAnyCertManager(ctx context.Context, hello *tls.Cli
// solving). True is returned if the challenge is being solved distributed (there
// is no semantic difference with distributed solving; it is mainly for logging).
func (cfg *Config) getTLSALPNChallengeCert(clientHello *tls.ClientHelloInfo) (*tls.Certificate, bool, error) {
chalData, distributed, err := cfg.getChallengeInfo(clientHello.Context(), clientHello.ServerName)
chalData, distributed, err := cfg.getACMEChallengeInfo(clientHello.Context(), clientHello.ServerName, true)
if err != nil {
return nil, distributed, err
}

View File

@ -15,6 +15,7 @@
package certmagic
import (
"fmt"
"net/http"
"net/url"
"strings"
@ -72,18 +73,104 @@ func (am *ACMEIssuer) distributedHTTPChallengeSolver(w http.ResponseWriter, r *h
return false
}
host := hostOnly(r.Host)
chalInfo, distributed, err := am.config.getChallengeInfo(r.Context(), host)
chalInfo, distributed, err := am.config.getACMEChallengeInfo(r.Context(), host, !am.DisableDistributedSolvers)
if err != nil {
if am.DisableDistributedSolvers {
// Distributed solvers are disabled, so the only way an error can be returned is if
// this instance didn't initiate the challenge (or if the process exited after, but
// either way, we don't have the challenge info). Assuming this is a legitimate
// challenge request, we may still be able to solve it if we can present the correct
// account thumbprint with the token, since the token is given to us in the URL path.
//
// NOTE: About doing this, RFC 8555 section 8.3 says:
//
// Note that because the token appears both in the request sent by the
// ACME server and in the key authorization in the response, it is
// possible to build clients that copy the token from request to
// response. Clients should avoid this behavior because it can lead to
// cross-site scripting vulnerabilities; instead, clients should be
// explicitly configured on a per-challenge basis. A client that does
// copy tokens from requests to responses MUST validate that the token
// in the request matches the token syntax above (e.g., that it includes
// only characters from the base64url alphabet).
//
// Also, since we're just blindly solving a challenge, we're unable to mitigate DNS
// rebinding attacks, because we don't know what host to expect in the URL. So this
// is not ideal, but we do at least validate the copied token is in the base64url set.
if strings.HasPrefix(r.URL.Path, acmeHTTPChallengeBasePath) &&
strings.Count(r.URL.Path, "/") == 3 &&
r.Method == http.MethodGet {
tokenStart := strings.LastIndex(r.URL.Path, "/") + 1
token := r.URL.Path[tokenStart:]
if allBase64URL(token) {
if err := am.solveHTTPChallengeBlindly(w, r); err != nil {
am.Logger.Error("solving http-01 challenge blindly",
zap.String("identifier", host),
zap.Error(err))
}
return true
}
}
}
// couldn't get challenge info even with distributed solver
am.Logger.Warn("looking up info for HTTP challenge",
zap.String("host", host),
zap.String("uri", r.RequestURI),
zap.String("identifier", host),
zap.String("remote_addr", r.RemoteAddr),
zap.String("user_agent", r.Header.Get("User-Agent")),
zap.Error(err))
return false
}
return solveHTTPChallenge(am.Logger, w, r, chalInfo.Challenge, distributed)
}
// solveHTTPChallengeBlindly will try to respond correctly with an http-01 challenge response.
// The request must be an http-01 challenge request. We cannot know for sure the ACME CA that
// is requesting this, so we have to guess as we load the account to use for a thumbprint as
// part of the response body. It is a no-op if the last component of the URL path contains
// characters outside of the base64url alphabet.
func (am *ACMEIssuer) solveHTTPChallengeBlindly(w http.ResponseWriter, r *http.Request) error {
tokenStart := strings.LastIndex(r.URL.Path, "/") + 1
token := r.URL.Path[tokenStart:]
if allBase64URL(token) {
acct, err := am.getAccountToUse(r.Context(), am.CA) // assume production CA, I guess
if err != nil {
return fmt.Errorf("getting an account to use: %v", err)
}
thumbprint, err := acct.Thumbprint()
if err != nil {
return fmt.Errorf("could not encode account thumbprint: %v", err)
}
w.Header().Add("Content-Type", "text/plain")
_, _ = w.Write([]byte(token + "." + thumbprint))
r.Close = true
am.Logger.Info("served key authentication",
zap.String("identifier", hostOnly(r.Host)),
zap.String("challenge", "http-01"),
zap.String("remote", r.RemoteAddr),
zap.Bool("distributed", false),
zap.Bool("blind", true),
zap.String("ca", am.CA))
}
return nil
}
// allBase64URL returns true if all characters of s are in the base64url alphabet.
func allBase64URL(s string) bool {
for _, c := range s {
if (c >= 'A' && c <= 'Z') ||
(c >= 'a' && c <= 'z') ||
(c >= '0' && c <= '9') ||
c == '-' || c == '_' {
continue
}
return false
}
return true
}
// solveHTTPChallenge solves the HTTP challenge using the given challenge information.
// If the challenge is being solved in a distributed fahsion, set distributed to true for logging purposes.
// It returns true the properties of the request check out in relation to the HTTP challenge.

View File

@ -111,7 +111,7 @@ func (certCache *Cache) RenewManagedCertificates(ctx context.Context) error {
}
// the list of names on this cert should never be empty... programmer error?
if cert.Names == nil || len(cert.Names) == 0 {
if len(cert.Names) == 0 {
log.Warn("certificate has no names; removing from cache", zap.String("cert_key", certKey))
deleteQueue = append(deleteQueue, cert)
continue
@ -125,12 +125,6 @@ func (certCache *Cache) RenewManagedCertificates(ctx context.Context) error {
zap.Error(err))
continue
}
if cfg == nil {
// this is bad if this happens, probably a programmer error (oops)
log.Error("no configuration associated with certificate; unable to manage",
zap.Strings("identifiers", cert.Names))
continue
}
if cfg.OnDemand != nil {
continue
}

View File

@ -149,6 +149,16 @@ type Locker interface {
Unlock(ctx context.Context, name string) error
}
// LockLeaseRenewer is an optional interface that can be implemented by a Storage
// implementation to support renewing the lease on a lock. This is useful for
// long-running operations that need to be synchronized across a cluster.
type LockLeaseRenewer interface {
// RenewLockLease renews the lease on the lock for the given lockKey for the
// given leaseDuration. This is used to prevent the lock from being acquired
// by another process.
RenewLockLease(ctx context.Context, lockKey string, leaseDuration time.Duration) error
}
// KeyInfo holds information about a key in storage.
// Key and IsTerminal are required; Modified and Size
// are optional if the storage implementation is not

View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) 2014 Brian Goff
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,62 +0,0 @@
package md2man
import (
"fmt"
"io"
"os"
"strings"
"github.com/russross/blackfriday/v2"
)
func fmtListFlags(flags blackfriday.ListType) string {
knownFlags := []struct {
name string
flag blackfriday.ListType
}{
{"ListTypeOrdered", blackfriday.ListTypeOrdered},
{"ListTypeDefinition", blackfriday.ListTypeDefinition},
{"ListTypeTerm", blackfriday.ListTypeTerm},
{"ListItemContainsBlock", blackfriday.ListItemContainsBlock},
{"ListItemBeginningOfList", blackfriday.ListItemBeginningOfList},
{"ListItemEndOfList", blackfriday.ListItemEndOfList},
}
var f []string
for _, kf := range knownFlags {
if flags&kf.flag != 0 {
f = append(f, kf.name)
flags &^= kf.flag
}
}
if flags != 0 {
f = append(f, fmt.Sprintf("Unknown(%#x)", flags))
}
return strings.Join(f, "|")
}
type debugDecorator struct {
blackfriday.Renderer
}
func depth(node *blackfriday.Node) int {
d := 0
for n := node.Parent; n != nil; n = n.Parent {
d++
}
return d
}
func (d *debugDecorator) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
fmt.Fprintf(os.Stderr, "%s%s %v %v\n",
strings.Repeat(" ", depth(node)),
map[bool]string{true: "+", false: "-"}[entering],
node,
fmtListFlags(node.ListFlags))
var b strings.Builder
status := d.Renderer.RenderNode(io.MultiWriter(&b, w), node, entering)
if b.Len() > 0 {
fmt.Fprintf(os.Stderr, ">> %q\n", b.String())
}
return status
}

View File

@ -1,24 +0,0 @@
// Package md2man aims in converting markdown into roff (man pages).
package md2man
import (
"os"
"strconv"
"github.com/russross/blackfriday/v2"
)
// Render converts a markdown document into a roff formatted document.
func Render(doc []byte) []byte {
renderer := NewRoffRenderer()
var r blackfriday.Renderer = renderer
if v, _ := strconv.ParseBool(os.Getenv("MD2MAN_DEBUG")); v {
r = &debugDecorator{Renderer: r}
}
return blackfriday.Run(doc,
[]blackfriday.Option{
blackfriday.WithRenderer(r),
blackfriday.WithExtensions(renderer.GetExtensions()),
}...)
}

View File

@ -1,416 +0,0 @@
package md2man
import (
"bufio"
"bytes"
"fmt"
"io"
"os"
"strings"
"github.com/russross/blackfriday/v2"
)
// roffRenderer implements the blackfriday.Renderer interface for creating
// roff format (manpages) from markdown text
type roffRenderer struct {
listCounters []int
firstHeader bool
listDepth int
}
const (
titleHeader = ".TH "
topLevelHeader = "\n\n.SH "
secondLevelHdr = "\n.SH "
otherHeader = "\n.SS "
crTag = "\n"
emphTag = "\\fI"
emphCloseTag = "\\fP"
strongTag = "\\fB"
strongCloseTag = "\\fP"
breakTag = "\n.br\n"
paraTag = "\n.PP\n"
hruleTag = "\n.ti 0\n\\l'\\n(.lu'\n"
linkTag = "\n\\[la]"
linkCloseTag = "\\[ra]"
codespanTag = "\\fB"
codespanCloseTag = "\\fR"
codeTag = "\n.EX\n"
codeCloseTag = ".EE\n" // Do not prepend a newline character since code blocks, by definition, include a newline already (or at least as how blackfriday gives us on).
quoteTag = "\n.PP\n.RS\n"
quoteCloseTag = "\n.RE\n"
listTag = "\n.RS\n"
listCloseTag = ".RE\n"
dtTag = "\n.TP\n"
dd2Tag = "\n"
tableStart = "\n.TS\nallbox;\n"
tableEnd = ".TE\n"
tableCellStart = "T{\n"
tableCellEnd = "\nT}"
tablePreprocessor = `'\" t`
)
// NewRoffRenderer creates a new blackfriday Renderer for generating roff documents
// from markdown
func NewRoffRenderer() *roffRenderer {
return &roffRenderer{}
}
// GetExtensions returns the list of extensions used by this renderer implementation
func (*roffRenderer) GetExtensions() blackfriday.Extensions {
return blackfriday.NoIntraEmphasis |
blackfriday.Tables |
blackfriday.FencedCode |
blackfriday.SpaceHeadings |
blackfriday.Footnotes |
blackfriday.Titleblock |
blackfriday.DefinitionLists
}
// RenderHeader handles outputting the header at document start
func (r *roffRenderer) RenderHeader(w io.Writer, ast *blackfriday.Node) {
// We need to walk the tree to check if there are any tables.
// If there are, we need to enable the roff table preprocessor.
ast.Walk(func(node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
if node.Type == blackfriday.Table {
out(w, tablePreprocessor+"\n")
return blackfriday.Terminate
}
return blackfriday.GoToNext
})
// disable hyphenation
out(w, ".nh\n")
}
// RenderFooter handles outputting the footer at the document end; the roff
// renderer has no footer information
func (r *roffRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) {
}
// RenderNode is called for each node in a markdown document; based on the node
// type the equivalent roff output is sent to the writer
func (r *roffRenderer) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
walkAction := blackfriday.GoToNext
switch node.Type {
case blackfriday.Text:
// Special case: format the NAME section as required for proper whatis parsing.
// Refer to the lexgrog(1) and groff_man(7) manual pages for details.
if node.Parent != nil &&
node.Parent.Type == blackfriday.Paragraph &&
node.Parent.Prev != nil &&
node.Parent.Prev.Type == blackfriday.Heading &&
node.Parent.Prev.FirstChild != nil &&
bytes.EqualFold(node.Parent.Prev.FirstChild.Literal, []byte("NAME")) {
before, after, found := bytesCut(node.Literal, []byte(" - "))
escapeSpecialChars(w, before)
if found {
out(w, ` \- `)
escapeSpecialChars(w, after)
}
} else {
escapeSpecialChars(w, node.Literal)
}
case blackfriday.Softbreak:
out(w, crTag)
case blackfriday.Hardbreak:
out(w, breakTag)
case blackfriday.Emph:
if entering {
out(w, emphTag)
} else {
out(w, emphCloseTag)
}
case blackfriday.Strong:
if entering {
out(w, strongTag)
} else {
out(w, strongCloseTag)
}
case blackfriday.Link:
// Don't render the link text for automatic links, because this
// will only duplicate the URL in the roff output.
// See https://daringfireball.net/projects/markdown/syntax#autolink
if !bytes.Equal(node.LinkData.Destination, node.FirstChild.Literal) {
out(w, string(node.FirstChild.Literal))
}
// Hyphens in a link must be escaped to avoid word-wrap in the rendered man page.
escapedLink := strings.ReplaceAll(string(node.LinkData.Destination), "-", "\\-")
out(w, linkTag+escapedLink+linkCloseTag)
walkAction = blackfriday.SkipChildren
case blackfriday.Image:
// ignore images
walkAction = blackfriday.SkipChildren
case blackfriday.Code:
out(w, codespanTag)
escapeSpecialChars(w, node.Literal)
out(w, codespanCloseTag)
case blackfriday.Document:
break
case blackfriday.Paragraph:
if entering {
if r.listDepth > 0 {
// roff .PP markers break lists
if node.Prev != nil { // continued paragraph
if node.Prev.Type == blackfriday.List && node.Prev.ListFlags&blackfriday.ListTypeDefinition == 0 {
out(w, ".IP\n")
} else {
out(w, crTag)
}
}
} else if node.Prev != nil && node.Prev.Type == blackfriday.Heading {
out(w, crTag)
} else {
out(w, paraTag)
}
} else {
if node.Next == nil || node.Next.Type != blackfriday.List {
out(w, crTag)
}
}
case blackfriday.BlockQuote:
if entering {
out(w, quoteTag)
} else {
out(w, quoteCloseTag)
}
case blackfriday.Heading:
r.handleHeading(w, node, entering)
case blackfriday.HorizontalRule:
out(w, hruleTag)
case blackfriday.List:
r.handleList(w, node, entering)
case blackfriday.Item:
r.handleItem(w, node, entering)
case blackfriday.CodeBlock:
out(w, codeTag)
escapeSpecialChars(w, node.Literal)
out(w, codeCloseTag)
case blackfriday.Table:
r.handleTable(w, node, entering)
case blackfriday.TableHead:
case blackfriday.TableBody:
case blackfriday.TableRow:
// no action as cell entries do all the nroff formatting
return blackfriday.GoToNext
case blackfriday.TableCell:
r.handleTableCell(w, node, entering)
case blackfriday.HTMLSpan:
// ignore other HTML tags
case blackfriday.HTMLBlock:
if bytes.HasPrefix(node.Literal, []byte("<!--")) {
break // ignore comments, no warning
}
fmt.Fprintln(os.Stderr, "WARNING: go-md2man does not handle node type "+node.Type.String())
default:
fmt.Fprintln(os.Stderr, "WARNING: go-md2man does not handle node type "+node.Type.String())
}
return walkAction
}
func (r *roffRenderer) handleHeading(w io.Writer, node *blackfriday.Node, entering bool) {
if entering {
switch node.Level {
case 1:
if !r.firstHeader {
out(w, titleHeader)
r.firstHeader = true
break
}
out(w, topLevelHeader)
case 2:
out(w, secondLevelHdr)
default:
out(w, otherHeader)
}
}
}
func (r *roffRenderer) handleList(w io.Writer, node *blackfriday.Node, entering bool) {
openTag := listTag
closeTag := listCloseTag
if (entering && r.listDepth == 0) || (!entering && r.listDepth == 1) {
openTag = crTag
closeTag = ""
}
if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
// tags for definition lists handled within Item node
openTag = ""
closeTag = ""
}
if entering {
r.listDepth++
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
r.listCounters = append(r.listCounters, 1)
}
out(w, openTag)
} else {
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
r.listCounters = r.listCounters[:len(r.listCounters)-1]
}
out(w, closeTag)
r.listDepth--
}
}
func (r *roffRenderer) handleItem(w io.Writer, node *blackfriday.Node, entering bool) {
if entering {
if node.ListFlags&blackfriday.ListTypeOrdered != 0 {
out(w, fmt.Sprintf(".IP \"%3d.\" 5\n", r.listCounters[len(r.listCounters)-1]))
r.listCounters[len(r.listCounters)-1]++
} else if node.ListFlags&blackfriday.ListTypeTerm != 0 {
// DT (definition term): line just before DD (see below).
out(w, dtTag)
} else if node.ListFlags&blackfriday.ListTypeDefinition != 0 {
// DD (definition description): line that starts with ": ".
//
// We have to distinguish between the first DD and the
// subsequent ones, as there should be no vertical
// whitespace between the DT and the first DD.
if node.Prev != nil && node.Prev.ListFlags&(blackfriday.ListTypeTerm|blackfriday.ListTypeDefinition) == blackfriday.ListTypeDefinition {
if node.Prev.Type == blackfriday.Item &&
node.Prev.LastChild != nil &&
node.Prev.LastChild.Type == blackfriday.List &&
node.Prev.LastChild.ListFlags&blackfriday.ListTypeDefinition == 0 {
out(w, ".IP\n")
} else {
out(w, dd2Tag)
}
}
} else {
out(w, ".IP \\(bu 2\n")
}
}
}
func (r *roffRenderer) handleTable(w io.Writer, node *blackfriday.Node, entering bool) {
if entering {
out(w, tableStart)
// call walker to count cells (and rows?) so format section can be produced
columns := countColumns(node)
out(w, strings.Repeat("l ", columns)+"\n")
out(w, strings.Repeat("l ", columns)+".\n")
} else {
out(w, tableEnd)
}
}
func (r *roffRenderer) handleTableCell(w io.Writer, node *blackfriday.Node, entering bool) {
if entering {
var start string
if node.Prev != nil && node.Prev.Type == blackfriday.TableCell {
start = "\t"
}
if node.IsHeader {
start += strongTag
} else if nodeLiteralSize(node) > 30 {
start += tableCellStart
}
out(w, start)
} else {
var end string
if node.IsHeader {
end = strongCloseTag
} else if nodeLiteralSize(node) > 30 {
end = tableCellEnd
}
if node.Next == nil {
// Last cell: need to carriage return if we are at the end of the header row.
end += crTag
}
out(w, end)
}
}
func nodeLiteralSize(node *blackfriday.Node) int {
total := 0
for n := node.FirstChild; n != nil; n = n.FirstChild {
total += len(n.Literal)
}
return total
}
// because roff format requires knowing the column count before outputting any table
// data we need to walk a table tree and count the columns
func countColumns(node *blackfriday.Node) int {
var columns int
node.Walk(func(node *blackfriday.Node, entering bool) blackfriday.WalkStatus {
switch node.Type {
case blackfriday.TableRow:
if !entering {
return blackfriday.Terminate
}
case blackfriday.TableCell:
if entering {
columns++
}
default:
}
return blackfriday.GoToNext
})
return columns
}
func out(w io.Writer, output string) {
io.WriteString(w, output) //nolint:errcheck
}
func escapeSpecialChars(w io.Writer, text []byte) {
scanner := bufio.NewScanner(bytes.NewReader(text))
// count the number of lines in the text
// we need to know this to avoid adding a newline after the last line
n := bytes.Count(text, []byte{'\n'})
idx := 0
for scanner.Scan() {
dt := scanner.Bytes()
if idx < n {
idx++
dt = append(dt, '\n')
}
escapeSpecialCharsLine(w, dt)
}
if err := scanner.Err(); err != nil {
panic(err)
}
}
func escapeSpecialCharsLine(w io.Writer, text []byte) {
for i := 0; i < len(text); i++ {
// escape initial apostrophe or period
if len(text) >= 1 && (text[0] == '\'' || text[0] == '.') {
out(w, "\\&")
}
// directly copy normal characters
org := i
for i < len(text) && text[i] != '\\' {
i++
}
if i > org {
w.Write(text[org:i]) //nolint:errcheck
}
// escape a character
if i >= len(text) {
break
}
w.Write([]byte{'\\', text[i]}) //nolint:errcheck
}
}
// bytesCut is a copy of [bytes.Cut] to provide compatibility with go1.17
// and older. We can remove this once we drop support for go1.17 and older.
func bytesCut(s, sep []byte) (before, after []byte, found bool) {
if i := bytes.Index(s, sep); i >= 0 {
return s[:i], s[i+len(sep):], true
}
return s, nil, false
}

View File

@ -0,0 +1,9 @@
# Github is obeying this ignore file by default.
# Run this command on local to ignore formatting commits in `git blame`
# git config blame.ignoreRevsFile .git-blame-ignore-revs
# Added a new column to supported_mimes.md
# The supported_mimes.md file was a nice way to find when a file format was
# introduced. However, when I changed to add a new column in the table, the
# whole git blame got poisoned for the file.
eb497f9bc5d31c6eab2929a112051218670137ba

Some files were not shown because too many files have changed in this diff Show More