Update dependencies

This commit is contained in:
Ingo Oppermann 2025-07-08 11:59:04 +02:00
parent 2ff8ce2c44
commit ede6debf71
No known key found for this signature in database
GPG Key ID: 2AB32426E9DD229E
84 changed files with 1960 additions and 1004 deletions

20
go.mod
View File

@ -5,17 +5,17 @@ go 1.23.0
toolchain go1.24.2
require (
github.com/99designs/gqlgen v0.17.75
github.com/Masterminds/semver/v3 v3.3.1
github.com/99designs/gqlgen v0.17.76
github.com/Masterminds/semver/v3 v3.4.0
github.com/adhocore/gronx v1.19.6
github.com/andybalholm/brotli v1.1.1
github.com/andybalholm/brotli v1.2.0
github.com/atrox/haikunatorgo/v2 v2.0.1
github.com/caddyserver/certmagic v0.23.0
github.com/datarhei/gosrt v0.9.0
github.com/datarhei/joy4 v0.0.0-20240603190808-b1407345907e
github.com/dolthub/swiss v0.2.1
github.com/fujiwara/shapeio v1.0.0
github.com/go-playground/validator/v10 v10.26.0
github.com/go-playground/validator/v10 v10.27.0
github.com/gobwas/glob v0.2.3
github.com/golang-jwt/jwt/v5 v5.2.2
github.com/google/gops v0.3.28
@ -26,7 +26,7 @@ require (
github.com/invopop/jsonschema v0.4.0
github.com/joho/godotenv v1.5.1
github.com/klauspost/compress v1.18.0
github.com/klauspost/cpuid/v2 v2.2.10
github.com/klauspost/cpuid/v2 v2.2.11
github.com/labstack/echo/v4 v4.13.4
github.com/lestrrat-go/strftime v1.1.0
github.com/lithammer/shortuuid/v4 v4.2.0
@ -40,9 +40,9 @@ require (
github.com/swaggo/echo-swagger v1.4.1
github.com/swaggo/swag v1.16.4
github.com/tklauser/go-sysconf v0.3.15
github.com/vektah/gqlparser/v2 v2.5.28
github.com/vektah/gqlparser/v2 v2.5.30
github.com/xeipuuv/gojsonschema v1.2.0
go.etcd.io/bbolt v1.4.1
go.etcd.io/bbolt v1.4.2
go.uber.org/automaxprocs v1.6.0
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.39.0
@ -94,12 +94,12 @@ require (
github.com/minio/crc64nvme v1.0.2 // indirect
github.com/minio/md5-simd v1.1.2 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c // indirect
github.com/philhofer/fwd v1.2.0 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
github.com/prometheus/common v0.64.0 // indirect
github.com/prometheus/procfs v0.16.1 // indirect
github.com/prometheus/common v0.65.0 // indirect
github.com/prometheus/procfs v0.17.0 // indirect
github.com/rs/xid v1.6.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect

40
go.sum
View File

@ -1,11 +1,11 @@
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
github.com/99designs/gqlgen v0.17.75 h1:GwHJsptXWLHeY7JO8b7YueUI4w9Pom6wJTICosDtQuI=
github.com/99designs/gqlgen v0.17.75/go.mod h1:p7gbTpdnHyl70hmSpM8XG8GiKwmCv+T5zkdY8U8bLog=
github.com/99designs/gqlgen v0.17.76 h1:YsJBcfACWmXWU2t1yCjoGdOmqcTfOFpjbLAE443fmYI=
github.com/99designs/gqlgen v0.17.76/go.mod h1:miiU+PkAnTIDKMQ1BseUOIVeQHoiwYDZGCswoxl7xec=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
github.com/Masterminds/semver/v3 v3.3.1 h1:QtNSWtVZ3nBfk8mAOu/B6v7FMJ+NHTIgUPi7rj+4nv4=
github.com/Masterminds/semver/v3 v3.3.1/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0=
github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo=
github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y=
github.com/adhocore/gronx v1.19.6 h1:5KNVcoR9ACgL9HhEqCm5QXsab/gI4QDIybTAWcXDKDc=
@ -19,8 +19,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
github.com/andybalholm/cascadia v1.3.3 h1:AG2YHrzJIm4BZ19iwJ/DAua6Btl3IwJX+VI4kktS1LM=
github.com/andybalholm/cascadia v1.3.3/go.mod h1:xNd9bqTn98Ln4DwST8/nG+H0yuB8Hmgu1YHNnWw0GeA=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
@ -98,8 +98,8 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.26.0 h1:SP05Nqhjcvz81uJaRfEV0YBSSSGMc/iMaVtFbr3Sw2k=
github.com/go-playground/validator/v10 v10.26.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4=
github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk=
github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
@ -179,8 +179,8 @@ github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE=
github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/klauspost/cpuid/v2 v2.2.11 h1:0OwqZRYI2rFrjS4kvkDnqJkKHdHaRnCm68/DY4OxRzU=
github.com/klauspost/cpuid/v2 v2.2.11/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
@ -240,8 +240,8 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c h1:dAMKvw0MlJT1GshSTtih8C2gDs04w8dReiOGXrGLNoY=
github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM=
github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@ -268,15 +268,15 @@ github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
github.com/prometheus/common v0.64.0 h1:pdZeA+g617P7oGv1CzdTzyeShxAGrTBsolKNOLQPGO4=
github.com/prometheus/common v0.64.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8=
github.com/prometheus/common v0.65.0 h1:QDwzd+G1twt//Kwj/Ww6E9FQq1iVMmODnILtW1t2VzE=
github.com/prometheus/common v0.65.0/go.mod h1:0gZns+BLRQ3V6NdaerOhMbwwRbNh9hkGINtQAsP5GS8=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
github.com/prometheus/procfs v0.17.0 h1:FuLQ+05u4ZI+SS/w9+BWEM2TXiHKsUQ9TADiRH7DuK0=
github.com/prometheus/procfs v0.17.0/go.mod h1:oPQLaDAMRbA+u8H5Pbfq+dl3VDAvHxMUOVhe0wYB2zw=
github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg=
github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
@ -326,8 +326,8 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/vektah/gqlparser/v2 v2.5.28 h1:bIulcl3LF69ba6EiZVGD88y4MkM+Jxrf3P2MX8xLRkY=
github.com/vektah/gqlparser/v2 v2.5.28/go.mod h1:D1/VCZtV3LPnQrcPBeR/q5jkSQIPti0uYCP/RI0gIeo=
github.com/vektah/gqlparser/v2 v2.5.30 h1:EqLwGAFLIzt1wpx1IPpY67DwUujF1OfzgEyDsLrN6kE=
github.com/vektah/gqlparser/v2 v2.5.30/go.mod h1:D1/VCZtV3LPnQrcPBeR/q5jkSQIPti0uYCP/RI0gIeo=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
@ -347,8 +347,8 @@ github.com/zeebo/blake3 v0.2.4 h1:KYQPkhpRtcqh0ssGYcKLG1JYvddkEA8QwCM/yBqhaZI=
github.com/zeebo/blake3 v0.2.4/go.mod h1:7eeQ6d2iXWRGF6npfaxl2CU+xy2Fjo2gxeyZGCRUjcE=
github.com/zeebo/pcg v1.0.1 h1:lyqfGeWiv4ahac6ttHs+I5hwtH/+1mrhlCtVNQM2kHo=
github.com/zeebo/pcg v1.0.1/go.mod h1:09F0S9iiKrwn9rlI5yjLkmrug154/YRW6KnnXVDM/l4=
go.etcd.io/bbolt v1.4.1 h1:5mOV+HWjIPLEAlUGMsveaUvK2+byZMFOzojoi7bh7uI=
go.etcd.io/bbolt v1.4.1/go.mod h1:c8zu2BnXWTu2XM4XcICtbGSl9cFwsXtcf9zLt2OncM8=
go.etcd.io/bbolt v1.4.2 h1:IrUHp260R8c+zYx/Tm8QZr04CX+qWS5PGfPdevhdm1I=
go.etcd.io/bbolt v1.4.2/go.mod h1:Is8rSHO/b4f3XigBC0lL0+4FwAQv3HXEEIgFMuKHceM=
go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs=
go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=

View File

@ -93,8 +93,8 @@ func (b *Binder) InstantiateType(orig types.Type, targs []types.Type) (types.Typ
}
var (
MapType = types.NewMap(types.Typ[types.String], types.NewInterfaceType(nil, nil).Complete())
InterfaceType = types.NewInterfaceType(nil, nil)
MapType = types.NewMap(types.Typ[types.String], types.Universe.Lookup("any").Type())
InterfaceType = types.Universe.Lookup("any").Type()
)
func (b *Binder) DefaultUserObject(name string) (types.Type, error) {
@ -308,6 +308,10 @@ func (ref *TypeReference) MarshalFunc() string {
return "marshal" + ref.UniquenessKey()
}
func (ref *TypeReference) MarshalFuncFunctionSyntax() string {
return ref.MarshalFunc() + "F"
}
func (ref *TypeReference) UnmarshalFunc() string {
if ref.Definition == nil {
panic(errors.New("Definition missing for " + ref.GQL.Name()))
@ -320,6 +324,10 @@ func (ref *TypeReference) UnmarshalFunc() string {
return "unmarshal" + ref.UniquenessKey()
}
func (ref *TypeReference) UnmarshalFuncFunctionSyntax() string {
return ref.UnmarshalFunc() + "F"
}
func (ref *TypeReference) IsTargetNilable() bool {
return IsNilable(ref.Target)
}

View File

@ -68,6 +68,14 @@ type Config struct {
var cfgFilenames = []string{".gqlgen.yml", "gqlgen.yml", "gqlgen.yaml"}
// templatePackageNames is a list of packages names that the default templates use, in order to preload those for performance considerations
// any additional package added to the base templates should be added here to improve performance and load all packages in bulk
var templatePackageNames = []string{
"context", "fmt", "io", "strconv", "time", "sync", "strings", "sync/atomic", "embed", "golang.org/x/sync/semaphore",
"errors", "bytes", "github.com/vektah/gqlparser/v2", "github.com/vektah/gqlparser/v2/ast",
"github.com/99designs/gqlgen/graphql", "github.com/99designs/gqlgen/graphql/introspection",
}
// DefaultConfig creates a copy of the default config
func DefaultConfig() *Config {
falseValue := false
@ -235,6 +243,7 @@ func (c *Config) Init() error {
c.Packages = code.NewPackages(
code.WithBuildTags(c.GoBuildTags...),
code.PackagePrefixToCache("github.com/99designs/gqlgen/graphql"),
code.WithPreloadNames(templatePackageNames...),
)
}
@ -249,14 +258,15 @@ func (c *Config) Init() error {
return err
}
// prefetch all packages in one big packages.Load call
c.Packages.LoadAll(c.packageList()...)
err = c.autobind()
if err != nil {
return err
}
c.injectBuiltins()
// prefetch all packages in one big packages.Load call
c.Packages.LoadAll(c.packageList()...)
// check everything is valid on the way out
err = c.check()
@ -881,6 +891,7 @@ func (c *Config) LoadSchema() error {
c.Packages = code.NewPackages(
code.WithBuildTags(c.GoBuildTags...),
code.PackagePrefixToCache("github.com/99designs/gqlgen/graphql"),
code.WithPreloadNames(templatePackageNames...),
)
}

View File

@ -51,7 +51,11 @@
{{- if $type.Unmarshaler }}
{{- if $type.HasEnumValues }}
tmp, err := {{ $type.Unmarshaler | call }}(v)
{{ if $useFunctionSyntaxForExecutionContext -}}
res := {{ $type.UnmarshalFuncFunctionSyntax }}[tmp]
{{- else -}}
res := {{ $type.UnmarshalFunc }}[tmp]
{{- end -}}
{{- else if $type.CastType }}
{{- if $type.IsContext }}
tmp, err := {{ $type.Unmarshaler | call }}(ctx, v)
@ -237,7 +241,11 @@
{{- $v = "*v" }}
{{- end }}
{{- if $type.HasEnumValues }}
{{- if $useFunctionSyntaxForExecutionContext -}}
{{- $v = printf "%v[%v]" $type.MarshalFuncFunctionSyntax $v }}
{{- else -}}
{{- $v = printf "%v[%v]" $type.MarshalFunc $v }}
{{- end -}}
{{- else if $type.CastType }}
{{- $v = printf "%v(%v)" ($type.CastType | ref) $v}}
{{- end }}
@ -286,12 +294,20 @@
{{- $enum = $type.GO.Elem }}
{{- end }}
var (
{{ if $useFunctionSyntaxForExecutionContext -}}
{{ $type.UnmarshalFuncFunctionSyntax }} = map[string]{{ $enum | ref }}{
{{- else -}}
{{ $type.UnmarshalFunc }} = map[string]{{ $enum | ref }}{
{{- end -}}
{{- range $value := $type.EnumValues }}
"{{ $value.Definition.Name }}": {{ $value.Object | obj }},
{{- end }}
}
{{ if $useFunctionSyntaxForExecutionContext -}}
{{ $type.MarshalFuncFunctionSyntax }} = map[{{ $enum | ref }}]string{
{{- else -}}
{{ $type.MarshalFunc }} = map[{{ $enum | ref }}]string{
{{- end -}}
{{- range $value := $type.EnumValues }}
{{ $value.Object | obj }}: "{{ $value.Definition.Name }}",
{{- end }}

View File

@ -216,23 +216,30 @@ func (e *Executor) parseQuery(
stats.Validation.Start = graphql.Now()
if len(doc.Operations) == 0 {
if doc == nil || len(doc.Operations) == 0 {
err = gqlerror.Errorf("no operation provided")
gqlErr, _ := err.(*gqlerror.Error)
errcode.Set(err, errcode.ValidationFailed)
return nil, gqlerror.List{gqlErr}
}
currentRules := rules.NewDefaultRules()
// Customise rules as required
// TODO(steve): consider currentRules.RemoveRule(rules.MaxIntrospectionDepth.Name)
// swap out the FieldsOnCorrectType rule with one that doesn't provide suggestions
if e.disableSuggestion {
validator.RemoveRule("FieldsOnCorrectType")
currentRules.RemoveRule("FieldsOnCorrectType")
rule := rules.FieldsOnCorrectTypeRuleWithoutSuggestions
// rule may already have been added
validator.ReplaceRule(rule.Name, rule.RuleFunc)
currentRules.AddRule(rule.Name, rule.RuleFunc)
} else { // or vice versa
currentRules.RemoveRule("FieldsOnCorrectTypeWithoutSuggestions")
rule := rules.FieldsOnCorrectTypeRule
currentRules.AddRule(rule.Name, rule.RuleFunc)
}
listErr := validator.Validate(e.es.Schema(), doc)
listErr := validator.ValidateWithRules(e.es.Schema(), doc, currentRules)
if len(listErr) != 0 {
for _, e := range listErr {
errcode.Set(e, errcode.ValidationFailed)

View File

@ -1,3 +1,3 @@
package graphql
const Version = "v0.17.75"
const Version = "v0.17.76"

View File

@ -43,6 +43,12 @@ func WithBuildTags(tags ...string) func(p *Packages) {
}
}
func WithPreloadNames(importPaths ...string) func(p *Packages) {
return func(p *Packages) {
p.LoadAllNames(importPaths...)
}
}
// PackagePrefixToCache option for NewPackages
// will not reset gqlgen packages in packages.Load call
func PackagePrefixToCache(prefixPath string) func(p *Packages) {
@ -61,6 +67,20 @@ func NewPackages(opts ...Option) *Packages {
return p
}
func dedupPackages(packages []string) []string {
packageMap := make(map[string]struct{})
dedupedPackages := make([]string, 0, len(packageMap))
for _, p := range packages {
if _, ok := packageMap[p]; ok {
continue
}
packageMap[p] = struct{}{}
dedupedPackages = append(dedupedPackages, p)
}
return dedupedPackages
}
func (p *Packages) CleanupUserPackages() {
if p.packagesToCachePrefix == "" {
// Cleanup all packages if we don't know which ones to keep
@ -169,46 +189,67 @@ func (p *Packages) LoadWithTypes(importPath string) *packages.Package {
return pkg
}
// NameForPackage looks up the package name from the package stanza in the go files at the given import path.
func (p *Packages) NameForPackage(importPath string) string {
if importPath == "" {
panic(errors.New("import path can not be empty"))
}
if p.importToName == nil {
p.importToName = map[string]string{}
// LoadAllNames will call packages.Load with the NeedName mode only and will store the package name in a cache.
// it does not return any package data, but after calling this you can call NameForPackage to get the package name without loading the full package data.
func (p *Packages) LoadAllNames(importPaths ...string) {
importPaths = dedupPackages(importPaths)
missing := make([]string, 0, len(importPaths))
for _, importPath := range importPaths {
if importPath == "" {
panic(errors.New("import path can not be empty"))
}
if p.importToName == nil {
p.importToName = map[string]string{}
}
importPath = NormalizeVendor(importPath)
// if it's in the name cache use it
if name := p.importToName[importPath]; name != "" {
continue
}
// otherwise we might have already loaded the full package data for it cached
pkg := p.packages[importPath]
if pkg != nil {
if _, ok := p.importToName[importPath]; !ok {
p.importToName[importPath] = pkg.Name
}
continue
}
missing = append(missing, importPath)
}
importPath = NormalizeVendor(importPath)
// if its in the name cache use it
if name := p.importToName[importPath]; name != "" {
return name
}
// otherwise we might have already loaded the full package data for it cached
pkg := p.packages[importPath]
if pkg == nil {
// otherwise do a name only lookup for it but don't put it in the package cache.
p.numNameCalls++
if len(missing) > 0 {
pkgs, err := packages.Load(&packages.Config{
Mode: packages.NeedName,
BuildFlags: p.buildFlags,
}, importPath)
}, missing...)
if err != nil {
p.loadErrors = append(p.loadErrors, err)
} else {
pkg = pkgs[0]
}
for _, pkg := range pkgs {
if pkg.Name == "" {
pkg.Name = SanitizePackageName(filepath.Base(pkg.PkgPath))
}
p.importToName[pkg.PkgPath] = pkg.Name
}
}
}
if pkg == nil || pkg.Name == "" {
return SanitizePackageName(filepath.Base(importPath))
}
// NameForPackage looks up the package name from the package stanza in the go files at the given import path.
func (p *Packages) NameForPackage(importPath string) string {
p.numNameCalls++
p.LoadAllNames(importPath)
p.importToName[importPath] = pkg.Name
return pkg.Name
importPath = NormalizeVendor(importPath)
return p.importToName[importPath]
}
// Evict removes a given package import path from the cache. Further calls to Load will fetch it from disk.

View File

@ -23,8 +23,9 @@ var federationTemplate string
var explicitRequiresTemplate string
type Federation struct {
Entities []*Entity
PackageOptions PackageOptions
Entities []*Entity
RequiresEntities map[string]*Entity
PackageOptions PackageOptions
version int
@ -320,6 +321,8 @@ func (f *Federation) GenerateCode(data *codegen.Data) error {
}
}
f.RequiresEntities = requiresEntities
return templates.Render(templates.Options{
PackageName: data.Config.Federation.Package,
Filename: data.Config.Federation.Filename,

View File

@ -264,15 +264,27 @@ func (ec *executionContext) resolveManyEntities(
}
for i, entity := range entities {
{{- if and $.PackageOptions.ExplicitRequires (index $.RequiresEntities $entity.Def.Name) }}
err = ec.Populate{{$entity.Def.Name}}Requires(ctx, {{- if not $usePointers -}}&{{- end -}}entity, reps[i].entity)
if err != nil {
return fmt.Errorf(`populating requires for Entity "{{$entity.Def.Name}}": %w`, err)
}
{{- end }}
{{- range $entity.Requires }}
{{ if $useFunctionSyntaxForExecutionContext -}}
entity.{{.Field.JoinGo `.`}}, err = {{.Type.UnmarshalFunc}}(ctx, ec, reps[i].entity["{{.Field.Join `"].(map[string]any)["`}}"])
{{- else -}}
entity.{{.Field.JoinGo `.`}}, err = ec.{{.Type.UnmarshalFunc}}(ctx, reps[i].entity["{{.Field.Join `"].(map[string]any)["`}}"])
{{- end }}
{{- if $options.ComputedRequires }}
{{/* We don't do anything in this case, computed requires are handled by standard resolvers */}}
{{- else if $.PackageOptions.ExplicitRequires }}
{{/* already handled above */}}
{{- else }}
{{ if $useFunctionSyntaxForExecutionContext -}}
entity.{{.Field.JoinGo `.`}}, err = {{.Type.UnmarshalFunc}}(ctx, ec, reps[i].entity["{{.Field.Join `"].(map[string]any)["`}}"])
{{- else -}}
entity.{{.Field.JoinGo `.`}}, err = ec.{{.Type.UnmarshalFunc}}(ctx, reps[i].entity["{{.Field.Join `"].(map[string]any)["`}}"])
{{- end }}
if err != nil {
return err
}
{{- end}}
{{- end}}
list[reps[i].index] = entity
}

View File

@ -187,10 +187,17 @@ func (m *Plugin) generatePerSchema(data *codegen.Data) error {
}
}
var allImports []string
for _, file := range files {
file.imports = rewriter.ExistingImports(file.name)
file.RemainingSource = rewriter.RemainingSource(file.name)
for _, i := range file.imports {
allImports = append(allImports, i.ImportPath)
}
}
data.Config.Packages.LoadAllNames(allImports...) // Preload all names in one Load call for performance reasons
newResolverTemplate := resolverTemplate
if data.Config.Resolver.ResolverTemplate != "" {
newResolverTemplate = readResolverTemplate(data.Config.Resolver.ResolverTemplate)

View File

@ -1,5 +1,31 @@
# Changelog
## 3.4.0 (2025-06-27)
### Added
- #268: Added property to Constraints to include prereleases for Check and Validate
### Changed
- #263: Updated Go testing for 1.24, 1.23, and 1.22
- #269: Updated the error message handling for message case and wrapping errors
- #266: Restore the ability to have leading 0's when parsing with NewVersion.
Opt-out of this by setting CoerceNewVersion to false.
### Fixed
- #257: Fixed the CodeQL link (thanks @dmitris)
- #262: Restored detailed errors when failed to parse with NewVersion. Opt-out
of this by setting DetailedNewVersionErrors to false for faster performance.
- #267: Handle pre-releases for an "and" group if one constraint includes them
## 3.3.1 (2024-11-19)
### Fixed
- #253: Fix for allowing some version that were invalid
## 3.3.0 (2024-08-27)
### Added
@ -137,7 +163,7 @@ functions. These are described in the added and changed sections below.
- #78: Fix unchecked error in example code (thanks @ravron)
- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
- #97: Fixed copyright file for proper display on GitHub
- #107: Fix handling prerelease when sorting alphanum and num
- #107: Fix handling prerelease when sorting alphanum and num
- #109: Fixed where Validate sometimes returns wrong message on error
## 1.4.2 (2018-04-10)

View File

@ -50,6 +50,18 @@ other versions, convert the version back into a string, and get the original
string. Getting the original string is useful if the semantic version was coerced
into a valid form.
There are package level variables that affect how `NewVersion` handles parsing.
- `CoerceNewVersion` is `true` by default. When set to `true` it coerces non-compliant
versions into SemVer. For example, allowing a leading 0 in a major, minor, or patch
part. This enables the use of CalVer in versions even when not compliant with SemVer.
When set to `false` less coercion work is done.
- `DetailedNewVersionErrors` provides more detailed errors. It only has an affect when
`CoerceNewVersion` is set to `false`. When `DetailedNewVersionErrors` is set to `true`
it can provide some more insight into why a version is invalid. Setting
`DetailedNewVersionErrors` to `false` is faster on performance but provides less
detailed error messages if a version fails to parse.
## Sorting Semantic Versions
A set of versions can be sorted using the `sort` package from the standard library.
@ -160,6 +172,10 @@ means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case
sensitivity doesn't apply here. This is due to ASCII sort ordering which is what
the spec specifies.
The `Constraints` instance returned from `semver.NewConstraint()` has a property
`IncludePrerelease` that, when set to true, will return prerelease versions when calls
to `Check()` and `Validate()` are made.
### Hyphen Range Comparisons
There are multiple methods to handle ranges and the first is hyphens ranges.
@ -250,7 +266,7 @@ or [create a pull request](https://github.com/Masterminds/semver/pulls).
Security is an important consideration for this project. The project currently
uses the following tools to help discover security issues:
* [CodeQL](https://github.com/Masterminds/semver)
* [CodeQL](https://codeql.github.com)
* [gosec](https://github.com/securego/gosec)
* Daily Fuzz testing

View File

@ -12,6 +12,13 @@ import (
// checked against.
type Constraints struct {
constraints [][]*constraint
containsPre []bool
// IncludePrerelease specifies if pre-releases should be included in
// the results. Note, if a constraint range has a prerelease than
// prereleases will be included for that AND group even if this is
// set to false.
IncludePrerelease bool
}
// NewConstraint returns a Constraints instance that a Version instance can
@ -22,11 +29,10 @@ func NewConstraint(c string) (*Constraints, error) {
c = rewriteRange(c)
ors := strings.Split(c, "||")
or := make([][]*constraint, len(ors))
lenors := len(ors)
or := make([][]*constraint, lenors)
hasPre := make([]bool, lenors)
for k, v := range ors {
// TODO: Find a way to validate and fetch all the constraints in a simpler form
// Validate the segment
if !validConstraintRegex.MatchString(v) {
return nil, fmt.Errorf("improper constraint: %s", v)
@ -43,12 +49,22 @@ func NewConstraint(c string) (*Constraints, error) {
return nil, err
}
// If one of the constraints has a prerelease record this.
// This information is used when checking all in an "and"
// group to ensure they all check for prereleases.
if pc.con.pre != "" {
hasPre[k] = true
}
result[i] = pc
}
or[k] = result
}
o := &Constraints{constraints: or}
o := &Constraints{
constraints: or,
containsPre: hasPre,
}
return o, nil
}
@ -57,10 +73,10 @@ func (cs Constraints) Check(v *Version) bool {
// TODO(mattfarina): For v4 of this library consolidate the Check and Validate
// functions as the underlying functions make that possible now.
// loop over the ORs and check the inner ANDs
for _, o := range cs.constraints {
for i, o := range cs.constraints {
joy := true
for _, c := range o {
if check, _ := c.check(v); !check {
if check, _ := c.check(v, (cs.IncludePrerelease || cs.containsPre[i])); !check {
joy = false
break
}
@ -83,12 +99,12 @@ func (cs Constraints) Validate(v *Version) (bool, []error) {
// Capture the prerelease message only once. When it happens the first time
// this var is marked
var prerelesase bool
for _, o := range cs.constraints {
for i, o := range cs.constraints {
joy := true
for _, c := range o {
// Before running the check handle the case there the version is
// a prerelease and the check is not searching for prereleases.
if c.con.pre == "" && v.pre != "" {
if !(cs.IncludePrerelease || cs.containsPre[i]) && v.pre != "" {
if !prerelesase {
em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
e = append(e, em)
@ -98,7 +114,7 @@ func (cs Constraints) Validate(v *Version) (bool, []error) {
} else {
if _, err := c.check(v); err != nil {
if _, err := c.check(v, (cs.IncludePrerelease || cs.containsPre[i])); err != nil {
e = append(e, err)
joy = false
}
@ -227,8 +243,8 @@ type constraint struct {
}
// Check if a version meets the constraint
func (c *constraint) check(v *Version) (bool, error) {
return constraintOps[c.origfunc](v, c)
func (c *constraint) check(v *Version, includePre bool) (bool, error) {
return constraintOps[c.origfunc](v, c, includePre)
}
// String prints an individual constraint into a string
@ -236,7 +252,7 @@ func (c *constraint) string() string {
return c.origfunc + c.orig
}
type cfunc func(v *Version, c *constraint) (bool, error)
type cfunc func(v *Version, c *constraint, includePre bool) (bool, error)
func parseConstraint(c string) (*constraint, error) {
if len(c) > 0 {
@ -272,7 +288,7 @@ func parseConstraint(c string) (*constraint, error) {
// The constraintRegex should catch any regex parsing errors. So,
// we should never get here.
return nil, errors.New("constraint Parser Error")
return nil, errors.New("constraint parser error")
}
cs.con = con
@ -290,7 +306,7 @@ func parseConstraint(c string) (*constraint, error) {
// The constraintRegex should catch any regex parsing errors. So,
// we should never get here.
return nil, errors.New("constraint Parser Error")
return nil, errors.New("constraint parser error")
}
cs := &constraint{
@ -305,16 +321,14 @@ func parseConstraint(c string) (*constraint, error) {
}
// Constraint functions
func constraintNotEqual(v *Version, c *constraint) (bool, error) {
func constraintNotEqual(v *Version, c *constraint, includePre bool) (bool, error) {
// The existence of prereleases is checked at the group level and passed in.
// Exit early if the version has a prerelease but those are to be ignored.
if v.Prerelease() != "" && !includePre {
return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
}
if c.dirty {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
}
if c.con.Major() != v.Major() {
return true, nil
}
@ -345,12 +359,11 @@ func constraintNotEqual(v *Version, c *constraint) (bool, error) {
return true, nil
}
func constraintGreaterThan(v *Version, c *constraint) (bool, error) {
func constraintGreaterThan(v *Version, c *constraint, includePre bool) (bool, error) {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
// The existence of prereleases is checked at the group level and passed in.
// Exit early if the version has a prerelease but those are to be ignored.
if v.Prerelease() != "" && !includePre {
return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
}
@ -391,11 +404,10 @@ func constraintGreaterThan(v *Version, c *constraint) (bool, error) {
return false, fmt.Errorf("%s is less than or equal to %s", v, c.orig)
}
func constraintLessThan(v *Version, c *constraint) (bool, error) {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
func constraintLessThan(v *Version, c *constraint, includePre bool) (bool, error) {
// The existence of prereleases is checked at the group level and passed in.
// Exit early if the version has a prerelease but those are to be ignored.
if v.Prerelease() != "" && !includePre {
return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
}
@ -406,12 +418,11 @@ func constraintLessThan(v *Version, c *constraint) (bool, error) {
return false, fmt.Errorf("%s is greater than or equal to %s", v, c.orig)
}
func constraintGreaterThanEqual(v *Version, c *constraint) (bool, error) {
func constraintGreaterThanEqual(v *Version, c *constraint, includePre bool) (bool, error) {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
// The existence of prereleases is checked at the group level and passed in.
// Exit early if the version has a prerelease but those are to be ignored.
if v.Prerelease() != "" && !includePre {
return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
}
@ -422,11 +433,10 @@ func constraintGreaterThanEqual(v *Version, c *constraint) (bool, error) {
return false, fmt.Errorf("%s is less than %s", v, c.orig)
}
func constraintLessThanEqual(v *Version, c *constraint) (bool, error) {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
func constraintLessThanEqual(v *Version, c *constraint, includePre bool) (bool, error) {
// The existence of prereleases is checked at the group level and passed in.
// Exit early if the version has a prerelease but those are to be ignored.
if v.Prerelease() != "" && !includePre {
return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
}
@ -455,11 +465,10 @@ func constraintLessThanEqual(v *Version, c *constraint) (bool, error) {
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0
// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0
// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0
func constraintTilde(v *Version, c *constraint) (bool, error) {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
func constraintTilde(v *Version, c *constraint, includePre bool) (bool, error) {
// The existence of prereleases is checked at the group level and passed in.
// Exit early if the version has a prerelease but those are to be ignored.
if v.Prerelease() != "" && !includePre {
return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
}
@ -487,16 +496,15 @@ func constraintTilde(v *Version, c *constraint) (bool, error) {
// When there is a .x (dirty) status it automatically opts in to ~. Otherwise
// it's a straight =
func constraintTildeOrEqual(v *Version, c *constraint) (bool, error) {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
func constraintTildeOrEqual(v *Version, c *constraint, includePre bool) (bool, error) {
// The existence of prereleases is checked at the group level and passed in.
// Exit early if the version has a prerelease but those are to be ignored.
if v.Prerelease() != "" && !includePre {
return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
}
if c.dirty {
return constraintTilde(v, c)
return constraintTilde(v, c, includePre)
}
eq := v.Equal(c.con)
@ -516,11 +524,10 @@ func constraintTildeOrEqual(v *Version, c *constraint) (bool, error) {
// ^0.0.3 --> >=0.0.3 <0.0.4
// ^0.0 --> >=0.0.0 <0.1.0
// ^0 --> >=0.0.0 <1.0.0
func constraintCaret(v *Version, c *constraint) (bool, error) {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
func constraintCaret(v *Version, c *constraint, includePre bool) (bool, error) {
// The existence of prereleases is checked at the group level and passed in.
// Exit early if the version has a prerelease but those are to be ignored.
if v.Prerelease() != "" && !includePre {
return false, fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
}

View File

@ -14,28 +14,40 @@ import (
// The compiled version of the regex created at init() is cached here so it
// only needs to be created once.
var versionRegex *regexp.Regexp
var looseVersionRegex *regexp.Regexp
// CoerceNewVersion sets if leading 0's are allowd in the version part. Leading 0's are
// not allowed in a valid semantic version. When set to true, NewVersion will coerce
// leading 0's into a valid version.
var CoerceNewVersion = true
// DetailedNewVersionErrors specifies if detailed errors are returned from the NewVersion
// function. This is used when CoerceNewVersion is set to false. If set to false
// ErrInvalidSemVer is returned for an invalid version. This does not apply to
// StrictNewVersion. Setting this function to false returns errors more quickly.
var DetailedNewVersionErrors = true
var (
// ErrInvalidSemVer is returned a version is found to be invalid when
// being parsed.
ErrInvalidSemVer = errors.New("Invalid Semantic Version")
ErrInvalidSemVer = errors.New("invalid semantic version")
// ErrEmptyString is returned when an empty string is passed in for parsing.
ErrEmptyString = errors.New("Version string empty")
ErrEmptyString = errors.New("version string empty")
// ErrInvalidCharacters is returned when invalid characters are found as
// part of a version
ErrInvalidCharacters = errors.New("Invalid characters in version")
ErrInvalidCharacters = errors.New("invalid characters in version")
// ErrSegmentStartsZero is returned when a version segment starts with 0.
// This is invalid in SemVer.
ErrSegmentStartsZero = errors.New("Version segment starts with 0")
ErrSegmentStartsZero = errors.New("version segment starts with 0")
// ErrInvalidMetadata is returned when the metadata is an invalid format
ErrInvalidMetadata = errors.New("Invalid Metadata string")
ErrInvalidMetadata = errors.New("invalid metadata string")
// ErrInvalidPrerelease is returned when the pre-release is an invalid format
ErrInvalidPrerelease = errors.New("Invalid Prerelease string")
ErrInvalidPrerelease = errors.New("invalid prerelease string")
)
// semVerRegex is the regular expression used to parse a semantic version.
@ -45,6 +57,12 @@ const semVerRegex string = `v?(0|[1-9]\d*)(?:\.(0|[1-9]\d*))?(?:\.(0|[1-9]\d*))?
`(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?` +
`(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?`
// looseSemVerRegex is a regular expression that lets invalid semver expressions through
// with enough detail that certain errors can be checked for.
const looseSemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` +
`(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
`(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
// Version represents a single semantic version.
type Version struct {
major, minor, patch uint64
@ -55,6 +73,7 @@ type Version struct {
func init() {
versionRegex = regexp.MustCompile("^" + semVerRegex + "$")
looseVersionRegex = regexp.MustCompile("^" + looseSemVerRegex + "$")
}
const (
@ -142,8 +161,27 @@ func StrictNewVersion(v string) (*Version, error) {
// attempts to convert it to SemVer. If you want to validate it was a strict
// semantic version at parse time see StrictNewVersion().
func NewVersion(v string) (*Version, error) {
if CoerceNewVersion {
return coerceNewVersion(v)
}
m := versionRegex.FindStringSubmatch(v)
if m == nil {
// Disabling detailed errors is first so that it is in the fast path.
if !DetailedNewVersionErrors {
return nil, ErrInvalidSemVer
}
// Check for specific errors with the semver string and return a more detailed
// error.
m = looseVersionRegex.FindStringSubmatch(v)
if m == nil {
return nil, ErrInvalidSemVer
}
err := validateVersion(m)
if err != nil {
return nil, err
}
return nil, ErrInvalidSemVer
}
@ -156,13 +194,13 @@ func NewVersion(v string) (*Version, error) {
var err error
sv.major, err = strconv.ParseUint(m[1], 10, 64)
if err != nil {
return nil, fmt.Errorf("Error parsing version segment: %s", err)
return nil, fmt.Errorf("error parsing version segment: %w", err)
}
if m[2] != "" {
sv.minor, err = strconv.ParseUint(m[2], 10, 64)
if err != nil {
return nil, fmt.Errorf("Error parsing version segment: %s", err)
return nil, fmt.Errorf("error parsing version segment: %w", err)
}
} else {
sv.minor = 0
@ -171,7 +209,61 @@ func NewVersion(v string) (*Version, error) {
if m[3] != "" {
sv.patch, err = strconv.ParseUint(m[3], 10, 64)
if err != nil {
return nil, fmt.Errorf("Error parsing version segment: %s", err)
return nil, fmt.Errorf("error parsing version segment: %w", err)
}
} else {
sv.patch = 0
}
// Perform some basic due diligence on the extra parts to ensure they are
// valid.
if sv.pre != "" {
if err = validatePrerelease(sv.pre); err != nil {
return nil, err
}
}
if sv.metadata != "" {
if err = validateMetadata(sv.metadata); err != nil {
return nil, err
}
}
return sv, nil
}
func coerceNewVersion(v string) (*Version, error) {
m := looseVersionRegex.FindStringSubmatch(v)
if m == nil {
return nil, ErrInvalidSemVer
}
sv := &Version{
metadata: m[8],
pre: m[5],
original: v,
}
var err error
sv.major, err = strconv.ParseUint(m[1], 10, 64)
if err != nil {
return nil, fmt.Errorf("error parsing version segment: %w", err)
}
if m[2] != "" {
sv.minor, err = strconv.ParseUint(strings.TrimPrefix(m[2], "."), 10, 64)
if err != nil {
return nil, fmt.Errorf("error parsing version segment: %w", err)
}
} else {
sv.minor = 0
}
if m[3] != "" {
sv.patch, err = strconv.ParseUint(strings.TrimPrefix(m[3], "."), 10, 64)
if err != nil {
return nil, fmt.Errorf("error parsing version segment: %w", err)
}
} else {
sv.patch = 0
@ -615,7 +707,7 @@ func validatePrerelease(p string) error {
eparts := strings.Split(p, ".")
for _, p := range eparts {
if p == "" {
return ErrInvalidMetadata
return ErrInvalidPrerelease
} else if containsOnly(p, num) {
if len(p) > 1 && p[0] == '0' {
return ErrSegmentStartsZero
@ -643,3 +735,54 @@ func validateMetadata(m string) error {
}
return nil
}
// validateVersion checks for common validation issues but may not catch all errors
func validateVersion(m []string) error {
var err error
var v string
if m[1] != "" {
if len(m[1]) > 1 && m[1][0] == '0' {
return ErrSegmentStartsZero
}
_, err = strconv.ParseUint(m[1], 10, 64)
if err != nil {
return fmt.Errorf("error parsing version segment: %w", err)
}
}
if m[2] != "" {
v = strings.TrimPrefix(m[2], ".")
if len(v) > 1 && v[0] == '0' {
return ErrSegmentStartsZero
}
_, err = strconv.ParseUint(v, 10, 64)
if err != nil {
return fmt.Errorf("error parsing version segment: %w", err)
}
}
if m[3] != "" {
v = strings.TrimPrefix(m[3], ".")
if len(v) > 1 && v[0] == '0' {
return ErrSegmentStartsZero
}
_, err = strconv.ParseUint(v, 10, 64)
if err != nil {
return fmt.Errorf("error parsing version segment: %w", err)
}
}
if m[5] != "" {
if err = validatePrerelease(m[5]); err != nil {
return err
}
}
if m[8] != "" {
if err = validateMetadata(m[8]); err != nil {
return err
}
}
return nil
}

2
vendor/github.com/andybalholm/brotli/.gitignore generated vendored Normal file
View File

@ -0,0 +1,2 @@
cpu.out
brotli.test

View File

@ -229,6 +229,8 @@ func storeMetaBlockHeaderBW(len uint, is_uncompressed bool, bw *bitWriter) {
nibbles = 4
} else if len <= 1<<20 {
nibbles = 5
} else if len > 1<<24 {
panic("metablock too long")
}
bw.writeBits(2, uint64(nibbles)-4)

View File

@ -17,7 +17,7 @@ func findMatchLengthWithLimit(s1 []byte, s2 []byte, limit uint) uint {
var matched uint = 0
_, _ = s1[limit-1], s2[limit-1] // bounds check
switch runtime.GOARCH {
case "amd64":
case "amd64", "arm64":
// Compare 8 bytes at at time.
for matched+8 <= limit {
w1 := binary.LittleEndian.Uint64(s1[matched:])

View File

@ -1,6 +1,7 @@
package matchfinder
import (
"bytes"
"encoding/binary"
"math/bits"
"runtime"
@ -42,7 +43,7 @@ type M4 struct {
DistanceBitCost int
table []uint32
chain []uint16
chain []uint32
history []byte
}
@ -100,7 +101,7 @@ func (q *M4) FindMatches(dst []Match, src []byte) []Match {
e.NextEmit = len(q.history)
q.history = append(q.history, src...)
if q.ChainLength > 0 {
q.chain = append(q.chain, make([]uint16, len(src))...)
q.chain = append(q.chain, make([]uint32, len(src))...)
}
src = q.history
@ -123,15 +124,25 @@ func (q *M4) FindMatches(dst []Match, src []byte) []Match {
matches = [3]absoluteMatch{}
}
// Look for a repeat match one byte after the current position.
if matches[0] == (absoluteMatch{}) && len(e.Dst) > 0 {
prevDistance := e.Dst[len(e.Dst)-1].Distance
if binary.LittleEndian.Uint32(src[i+1:]) == binary.LittleEndian.Uint32(src[i+1-prevDistance:]) {
// We have a 4-byte match.
m := extendMatch2(src, i+1, i+1-prevDistance, e.NextEmit+1)
if m.End-m.Start >= q.MinLength {
matches[0] = m
}
}
}
// Calculate and store the hash.
h := ((binary.LittleEndian.Uint64(src[i:]) & (1<<(8*q.HashLen) - 1)) * hashMul64) >> (64 - q.TableBits)
candidate := int(q.table[h])
q.table[h] = uint32(i)
if q.ChainLength > 0 && candidate != 0 {
delta := i - candidate
if delta < 1<<16 {
q.chain[i] = uint16(delta)
}
q.chain[i] = uint32(delta)
}
if i < matches[0].End && i != matches[0].End+2-q.HashLen {
@ -220,6 +231,25 @@ func (q *M4) FindMatches(dst []Match, src []byte) []Match {
// Emit the first match, shortening it if necessary to avoid overlap with the second.
if matches[2].End > matches[1].Start {
matches[2].End = matches[1].Start
if q.ChainLength > 0 && matches[2].End-matches[2].Start >= q.MinLength {
// Since the match length was trimmed, we may be able to find a closer match
// to replace it.
pos := matches[2].Start
for {
delta := int(q.chain[pos])
if delta == 0 {
break
}
pos -= delta
if pos <= matches[2].Match {
break
}
if bytes.Equal(src[matches[2].Start:matches[2].End], src[pos:pos+matches[2].End-matches[2].Start]) {
matches[2].Match = pos
break
}
}
}
}
if matches[2].End-matches[2].Start >= q.MinLength && q.score(matches[2]) > 0 {
e.emit(matches[2])
@ -261,7 +291,7 @@ const hashMul64 = 0x1E35A7BD1E35A7BD
// 0 <= i && i < j && j <= len(src)
func extendMatch(src []byte, i, j int) int {
switch runtime.GOARCH {
case "amd64":
case "amd64", "arm64":
// As long as we are 8 or more bytes before the end of src, we can load and
// compare 8 bytes at a time. If those 8 bytes are equal, repeat.
for j+8 < len(src) {

View File

@ -0,0 +1,328 @@
package matchfinder
import (
"encoding/binary"
"math"
"math/bits"
"slices"
)
// Pathfinder is a MatchFinder that uses hash chains to find matches, and a
// shortest-path optimizer to choose which matches to use.
type Pathfinder struct {
// MaxDistance is the maximum distance (in bytes) to look back for
// a match. The default is 65535.
MaxDistance int
// MinLength is the length of the shortest match to return.
// The default is 4.
MinLength int
// HashLen is the number of bytes to use to calculate the hashes.
// The maximum is 8 and the default is 6.
HashLen int
// TableBits is the number of bits in the hash table indexes.
// The default is 17 (128K entries).
TableBits int
// ChainLength is how many entries to search on the "match chain" of older
// locations with the same hash as the current location.
ChainLength int
table []uint32
chain []uint32
history []byte
// holding onto buffers to reduce allocations:
arrivals []arrival
foundMatches []absoluteMatch
matches []Match
}
func (q *Pathfinder) Reset() {
for i := range q.table {
q.table[i] = 0
}
q.history = q.history[:0]
q.chain = q.chain[:0]
}
// An arrival represents how we got to a certain byte position.
// The cost is the total cost to get there from the beginning of the block.
// If distance > 0, the arrival is with a match.
// If distance == 0, the arrival is with a run of literals.
type arrival struct {
length uint32
distance uint32
cost float32
}
const (
baseMatchCost float32 = 4
)
func (q *Pathfinder) FindMatches(dst []Match, src []byte) []Match {
if q.MaxDistance == 0 {
q.MaxDistance = 65535
}
if q.MinLength == 0 {
q.MinLength = 4
}
if q.HashLen == 0 {
q.HashLen = 6
}
if q.TableBits == 0 {
q.TableBits = 17
}
if len(q.table) < 1<<q.TableBits {
q.table = make([]uint32, 1<<q.TableBits)
}
var histogram [256]uint32
for _, b := range src {
histogram[b]++
}
var byteCost [256]float32
for b, n := range histogram {
cost := max(math.Log2(float64(len(src))/float64(n)), 1)
byteCost[b] = float32(cost)
}
// Each element in arrivals corresponds to the position just after
// the corresponding byte in src.
arrivals := q.arrivals
if len(arrivals) < len(src) {
arrivals = make([]arrival, len(src))
q.arrivals = arrivals
} else {
arrivals = arrivals[:len(src)]
for i := range arrivals {
arrivals[i] = arrival{}
}
}
if len(q.history) > q.MaxDistance*2 {
// Trim down the history buffer.
delta := len(q.history) - q.MaxDistance
copy(q.history, q.history[delta:])
q.history = q.history[:q.MaxDistance]
q.chain = q.chain[:q.MaxDistance]
for i, v := range q.table {
newV := max(int(v)-delta, 0)
q.table[i] = uint32(newV)
}
}
// Append src to the history buffer.
historyLen := len(q.history)
q.history = append(q.history, src...)
q.chain = append(q.chain, make([]uint32, len(src))...)
src = q.history
// Calculate hashes and build the chain.
for i := historyLen; i < len(src)-7; i++ {
h := ((binary.LittleEndian.Uint64(src[i:]) & (1<<(8*q.HashLen) - 1)) * hashMul64) >> (64 - q.TableBits)
candidate := int(q.table[h])
q.table[h] = uint32(i)
if candidate != 0 {
delta := i - candidate
q.chain[i] = uint32(delta)
}
}
// Look for matches, and collect them in foundMatches. Later we'll figure out
// which ones to actually use.
foundMatches := q.foundMatches[:0]
var prevMatch absoluteMatch
i := historyLen
for i < len(src)-7 {
delta := q.chain[i]
if delta == 0 {
i++
continue
}
candidate := i - int(delta)
if candidate <= 0 || i-candidate > q.MaxDistance {
i++
continue
}
var currentMatch absoluteMatch
if i >= prevMatch.End && prevMatch != (absoluteMatch{}) {
// Look for a repeat match at i+1.
prevDistance := prevMatch.Start - prevMatch.Match
if binary.LittleEndian.Uint32(src[i+1:]) == binary.LittleEndian.Uint32(src[i+1-prevDistance:]) {
m := extendMatch2(src, i+1, i+1-prevDistance, i+1)
if m.End-m.Start > q.MinLength {
currentMatch = m
foundMatches = append(foundMatches, m)
}
}
}
if binary.LittleEndian.Uint32(src[candidate:]) == binary.LittleEndian.Uint32(src[i:]) {
m := extendMatch2(src, i, candidate, max(historyLen, prevMatch.Start))
if m.End-m.Start > q.MinLength {
currentMatch = m
foundMatches = append(foundMatches, m)
}
}
for range q.ChainLength {
delta := q.chain[candidate]
if delta == 0 {
break
}
candidate -= int(delta)
if candidate <= 0 || i-candidate > q.MaxDistance {
break
}
if binary.LittleEndian.Uint32(src[candidate:]) == binary.LittleEndian.Uint32(src[i:]) {
m := extendMatch2(src, i, candidate, max(historyLen, prevMatch.Start))
if m.End-m.Start > q.MinLength && m.End-m.Start > currentMatch.End-currentMatch.Start {
currentMatch = m
foundMatches = append(foundMatches, m)
}
}
}
if i < prevMatch.End && currentMatch.End-currentMatch.Start <= prevMatch.End-prevMatch.Start {
// We were looking for an overlapping match, but we didn't find one longer
// than the previous match. So we'll go back to sequential search,
// starting right after the previous match.
i = prevMatch.End
continue
}
if currentMatch == (absoluteMatch{}) {
// No match found. Continue with sequential search.
i++
continue
}
// We've found a match; now look for matches overlapping the end of it.
prevMatch = currentMatch
i = currentMatch.End + 2 - q.HashLen
}
q.foundMatches = foundMatches
slices.SortFunc(foundMatches, func(a, b absoluteMatch) int { return a.Start - b.Start })
matchIndex := 0
var pending absoluteMatch
for i := historyLen; i < len(src); i++ {
var arrivedHere arrival
if i > historyLen {
arrivedHere = arrivals[i-historyLen-1]
}
unmatched := 0
if arrivedHere.distance == 0 {
unmatched = int(arrivedHere.length)
}
prevDistance := 0
if i-unmatched > historyLen {
prevDistance = int(arrivals[i-historyLen-1-unmatched].distance)
}
literalCost := byteCost[src[i]]
nextArrival := &arrivals[i-historyLen]
if nextArrival.cost == 0 || arrivedHere.cost+literalCost < nextArrival.cost {
*nextArrival = arrival{
cost: arrivedHere.cost + literalCost,
length: uint32(unmatched + 1),
}
}
for matchIndex < len(foundMatches) && foundMatches[matchIndex].Start == i {
m := foundMatches[matchIndex]
matchIndex++
if m.End > pending.End {
pending = m
}
matchCost := baseMatchCost + float32(bits.Len(uint(unmatched)))
if m.Start-m.Match != prevDistance {
matchCost += float32(bits.Len(uint(m.Start - m.Match)))
}
for j := m.Start + q.MinLength; j <= m.End; j++ {
adjustedCost := matchCost
if j-m.Start < 6 {
// Matches shorter than 6 are comparatively rare, and therefore
// have longer codes.
adjustedCost += float32(6-(j-m.Start)) * 2
}
a := &arrivals[j-historyLen-1]
if a.cost == 0 || arrivedHere.cost+adjustedCost < a.cost {
*a = arrival{
length: uint32(j - m.Start),
distance: uint32(m.Start - m.Match),
cost: arrivedHere.cost + adjustedCost,
}
}
}
}
// If a match from an earlier position extends far enough past the current
// position, try using the tail of it, starting from here.
if unmatched == 0 && pending.Start != i && pending.End >= i+q.MinLength &&
!(arrivedHere.length != 0 && arrivedHere.distance == uint32(pending.Start-pending.Match)) {
matchCost := baseMatchCost + float32(bits.Len(uint(pending.Start-pending.Match)))
for j := i + q.MinLength; j <= pending.End; j++ {
adjustedCost := matchCost
if j-i < 6 {
// Matches shorter than 6 are comparatively rare, and therefore
// have longer codes.
adjustedCost += float32(6-(j-i)) * 2
}
a := &arrivals[j-historyLen-1]
if a.cost == 0 || arrivedHere.cost+adjustedCost < a.cost {
*a = arrival{
length: uint32(j - i),
distance: uint32(pending.Start - pending.Match),
cost: arrivedHere.cost + adjustedCost,
}
}
}
}
delta := q.chain[i]
if delta == 0 {
continue
}
candidate := i - int(delta)
if candidate <= 0 || i-candidate > q.MaxDistance {
continue
}
}
// We've found the shortest path; now walk it backward and store the matches.
matches := q.matches[:0]
i = len(arrivals) - 1
for i >= 0 {
a := arrivals[i]
if a.distance > 0 {
matches = append(matches, Match{
Length: int(a.length),
Distance: int(a.distance),
})
i -= int(a.length)
} else {
if len(matches) == 0 {
matches = append(matches, Match{})
}
matches[len(matches)-1].Unmatched = int(a.length)
i -= int(a.length)
}
}
q.matches = matches
slices.Reverse(matches)
return append(dst, matches...)
}

View File

@ -49,6 +49,9 @@ func (r *Reader) Read(p []byte) (n int, err error) {
if !decoderHasMoreOutput(r) && len(r.in) == 0 {
m, readErr := r.src.Read(r.buf)
if m == 0 {
if readErr == io.EOF && r.state != stateDone {
readErr = io.ErrUnexpectedEOF
}
// If readErr is `nil`, we just proxy underlying stream behavior.
return 0, readErr
}

View File

@ -121,18 +121,18 @@ type nopCloser struct {
func (nopCloser) Close() error { return nil }
// NewWriterV2 is like NewWriterLevel, but it uses the new implementation
// based on the matchfinder package. It currently supports up to level 7;
// if a higher level is specified, level 7 will be used.
// based on the matchfinder package. It currently supports up to level 9;
// if a higher level is specified, level 9 will be used.
func NewWriterV2(dst io.Writer, level int) *matchfinder.Writer {
var mf matchfinder.MatchFinder
if level < 2 {
mf = matchfinder.M0{Lazy: level == 1}
} else {
} else if level < 8 {
hashLen := 6
if level >= 6 {
hashLen = 5
}
chainLen := 64
chainLen := 16
switch level {
case 2:
chainLen = 0
@ -149,7 +149,19 @@ func NewWriterV2(dst io.Writer, level int) *matchfinder.Writer {
MaxDistance: 1 << 20,
ChainLength: chainLen,
HashLen: hashLen,
DistanceBitCost: 57,
DistanceBitCost: 66,
}
} else {
chainLen := 32
hashLen := 5
if level == 8 {
chainLen = 4
hashLen = 6
}
mf = &matchfinder.Pathfinder{
MaxDistance: 1 << 20,
ChainLength: chainLen,
HashLen: hashLen,
}
}

View File

@ -2,101 +2,53 @@ version: "2"
linters:
default: all
disable:
- asasalint
- asciicheck
- bidichk
- bodyclose
- canonicalheader
- containedctx
- contextcheck
- noinlineerr
- wsl_v5
- copyloopvar
- cyclop
- decorder
- depguard
- dogsled
- dupl
- dupword
- durationcheck
- err113
- errcheck
- errchkjson
- errname
- errorlint
- exhaustive
- exhaustruct
- exptostd
- fatcontext
- forbidigo
- forcetypeassert
- funlen
- ginkgolinter
- gocheckcompilerdirectives
- gochecknoglobals
- gochecknoinits
- gochecksumtype
- gocognit
- goconst
- gocritic
- gocyclo
- godot
- godox
- goheader
- gomoddirectives
- gomodguard
- goprintffuncname
- gosec
- gosmopolitan
- govet
- grouper
- iface
- importas
- inamedparam
- ineffassign
- interfacebloat
- intrange
- ireturn
- lll
- loggercheck
- maintidx
- makezero
- mirror
- misspell
- mnd
- musttag
- nakedret
- nestif
- nilerr
- nilnesserr
- nilnil
- nlreturn
- noctx
- nolintlint
- nonamedreturns
- nosprintfhostport
- paralleltest
- perfsprint
- prealloc
- predeclared
- promlinter
- protogetter
- reassign
- recvcheck
- revive
- rowserrcheck
- sloglint
- spancheck
- sqlclosecheck
- staticcheck
- tagalign
- tagliatelle
- testableexamples
- testifylint
- testpackage
- thelper
- tparallel
- unparam
- varnamelen
- whitespace
- wrapcheck
- wsl
- zerologlint

View File

@ -1,6 +1,6 @@
Package validator
=================
<img align="right" src="logo.png">![Project status](https://img.shields.io/badge/version-10.25.0-green.svg)
<img align="right" src="logo.png">[![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/go-playground/validator)](https://github.com/go-playground/validator/releases)
[![Build Status](https://github.com/go-playground/validator/actions/workflows/workflow.yml/badge.svg)](https://github.com/go-playground/validator/actions)
[![Coverage Status](https://coveralls.io/repos/go-playground/validator/badge.svg?branch=master&service=github)](https://coveralls.io/github/go-playground/validator?branch=master)
[![Go Report Card](https://goreportcard.com/badge/github.com/go-playground/validator)](https://goreportcard.com/report/github.com/go-playground/validator)
@ -262,6 +262,8 @@ validate := validator.New(validator.WithRequiredStructEnabled())
| excluded_without | Excluded Without |
| excluded_without_all | Excluded Without All |
| unique | Unique |
| validateFn | Verify if the method `Validate() error` does not return an error (or any specified method) |
#### Aliases:
| Tag | Description |

View File

@ -2,10 +2,12 @@ package validator
import (
"bytes"
"cmp"
"context"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"io/fs"
"net"
@ -244,6 +246,7 @@ var (
"cron": isCron,
"spicedb": isSpiceDB,
"ein": isEIN,
"validateFn": isValidateFn,
}
)
@ -294,7 +297,7 @@ func isOneOf(fl FieldLevel) bool {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
v = strconv.FormatUint(field.Uint(), 10)
default:
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
for i := 0; i < len(vals); i++ {
if vals[i] == v {
@ -310,7 +313,7 @@ func isOneOfCI(fl FieldLevel) bool {
field := fl.Field()
if field.Kind() != reflect.String {
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
v := field.String()
for _, val := range vals {
@ -384,13 +387,13 @@ func isUnique(fl FieldLevel) bool {
}
if uniqueField.Kind() != field.Kind() {
panic(fmt.Sprintf("Bad field type %T:%T", field.Interface(), uniqueField.Interface()))
panic(fmt.Sprintf("Bad field type %s:%s", field.Type(), uniqueField.Type()))
}
return field.Interface() != uniqueField.Interface()
return getValue(field) != getValue(uniqueField)
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
}
@ -471,7 +474,7 @@ func isLongitude(fl FieldLevel) bool {
case reflect.Float64:
v = strconv.FormatFloat(field.Float(), 'f', -1, 64)
default:
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
return longitudeRegex().MatchString(v)
@ -494,7 +497,7 @@ func isLatitude(fl FieldLevel) bool {
case reflect.Float64:
v = strconv.FormatFloat(field.Float(), 'f', -1, 64)
default:
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
return latitudeRegex().MatchString(v)
@ -945,7 +948,6 @@ func isNeField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() != currentField.Int()
@ -966,9 +968,8 @@ func isNeField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) {
t := currentField.Interface().(time.Time)
fieldTime := field.Interface().(time.Time)
t := getValue(currentField).(time.Time)
fieldTime := getValue(field).(time.Time)
return !fieldTime.Equal(t)
}
@ -1005,7 +1006,6 @@ func isLteCrossStructField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() <= topField.Int()
@ -1023,9 +1023,8 @@ func isLteCrossStructField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) {
fieldTime := field.Convert(timeType).Interface().(time.Time)
topTime := topField.Convert(timeType).Interface().(time.Time)
fieldTime := getValue(field.Convert(timeType)).(time.Time)
topTime := getValue(topField.Convert(timeType)).(time.Time)
return fieldTime.Before(topTime) || fieldTime.Equal(topTime)
}
@ -1052,7 +1051,6 @@ func isLtCrossStructField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() < topField.Int()
@ -1070,9 +1068,8 @@ func isLtCrossStructField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) {
fieldTime := field.Convert(timeType).Interface().(time.Time)
topTime := topField.Convert(timeType).Interface().(time.Time)
fieldTime := getValue(field.Convert(timeType)).(time.Time)
topTime := getValue(topField.Convert(timeType)).(time.Time)
return fieldTime.Before(topTime)
}
@ -1098,7 +1095,6 @@ func isGteCrossStructField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() >= topField.Int()
@ -1116,9 +1112,8 @@ func isGteCrossStructField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) {
fieldTime := field.Convert(timeType).Interface().(time.Time)
topTime := topField.Convert(timeType).Interface().(time.Time)
fieldTime := getValue(field.Convert(timeType)).(time.Time)
topTime := getValue(topField.Convert(timeType)).(time.Time)
return fieldTime.After(topTime) || fieldTime.Equal(topTime)
}
@ -1144,7 +1139,6 @@ func isGtCrossStructField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() > topField.Int()
@ -1162,9 +1156,8 @@ func isGtCrossStructField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) {
fieldTime := field.Convert(timeType).Interface().(time.Time)
topTime := topField.Convert(timeType).Interface().(time.Time)
fieldTime := getValue(field.Convert(timeType)).(time.Time)
topTime := getValue(topField.Convert(timeType)).(time.Time)
return fieldTime.After(topTime)
}
@ -1190,7 +1183,6 @@ func isNeCrossStructField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return topField.Int() != field.Int()
@ -1211,9 +1203,8 @@ func isNeCrossStructField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) {
t := field.Convert(timeType).Interface().(time.Time)
fieldTime := topField.Convert(timeType).Interface().(time.Time)
t := getValue(field.Convert(timeType)).(time.Time)
fieldTime := getValue(topField.Convert(timeType)).(time.Time)
return !fieldTime.Equal(t)
}
@ -1239,7 +1230,6 @@ func isEqCrossStructField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return topField.Int() == field.Int()
@ -1260,9 +1250,8 @@ func isEqCrossStructField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && topField.Type().ConvertibleTo(timeType) {
t := field.Convert(timeType).Interface().(time.Time)
fieldTime := topField.Convert(timeType).Interface().(time.Time)
t := getValue(field.Convert(timeType)).(time.Time)
fieldTime := getValue(topField.Convert(timeType)).(time.Time)
return fieldTime.Equal(t)
}
@ -1288,7 +1277,6 @@ func isEqField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() == currentField.Int()
@ -1309,9 +1297,8 @@ func isEqField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) {
t := currentField.Convert(timeType).Interface().(time.Time)
fieldTime := field.Convert(timeType).Interface().(time.Time)
t := getValue(currentField.Convert(timeType)).(time.Time)
fieldTime := getValue(field.Convert(timeType)).(time.Time)
return fieldTime.Equal(t)
}
@ -1332,7 +1319,6 @@ func isEq(fl FieldLevel) bool {
param := fl.Param()
switch field.Kind() {
case reflect.String:
return field.String() == param
@ -1367,7 +1353,7 @@ func isEq(fl FieldLevel) bool {
return field.Bool() == p
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isEqIgnoreCase is the validation function for validating if the current field's string value is
@ -1382,7 +1368,7 @@ func isEqIgnoreCase(fl FieldLevel) bool {
return strings.EqualFold(field.String(), param)
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isPostcodeByIso3166Alpha2 validates by value which is country code in iso 3166 alpha 2
@ -1416,7 +1402,7 @@ func isPostcodeByIso3166Alpha2Field(fl FieldLevel) bool {
}
if kind != reflect.String {
panic(fmt.Sprintf("Bad field type %T", currentField.Interface()))
panic(fmt.Sprintf("Bad field type %s", currentField.Type()))
}
postcodeRegexInit.Do(initPostcodes)
@ -1472,16 +1458,7 @@ func isURI(fl FieldLevel) bool {
return err == nil
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
}
// isFileURL is the helper function for validating if the `path` valid file URL as per RFC8089
func isFileURL(path string) bool {
if !strings.HasPrefix(path, "file:/") {
return false
}
_, err := url.ParseRequestURI(path)
return err == nil
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isURL is the validation function for validating if the current field's value is a valid URL.
@ -1497,23 +1474,20 @@ func isURL(fl FieldLevel) bool {
return false
}
if isFileURL(s) {
return true
}
url, err := url.Parse(s)
if err != nil || url.Scheme == "" {
return false
}
isFileScheme := url.Scheme == "file"
if url.Host == "" && url.Fragment == "" && url.Opaque == "" {
if (isFileScheme && (len(url.Path) == 0 || url.Path == "/")) || (!isFileScheme && len(url.Host) == 0 && len(url.Fragment) == 0 && len(url.Opaque) == 0) {
return false
}
return true
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isHttpURL is the validation function for validating if the current field's value is a valid HTTP(s) URL.
@ -1536,7 +1510,7 @@ func isHttpURL(fl FieldLevel) bool {
return url.Scheme == "http" || url.Scheme == "https"
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isUrnRFC2141 is the validation function for validating if the current field's value is a valid URN as per RFC 2141.
@ -1553,7 +1527,7 @@ func isUrnRFC2141(fl FieldLevel) bool {
return match
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isFile is the validation function for validating if the current field's value is a valid existing file path.
@ -1570,7 +1544,7 @@ func isFile(fl FieldLevel) bool {
return !fileInfo.IsDir()
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isImage is the validation function for validating if the current field's value contains the path to a valid image file
@ -1632,7 +1606,8 @@ func isImage(fl FieldLevel) bool {
return true
}
}
return false
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isFilePath is the validation function for validating if the current field's value is a valid file path.
@ -1686,7 +1661,7 @@ func isFilePath(fl FieldLevel) bool {
}
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isE164 is the validation function for validating if the current field's value is a valid e.164 formatted phone number.
@ -1796,7 +1771,7 @@ func hasValue(fl FieldLevel) bool {
case reflect.Slice, reflect.Map, reflect.Ptr, reflect.Interface, reflect.Chan, reflect.Func:
return !field.IsNil()
default:
if fl.(*validate).fldIsPointer && field.Interface() != nil {
if fl.(*validate).fldIsPointer && getValue(field) != nil {
return true
}
return field.IsValid() && !field.IsZero()
@ -1807,10 +1782,13 @@ func hasValue(fl FieldLevel) bool {
func hasNotZeroValue(fl FieldLevel) bool {
field := fl.Field()
switch field.Kind() {
case reflect.Slice, reflect.Map, reflect.Ptr, reflect.Interface, reflect.Chan, reflect.Func:
case reflect.Slice, reflect.Map:
// For slices and maps, consider them "not zero" only if they're both non-nil AND have elements
return !field.IsNil() && field.Len() > 0
case reflect.Ptr, reflect.Interface, reflect.Chan, reflect.Func:
return !field.IsNil()
default:
if fl.(*validate).fldIsPointer && field.Interface() != nil {
if fl.(*validate).fldIsPointer && getValue(field) != nil {
return !field.IsZero()
}
return field.IsValid() && !field.IsZero()
@ -1834,7 +1812,7 @@ func requireCheckFieldKind(fl FieldLevel, param string, defaultNotFoundValue boo
case reflect.Slice, reflect.Map, reflect.Ptr, reflect.Interface, reflect.Chan, reflect.Func:
return field.IsNil()
default:
if nullable && field.Interface() != nil {
if nullable && getValue(field) != nil {
return false
}
return field.IsValid() && field.IsZero()
@ -1851,7 +1829,6 @@ func requireCheckFieldValue(
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() == asInt(value)
@ -1864,7 +1841,13 @@ func requireCheckFieldValue(
case reflect.Float64:
return field.Float() == asFloat64(value)
case reflect.Slice, reflect.Map, reflect.Array:
case reflect.Slice, reflect.Map:
if value == "nil" {
return field.IsNil()
}
return int64(field.Len()) == asInt(value)
case reflect.Array:
// Arrays can't be nil, so only compare lengths
return int64(field.Len()) == asInt(value)
case reflect.Bool:
@ -2019,8 +2002,11 @@ func excludedWithout(fl FieldLevel) bool {
// requiredWithout is the validation function
// The field under validation must be present and not empty only when any of the other specified fields are not present.
func requiredWithout(fl FieldLevel) bool {
if requireCheckFieldKind(fl, strings.TrimSpace(fl.Param()), true) {
return hasValue(fl)
params := parseOneOfParam2(fl.Param())
for _, param := range params {
if requireCheckFieldKind(fl, param, true) {
return hasValue(fl)
}
}
return true
}
@ -2060,7 +2046,6 @@ func isGteField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() >= currentField.Int()
@ -2078,9 +2063,8 @@ func isGteField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) {
t := currentField.Convert(timeType).Interface().(time.Time)
fieldTime := field.Convert(timeType).Interface().(time.Time)
t := getValue(currentField.Convert(timeType)).(time.Time)
fieldTime := getValue(field.Convert(timeType)).(time.Time)
return fieldTime.After(t) || fieldTime.Equal(t)
}
@ -2106,7 +2090,6 @@ func isGtField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() > currentField.Int()
@ -2124,9 +2107,8 @@ func isGtField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) {
t := currentField.Convert(timeType).Interface().(time.Time)
fieldTime := field.Convert(timeType).Interface().(time.Time)
t := getValue(currentField.Convert(timeType)).(time.Time)
fieldTime := getValue(field.Convert(timeType)).(time.Time)
return fieldTime.After(t)
}
@ -2147,7 +2129,6 @@ func isGte(fl FieldLevel) bool {
param := fl.Param()
switch field.Kind() {
case reflect.String:
p := asInt(param)
@ -2181,15 +2162,14 @@ func isGte(fl FieldLevel) bool {
case reflect.Struct:
if field.Type().ConvertibleTo(timeType) {
now := time.Now().UTC()
t := field.Convert(timeType).Interface().(time.Time)
t := getValue(field.Convert(timeType)).(time.Time)
return t.After(now) || t.Equal(now)
}
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isGt is the validation function for validating if the current field's value is greater than the param's value.
@ -2198,7 +2178,6 @@ func isGt(fl FieldLevel) bool {
param := fl.Param()
switch field.Kind() {
case reflect.String:
p := asInt(param)
@ -2232,11 +2211,11 @@ func isGt(fl FieldLevel) bool {
case reflect.Struct:
if field.Type().ConvertibleTo(timeType) {
return field.Convert(timeType).Interface().(time.Time).After(time.Now().UTC())
return getValue(field.Convert(timeType)).(time.Time).After(time.Now().UTC())
}
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// hasLengthOf is the validation function for validating if the current field's value is equal to the param's value.
@ -2245,7 +2224,6 @@ func hasLengthOf(fl FieldLevel) bool {
param := fl.Param()
switch field.Kind() {
case reflect.String:
p := asInt(param)
@ -2277,7 +2255,7 @@ func hasLengthOf(fl FieldLevel) bool {
return field.Float() == p
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// hasMinOf is the validation function for validating if the current field's value is greater than or equal to the param's value.
@ -2296,7 +2274,6 @@ func isLteField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() <= currentField.Int()
@ -2314,9 +2291,8 @@ func isLteField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) {
t := currentField.Convert(timeType).Interface().(time.Time)
fieldTime := field.Convert(timeType).Interface().(time.Time)
t := getValue(currentField.Convert(timeType)).(time.Time)
fieldTime := getValue(field.Convert(timeType)).(time.Time)
return fieldTime.Before(t) || fieldTime.Equal(t)
}
@ -2342,7 +2318,6 @@ func isLtField(fl FieldLevel) bool {
}
switch kind {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return field.Int() < currentField.Int()
@ -2360,9 +2335,8 @@ func isLtField(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(timeType) && currentField.Type().ConvertibleTo(timeType) {
t := currentField.Convert(timeType).Interface().(time.Time)
fieldTime := field.Convert(timeType).Interface().(time.Time)
t := getValue(currentField.Convert(timeType)).(time.Time)
fieldTime := getValue(field.Convert(timeType)).(time.Time)
return fieldTime.Before(t)
}
@ -2383,7 +2357,6 @@ func isLte(fl FieldLevel) bool {
param := fl.Param()
switch field.Kind() {
case reflect.String:
p := asInt(param)
@ -2417,15 +2390,14 @@ func isLte(fl FieldLevel) bool {
case reflect.Struct:
if field.Type().ConvertibleTo(timeType) {
now := time.Now().UTC()
t := field.Convert(timeType).Interface().(time.Time)
t := getValue(field.Convert(timeType)).(time.Time)
return t.Before(now) || t.Equal(now)
}
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isLt is the validation function for validating if the current field's value is less than the param's value.
@ -2434,7 +2406,6 @@ func isLt(fl FieldLevel) bool {
param := fl.Param()
switch field.Kind() {
case reflect.String:
p := asInt(param)
@ -2468,11 +2439,11 @@ func isLt(fl FieldLevel) bool {
case reflect.Struct:
if field.Type().ConvertibleTo(timeType) {
return field.Convert(timeType).Interface().(time.Time).Before(time.Now().UTC())
return getValue(field.Convert(timeType)).(time.Time).Before(time.Now().UTC())
}
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// hasMaxOf is the validation function for validating if the current field's value is less than or equal to the param's value.
@ -2642,7 +2613,7 @@ func isDir(fl FieldLevel) bool {
return fileInfo.IsDir()
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isDirPath is the validation function for validating if the current field's value is a valid directory.
@ -2699,7 +2670,7 @@ func isDirPath(fl FieldLevel) bool {
}
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isJSON is the validation function for validating if the current field's value is a valid json string.
@ -2714,12 +2685,12 @@ func isJSON(fl FieldLevel) bool {
fieldType := field.Type()
if fieldType.ConvertibleTo(byteSliceType) {
b := field.Convert(byteSliceType).Interface().([]byte)
b := getValue(field.Convert(byteSliceType)).([]byte)
return json.Valid(b)
}
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isJWT is the validation function for validating if the current field's value is a valid JWT string.
@ -2766,7 +2737,7 @@ func isLowercase(fl FieldLevel) bool {
return field.String() == strings.ToLower(field.String())
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isUppercase is the validation function for validating if the current field's value is an uppercase string.
@ -2780,7 +2751,7 @@ func isUppercase(fl FieldLevel) bool {
return field.String() == strings.ToUpper(field.String())
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isDatetime is the validation function for validating if the current field's value is a valid datetime string.
@ -2794,7 +2765,7 @@ func isDatetime(fl FieldLevel) bool {
return err == nil
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isTimeZone is the validation function for validating if the current field's value is a valid time zone string.
@ -2816,7 +2787,7 @@ func isTimeZone(fl FieldLevel) bool {
return err == nil
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isIso3166Alpha2 is the validation function for validating if the current field's value is a valid iso3166-1 alpha-2 country code.
@ -2860,7 +2831,7 @@ func isIso3166AlphaNumeric(fl FieldLevel) bool {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
code = int(field.Uint() % 1000)
default:
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
_, ok := iso3166_1_alpha_numeric[code]
@ -2884,7 +2855,7 @@ func isIso3166AlphaNumericEU(fl FieldLevel) bool {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
code = int(field.Uint() % 1000)
default:
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
_, ok := iso3166_1_alpha_numeric_eu[code]
@ -2914,7 +2885,7 @@ func isIso4217Numeric(fl FieldLevel) bool {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
code = int(field.Uint())
default:
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
_, ok := iso4217_numeric[code]
@ -2930,7 +2901,7 @@ func isBCP47LanguageTag(fl FieldLevel) bool {
return err == nil
}
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
// isIsoBicFormat is the validation function for validating if the current field's value is a valid Business Identifier Code (SWIFT code), defined in ISO 9362
@ -3053,7 +3024,7 @@ func hasLuhnChecksum(fl FieldLevel) bool {
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
str = strconv.FormatUint(field.Uint(), 10)
default:
panic(fmt.Sprintf("Bad field type %T", field.Interface()))
panic(fmt.Sprintf("Bad field type %s", field.Type()))
}
size := len(str)
if size < 2 { // there has to be at least one digit that carries a meaning + the checksum
@ -3079,3 +3050,60 @@ func isEIN(fl FieldLevel) bool {
return einRegex().MatchString(field.String())
}
func isValidateFn(fl FieldLevel) bool {
const defaultParam = `Validate`
field := fl.Field()
validateFn := cmp.Or(fl.Param(), defaultParam)
ok, err := tryCallValidateFn(field, validateFn)
if err != nil {
return false
}
return ok
}
var (
errMethodNotFound = errors.New(`method not found`)
errMethodReturnNoValues = errors.New(`method return o values (void)`)
errMethodReturnInvalidType = errors.New(`method should return invalid type`)
)
func tryCallValidateFn(field reflect.Value, validateFn string) (bool, error) {
method := field.MethodByName(validateFn)
if field.CanAddr() && !method.IsValid() {
method = field.Addr().MethodByName(validateFn)
}
if !method.IsValid() {
return false, fmt.Errorf("unable to call %q on type %q: %w",
validateFn, field.Type().String(), errMethodNotFound)
}
returnValues := method.Call([]reflect.Value{})
if len(returnValues) == 0 {
return false, fmt.Errorf("unable to use result of method %q on type %q: %w",
validateFn, field.Type().String(), errMethodReturnNoValues)
}
firstReturnValue := returnValues[0]
switch firstReturnValue.Kind() {
case reflect.Bool:
return firstReturnValue.Bool(), nil
case reflect.Interface:
errorType := reflect.TypeOf((*error)(nil)).Elem()
if firstReturnValue.Type().Implements(errorType) {
return firstReturnValue.IsNil(), nil
}
return false, fmt.Errorf("unable to use result of method %q on type %q: %w (got interface %v expect error)",
validateFn, field.Type().String(), errMethodReturnInvalidType, firstReturnValue.Type().String())
default:
return false, fmt.Errorf("unable to use result of method %q on type %q: %w (got %v expect error or bool)",
validateFn, field.Type().String(), errMethodReturnInvalidType, firstReturnValue.Type().String())
}
}

View File

@ -124,7 +124,6 @@ func (v *Validate) extractStructCache(current reflect.Value, sName string) *cStr
var customName string
for i := 0; i < numFields; i++ {
fld = typ.Field(i)
if !v.privateFieldValidation && !fld.Anonymous && len(fld.PkgPath) > 0 {
@ -191,7 +190,6 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
} else {
next, curr := v.parseFieldTagsRecursive(tagsVal, fieldName, t, true)
current.next, current = next, curr
}
continue
}
@ -210,7 +208,6 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
switch t {
case diveTag:
current.typeof = typeDive
continue
case keysTag:
current.typeof = typeKeys
@ -219,8 +216,6 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
panic(fmt.Sprintf("'%s' tag must be immediately preceded by the '%s' tag", keysTag, diveTag))
}
current.typeof = typeKeys
// need to pass along only keys tag
// need to increment i to skip over the keys tags
b := make([]byte, 0, 64)
@ -228,7 +223,6 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
i++
for ; i < len(tags); i++ {
b = append(b, tags[i]...)
b = append(b, ',')
@ -238,7 +232,6 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
}
current.keys, _ = v.parseFieldTagsRecursive(string(b[:len(b)-1]), fieldName, "", false)
continue
case endKeysTag:
current.typeof = typeEndKeys
@ -256,19 +249,15 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
case omitempty:
current.typeof = typeOmitEmpty
continue
case omitnil:
current.typeof = typeOmitNil
continue
case structOnlyTag:
current.typeof = typeStructOnly
continue
case noStructLevelTag:
current.typeof = typeNoStructLevel
continue
default:
if t == isdefault {

View File

@ -188,7 +188,7 @@ Same as structonly tag except that any struct level validations will not run.
# Omit Empty
Allows conditional validation, for example if a field is not set with
Allows conditional validation, for example, if a field is not set with
a value (Determined by the "required" validator) then other validation
such as min or max won't run, but if a value is set validation will run.
@ -756,6 +756,20 @@ in a field of the struct specified via a parameter.
// For slices of struct:
Usage: unique=field
# ValidateFn
This validates that an object responds to a method that can return error or bool.
By default it expects an interface `Validate() error` and check that the method
does not return an error. Other methods can be specified using two signatures:
If the method returns an error, it check if the return value is nil.
If the method returns a boolean, it checks if the value is true.
// to use the default method Validate() error
Usage: validateFn
// to use the custom method IsValid() bool (or error)
Usage: validateFn=IsValid
# Alpha Only
This validates that a string value contains ASCII alpha characters only

View File

@ -24,7 +24,6 @@ type InvalidValidationError struct {
// Error returns InvalidValidationError message
func (e *InvalidValidationError) Error() string {
if e.Type == nil {
return "validator: (nil)"
}
@ -41,11 +40,9 @@ type ValidationErrors []FieldError
// All information to create an error message specific to your application is contained within
// the FieldError found within the ValidationErrors array
func (ve ValidationErrors) Error() string {
buff := bytes.NewBufferString("")
for i := 0; i < len(ve); i++ {
buff.WriteString(ve[i].Error())
buff.WriteString("\n")
}
@ -55,7 +52,6 @@ func (ve ValidationErrors) Error() string {
// Translate translates all of the ValidationErrors
func (ve ValidationErrors) Translate(ut ut.Translator) ValidationErrorsTranslations {
trans := make(ValidationErrorsTranslations)
var fe *fieldError
@ -109,22 +105,24 @@ type FieldError interface {
// StructNamespace returns the namespace for the field error, with the field's
// actual name.
//
// eq. "User.FirstName" see Namespace for comparison
// eg. "User.FirstName" see Namespace for comparison
//
// NOTE: this field can be blank when validating a single primitive field
// using validate.Field(...) as there is no way to extract its name
StructNamespace() string
// Field returns the fields name with the tag name taking precedence over the
// Field returns the field's name with the tag name taking precedence over the
// field's actual name.
//
// eq. JSON name "fname"
// `RegisterTagNameFunc` must be registered to get tag value.
//
// eg. JSON name "fname"
// see StructField for comparison
Field() string
// StructField returns the field's actual name from the struct, when able to determine.
//
// eq. "FirstName"
// eg. "FirstName"
// see Field for comparison
StructField() string
@ -204,7 +202,6 @@ func (fe *fieldError) StructNamespace() string {
// Field returns the field's name with the tag name taking precedence over the
// field's actual name.
func (fe *fieldError) Field() string {
return fe.ns[len(fe.ns)-int(fe.fieldLen):]
// // return fe.field
// fld := fe.ns[len(fe.ns)-int(fe.fieldLen):]

View File

@ -107,7 +107,6 @@ func (v *validate) ExtractType(field reflect.Value) (reflect.Value, reflect.Kind
// ReportError reports an error just by passing the field and tag information
func (v *validate) ReportError(field interface{}, fieldName, structFieldName, tag, param string) {
fv, kind, _ := v.extractTypeInternal(reflect.ValueOf(field), false)
if len(structFieldName) == 0 {
@ -123,7 +122,6 @@ func (v *validate) ReportError(field interface{}, fieldName, structFieldName, ta
}
if kind == reflect.Invalid {
v.errs = append(v.errs,
&fieldError{
v: v.v,
@ -149,7 +147,7 @@ func (v *validate) ReportError(field interface{}, fieldName, structFieldName, ta
structNs: v.str2,
fieldLen: uint8(len(fieldName)),
structfieldLen: uint8(len(structFieldName)),
value: fv.Interface(),
value: getValue(fv),
param: param,
kind: kind,
typ: fv.Type(),
@ -161,11 +159,9 @@ func (v *validate) ReportError(field interface{}, fieldName, structFieldName, ta
//
// NOTE: this function prepends the current namespace to the relative ones.
func (v *validate) ReportValidationErrors(relativeNamespace, relativeStructNamespace string, errs ValidationErrors) {
var err *fieldError
for i := 0; i < len(errs); i++ {
err = errs[i].(*fieldError)
err.ns = string(append(append(v.ns, relativeNamespace...), err.ns...))
err.structNs = string(append(append(v.actualNs, relativeStructNamespace...), err.structNs...))

View File

@ -13,7 +13,6 @@ import (
// It will dive into pointers, customTypes and return you the
// underlying value and it's kind.
func (v *validate) extractTypeInternal(current reflect.Value, nullable bool) (reflect.Value, reflect.Kind, bool) {
BEGIN:
switch current.Kind() {
case reflect.Ptr:
@ -44,7 +43,6 @@ BEGIN:
default:
if v.v.hasCustomFuncs {
if fn, ok := v.v.customFuncs[current.Type()]; ok {
current = reflect.ValueOf(fn(current))
goto BEGIN
@ -61,7 +59,6 @@ BEGIN:
// NOTE: when not successful ok will be false, this can happen when a nested struct is nil and so the field
// could not be retrieved because it didn't exist.
func (v *validate) getStructFieldOKInternal(val reflect.Value, namespace string) (current reflect.Value, kind reflect.Kind, nullable bool, found bool) {
BEGIN:
current, kind, nullable = v.ExtractType(val)
if kind == reflect.Invalid {
@ -74,7 +71,6 @@ BEGIN:
}
switch kind {
case reflect.Ptr, reflect.Interface:
return
@ -85,7 +81,6 @@ BEGIN:
var ns string
if !typ.ConvertibleTo(timeType) {
idx := strings.Index(namespace, namespaceSeparator)
if idx != -1 {
@ -222,7 +217,7 @@ BEGIN:
panic("Invalid field namespace")
}
// asInt returns the parameter as a int64
// asInt returns the parameter as an int64
// or panics if it can't convert
func asInt(param string) int64 {
i, err := strconv.ParseInt(param, 0, 64)
@ -256,7 +251,6 @@ func asIntFromType(t reflect.Type, param string) int64 {
// asUint returns the parameter as a uint64
// or panics if it can't convert
func asUint(param string) uint64 {
i, err := strconv.ParseUint(param, 0, 64)
panicIf(err)
@ -282,7 +276,6 @@ func asFloat32(param string) float64 {
// asBool returns the parameter as a bool
// or panics if it can't convert
func asBool(param string) bool {
i, err := strconv.ParseBool(param)
panicIf(err)
@ -303,7 +296,7 @@ func fieldMatchesRegexByStringerValOrString(regexFn func() *regexp.Regexp, fl Fi
case reflect.String:
return regex.MatchString(fl.Field().String())
default:
if stringer, ok := fl.Field().Interface().(fmt.Stringer); ok {
if stringer, ok := getValue(fl.Field()).(fmt.Stringer); ok {
return regex.MatchString(stringer.String())
} else {
return regex.MatchString(fl.Field().String())

View File

@ -32,14 +32,12 @@ type validate struct {
// parent and current will be the same the first run of validateStruct
func (v *validate) validateStruct(ctx context.Context, parent reflect.Value, current reflect.Value, typ reflect.Type, ns []byte, structNs []byte, ct *cTag) {
cs, ok := v.v.structCache.Get(typ)
if !ok {
cs = v.v.extractStructCache(current, typ.Name())
}
if len(ns) == 0 && len(cs.name) != 0 {
ns = append(ns, cs.name...)
ns = append(ns, '.')
@ -50,21 +48,17 @@ func (v *validate) validateStruct(ctx context.Context, parent reflect.Value, cur
// ct is nil on top level struct, and structs as fields that have no tag info
// so if nil or if not nil and the structonly tag isn't present
if ct == nil || ct.typeof != typeStructOnly {
var f *cField
for i := 0; i < len(cs.fields); i++ {
f = cs.fields[i]
if v.isPartial {
if v.ffn != nil {
// used with StructFiltered
if v.ffn(append(structNs, f.name...)) {
continue
}
} else {
// used with StructPartial & StructExcept
_, ok = v.includeExclude[string(append(structNs, f.name...))]
@ -83,7 +77,6 @@ func (v *validate) validateStruct(ctx context.Context, parent reflect.Value, cur
// first iteration will have no info about nostructlevel tag, and is checked prior to
// calling the next iteration of validateStruct called from traverseField.
if cs.fn != nil {
v.slflParent = parent
v.slCurrent = current
v.ns = ns
@ -267,7 +260,7 @@ OUTER:
return
}
default:
if v.fldIsPointer && field.Interface() == nil {
if v.fldIsPointer && getValue(field) == nil {
return
}
}
@ -291,7 +284,6 @@ OUTER:
reusableCF := &cField{}
for i := 0; i < current.Len(); i++ {
i64 = int64(i)
v.misc = append(v.misc[0:0], cf.name...)
@ -304,7 +296,6 @@ OUTER:
if cf.namesEqual {
reusableCF.altName = reusableCF.name
} else {
v.misc = append(v.misc[0:0], cf.altName...)
v.misc = append(v.misc, '[')
v.misc = strconv.AppendInt(v.misc, i64, 10)
@ -321,8 +312,7 @@ OUTER:
reusableCF := &cField{}
for _, key := range current.MapKeys() {
pv = fmt.Sprintf("%v", key.Interface())
pv = fmt.Sprintf("%v", key)
v.misc = append(v.misc[0:0], cf.name...)
v.misc = append(v.misc, '[')
@ -347,6 +337,18 @@ OUTER:
// can be nil when just keys being validated
if ct.next != nil {
v.traverseField(ctx, parent, current.MapIndex(key), ns, structNs, reusableCF, ct.next)
} else {
// Struct fallback when map values are structs
val := current.MapIndex(key)
switch val.Kind() {
case reflect.Ptr:
if val.Elem().Kind() == reflect.Struct {
// Dive into the struct so its own tags run
v.traverseField(ctx, parent, val, ns, structNs, reusableCF, nil)
}
case reflect.Struct:
v.traverseField(ctx, parent, val, ns, structNs, reusableCF, nil)
}
}
} else {
v.traverseField(ctx, parent, current.MapIndex(key), ns, structNs, reusableCF, ct)
@ -366,7 +368,6 @@ OUTER:
v.misc = v.misc[0:0]
for {
// set Field Level fields
v.slflParent = parent
v.flField = current
@ -381,7 +382,6 @@ OUTER:
// drain rest of the 'or' values, then continue or leave
for {
ct = ct.next
if ct == nil {
@ -418,7 +418,6 @@ OUTER:
}
if ct.hasAlias {
v.errs = append(v.errs,
&fieldError{
v: v.v,
@ -434,9 +433,7 @@ OUTER:
typ: typ,
},
)
} else {
tVal := string(v.misc)[1:]
v.errs = append(v.errs,
@ -500,7 +497,6 @@ OUTER:
ct = ct.next
}
}
}
func getValue(val reflect.Value) interface{} {

View File

@ -104,7 +104,6 @@ type Validate struct {
// in essence only parsing your validation tags once per struct type.
// Using multiple instances neglects the benefit of caching.
func New(options ...Option) *Validate {
tc := new(tagCache)
tc.m.Store(make(map[string]*cTag))
@ -126,7 +125,6 @@ func New(options ...Option) *Validate {
// must copy validators for separate validations to be used in each instance
for k, val := range bakedInValidators {
switch k {
// these require that even if the value is nil that the validation should run, omitempty still overrides this behaviour
case requiredIfTag, requiredUnlessTag, requiredWithTag, requiredWithAllTag, requiredWithoutTag, requiredWithoutAllTag,
@ -233,30 +231,12 @@ func (v *Validate) RegisterValidationCtx(tag string, fn FuncCtx, callValidationE
return v.registerValidation(tag, fn, false, nilCheckable)
}
func (v *Validate) registerValidation(tag string, fn FuncCtx, bakedIn bool, nilCheckable bool) error {
if len(tag) == 0 {
return errors.New("function Key cannot be empty")
}
if fn == nil {
return errors.New("function cannot be empty")
}
_, ok := restrictedTags[tag]
if !bakedIn && (ok || strings.ContainsAny(tag, restrictedTagChars)) {
panic(fmt.Sprintf(restrictedTagErr, tag))
}
v.validations[tag] = internalValidationFuncWrapper{fn: fn, runValidationOnNil: nilCheckable}
return nil
}
// RegisterAlias registers a mapping of a single validation tag that
// defines a common or complex set of validation(s) to simplify adding validation
// to structs.
//
// NOTE: this function is not thread-safe it is intended that these all be registered prior to any validation
func (v *Validate) RegisterAlias(alias, tags string) {
_, ok := restrictedTags[alias]
if ok || strings.ContainsAny(alias, restrictedTagChars) {
@ -280,7 +260,6 @@ func (v *Validate) RegisterStructValidation(fn StructLevelFunc, types ...interfa
// NOTE:
// - this method is not thread-safe it is intended that these all be registered prior to any validation
func (v *Validate) RegisterStructValidationCtx(fn StructLevelFuncCtx, types ...interface{}) {
if v.structLevelFuncs == nil {
v.structLevelFuncs = make(map[reflect.Type]StructLevelFuncCtx)
}
@ -327,7 +306,6 @@ func (v *Validate) RegisterStructValidationMapRules(rules map[string]string, typ
//
// NOTE: this method is not thread-safe it is intended that these all be registered prior to any validation
func (v *Validate) RegisterCustomTypeFunc(fn CustomTypeFunc, types ...interface{}) {
if v.customFuncs == nil {
v.customFuncs = make(map[reflect.Type]CustomTypeFunc)
}
@ -341,7 +319,6 @@ func (v *Validate) RegisterCustomTypeFunc(fn CustomTypeFunc, types ...interface{
// RegisterTranslation registers translations against the provided tag.
func (v *Validate) RegisterTranslation(tag string, trans ut.Translator, registerFn RegisterTranslationsFunc, translationFn TranslationFunc) (err error) {
if v.transTagFunc == nil {
v.transTagFunc = make(map[ut.Translator]map[string]TranslationFunc)
}
@ -375,7 +352,6 @@ func (v *Validate) Struct(s interface{}) error {
// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
func (v *Validate) StructCtx(ctx context.Context, s interface{}) (err error) {
val := reflect.ValueOf(s)
top := val
@ -492,10 +468,8 @@ func (v *Validate) StructPartialCtx(ctx context.Context, s interface{}, fields .
name := typ.Name()
for _, k := range fields {
flds := strings.Split(k, namespaceSeparator)
if len(flds) > 0 {
vd.misc = append(vd.misc[0:0], name...)
// Don't append empty name for unnamed structs
if len(vd.misc) != 0 {
@ -503,7 +477,6 @@ func (v *Validate) StructPartialCtx(ctx context.Context, s interface{}, fields .
}
for _, s := range flds {
idx := strings.Index(s, leftBracket)
if idx != -1 {
@ -519,7 +492,6 @@ func (v *Validate) StructPartialCtx(ctx context.Context, s interface{}, fields .
idx = strings.Index(s, leftBracket)
}
} else {
vd.misc = append(vd.misc, s...)
vd.includeExclude[string(vd.misc)] = struct{}{}
}
@ -582,7 +554,6 @@ func (v *Validate) StructExceptCtx(ctx context.Context, s interface{}, fields ..
name := typ.Name()
for _, key := range fields {
vd.misc = vd.misc[0:0]
if len(name) > 0 {
@ -709,3 +680,20 @@ func (v *Validate) VarWithValueCtx(ctx context.Context, field interface{}, other
v.pool.Put(vd)
return
}
func (v *Validate) registerValidation(tag string, fn FuncCtx, bakedIn bool, nilCheckable bool) error {
if len(tag) == 0 {
return errors.New("function Key cannot be empty")
}
if fn == nil {
return errors.New("function cannot be empty")
}
_, ok := restrictedTags[tag]
if !bakedIn && (ok || strings.ContainsAny(tag, restrictedTagChars)) {
panic(fmt.Sprintf(restrictedTagErr, tag))
}
v.validations[tag] = internalValidationFuncWrapper{fn: fn, runValidationOnNil: nilCheckable}
return nil
}

View File

@ -285,6 +285,7 @@ Exit Code 1
| AMXCOMPLEX | Tile computational operations on complex numbers |
| AMXTILE | Tile architecture |
| AMXTF32 | Matrix Multiplication of TF32 Tiles into Packed Single Precision Tile |
| AMXTRANSPOSE | Tile multiply where the first operand is transposed |
| APX_F | Intel APX |
| AVX | AVX functions |
| AVX10 | If set the Intel AVX10 Converged Vector ISA is supported |
@ -420,6 +421,8 @@ Exit Code 1
| SHA | Intel SHA Extensions |
| SME | AMD Secure Memory Encryption supported |
| SME_COHERENT | AMD Hardware cache coherency across encryption domains enforced |
| SM3_X86 | SM3 instructions |
| SM4_X86 | SM4 instructions |
| SPEC_CTRL_SSBD | Speculative Store Bypass Disable |
| SRBDS_CTRL | SRBDS mitigation MSR available |
| SSE | SSE functions |

View File

@ -85,6 +85,7 @@ const (
AMXTILE // Tile architecture
AMXTF32 // Tile architecture
AMXCOMPLEX // Matrix Multiplication of TF32 Tiles into Packed Single Precision Tile
AMXTRANSPOSE // Tile multiply where the first operand is transposed
APX_F // Intel APX
AVX // AVX functions
AVX10 // If set the Intel AVX10 Converged Vector ISA is supported
@ -222,6 +223,8 @@ const (
SHA // Intel SHA Extensions
SME // AMD Secure Memory Encryption supported
SME_COHERENT // AMD Hardware cache coherency across encryption domains enforced
SM3_X86 // SM3 instructions
SM4_X86 // SM4 instructions
SPEC_CTRL_SSBD // Speculative Store Bypass Disable
SRBDS_CTRL // SRBDS mitigation MSR available
SRSO_MSR_FIX // Indicates that software may use MSR BP_CFG[BpSpecReduce] to mitigate SRSO.
@ -283,7 +286,7 @@ const (
CRC32 // CRC32/CRC32C instructions
DCPOP // Data cache clean to Point of Persistence (DC CVAP)
EVTSTRM // Generic timer
FCMA // Floatin point complex number addition and multiplication
FCMA // Floating point complex number addition and multiplication
FHM // FMLAL and FMLSL instructions
FP // Single-precision and double-precision floating point
FPHP // Half-precision floating point
@ -878,7 +881,12 @@ func physicalCores() int {
v, _ := vendorID()
switch v {
case Intel:
return logicalCores() / threadsPerCore()
lc := logicalCores()
tpc := threadsPerCore()
if lc > 0 && tpc > 0 {
return lc / tpc
}
return 0
case AMD, Hygon:
lc := logicalCores()
tpc := threadsPerCore()
@ -1279,6 +1287,8 @@ func support() flagSet {
// CPUID.(EAX=7, ECX=1).EAX
eax1, _, _, edx1 := cpuidex(7, 1)
fs.setIf(fs.inSet(AVX) && eax1&(1<<4) != 0, AVXVNNI)
fs.setIf(eax1&(1<<1) != 0, SM3_X86)
fs.setIf(eax1&(1<<2) != 0, SM4_X86)
fs.setIf(eax1&(1<<7) != 0, CMPCCXADD)
fs.setIf(eax1&(1<<10) != 0, MOVSB_ZL)
fs.setIf(eax1&(1<<11) != 0, STOSB_SHORT)
@ -1290,6 +1300,7 @@ func support() flagSet {
// CPUID.(EAX=7, ECX=1).EDX
fs.setIf(edx1&(1<<4) != 0, AVXVNNIINT8)
fs.setIf(edx1&(1<<5) != 0, AVXNECONVERT)
fs.setIf(edx1&(1<<6) != 0, AMXTRANSPOSE)
fs.setIf(edx1&(1<<7) != 0, AMXTF32)
fs.setIf(edx1&(1<<8) != 0, AMXCOMPLEX)
fs.setIf(edx1&(1<<10) != 0, AVXVNNIINT16)

View File

@ -19,227 +19,230 @@ func _() {
_ = x[AMXTILE-9]
_ = x[AMXTF32-10]
_ = x[AMXCOMPLEX-11]
_ = x[APX_F-12]
_ = x[AVX-13]
_ = x[AVX10-14]
_ = x[AVX10_128-15]
_ = x[AVX10_256-16]
_ = x[AVX10_512-17]
_ = x[AVX2-18]
_ = x[AVX512BF16-19]
_ = x[AVX512BITALG-20]
_ = x[AVX512BW-21]
_ = x[AVX512CD-22]
_ = x[AVX512DQ-23]
_ = x[AVX512ER-24]
_ = x[AVX512F-25]
_ = x[AVX512FP16-26]
_ = x[AVX512IFMA-27]
_ = x[AVX512PF-28]
_ = x[AVX512VBMI-29]
_ = x[AVX512VBMI2-30]
_ = x[AVX512VL-31]
_ = x[AVX512VNNI-32]
_ = x[AVX512VP2INTERSECT-33]
_ = x[AVX512VPOPCNTDQ-34]
_ = x[AVXIFMA-35]
_ = x[AVXNECONVERT-36]
_ = x[AVXSLOW-37]
_ = x[AVXVNNI-38]
_ = x[AVXVNNIINT8-39]
_ = x[AVXVNNIINT16-40]
_ = x[BHI_CTRL-41]
_ = x[BMI1-42]
_ = x[BMI2-43]
_ = x[CETIBT-44]
_ = x[CETSS-45]
_ = x[CLDEMOTE-46]
_ = x[CLMUL-47]
_ = x[CLZERO-48]
_ = x[CMOV-49]
_ = x[CMPCCXADD-50]
_ = x[CMPSB_SCADBS_SHORT-51]
_ = x[CMPXCHG8-52]
_ = x[CPBOOST-53]
_ = x[CPPC-54]
_ = x[CX16-55]
_ = x[EFER_LMSLE_UNS-56]
_ = x[ENQCMD-57]
_ = x[ERMS-58]
_ = x[F16C-59]
_ = x[FLUSH_L1D-60]
_ = x[FMA3-61]
_ = x[FMA4-62]
_ = x[FP128-63]
_ = x[FP256-64]
_ = x[FSRM-65]
_ = x[FXSR-66]
_ = x[FXSROPT-67]
_ = x[GFNI-68]
_ = x[HLE-69]
_ = x[HRESET-70]
_ = x[HTT-71]
_ = x[HWA-72]
_ = x[HYBRID_CPU-73]
_ = x[HYPERVISOR-74]
_ = x[IA32_ARCH_CAP-75]
_ = x[IA32_CORE_CAP-76]
_ = x[IBPB-77]
_ = x[IBPB_BRTYPE-78]
_ = x[IBRS-79]
_ = x[IBRS_PREFERRED-80]
_ = x[IBRS_PROVIDES_SMP-81]
_ = x[IBS-82]
_ = x[IBSBRNTRGT-83]
_ = x[IBSFETCHSAM-84]
_ = x[IBSFFV-85]
_ = x[IBSOPCNT-86]
_ = x[IBSOPCNTEXT-87]
_ = x[IBSOPSAM-88]
_ = x[IBSRDWROPCNT-89]
_ = x[IBSRIPINVALIDCHK-90]
_ = x[IBS_FETCH_CTLX-91]
_ = x[IBS_OPDATA4-92]
_ = x[IBS_OPFUSE-93]
_ = x[IBS_PREVENTHOST-94]
_ = x[IBS_ZEN4-95]
_ = x[IDPRED_CTRL-96]
_ = x[INT_WBINVD-97]
_ = x[INVLPGB-98]
_ = x[KEYLOCKER-99]
_ = x[KEYLOCKERW-100]
_ = x[LAHF-101]
_ = x[LAM-102]
_ = x[LBRVIRT-103]
_ = x[LZCNT-104]
_ = x[MCAOVERFLOW-105]
_ = x[MCDT_NO-106]
_ = x[MCOMMIT-107]
_ = x[MD_CLEAR-108]
_ = x[MMX-109]
_ = x[MMXEXT-110]
_ = x[MOVBE-111]
_ = x[MOVDIR64B-112]
_ = x[MOVDIRI-113]
_ = x[MOVSB_ZL-114]
_ = x[MOVU-115]
_ = x[MPX-116]
_ = x[MSRIRC-117]
_ = x[MSRLIST-118]
_ = x[MSR_PAGEFLUSH-119]
_ = x[NRIPS-120]
_ = x[NX-121]
_ = x[OSXSAVE-122]
_ = x[PCONFIG-123]
_ = x[POPCNT-124]
_ = x[PPIN-125]
_ = x[PREFETCHI-126]
_ = x[PSFD-127]
_ = x[RDPRU-128]
_ = x[RDRAND-129]
_ = x[RDSEED-130]
_ = x[RDTSCP-131]
_ = x[RRSBA_CTRL-132]
_ = x[RTM-133]
_ = x[RTM_ALWAYS_ABORT-134]
_ = x[SBPB-135]
_ = x[SERIALIZE-136]
_ = x[SEV-137]
_ = x[SEV_64BIT-138]
_ = x[SEV_ALTERNATIVE-139]
_ = x[SEV_DEBUGSWAP-140]
_ = x[SEV_ES-141]
_ = x[SEV_RESTRICTED-142]
_ = x[SEV_SNP-143]
_ = x[SGX-144]
_ = x[SGXLC-145]
_ = x[SHA-146]
_ = x[SME-147]
_ = x[SME_COHERENT-148]
_ = x[SPEC_CTRL_SSBD-149]
_ = x[SRBDS_CTRL-150]
_ = x[SRSO_MSR_FIX-151]
_ = x[SRSO_NO-152]
_ = x[SRSO_USER_KERNEL_NO-153]
_ = x[SSE-154]
_ = x[SSE2-155]
_ = x[SSE3-156]
_ = x[SSE4-157]
_ = x[SSE42-158]
_ = x[SSE4A-159]
_ = x[SSSE3-160]
_ = x[STIBP-161]
_ = x[STIBP_ALWAYSON-162]
_ = x[STOSB_SHORT-163]
_ = x[SUCCOR-164]
_ = x[SVM-165]
_ = x[SVMDA-166]
_ = x[SVMFBASID-167]
_ = x[SVML-168]
_ = x[SVMNP-169]
_ = x[SVMPF-170]
_ = x[SVMPFT-171]
_ = x[SYSCALL-172]
_ = x[SYSEE-173]
_ = x[TBM-174]
_ = x[TDX_GUEST-175]
_ = x[TLB_FLUSH_NESTED-176]
_ = x[TME-177]
_ = x[TOPEXT-178]
_ = x[TSCRATEMSR-179]
_ = x[TSXLDTRK-180]
_ = x[VAES-181]
_ = x[VMCBCLEAN-182]
_ = x[VMPL-183]
_ = x[VMSA_REGPROT-184]
_ = x[VMX-185]
_ = x[VPCLMULQDQ-186]
_ = x[VTE-187]
_ = x[WAITPKG-188]
_ = x[WBNOINVD-189]
_ = x[WRMSRNS-190]
_ = x[X87-191]
_ = x[XGETBV1-192]
_ = x[XOP-193]
_ = x[XSAVE-194]
_ = x[XSAVEC-195]
_ = x[XSAVEOPT-196]
_ = x[XSAVES-197]
_ = x[AESARM-198]
_ = x[ARMCPUID-199]
_ = x[ASIMD-200]
_ = x[ASIMDDP-201]
_ = x[ASIMDHP-202]
_ = x[ASIMDRDM-203]
_ = x[ATOMICS-204]
_ = x[CRC32-205]
_ = x[DCPOP-206]
_ = x[EVTSTRM-207]
_ = x[FCMA-208]
_ = x[FHM-209]
_ = x[FP-210]
_ = x[FPHP-211]
_ = x[GPA-212]
_ = x[JSCVT-213]
_ = x[LRCPC-214]
_ = x[PMULL-215]
_ = x[RNDR-216]
_ = x[TLB-217]
_ = x[TS-218]
_ = x[SHA1-219]
_ = x[SHA2-220]
_ = x[SHA3-221]
_ = x[SHA512-222]
_ = x[SM3-223]
_ = x[SM4-224]
_ = x[SVE-225]
_ = x[lastID-226]
_ = x[AMXTRANSPOSE-12]
_ = x[APX_F-13]
_ = x[AVX-14]
_ = x[AVX10-15]
_ = x[AVX10_128-16]
_ = x[AVX10_256-17]
_ = x[AVX10_512-18]
_ = x[AVX2-19]
_ = x[AVX512BF16-20]
_ = x[AVX512BITALG-21]
_ = x[AVX512BW-22]
_ = x[AVX512CD-23]
_ = x[AVX512DQ-24]
_ = x[AVX512ER-25]
_ = x[AVX512F-26]
_ = x[AVX512FP16-27]
_ = x[AVX512IFMA-28]
_ = x[AVX512PF-29]
_ = x[AVX512VBMI-30]
_ = x[AVX512VBMI2-31]
_ = x[AVX512VL-32]
_ = x[AVX512VNNI-33]
_ = x[AVX512VP2INTERSECT-34]
_ = x[AVX512VPOPCNTDQ-35]
_ = x[AVXIFMA-36]
_ = x[AVXNECONVERT-37]
_ = x[AVXSLOW-38]
_ = x[AVXVNNI-39]
_ = x[AVXVNNIINT8-40]
_ = x[AVXVNNIINT16-41]
_ = x[BHI_CTRL-42]
_ = x[BMI1-43]
_ = x[BMI2-44]
_ = x[CETIBT-45]
_ = x[CETSS-46]
_ = x[CLDEMOTE-47]
_ = x[CLMUL-48]
_ = x[CLZERO-49]
_ = x[CMOV-50]
_ = x[CMPCCXADD-51]
_ = x[CMPSB_SCADBS_SHORT-52]
_ = x[CMPXCHG8-53]
_ = x[CPBOOST-54]
_ = x[CPPC-55]
_ = x[CX16-56]
_ = x[EFER_LMSLE_UNS-57]
_ = x[ENQCMD-58]
_ = x[ERMS-59]
_ = x[F16C-60]
_ = x[FLUSH_L1D-61]
_ = x[FMA3-62]
_ = x[FMA4-63]
_ = x[FP128-64]
_ = x[FP256-65]
_ = x[FSRM-66]
_ = x[FXSR-67]
_ = x[FXSROPT-68]
_ = x[GFNI-69]
_ = x[HLE-70]
_ = x[HRESET-71]
_ = x[HTT-72]
_ = x[HWA-73]
_ = x[HYBRID_CPU-74]
_ = x[HYPERVISOR-75]
_ = x[IA32_ARCH_CAP-76]
_ = x[IA32_CORE_CAP-77]
_ = x[IBPB-78]
_ = x[IBPB_BRTYPE-79]
_ = x[IBRS-80]
_ = x[IBRS_PREFERRED-81]
_ = x[IBRS_PROVIDES_SMP-82]
_ = x[IBS-83]
_ = x[IBSBRNTRGT-84]
_ = x[IBSFETCHSAM-85]
_ = x[IBSFFV-86]
_ = x[IBSOPCNT-87]
_ = x[IBSOPCNTEXT-88]
_ = x[IBSOPSAM-89]
_ = x[IBSRDWROPCNT-90]
_ = x[IBSRIPINVALIDCHK-91]
_ = x[IBS_FETCH_CTLX-92]
_ = x[IBS_OPDATA4-93]
_ = x[IBS_OPFUSE-94]
_ = x[IBS_PREVENTHOST-95]
_ = x[IBS_ZEN4-96]
_ = x[IDPRED_CTRL-97]
_ = x[INT_WBINVD-98]
_ = x[INVLPGB-99]
_ = x[KEYLOCKER-100]
_ = x[KEYLOCKERW-101]
_ = x[LAHF-102]
_ = x[LAM-103]
_ = x[LBRVIRT-104]
_ = x[LZCNT-105]
_ = x[MCAOVERFLOW-106]
_ = x[MCDT_NO-107]
_ = x[MCOMMIT-108]
_ = x[MD_CLEAR-109]
_ = x[MMX-110]
_ = x[MMXEXT-111]
_ = x[MOVBE-112]
_ = x[MOVDIR64B-113]
_ = x[MOVDIRI-114]
_ = x[MOVSB_ZL-115]
_ = x[MOVU-116]
_ = x[MPX-117]
_ = x[MSRIRC-118]
_ = x[MSRLIST-119]
_ = x[MSR_PAGEFLUSH-120]
_ = x[NRIPS-121]
_ = x[NX-122]
_ = x[OSXSAVE-123]
_ = x[PCONFIG-124]
_ = x[POPCNT-125]
_ = x[PPIN-126]
_ = x[PREFETCHI-127]
_ = x[PSFD-128]
_ = x[RDPRU-129]
_ = x[RDRAND-130]
_ = x[RDSEED-131]
_ = x[RDTSCP-132]
_ = x[RRSBA_CTRL-133]
_ = x[RTM-134]
_ = x[RTM_ALWAYS_ABORT-135]
_ = x[SBPB-136]
_ = x[SERIALIZE-137]
_ = x[SEV-138]
_ = x[SEV_64BIT-139]
_ = x[SEV_ALTERNATIVE-140]
_ = x[SEV_DEBUGSWAP-141]
_ = x[SEV_ES-142]
_ = x[SEV_RESTRICTED-143]
_ = x[SEV_SNP-144]
_ = x[SGX-145]
_ = x[SGXLC-146]
_ = x[SHA-147]
_ = x[SME-148]
_ = x[SME_COHERENT-149]
_ = x[SM3_X86-150]
_ = x[SM4_X86-151]
_ = x[SPEC_CTRL_SSBD-152]
_ = x[SRBDS_CTRL-153]
_ = x[SRSO_MSR_FIX-154]
_ = x[SRSO_NO-155]
_ = x[SRSO_USER_KERNEL_NO-156]
_ = x[SSE-157]
_ = x[SSE2-158]
_ = x[SSE3-159]
_ = x[SSE4-160]
_ = x[SSE42-161]
_ = x[SSE4A-162]
_ = x[SSSE3-163]
_ = x[STIBP-164]
_ = x[STIBP_ALWAYSON-165]
_ = x[STOSB_SHORT-166]
_ = x[SUCCOR-167]
_ = x[SVM-168]
_ = x[SVMDA-169]
_ = x[SVMFBASID-170]
_ = x[SVML-171]
_ = x[SVMNP-172]
_ = x[SVMPF-173]
_ = x[SVMPFT-174]
_ = x[SYSCALL-175]
_ = x[SYSEE-176]
_ = x[TBM-177]
_ = x[TDX_GUEST-178]
_ = x[TLB_FLUSH_NESTED-179]
_ = x[TME-180]
_ = x[TOPEXT-181]
_ = x[TSCRATEMSR-182]
_ = x[TSXLDTRK-183]
_ = x[VAES-184]
_ = x[VMCBCLEAN-185]
_ = x[VMPL-186]
_ = x[VMSA_REGPROT-187]
_ = x[VMX-188]
_ = x[VPCLMULQDQ-189]
_ = x[VTE-190]
_ = x[WAITPKG-191]
_ = x[WBNOINVD-192]
_ = x[WRMSRNS-193]
_ = x[X87-194]
_ = x[XGETBV1-195]
_ = x[XOP-196]
_ = x[XSAVE-197]
_ = x[XSAVEC-198]
_ = x[XSAVEOPT-199]
_ = x[XSAVES-200]
_ = x[AESARM-201]
_ = x[ARMCPUID-202]
_ = x[ASIMD-203]
_ = x[ASIMDDP-204]
_ = x[ASIMDHP-205]
_ = x[ASIMDRDM-206]
_ = x[ATOMICS-207]
_ = x[CRC32-208]
_ = x[DCPOP-209]
_ = x[EVTSTRM-210]
_ = x[FCMA-211]
_ = x[FHM-212]
_ = x[FP-213]
_ = x[FPHP-214]
_ = x[GPA-215]
_ = x[JSCVT-216]
_ = x[LRCPC-217]
_ = x[PMULL-218]
_ = x[RNDR-219]
_ = x[TLB-220]
_ = x[TS-221]
_ = x[SHA1-222]
_ = x[SHA2-223]
_ = x[SHA3-224]
_ = x[SHA512-225]
_ = x[SM3-226]
_ = x[SM4-227]
_ = x[SVE-228]
_ = x[lastID-229]
_ = x[firstID-0]
}
const _FeatureID_name = "firstIDADXAESNIAMD3DNOWAMD3DNOWEXTAMXBF16AMXFP16AMXINT8AMXFP8AMXTILEAMXTF32AMXCOMPLEXAPX_FAVXAVX10AVX10_128AVX10_256AVX10_512AVX2AVX512BF16AVX512BITALGAVX512BWAVX512CDAVX512DQAVX512ERAVX512FAVX512FP16AVX512IFMAAVX512PFAVX512VBMIAVX512VBMI2AVX512VLAVX512VNNIAVX512VP2INTERSECTAVX512VPOPCNTDQAVXIFMAAVXNECONVERTAVXSLOWAVXVNNIAVXVNNIINT8AVXVNNIINT16BHI_CTRLBMI1BMI2CETIBTCETSSCLDEMOTECLMULCLZEROCMOVCMPCCXADDCMPSB_SCADBS_SHORTCMPXCHG8CPBOOSTCPPCCX16EFER_LMSLE_UNSENQCMDERMSF16CFLUSH_L1DFMA3FMA4FP128FP256FSRMFXSRFXSROPTGFNIHLEHRESETHTTHWAHYBRID_CPUHYPERVISORIA32_ARCH_CAPIA32_CORE_CAPIBPBIBPB_BRTYPEIBRSIBRS_PREFERREDIBRS_PROVIDES_SMPIBSIBSBRNTRGTIBSFETCHSAMIBSFFVIBSOPCNTIBSOPCNTEXTIBSOPSAMIBSRDWROPCNTIBSRIPINVALIDCHKIBS_FETCH_CTLXIBS_OPDATA4IBS_OPFUSEIBS_PREVENTHOSTIBS_ZEN4IDPRED_CTRLINT_WBINVDINVLPGBKEYLOCKERKEYLOCKERWLAHFLAMLBRVIRTLZCNTMCAOVERFLOWMCDT_NOMCOMMITMD_CLEARMMXMMXEXTMOVBEMOVDIR64BMOVDIRIMOVSB_ZLMOVUMPXMSRIRCMSRLISTMSR_PAGEFLUSHNRIPSNXOSXSAVEPCONFIGPOPCNTPPINPREFETCHIPSFDRDPRURDRANDRDSEEDRDTSCPRRSBA_CTRLRTMRTM_ALWAYS_ABORTSBPBSERIALIZESEVSEV_64BITSEV_ALTERNATIVESEV_DEBUGSWAPSEV_ESSEV_RESTRICTEDSEV_SNPSGXSGXLCSHASMESME_COHERENTSPEC_CTRL_SSBDSRBDS_CTRLSRSO_MSR_FIXSRSO_NOSRSO_USER_KERNEL_NOSSESSE2SSE3SSE4SSE42SSE4ASSSE3STIBPSTIBP_ALWAYSONSTOSB_SHORTSUCCORSVMSVMDASVMFBASIDSVMLSVMNPSVMPFSVMPFTSYSCALLSYSEETBMTDX_GUESTTLB_FLUSH_NESTEDTMETOPEXTTSCRATEMSRTSXLDTRKVAESVMCBCLEANVMPLVMSA_REGPROTVMXVPCLMULQDQVTEWAITPKGWBNOINVDWRMSRNSX87XGETBV1XOPXSAVEXSAVECXSAVEOPTXSAVESAESARMARMCPUIDASIMDASIMDDPASIMDHPASIMDRDMATOMICSCRC32DCPOPEVTSTRMFCMAFHMFPFPHPGPAJSCVTLRCPCPMULLRNDRTLBTSSHA1SHA2SHA3SHA512SM3SM4SVElastID"
const _FeatureID_name = "firstIDADXAESNIAMD3DNOWAMD3DNOWEXTAMXBF16AMXFP16AMXINT8AMXFP8AMXTILEAMXTF32AMXCOMPLEXAMXTRANSPOSEAPX_FAVXAVX10AVX10_128AVX10_256AVX10_512AVX2AVX512BF16AVX512BITALGAVX512BWAVX512CDAVX512DQAVX512ERAVX512FAVX512FP16AVX512IFMAAVX512PFAVX512VBMIAVX512VBMI2AVX512VLAVX512VNNIAVX512VP2INTERSECTAVX512VPOPCNTDQAVXIFMAAVXNECONVERTAVXSLOWAVXVNNIAVXVNNIINT8AVXVNNIINT16BHI_CTRLBMI1BMI2CETIBTCETSSCLDEMOTECLMULCLZEROCMOVCMPCCXADDCMPSB_SCADBS_SHORTCMPXCHG8CPBOOSTCPPCCX16EFER_LMSLE_UNSENQCMDERMSF16CFLUSH_L1DFMA3FMA4FP128FP256FSRMFXSRFXSROPTGFNIHLEHRESETHTTHWAHYBRID_CPUHYPERVISORIA32_ARCH_CAPIA32_CORE_CAPIBPBIBPB_BRTYPEIBRSIBRS_PREFERREDIBRS_PROVIDES_SMPIBSIBSBRNTRGTIBSFETCHSAMIBSFFVIBSOPCNTIBSOPCNTEXTIBSOPSAMIBSRDWROPCNTIBSRIPINVALIDCHKIBS_FETCH_CTLXIBS_OPDATA4IBS_OPFUSEIBS_PREVENTHOSTIBS_ZEN4IDPRED_CTRLINT_WBINVDINVLPGBKEYLOCKERKEYLOCKERWLAHFLAMLBRVIRTLZCNTMCAOVERFLOWMCDT_NOMCOMMITMD_CLEARMMXMMXEXTMOVBEMOVDIR64BMOVDIRIMOVSB_ZLMOVUMPXMSRIRCMSRLISTMSR_PAGEFLUSHNRIPSNXOSXSAVEPCONFIGPOPCNTPPINPREFETCHIPSFDRDPRURDRANDRDSEEDRDTSCPRRSBA_CTRLRTMRTM_ALWAYS_ABORTSBPBSERIALIZESEVSEV_64BITSEV_ALTERNATIVESEV_DEBUGSWAPSEV_ESSEV_RESTRICTEDSEV_SNPSGXSGXLCSHASMESME_COHERENTSM3_X86SM4_X86SPEC_CTRL_SSBDSRBDS_CTRLSRSO_MSR_FIXSRSO_NOSRSO_USER_KERNEL_NOSSESSE2SSE3SSE4SSE42SSE4ASSSE3STIBPSTIBP_ALWAYSONSTOSB_SHORTSUCCORSVMSVMDASVMFBASIDSVMLSVMNPSVMPFSVMPFTSYSCALLSYSEETBMTDX_GUESTTLB_FLUSH_NESTEDTMETOPEXTTSCRATEMSRTSXLDTRKVAESVMCBCLEANVMPLVMSA_REGPROTVMXVPCLMULQDQVTEWAITPKGWBNOINVDWRMSRNSX87XGETBV1XOPXSAVEXSAVECXSAVEOPTXSAVESAESARMARMCPUIDASIMDASIMDDPASIMDHPASIMDRDMATOMICSCRC32DCPOPEVTSTRMFCMAFHMFPFPHPGPAJSCVTLRCPCPMULLRNDRTLBTSSHA1SHA2SHA3SHA512SM3SM4SVElastID"
var _FeatureID_index = [...]uint16{0, 7, 10, 15, 23, 34, 41, 48, 55, 61, 68, 75, 85, 90, 93, 98, 107, 116, 125, 129, 139, 151, 159, 167, 175, 183, 190, 200, 210, 218, 228, 239, 247, 257, 275, 290, 297, 309, 316, 323, 334, 346, 354, 358, 362, 368, 373, 381, 386, 392, 396, 405, 423, 431, 438, 442, 446, 460, 466, 470, 474, 483, 487, 491, 496, 501, 505, 509, 516, 520, 523, 529, 532, 535, 545, 555, 568, 581, 585, 596, 600, 614, 631, 634, 644, 655, 661, 669, 680, 688, 700, 716, 730, 741, 751, 766, 774, 785, 795, 802, 811, 821, 825, 828, 835, 840, 851, 858, 865, 873, 876, 882, 887, 896, 903, 911, 915, 918, 924, 931, 944, 949, 951, 958, 965, 971, 975, 984, 988, 993, 999, 1005, 1011, 1021, 1024, 1040, 1044, 1053, 1056, 1065, 1080, 1093, 1099, 1113, 1120, 1123, 1128, 1131, 1134, 1146, 1160, 1170, 1182, 1189, 1208, 1211, 1215, 1219, 1223, 1228, 1233, 1238, 1243, 1257, 1268, 1274, 1277, 1282, 1291, 1295, 1300, 1305, 1311, 1318, 1323, 1326, 1335, 1351, 1354, 1360, 1370, 1378, 1382, 1391, 1395, 1407, 1410, 1420, 1423, 1430, 1438, 1445, 1448, 1455, 1458, 1463, 1469, 1477, 1483, 1489, 1497, 1502, 1509, 1516, 1524, 1531, 1536, 1541, 1548, 1552, 1555, 1557, 1561, 1564, 1569, 1574, 1579, 1583, 1586, 1588, 1592, 1596, 1600, 1606, 1609, 1612, 1615, 1621}
var _FeatureID_index = [...]uint16{0, 7, 10, 15, 23, 34, 41, 48, 55, 61, 68, 75, 85, 97, 102, 105, 110, 119, 128, 137, 141, 151, 163, 171, 179, 187, 195, 202, 212, 222, 230, 240, 251, 259, 269, 287, 302, 309, 321, 328, 335, 346, 358, 366, 370, 374, 380, 385, 393, 398, 404, 408, 417, 435, 443, 450, 454, 458, 472, 478, 482, 486, 495, 499, 503, 508, 513, 517, 521, 528, 532, 535, 541, 544, 547, 557, 567, 580, 593, 597, 608, 612, 626, 643, 646, 656, 667, 673, 681, 692, 700, 712, 728, 742, 753, 763, 778, 786, 797, 807, 814, 823, 833, 837, 840, 847, 852, 863, 870, 877, 885, 888, 894, 899, 908, 915, 923, 927, 930, 936, 943, 956, 961, 963, 970, 977, 983, 987, 996, 1000, 1005, 1011, 1017, 1023, 1033, 1036, 1052, 1056, 1065, 1068, 1077, 1092, 1105, 1111, 1125, 1132, 1135, 1140, 1143, 1146, 1158, 1165, 1172, 1186, 1196, 1208, 1215, 1234, 1237, 1241, 1245, 1249, 1254, 1259, 1264, 1269, 1283, 1294, 1300, 1303, 1308, 1317, 1321, 1326, 1331, 1337, 1344, 1349, 1352, 1361, 1377, 1380, 1386, 1396, 1404, 1408, 1417, 1421, 1433, 1436, 1446, 1449, 1456, 1464, 1471, 1474, 1481, 1484, 1489, 1495, 1503, 1509, 1515, 1523, 1528, 1535, 1542, 1550, 1557, 1562, 1567, 1574, 1578, 1581, 1583, 1587, 1590, 1595, 1600, 1605, 1609, 1612, 1614, 1618, 1622, 1626, 1632, 1635, 1638, 1641, 1647}
func (i FeatureID) String() string {
if i < 0 || i >= FeatureID(len(_FeatureID_index)-1) {

View File

@ -65,9 +65,16 @@ func sysctlGetInt64(unknown int, names ...string) int {
return unknown
}
func setFeature(c *CPUInfo, name string, feature FeatureID) {
c.featureSet.setIf(sysctlGetBool(name), feature)
func setFeature(c *CPUInfo, feature FeatureID, aliases ...string) {
for _, alias := range aliases {
set := sysctlGetBool(alias)
c.featureSet.setIf(set, feature)
if set {
break
}
}
}
func tryToFillCPUInfoFomSysctl(c *CPUInfo) {
c.BrandName = sysctlGetString("machdep.cpu.brand_string")
@ -87,41 +94,36 @@ func tryToFillCPUInfoFomSysctl(c *CPUInfo) {
c.Cache.L2 = sysctlGetInt64(-1, "hw.l2cachesize")
c.Cache.L3 = sysctlGetInt64(-1, "hw.l3cachesize")
// from https://developer.arm.com/downloads/-/exploration-tools/feature-names-for-a-profile
setFeature(c, "hw.optional.arm.FEAT_AES", AESARM)
setFeature(c, "hw.optional.AdvSIMD", ASIMD)
setFeature(c, "hw.optional.arm.FEAT_DotProd", ASIMDDP)
setFeature(c, "hw.optional.arm.FEAT_RDM", ASIMDRDM)
setFeature(c, "hw.optional.FEAT_CRC32", CRC32)
setFeature(c, "hw.optional.arm.FEAT_DPB", DCPOP)
// setFeature(c, "", EVTSTRM)
setFeature(c, "hw.optional.arm.FEAT_FCMA", FCMA)
setFeature(c, "hw.optional.arm.FEAT_FHM", FHM)
setFeature(c, "hw.optional.arm.FEAT_FP", FP)
setFeature(c, "hw.optional.arm.FEAT_FP16", FPHP)
setFeature(c, "hw.optional.arm.FEAT_PAuth", GPA)
setFeature(c, "hw.optional.arm.FEAT_RNG", RNDR)
setFeature(c, "hw.optional.arm.FEAT_JSCVT", JSCVT)
setFeature(c, "hw.optional.arm.FEAT_LRCPC", LRCPC)
setFeature(c, "hw.optional.arm.FEAT_PMULL", PMULL)
setFeature(c, "hw.optional.arm.FEAT_SHA1", SHA1)
setFeature(c, "hw.optional.arm.FEAT_SHA256", SHA2)
setFeature(c, "hw.optional.arm.FEAT_SHA3", SHA3)
setFeature(c, "hw.optional.arm.FEAT_SHA512", SHA512)
setFeature(c, "hw.optional.arm.FEAT_TLBIOS", TLB)
setFeature(c, "hw.optional.arm.FEAT_TLBIRANGE", TLB)
setFeature(c, "hw.optional.arm.FEAT_FlagM", TS)
setFeature(c, "hw.optional.arm.FEAT_FlagM2", TS)
// setFeature(c, "", SM3)
// setFeature(c, "", SM4)
setFeature(c, "hw.optional.arm.FEAT_SVE", SVE)
// from empirical observation
setFeature(c, "hw.optional.AdvSIMD_HPFPCvt", ASIMDHP)
setFeature(c, "hw.optional.armv8_1_atomics", ATOMICS)
setFeature(c, "hw.optional.floatingpoint", FP)
setFeature(c, "hw.optional.armv8_2_sha3", SHA3)
setFeature(c, "hw.optional.armv8_2_sha512", SHA512)
setFeature(c, "hw.optional.armv8_3_compnum", FCMA)
setFeature(c, "hw.optional.armv8_crc32", CRC32)
// ARM features:
//
// Note: On some Apple Silicon system, some feats have aliases. See:
// https://developer.apple.com/documentation/kernel/1387446-sysctlbyname/determining_instruction_set_characteristics
// When so, we look at all aliases and consider a feature available when at least one identifier matches.
setFeature(c, AESARM, "hw.optional.arm.FEAT_AES") // AES instructions
setFeature(c, ASIMD, "hw.optional.arm.AdvSIMD", "hw.optional.neon") // Advanced SIMD
setFeature(c, ASIMDDP, "hw.optional.arm.FEAT_DotProd") // SIMD Dot Product
setFeature(c, ASIMDHP, "hw.optional.arm.AdvSIMD_HPFPCvt", "hw.optional.neon_hpfp") // Advanced SIMD half-precision floating point
setFeature(c, ASIMDRDM, "hw.optional.arm.FEAT_RDM") // Rounding Double Multiply Accumulate/Subtract
setFeature(c, ATOMICS, "hw.optional.arm.FEAT_LSE", "hw.optional.armv8_1_atomics") // Large System Extensions (LSE)
setFeature(c, CRC32, "hw.optional.arm.FEAT_CRC32", "hw.optional.armv8_crc32") // CRC32/CRC32C instructions
setFeature(c, DCPOP, "hw.optional.arm.FEAT_DPB") // Data cache clean to Point of Persistence (DC CVAP)
setFeature(c, EVTSTRM, "hw.optional.arm.FEAT_ECV") // Generic timer
setFeature(c, FCMA, "hw.optional.arm.FEAT_FCMA", "hw.optional.armv8_3_compnum") // Floating point complex number addition and multiplication
setFeature(c, FHM, "hw.optional.armv8_2_fhm", "hw.optional.arm.FEAT_FHM") // FMLAL and FMLSL instructions
setFeature(c, FP, "hw.optional.floatingpoint") // Single-precision and double-precision floating point
setFeature(c, FPHP, "hw.optional.arm.FEAT_FP16", "hw.optional.neon_fp16") // Half-precision floating point
setFeature(c, GPA, "hw.optional.arm.FEAT_PAuth") // Generic Pointer Authentication
setFeature(c, JSCVT, "hw.optional.arm.FEAT_JSCVT") // Javascript-style double->int convert (FJCVTZS)
setFeature(c, LRCPC, "hw.optional.arm.FEAT_LRCPC") // Weaker release consistency (LDAPR, etc)
setFeature(c, PMULL, "hw.optional.arm.FEAT_PMULL") // Polynomial Multiply instructions (PMULL/PMULL2)
setFeature(c, RNDR, "hw.optional.arm.FEAT_RNG") // Random Number instructions
setFeature(c, TLB, "hw.optional.arm.FEAT_TLBIOS", "hw.optional.arm.FEAT_TLBIRANGE") // Outer Shareable and TLB range maintenance instructions
setFeature(c, TS, "hw.optional.arm.FEAT_FlagM", "hw.optional.arm.FEAT_FlagM2") // Flag manipulation instructions
setFeature(c, SHA1, "hw.optional.arm.FEAT_SHA1") // SHA-1 instructions (SHA1C, etc)
setFeature(c, SHA2, "hw.optional.arm.FEAT_SHA256") // SHA-2 instructions (SHA256H, etc)
setFeature(c, SHA3, "hw.optional.arm.FEAT_SHA3") // SHA-3 instructions (EOR3, RAXI, XAR, BCAX)
setFeature(c, SHA512, "hw.optional.arm.FEAT_SHA512") // SHA512 instructions
setFeature(c, SM3, "hw.optional.arm.FEAT_SM3") // SM3 instructions
setFeature(c, SM4, "hw.optional.arm.FEAT_SM4") // SM4 instructions
setFeature(c, SVE, "hw.optional.arm.FEAT_SVE") // Scalable Vector Extension
}

View File

@ -201,6 +201,7 @@ var unitMap = map[string]struct {
// ParseDuration parses a string into a time.Duration, assuming that a year
// always has 365d, a week always has 7d, and a day always has 24h.
// Negative durations are not supported.
func ParseDuration(s string) (Duration, error) {
switch s {
case "0":
@ -253,18 +254,36 @@ func ParseDuration(s string) (Duration, error) {
return 0, errors.New("duration out of range")
}
}
return Duration(dur), nil
}
// ParseDurationAllowNegative is like ParseDuration but also accepts negative durations.
func ParseDurationAllowNegative(s string) (Duration, error) {
if s == "" || s[0] != '-' {
return ParseDuration(s)
}
d, err := ParseDuration(s[1:])
return -d, err
}
func (d Duration) String() string {
var (
ms = int64(time.Duration(d) / time.Millisecond)
r = ""
ms = int64(time.Duration(d) / time.Millisecond)
r = ""
sign = ""
)
if ms == 0 {
return "0s"
}
if ms < 0 {
sign, ms = "-", -ms
}
f := func(unit string, mult int64, exact bool) {
if exact && ms%mult != 0 {
return
@ -286,7 +305,7 @@ func (d Duration) String() string {
f("s", 1000, false)
f("ms", 1, false)
return r
return sign + r
}
// MarshalJSON implements the json.Marshaler interface.

View File

@ -33,7 +33,7 @@ GOHOSTOS ?= $(shell $(GO) env GOHOSTOS)
GOHOSTARCH ?= $(shell $(GO) env GOHOSTARCH)
GO_VERSION ?= $(shell $(GO) version)
GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION))Error Parsing File
GO_VERSION_NUMBER ?= $(word 3, $(GO_VERSION))
PRE_GO_111 ?= $(shell echo $(GO_VERSION_NUMBER) | grep -E 'go1\.(10|[0-9])\.')
PROMU := $(FIRST_GOPATH)/bin/promu
@ -61,7 +61,8 @@ PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_
SKIP_GOLANGCI_LINT :=
GOLANGCI_LINT :=
GOLANGCI_LINT_OPTS ?=
GOLANGCI_LINT_VERSION ?= v2.0.2
GOLANGCI_LINT_VERSION ?= v2.1.5
GOLANGCI_FMT_OPTS ?=
# golangci-lint only supports linux, darwin and windows platforms on i386/amd64/arm64.
# windows isn't included here because of the path separator being different.
ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin))
@ -156,9 +157,13 @@ $(GOTEST_DIR):
@mkdir -p $@
.PHONY: common-format
common-format:
common-format: $(GOLANGCI_LINT)
@echo ">> formatting code"
$(GO) fmt $(pkgs)
ifdef GOLANGCI_LINT
@echo ">> formatting code with golangci-lint"
$(GOLANGCI_LINT) fmt $(GOLANGCI_FMT_OPTS)
endif
.PHONY: common-vet
common-vet:
@ -248,8 +253,8 @@ $(PROMU):
cp $(PROMU_TMP)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM)/promu $(FIRST_GOPATH)/bin/promu
rm -r $(PROMU_TMP)
.PHONY: proto
proto:
.PHONY: common-proto
common-proto:
@echo ">> generating code from proto files"
@./scripts/genproto.sh

View File

@ -123,13 +123,16 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) {
finish := float64(0)
pct := float64(0)
recovering := strings.Contains(lines[syncLineIdx], "recovery")
reshaping := strings.Contains(lines[syncLineIdx], "reshape")
resyncing := strings.Contains(lines[syncLineIdx], "resync")
checking := strings.Contains(lines[syncLineIdx], "check")
// Append recovery and resyncing state info.
if recovering || resyncing || checking {
if recovering || resyncing || checking || reshaping {
if recovering {
state = "recovering"
} else if reshaping {
state = "reshaping"
} else if checking {
state = "checking"
} else {

View File

@ -66,6 +66,10 @@ type Meminfo struct {
// Memory which has been evicted from RAM, and is temporarily
// on the disk
SwapFree *uint64
// Memory consumed by the zswap backend (compressed size)
Zswap *uint64
// Amount of anonymous memory stored in zswap (original size)
Zswapped *uint64
// Memory which is waiting to get written back to the disk
Dirty *uint64
// Memory which is actively being written back to the disk
@ -85,6 +89,8 @@ type Meminfo struct {
// amount of memory dedicated to the lowest level of page
// tables.
PageTables *uint64
// secondary page tables.
SecPageTables *uint64
// NFS pages sent to the server, but not yet committed to
// stable storage
NFSUnstable *uint64
@ -129,15 +135,18 @@ type Meminfo struct {
Percpu *uint64
HardwareCorrupted *uint64
AnonHugePages *uint64
FileHugePages *uint64
ShmemHugePages *uint64
ShmemPmdMapped *uint64
CmaTotal *uint64
CmaFree *uint64
Unaccepted *uint64
HugePagesTotal *uint64
HugePagesFree *uint64
HugePagesRsvd *uint64
HugePagesSurp *uint64
Hugepagesize *uint64
Hugetlb *uint64
DirectMap4k *uint64
DirectMap2M *uint64
DirectMap1G *uint64
@ -161,6 +170,8 @@ type Meminfo struct {
MlockedBytes *uint64
SwapTotalBytes *uint64
SwapFreeBytes *uint64
ZswapBytes *uint64
ZswappedBytes *uint64
DirtyBytes *uint64
WritebackBytes *uint64
AnonPagesBytes *uint64
@ -171,6 +182,7 @@ type Meminfo struct {
SUnreclaimBytes *uint64
KernelStackBytes *uint64
PageTablesBytes *uint64
SecPageTablesBytes *uint64
NFSUnstableBytes *uint64
BounceBytes *uint64
WritebackTmpBytes *uint64
@ -182,11 +194,14 @@ type Meminfo struct {
PercpuBytes *uint64
HardwareCorruptedBytes *uint64
AnonHugePagesBytes *uint64
FileHugePagesBytes *uint64
ShmemHugePagesBytes *uint64
ShmemPmdMappedBytes *uint64
CmaTotalBytes *uint64
CmaFreeBytes *uint64
UnacceptedBytes *uint64
HugepagesizeBytes *uint64
HugetlbBytes *uint64
DirectMap4kBytes *uint64
DirectMap2MBytes *uint64
DirectMap1GBytes *uint64
@ -287,6 +302,12 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) {
case "SwapFree:":
m.SwapFree = &val
m.SwapFreeBytes = &valBytes
case "Zswap:":
m.Zswap = &val
m.ZswapBytes = &valBytes
case "Zswapped:":
m.Zswapped = &val
m.ZswapBytes = &valBytes
case "Dirty:":
m.Dirty = &val
m.DirtyBytes = &valBytes
@ -317,6 +338,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) {
case "PageTables:":
m.PageTables = &val
m.PageTablesBytes = &valBytes
case "SecPageTables:":
m.SecPageTables = &val
m.SecPageTablesBytes = &valBytes
case "NFS_Unstable:":
m.NFSUnstable = &val
m.NFSUnstableBytes = &valBytes
@ -350,6 +374,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) {
case "AnonHugePages:":
m.AnonHugePages = &val
m.AnonHugePagesBytes = &valBytes
case "FileHugePages:":
m.FileHugePages = &val
m.FileHugePagesBytes = &valBytes
case "ShmemHugePages:":
m.ShmemHugePages = &val
m.ShmemHugePagesBytes = &valBytes
@ -362,6 +389,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) {
case "CmaFree:":
m.CmaFree = &val
m.CmaFreeBytes = &valBytes
case "Unaccepted:":
m.Unaccepted = &val
m.UnacceptedBytes = &valBytes
case "HugePages_Total:":
m.HugePagesTotal = &val
case "HugePages_Free:":
@ -373,6 +403,9 @@ func parseMemInfo(r io.Reader) (*Meminfo, error) {
case "Hugepagesize:":
m.Hugepagesize = &val
m.HugepagesizeBytes = &valBytes
case "Hugetlb:":
m.Hugetlb = &val
m.HugetlbBytes = &valBytes
case "DirectMap4k:":
m.DirectMap4k = &val
m.DirectMap4kBytes = &valBytes

View File

@ -101,6 +101,12 @@ type ProcStat struct {
RSS int
// Soft limit in bytes on the rss of the process.
RSSLimit uint64
// The address above which program text can run.
StartCode uint64
// The address below which program text can run.
EndCode uint64
// The address of the start (i.e., bottom) of the stack.
StartStack uint64
// CPU number last executed on.
Processor uint
// Real-time scheduling priority, a number in the range 1 to 99 for processes
@ -177,9 +183,9 @@ func (p Proc) Stat() (ProcStat, error) {
&s.VSize,
&s.RSS,
&s.RSSLimit,
&ignoreUint64,
&ignoreUint64,
&ignoreUint64,
&s.StartCode,
&s.EndCode,
&s.StartStack,
&ignoreUint64,
&ignoreUint64,
&ignoreUint64,

116
vendor/github.com/prometheus/procfs/proc_statm.go generated vendored Normal file
View File

@ -0,0 +1,116 @@
// Copyright 2025 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package procfs
import (
"os"
"strconv"
"strings"
"github.com/prometheus/procfs/internal/util"
)
// - https://man7.org/linux/man-pages/man5/proc_pid_statm.5.html
// ProcStatm Provides memory usage information for a process, measured in memory pages.
// Read from /proc/[pid]/statm.
type ProcStatm struct {
// The process ID.
PID int
// total program size (same as VmSize in status)
Size uint64
// resident set size (same as VmRSS in status)
Resident uint64
// number of resident shared pages (i.e., backed by a file)
Shared uint64
// text (code)
Text uint64
// library (unused since Linux 2.6; always 0)
Lib uint64
// data + stack
Data uint64
// dirty pages (unused since Linux 2.6; always 0)
Dt uint64
}
// NewStatm returns the current status information of the process.
// Deprecated: Use p.Statm() instead.
func (p Proc) NewStatm() (ProcStatm, error) {
return p.Statm()
}
// Statm returns the current memory usage information of the process.
func (p Proc) Statm() (ProcStatm, error) {
data, err := util.ReadFileNoStat(p.path("statm"))
if err != nil {
return ProcStatm{}, err
}
statmSlice, err := parseStatm(data)
if err != nil {
return ProcStatm{}, err
}
procStatm := ProcStatm{
PID: p.PID,
Size: statmSlice[0],
Resident: statmSlice[1],
Shared: statmSlice[2],
Text: statmSlice[3],
Lib: statmSlice[4],
Data: statmSlice[5],
Dt: statmSlice[6],
}
return procStatm, nil
}
// parseStatm return /proc/[pid]/statm data to uint64 slice.
func parseStatm(data []byte) ([]uint64, error) {
var statmSlice []uint64
statmItems := strings.Fields(string(data))
for i := 0; i < len(statmItems); i++ {
statmItem, err := strconv.ParseUint(statmItems[i], 10, 64)
if err != nil {
return nil, err
}
statmSlice = append(statmSlice, statmItem)
}
return statmSlice, nil
}
// SizeBytes returns the process of total program size in bytes.
func (s ProcStatm) SizeBytes() uint64 {
return s.Size * uint64(os.Getpagesize())
}
// ResidentBytes returns the process of resident set size in bytes.
func (s ProcStatm) ResidentBytes() uint64 {
return s.Resident * uint64(os.Getpagesize())
}
// SHRBytes returns the process of share memory size in bytes.
func (s ProcStatm) SHRBytes() uint64 {
return s.Shared * uint64(os.Getpagesize())
}
// TextBytes returns the process of text (code) size in bytes.
func (s ProcStatm) TextBytes() uint64 {
return s.Text * uint64(os.Getpagesize())
}
// DataBytes returns the process of data + stack size in bytes.
func (s ProcStatm) DataBytes() uint64 {
return s.Data * uint64(os.Getpagesize())
}

View File

@ -5,9 +5,7 @@ import (
"github.com/vektah/gqlparser/v2/gqlerror"
"github.com/vektah/gqlparser/v2/parser"
"github.com/vektah/gqlparser/v2/validator"
// Blank import is used to load up the validator rules.
_ "github.com/vektah/gqlparser/v2/validator/rules"
"github.com/vektah/gqlparser/v2/validator/rules"
)
func LoadSchema(str ...*ast.Source) (*ast.Schema, error) {
@ -30,6 +28,7 @@ func MustLoadSchema(str ...*ast.Source) *ast.Schema {
return s
}
// Deprecated: use LoadQueryWithRules instead.
func LoadQuery(schema *ast.Schema, str string) (*ast.QueryDocument, gqlerror.List) {
query, err := parser.ParseQuery(&ast.Source{Input: str})
if err != nil {
@ -47,6 +46,24 @@ func LoadQuery(schema *ast.Schema, str string) (*ast.QueryDocument, gqlerror.Lis
return query, nil
}
func LoadQueryWithRules(schema *ast.Schema, str string, rules *rules.Rules) (*ast.QueryDocument, gqlerror.List) {
query, err := parser.ParseQuery(&ast.Source{Input: str})
if err != nil {
gqlErr, ok := err.(*gqlerror.Error)
if ok {
return nil, gqlerror.List{gqlErr}
}
return nil, gqlerror.List{gqlerror.Wrap(err)}
}
errs := validator.ValidateWithRules(schema, query, rules)
if len(errs) > 0 {
return nil, errs
}
return query, nil
}
// Deprecated: use MustLoadQueryWithRules instead.
func MustLoadQuery(schema *ast.Schema, str string) *ast.QueryDocument {
q, err := LoadQuery(schema, str)
if err != nil {
@ -54,3 +71,11 @@ func MustLoadQuery(schema *ast.Schema, str string) *ast.QueryDocument {
}
return q
}
func MustLoadQueryWithRules(schema *ast.Schema, str string, rules *rules.Rules) *ast.QueryDocument {
q, err := LoadQueryWithRules(schema, str, rules)
if err != nil {
panic(err)
}
return q
}

View File

@ -0,0 +1,24 @@
package core
import (
"github.com/vektah/gqlparser/v2/gqlerror"
)
type AddErrFunc func(options ...ErrorOption)
type RuleFunc func(observers *Events, addError AddErrFunc)
type Rule struct {
Name string
RuleFunc RuleFunc
}
// NameSorter sorts Rules by name.
// usage: sort.Sort(core.NameSorter(specifiedRules))
type NameSorter []Rule
func (a NameSorter) Len() int { return len(a) }
func (a NameSorter) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a NameSorter) Less(i, j int) bool { return a[i].Name < a[j].Name }
type ErrorOption func(err *gqlerror.Error)

View File

@ -0,0 +1,154 @@
package core
import (
"bytes"
"fmt"
"math"
"sort"
"strings"
"github.com/agnivade/levenshtein"
"github.com/vektah/gqlparser/v2/ast"
"github.com/vektah/gqlparser/v2/gqlerror"
)
func Message(msg string, args ...interface{}) ErrorOption {
return func(err *gqlerror.Error) {
err.Message += fmt.Sprintf(msg, args...)
}
}
func At(position *ast.Position) ErrorOption {
return func(err *gqlerror.Error) {
if position == nil {
return
}
err.Locations = append(err.Locations, gqlerror.Location{
Line: position.Line,
Column: position.Column,
})
if position.Src.Name != "" {
err.SetFile(position.Src.Name)
}
}
}
func SuggestListQuoted(prefix string, typed string, suggestions []string) ErrorOption {
suggested := SuggestionList(typed, suggestions)
return func(err *gqlerror.Error) {
if len(suggested) > 0 {
err.Message += " " + prefix + " " + QuotedOrList(suggested...) + "?"
}
}
}
func SuggestListUnquoted(prefix string, typed string, suggestions []string) ErrorOption {
suggested := SuggestionList(typed, suggestions)
return func(err *gqlerror.Error) {
if len(suggested) > 0 {
err.Message += " " + prefix + " " + OrList(suggested...) + "?"
}
}
}
func Suggestf(suggestion string, args ...interface{}) ErrorOption {
return func(err *gqlerror.Error) {
err.Message += " Did you mean " + fmt.Sprintf(suggestion, args...) + "?"
}
}
// Given [ A, B, C ] return '"A", "B", or "C"'.
func QuotedOrList(items ...string) string {
itemsQuoted := make([]string, len(items))
for i, item := range items {
itemsQuoted[i] = `"` + item + `"`
}
return OrList(itemsQuoted...)
}
// Given [ A, B, C ] return 'A, B, or C'.
func OrList(items ...string) string {
var buf bytes.Buffer
if len(items) > 5 {
items = items[:5]
}
if len(items) == 2 {
buf.WriteString(items[0])
buf.WriteString(" or ")
buf.WriteString(items[1])
return buf.String()
}
for i, item := range items {
if i != 0 {
if i == len(items)-1 {
buf.WriteString(", or ")
} else {
buf.WriteString(", ")
}
}
buf.WriteString(item)
}
return buf.String()
}
// Given an invalid input string and a list of valid options, returns a filtered
// list of valid options sorted based on their similarity with the input.
func SuggestionList(input string, options []string) []string {
var results []string
optionsByDistance := map[string]int{}
for _, option := range options {
distance := lexicalDistance(input, option)
threshold := calcThreshold(input)
if distance <= threshold {
results = append(results, option)
optionsByDistance[option] = distance
}
}
sort.Slice(results, func(i, j int) bool {
return optionsByDistance[results[i]] < optionsByDistance[results[j]]
})
return results
}
func calcThreshold(a string) (threshold int) {
// the logic is copied from here
// https://github.com/graphql/graphql-js/blob/47bd8c8897c72d3efc17ecb1599a95cee6bac5e8/src/jsutils/suggestionList.ts#L14
threshold = int(math.Floor(float64(len(a))*0.4) + 1)
if threshold < 1 {
threshold = 1
}
return
}
// Computes the lexical distance between strings A and B.
//
// The "distance" between two strings is given by counting the minimum number
// of edits needed to transform string A into string B. An edit can be an
// insertion, deletion, or substitution of a single character, or a swap of two
// adjacent characters.
//
// Includes a custom alteration from Damerau-Levenshtein to treat case changes
// as a single edit which helps identify mis-cased values with an edit distance
// of 1.
//
// This distance can be useful for detecting typos in input or sorting
func lexicalDistance(a, b string) int {
if a == b {
return 0
}
a = strings.ToLower(a)
b = strings.ToLower(b)
// Any case change counts as a single edit
if a == b {
return 1
}
return levenshtein.ComputeDistance(a, b)
}

View File

@ -1,4 +1,4 @@
package validator
package core
import (
"context"

View File

@ -1,55 +0,0 @@
package validator
import (
"fmt"
"github.com/vektah/gqlparser/v2/ast"
"github.com/vektah/gqlparser/v2/gqlerror"
)
type ErrorOption func(err *gqlerror.Error)
func Message(msg string, args ...interface{}) ErrorOption {
return func(err *gqlerror.Error) {
err.Message += fmt.Sprintf(msg, args...)
}
}
func At(position *ast.Position) ErrorOption {
return func(err *gqlerror.Error) {
if position == nil {
return
}
err.Locations = append(err.Locations, gqlerror.Location{
Line: position.Line,
Column: position.Column,
})
if position.Src.Name != "" {
err.SetFile(position.Src.Name)
}
}
}
func SuggestListQuoted(prefix string, typed string, suggestions []string) ErrorOption {
suggested := SuggestionList(typed, suggestions)
return func(err *gqlerror.Error) {
if len(suggested) > 0 {
err.Message += " " + prefix + " " + QuotedOrList(suggested...) + "?"
}
}
}
func SuggestListUnquoted(prefix string, typed string, suggestions []string) ErrorOption {
suggested := SuggestionList(typed, suggestions)
return func(err *gqlerror.Error) {
if len(suggested) > 0 {
err.Message += " " + prefix + " " + OrList(suggested...) + "?"
}
}
}
func Suggestf(suggestion string, args ...interface{}) ErrorOption {
return func(err *gqlerror.Error) {
err.Message += " Did you mean " + fmt.Sprintf(suggestion, args...) + "?"
}
}

View File

@ -1,39 +0,0 @@
package validator
import "bytes"
// Given [ A, B, C ] return '"A", "B", or "C"'.
func QuotedOrList(items ...string) string {
itemsQuoted := make([]string, len(items))
for i, item := range items {
itemsQuoted[i] = `"` + item + `"`
}
return OrList(itemsQuoted...)
}
// Given [ A, B, C ] return 'A, B, or C'.
func OrList(items ...string) string {
var buf bytes.Buffer
if len(items) > 5 {
items = items[:5]
}
if len(items) == 2 {
buf.WriteString(items[0])
buf.WriteString(" or ")
buf.WriteString(items[1])
return buf.String()
}
for i, item := range items {
if i != 0 {
if i == len(items)-1 {
buf.WriteString(", or ")
} else {
buf.WriteString(", ")
}
}
buf.WriteString(item)
}
return buf.String()
}

View File

@ -8,7 +8,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
func ruleFuncFieldsOnCorrectType(observers *Events, addError AddErrFunc, disableSuggestion bool) {
@ -48,10 +48,6 @@ var FieldsOnCorrectTypeRuleWithoutSuggestions = Rule{
},
}
func init() {
AddRule(FieldsOnCorrectTypeRule.Name, FieldsOnCorrectTypeRule.RuleFunc)
}
// Go through all the implementations of type, as well as the interfaces
// that they implement. If any of those types include the provided field,
// suggest them, sorted by how often the type is referenced, starting

View File

@ -6,7 +6,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var FragmentsOnCompositeTypesRule = Rule{
@ -40,7 +40,3 @@ var FragmentsOnCompositeTypesRule = Rule{
})
},
}
func init() {
AddRule(FragmentsOnCompositeTypesRule.Name, FragmentsOnCompositeTypesRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
func ruleFuncKnownArgumentNames(observers *Events, addError AddErrFunc, disableSuggestion bool) {
@ -82,7 +82,3 @@ var KnownArgumentNamesRuleWithoutSuggestions = Rule{
ruleFuncKnownArgumentNames(observers, addError, true)
},
}
func init() {
AddRule(KnownArgumentNamesRule.Name, KnownArgumentNamesRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var KnownDirectivesRule = Rule{
@ -48,7 +48,3 @@ var KnownDirectivesRule = Rule{
})
},
}
func init() {
AddRule(KnownDirectivesRule.Name, KnownDirectivesRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var KnownFragmentNamesRule = Rule{
@ -20,7 +20,3 @@ var KnownFragmentNamesRule = Rule{
})
},
}
func init() {
AddRule(KnownFragmentNamesRule.Name, KnownFragmentNamesRule.RuleFunc)
}

View File

@ -6,7 +6,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var KnownRootTypeRule = Rule{
@ -36,7 +36,3 @@ var KnownRootTypeRule = Rule{
})
},
}
func init() {
AddRule(KnownRootTypeRule.Name, KnownRootTypeRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
func ruleFuncKnownTypeNames(observers *Events, addError AddErrFunc, disableSuggestion bool) {
@ -78,7 +78,3 @@ var KnownTypeNamesRuleWithoutSuggestions = Rule{
ruleFuncKnownTypeNames(observers, addError, true)
},
}
func init() {
AddRule(KnownTypeNamesRule.Name, KnownTypeNamesRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var LoneAnonymousOperationRule = Rule{
@ -20,7 +20,3 @@ var LoneAnonymousOperationRule = Rule{
})
},
}
func init() {
AddRule(LoneAnonymousOperationRule.Name, LoneAnonymousOperationRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
const maxListsDepth = 3
@ -84,7 +84,3 @@ func checkDepthFragmentSpread(fragmentSpread *ast.FragmentSpread, visitedFragmen
defer delete(visitedFragments, fragmentName)
return checkDepthSelectionSet(fragment.SelectionSet, visitedFragments, depth)
}
func init() {
AddRule(MaxIntrospectionDepth.Name, MaxIntrospectionDepth.RuleFunc)
}

View File

@ -7,7 +7,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var NoFragmentCyclesRule = Rule{
@ -71,10 +71,6 @@ var NoFragmentCyclesRule = Rule{
},
}
func init() {
AddRule(NoFragmentCyclesRule.Name, NoFragmentCyclesRule.RuleFunc)
}
func getFragmentSpreads(node ast.SelectionSet) []*ast.FragmentSpread {
var spreads []*ast.FragmentSpread

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var NoUndefinedVariablesRule = Rule{
@ -29,7 +29,3 @@ var NoUndefinedVariablesRule = Rule{
})
},
}
func init() {
AddRule(NoUndefinedVariablesRule.Name, NoUndefinedVariablesRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var NoUnusedFragmentsRule = Rule{
@ -30,7 +30,3 @@ var NoUnusedFragmentsRule = Rule{
})
},
}
func init() {
AddRule(NoUnusedFragmentsRule.Name, NoUnusedFragmentsRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var NoUnusedVariablesRule = Rule{
@ -31,7 +31,3 @@ var NoUnusedVariablesRule = Rule{
})
},
}
func init() {
AddRule(NoUnusedVariablesRule.Name, NoUnusedVariablesRule.RuleFunc)
}

View File

@ -8,7 +8,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var OverlappingFieldsCanBeMergedRule = Rule{
@ -108,10 +108,6 @@ var OverlappingFieldsCanBeMergedRule = Rule{
},
}
func init() {
AddRule(OverlappingFieldsCanBeMergedRule.Name, OverlappingFieldsCanBeMergedRule.RuleFunc)
}
type pairSet struct {
data map[string]map[string]bool
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var PossibleFragmentSpreadsRule = Rule{
@ -68,7 +68,3 @@ var PossibleFragmentSpreadsRule = Rule{
})
},
}
func init() {
AddRule(PossibleFragmentSpreadsRule.Name, PossibleFragmentSpreadsRule.RuleFunc)
}

View File

@ -3,7 +3,7 @@ package rules
import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var ProvidedRequiredArgumentsRule = Rule{
@ -62,7 +62,3 @@ var ProvidedRequiredArgumentsRule = Rule{
})
},
}
func init() {
AddRule(ProvidedRequiredArgumentsRule.Name, ProvidedRequiredArgumentsRule.RuleFunc)
}

View File

@ -0,0 +1,119 @@
package rules
import (
"slices"
"github.com/vektah/gqlparser/v2/validator/core"
)
// Rules manages GraphQL validation rules.
type Rules struct {
rules map[string]core.RuleFunc
ruleNameKeys []string // for deterministic order
}
// NewRules creates a Rules instance with the specified rules.
func NewRules(rs ...core.Rule) *Rules {
r := &Rules{
rules: make(map[string]core.RuleFunc),
}
for _, rule := range rs {
r.AddRule(rule.Name, rule.RuleFunc)
}
return r
}
// NewDefaultRules creates a Rules instance containing the default GraphQL validation rule set.
func NewDefaultRules() *Rules {
rules := []core.Rule{
FieldsOnCorrectTypeRule,
FragmentsOnCompositeTypesRule,
KnownArgumentNamesRule,
KnownDirectivesRule,
KnownFragmentNamesRule,
KnownRootTypeRule,
KnownTypeNamesRule,
LoneAnonymousOperationRule,
MaxIntrospectionDepth,
NoFragmentCyclesRule,
NoUndefinedVariablesRule,
NoUnusedFragmentsRule,
NoUnusedVariablesRule,
OverlappingFieldsCanBeMergedRule,
PossibleFragmentSpreadsRule,
ProvidedRequiredArgumentsRule,
ScalarLeafsRule,
SingleFieldSubscriptionsRule,
UniqueArgumentNamesRule,
UniqueDirectivesPerLocationRule,
UniqueFragmentNamesRule,
UniqueInputFieldNamesRule,
UniqueOperationNamesRule,
UniqueVariableNamesRule,
ValuesOfCorrectTypeRule,
VariablesAreInputTypesRule,
VariablesInAllowedPositionRule,
}
r := NewRules(rules...)
return r
}
// AddRule adds a rule with the specified name and rule function to the rule set.
// If a rule with the same name already exists, it will not be added.
func (r *Rules) AddRule(name string, ruleFunc core.RuleFunc) {
if r.rules == nil {
r.rules = make(map[string]core.RuleFunc)
}
if _, exists := r.rules[name]; !exists {
r.rules[name] = ruleFunc
r.ruleNameKeys = append(r.ruleNameKeys, name)
}
}
// GetInner returns the internal rule map.
// If the map is not initialized, it returns an empty map.
func (r *Rules) GetInner() map[string]core.RuleFunc {
if r == nil {
return nil // impossible nonsense, hopefully
}
if r.rules == nil {
return make(map[string]core.RuleFunc)
}
return r.rules
}
// RemoveRule removes a rule with the specified name from the rule set.
// If no rule with the specified name exists, it does nothing.
func (r *Rules) RemoveRule(name string) {
if r == nil {
return // impossible nonsense, hopefully
}
if r.rules != nil {
delete(r.rules, name)
}
if len(r.ruleNameKeys) > 0 {
r.ruleNameKeys = slices.DeleteFunc(r.ruleNameKeys, func(s string) bool {
return s == name // delete the name rule key
})
}
}
// ReplaceRule replaces a rule with the specified name with a new rule function.
// If no rule with the specified name exists, it does nothing.
func (r *Rules) ReplaceRule(name string, ruleFunc core.RuleFunc) {
if r == nil {
return // impossible nonsense, hopefully
}
if r.rules == nil {
r.rules = make(map[string]core.RuleFunc)
}
if _, exists := r.rules[name]; exists {
r.rules[name] = ruleFunc
}
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var ScalarLeafsRule = Rule{
@ -37,7 +37,3 @@ var ScalarLeafsRule = Rule{
})
},
}
func init() {
AddRule(ScalarLeafsRule.Name, ScalarLeafsRule.RuleFunc)
}

View File

@ -7,7 +7,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var SingleFieldSubscriptionsRule = Rule{
@ -44,10 +44,6 @@ var SingleFieldSubscriptionsRule = Rule{
},
}
func init() {
AddRule(SingleFieldSubscriptionsRule.Name, SingleFieldSubscriptionsRule.RuleFunc)
}
type topField struct {
name string
position *ast.Position

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var UniqueArgumentNamesRule = Rule{
@ -20,10 +20,6 @@ var UniqueArgumentNamesRule = Rule{
},
}
func init() {
AddRule(UniqueArgumentNamesRule.Name, UniqueArgumentNamesRule.RuleFunc)
}
func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) {
knownArgNames := map[string]int{}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var UniqueDirectivesPerLocationRule = Rule{
@ -25,7 +25,3 @@ var UniqueDirectivesPerLocationRule = Rule{
})
},
}
func init() {
AddRule(UniqueDirectivesPerLocationRule.Name, UniqueDirectivesPerLocationRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var UniqueFragmentNamesRule = Rule{
@ -23,7 +23,3 @@ var UniqueFragmentNamesRule = Rule{
})
},
}
func init() {
AddRule(UniqueFragmentNamesRule.Name, UniqueFragmentNamesRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var UniqueInputFieldNamesRule = Rule{
@ -28,7 +28,3 @@ var UniqueInputFieldNamesRule = Rule{
})
},
}
func init() {
AddRule(UniqueInputFieldNamesRule.Name, UniqueInputFieldNamesRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var UniqueOperationNamesRule = Rule{
@ -23,7 +23,3 @@ var UniqueOperationNamesRule = Rule{
})
},
}
func init() {
AddRule(UniqueOperationNamesRule.Name, UniqueOperationNamesRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var UniqueVariableNamesRule = Rule{
@ -25,7 +25,3 @@ var UniqueVariableNamesRule = Rule{
})
},
}
func init() {
AddRule(UniqueVariableNamesRule.Name, UniqueVariableNamesRule.RuleFunc)
}

View File

@ -8,7 +8,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
func ruleFuncValuesOfCorrectType(observers *Events, addError AddErrFunc, disableSuggestion bool) {
@ -213,10 +213,6 @@ var ValuesOfCorrectTypeRuleWithoutSuggestions = Rule{
},
}
func init() {
AddRule(ValuesOfCorrectTypeRule.Name, ValuesOfCorrectTypeRule.RuleFunc)
}
func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) {
addError(
unexpectedTypeMessageOnly(v),

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var VariablesAreInputTypesRule = Rule{
@ -29,7 +29,3 @@ var VariablesAreInputTypesRule = Rule{
})
},
}
func init() {
AddRule(VariablesAreInputTypesRule.Name, VariablesAreInputTypesRule.RuleFunc)
}

View File

@ -4,7 +4,7 @@ import (
"github.com/vektah/gqlparser/v2/ast"
//nolint:staticcheck // Validator rules each use dot imports for convenience.
. "github.com/vektah/gqlparser/v2/validator"
. "github.com/vektah/gqlparser/v2/validator/core"
)
var VariablesInAllowedPositionRule = Rule{
@ -39,7 +39,3 @@ var VariablesInAllowedPositionRule = Rule{
})
},
}
func init() {
AddRule(VariablesInAllowedPositionRule.Name, VariablesInAllowedPositionRule.RuleFunc)
}

View File

@ -1,69 +0,0 @@
package validator
import (
"math"
"sort"
"strings"
"github.com/agnivade/levenshtein"
)
// Given an invalid input string and a list of valid options, returns a filtered
// list of valid options sorted based on their similarity with the input.
func SuggestionList(input string, options []string) []string {
var results []string
optionsByDistance := map[string]int{}
for _, option := range options {
distance := lexicalDistance(input, option)
threshold := calcThreshold(input)
if distance <= threshold {
results = append(results, option)
optionsByDistance[option] = distance
}
}
sort.Slice(results, func(i, j int) bool {
return optionsByDistance[results[i]] < optionsByDistance[results[j]]
})
return results
}
func calcThreshold(a string) (threshold int) {
// the logic is copied from here
// https://github.com/graphql/graphql-js/blob/47bd8c8897c72d3efc17ecb1599a95cee6bac5e8/src/jsutils/suggestionList.ts#L14
threshold = int(math.Floor(float64(len(a))*0.4) + 1)
if threshold < 1 {
threshold = 1
}
return
}
// Computes the lexical distance between strings A and B.
//
// The "distance" between two strings is given by counting the minimum number
// of edits needed to transform string A into string B. An edit can be an
// insertion, deletion, or substitution of a single character, or a swap of two
// adjacent characters.
//
// Includes a custom alteration from Damerau-Levenshtein to treat case changes
// as a single edit which helps identify mis-cased values with an edit distance
// of 1.
//
// This distance can be useful for detecting typos in input or sorting
func lexicalDistance(a, b string) int {
if a == b {
return 0
}
a = strings.ToLower(a)
b = strings.ToLower(b)
// Any case change counts as a single edit
if a == b {
return 1
}
return levenshtein.ComputeDistance(a, b)
}

View File

@ -1,22 +1,46 @@
package validator
import (
"sort"
//nolint:staticcheck // bad, yeah
. "github.com/vektah/gqlparser/v2/ast"
"github.com/vektah/gqlparser/v2/gqlerror"
"github.com/vektah/gqlparser/v2/validator/core"
validatorrules "github.com/vektah/gqlparser/v2/validator/rules"
)
type AddErrFunc func(options ...ErrorOption)
type (
AddErrFunc = core.AddErrFunc
RuleFunc = core.RuleFunc
Rule = core.Rule
Events = core.Events
ErrorOption = core.ErrorOption
Walker = core.Walker
)
type RuleFunc func(observers *Events, addError AddErrFunc)
var (
Message = core.Message
QuotedOrList = core.QuotedOrList
OrList = core.OrList
)
type Rule struct {
Name string
RuleFunc RuleFunc
// Walk is an alias for core.Walk
func Walk(schema *Schema, document *QueryDocument, observers *Events) {
core.Walk(schema, document, observers)
}
var specifiedRules []Rule
func init() {
// Initialize specifiedRules with default rules
defaultRules := validatorrules.NewDefaultRules()
for name, ruleFunc := range defaultRules.GetInner() {
specifiedRules = append(specifiedRules, Rule{Name: name, RuleFunc: ruleFunc})
// ensure initial default is in deterministic order
sort.Sort(core.NameSorter(specifiedRules))
}
}
// AddRule adds a rule to the rule set.
// ruleFunc is called once each time `Validate` is executed.
func AddRule(name string, ruleFunc RuleFunc) {
@ -59,6 +83,7 @@ func ReplaceRule(name string, ruleFunc RuleFunc) {
specifiedRules = result
}
// Deprecated: use ValidateWithRules instead.
func Validate(schema *Schema, doc *QueryDocument, rules ...Rule) gqlerror.List {
if rules == nil {
rules = specifiedRules
@ -74,7 +99,7 @@ func Validate(schema *Schema, doc *QueryDocument, rules ...Rule) gqlerror.List {
if len(errs) > 0 {
return errs
}
observers := &Events{}
observers := &core.Events{}
for i := range rules {
rule := rules[i]
rule.RuleFunc(observers, func(options ...ErrorOption) {
@ -91,3 +116,43 @@ func Validate(schema *Schema, doc *QueryDocument, rules ...Rule) gqlerror.List {
Walk(schema, doc, observers)
return errs
}
func ValidateWithRules(schema *Schema, doc *QueryDocument, rules *validatorrules.Rules) gqlerror.List {
if rules == nil {
rules = validatorrules.NewDefaultRules()
}
var errs gqlerror.List
if schema == nil {
errs = append(errs, gqlerror.Errorf("cannot validate as Schema is nil"))
}
if doc == nil {
errs = append(errs, gqlerror.Errorf("cannot validate as QueryDocument is nil"))
}
if len(errs) > 0 {
return errs
}
observers := &core.Events{}
var currentRules []Rule // nolint:prealloc // would require extra local refs for len
for name, ruleFunc := range rules.GetInner() {
currentRules = append(currentRules, Rule{Name: name, RuleFunc: ruleFunc})
// ensure deterministic order evaluation
sort.Sort(core.NameSorter(currentRules))
}
for _, currentRule := range currentRules {
currentRule.RuleFunc(observers, func(options ...ErrorOption) {
err := &gqlerror.Error{
Rule: currentRule.Name,
}
for _, o := range options {
o(err)
}
errs = append(errs, err)
})
}
Walk(schema, doc, observers)
return errs
}

View File

@ -1 +1 @@
1.23.9
1.23.10

View File

@ -9,6 +9,8 @@ import (
"unsafe"
"golang.org/x/sys/unix"
"go.etcd.io/bbolt/internal/common"
)
// flock acquires an advisory lock on a file descriptor.
@ -69,7 +71,7 @@ func mmap(db *DB, sz int) error {
// Save the original byte slice and convert to a byte array pointer.
db.dataref = b
db.data = (*[maxMapSize]byte)(unsafe.Pointer(&b[0]))
db.data = (*[common.MaxMapSize]byte)(unsafe.Pointer(&b[0]))
db.datasz = sz
return nil
}

View File

@ -7,6 +7,8 @@ import (
"unsafe"
"golang.org/x/sys/unix"
"go.etcd.io/bbolt/internal/common"
)
// flock acquires an advisory lock on a file descriptor.
@ -69,7 +71,7 @@ func mmap(db *DB, sz int) error {
// Save the original byte slice and convert to a byte array pointer.
db.dataref = b
db.data = (*[maxMapSize]byte)(unsafe.Pointer(&b[0]))
db.data = (*[common.MaxMapSize]byte)(unsafe.Pointer(&b[0]))
db.datasz = sz
return nil
}

View File

@ -7,6 +7,8 @@ import (
"unsafe"
"golang.org/x/sys/unix"
"go.etcd.io/bbolt/internal/common"
)
// flock acquires an advisory lock on a file descriptor.
@ -67,7 +69,7 @@ func mmap(db *DB, sz int) error {
// Save the original byte slice and convert to a byte array pointer.
db.dataref = b
db.data = (*[maxMapSize]byte)(unsafe.Pointer(&b[0]))
db.data = (*[common.MaxMapSize]byte)(unsafe.Pointer(&b[0]))
db.datasz = sz
return nil
}

3
vendor/go.etcd.io/bbolt/tx.go generated vendored
View File

@ -561,10 +561,13 @@ func (tx *Tx) writeMeta() error {
tx.meta.Write(p)
// Write the meta page to file.
tx.db.metalock.Lock()
if _, err := tx.db.ops.writeAt(buf, int64(p.Id())*int64(tx.db.pageSize)); err != nil {
tx.db.metalock.Unlock()
lg.Errorf("writeAt failed, pgid: %d, pageSize: %d, error: %v", p.Id(), tx.db.pageSize, err)
return err
}
tx.db.metalock.Unlock()
if !tx.db.NoSync || common.IgnoreNoSync {
// gofail: var beforeSyncMetaPage struct{}
if err := fdatasync(tx.db); err != nil {

23
vendor/modules.txt vendored
View File

@ -1,4 +1,4 @@
# github.com/99designs/gqlgen v0.17.75
# github.com/99designs/gqlgen v0.17.76
## explicit; go 1.23.0
github.com/99designs/gqlgen
github.com/99designs/gqlgen/api
@ -27,7 +27,7 @@ github.com/99designs/gqlgen/plugin/servergen
# github.com/KyleBanks/depth v1.2.1
## explicit
github.com/KyleBanks/depth
# github.com/Masterminds/semver/v3 v3.3.1
# github.com/Masterminds/semver/v3 v3.4.0
## explicit; go 1.21
github.com/Masterminds/semver/v3
# github.com/adhocore/gronx v1.19.6
@ -36,8 +36,8 @@ github.com/adhocore/gronx
# github.com/agnivade/levenshtein v1.2.1
## explicit; go 1.21
github.com/agnivade/levenshtein
# github.com/andybalholm/brotli v1.1.1
## explicit; go 1.13
# github.com/andybalholm/brotli v1.2.0
## explicit; go 1.22
github.com/andybalholm/brotli
github.com/andybalholm/brotli/matchfinder
# github.com/armon/go-metrics v0.4.1
@ -159,7 +159,7 @@ github.com/go-playground/locales/currency
# github.com/go-playground/universal-translator v0.18.1
## explicit; go 1.18
github.com/go-playground/universal-translator
# github.com/go-playground/validator/v10 v10.26.0
# github.com/go-playground/validator/v10 v10.27.0
## explicit; go 1.20
github.com/go-playground/validator/v10
# github.com/go-viper/mapstructure/v2 v2.3.0
@ -255,7 +255,7 @@ github.com/klauspost/compress/internal/snapref
github.com/klauspost/compress/s2
github.com/klauspost/compress/zstd
github.com/klauspost/compress/zstd/internal/xxhash
# github.com/klauspost/cpuid/v2 v2.2.10
# github.com/klauspost/cpuid/v2 v2.2.11
## explicit; go 1.22
github.com/klauspost/cpuid/v2
# github.com/labstack/echo/v4 v4.13.4
@ -328,7 +328,7 @@ github.com/minio/minio-go/v7/pkg/utils
# github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822
## explicit
github.com/munnerz/goautoneg
# github.com/philhofer/fwd v1.1.3-0.20240916144458-20a13a1f6b7c
# github.com/philhofer/fwd v1.2.0
## explicit; go 1.20
github.com/philhofer/fwd
# github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2
@ -348,11 +348,11 @@ github.com/prometheus/client_golang/prometheus/promhttp/internal
# github.com/prometheus/client_model v0.6.2
## explicit; go 1.22.0
github.com/prometheus/client_model/go
# github.com/prometheus/common v0.64.0
# github.com/prometheus/common v0.65.0
## explicit; go 1.23.0
github.com/prometheus/common/expfmt
github.com/prometheus/common/model
# github.com/prometheus/procfs v0.16.1
# github.com/prometheus/procfs v0.17.0
## explicit; go 1.23.0
github.com/prometheus/procfs
github.com/prometheus/procfs/internal/fs
@ -416,7 +416,7 @@ github.com/valyala/bytebufferpool
# github.com/valyala/fasttemplate v1.2.2
## explicit; go 1.12
github.com/valyala/fasttemplate
# github.com/vektah/gqlparser/v2 v2.5.28
# github.com/vektah/gqlparser/v2 v2.5.30
## explicit; go 1.22
github.com/vektah/gqlparser/v2
github.com/vektah/gqlparser/v2/ast
@ -424,6 +424,7 @@ github.com/vektah/gqlparser/v2/gqlerror
github.com/vektah/gqlparser/v2/lexer
github.com/vektah/gqlparser/v2/parser
github.com/vektah/gqlparser/v2/validator
github.com/vektah/gqlparser/v2/validator/core
github.com/vektah/gqlparser/v2/validator/rules
# github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb
## explicit
@ -452,7 +453,7 @@ github.com/zeebo/blake3/internal/alg/hash/hash_avx2
github.com/zeebo/blake3/internal/alg/hash/hash_pure
github.com/zeebo/blake3/internal/consts
github.com/zeebo/blake3/internal/utils
# go.etcd.io/bbolt v1.4.1
# go.etcd.io/bbolt v1.4.2
## explicit; go 1.23
go.etcd.io/bbolt
go.etcd.io/bbolt/errors