Compare commits

..

117 Commits

Author SHA1 Message Date
Ingo Oppermann
8b66753a27
Upgrade to CUDA 12.9.1 2026-02-25 14:03:16 +01:00
Ingo Oppermann
d93ab0e92b
Upgrade alpine to 3.23, golang to 1.26 2026-02-25 13:52:36 +01:00
Ingo Oppermann
74cd623377
Upgrade to ffmpeg 7.1.1 2025-12-29 08:59:28 +02:00
Ingo Oppermann
41c505ad43
Bump golang to 1.25 2025-12-09 15:10:55 +01:00
Ingo Oppermann
17a73c9f95
Build latest vod 2025-07-17 22:11:45 +02:00
Ingo Oppermann
5a533022f2
Build specific commit 2025-07-17 17:14:56 +02:00
Ingo Oppermann
a5c2e79253
Build specific commit 2025-07-17 16:46:30 +02:00
Ingo Oppermann
9a7f357e30
Update go version 2025-06-19 16:19:37 +02:00
Ingo Oppermann
07221a2f0b
Upgrade to alpine3.21 2025-01-29 12:20:56 +01:00
Ingo Oppermann
37b99e7752
Remove ubuntu22 build, remove cuda11 builds, bundle with ffmpeg7.1 2024-11-29 11:45:20 +01:00
Ingo Oppermann
6f3e2caf0f
Build with go1.23 2024-10-01 15:19:58 +02:00
Ingo Oppermann
2a8b01feac
Make cuda12 image latest 2024-09-26 15:03:59 +02:00
Ingo Oppermann
65beedd281
Upgrade to ubuntu noble, add cuda 12 bundle 2024-09-26 14:13:59 +02:00
Ingo Oppermann
ca6dba7259
Add ubuntu build for vod branch 2024-09-24 11:47:25 +02:00
Ingo Oppermann
ddba7bbf74
Upgrade base image to alpine3.20 2024-07-11 12:22:50 +02:00
Ingo Oppermann
6872ba0498
Merge branch 'main' into dev 2024-06-07 11:47:03 +02:00
Ingo Oppermann
69d3155176
Remove alpine3.16 builds for vod branch 2024-06-07 11:45:06 +02:00
Ingo Oppermann
18fc8abe62
Update changelog, bump version to 16.16.0 2024-06-07 11:37:25 +02:00
Ingo Oppermann
32b5a83fa9
Fix datarhei/restreamer#759 2024-06-04 17:51:49 +02:00
Ingo Oppermann
8bc84adc2b
Upgrade RTMP dependency 2024-05-30 22:53:20 +02:00
Ingo Oppermann
609bce569b
Don't report EOF as error in RTMP server, update dependency 2024-05-29 16:33:32 +02:00
Ingo Oppermann
d6a80c28e5
Add ConnectionIdleTimeout to RTMP server 2024-05-29 16:16:10 +02:00
Ingo Oppermann
ad8d214805
Add WithLevel() to Logger interface 2024-05-29 15:51:51 +02:00
Ingo Oppermann
8ae7ba8dfe
Upgrade dependencies 2024-05-28 14:26:54 +02:00
Ingo Oppermann
e271a0257b
Update dependencies 2024-05-15 14:59:04 +02:00
Ingo Oppermann
4739958e12
Fix wrong log output when receiving a RTMP stream 2024-04-19 11:36:02 +02:00
Ingo Oppermann
e35c3dead3
Update dependencies 2024-04-15 16:22:39 +02:00
Ingo Oppermann
28d9ae78c7
Cosmetics 2024-04-05 16:01:30 +02:00
Ingo Oppermann
8370b5da8d
Create empty session registry if none is provided 2024-04-05 16:01:19 +02:00
Ingo Oppermann
793414b4e3
Skip handling if session collectors are nil 2024-04-05 15:59:47 +02:00
Ingo Oppermann
781c98f2be
Remove cache layer from workflow 2024-04-05 12:12:23 +02:00
Ingo Oppermann
7ed99f1ae7
Merge branch 'main' into dev 2024-04-05 11:46:04 +02:00
Ingo Oppermann
cd71514b48
Upgrade versions in workflow 2024-04-05 11:44:53 +02:00
Ingo Oppermann
0747de2d98
Add test if import and migrate script exist 2024-04-04 11:10:10 +02:00
Ingo Oppermann
cfff53bab4
Bump version to 16.15.0, update changelog 2024-04-03 14:27:58 +02:00
Ingo Oppermann
a057573dbf
Fix cross-compile build 2024-04-02 16:14:54 +02:00
Ingo Oppermann
9b09d17083
Merge branch 'main' into dev 2024-04-02 14:56:04 +02:00
Ingo Oppermann
263504574f
Use cross-compilation instead of emulation 2024-04-02 14:32:47 +02:00
Ingo Oppermann
755b03581e
Merge branch 'main' into dev 2024-04-02 14:20:03 +02:00
Ingo Oppermann
c78a9cb6d9
Fix memfs concurrent read and write performance 2024-03-15 15:25:25 +01:00
Ingo Oppermann
a4b906e855
Add tests 2024-03-15 14:26:10 +01:00
Ingo Oppermann
45e5f4cb0a
Add tesr for memfs 2024-03-15 14:18:39 +01:00
Ingo Oppermann
931ff7c91f
Add benchmark 2024-03-15 11:46:35 +01:00
Ingo Oppermann
9d8d59c4a2
Fix concurrent accesses 2024-03-15 11:45:50 +01:00
Ingo Oppermann
f987d5d577
Fix typo in test name 2024-03-14 12:03:38 +01:00
Ingo Oppermann
bfe53a2461
Fix placeholder parsing 2024-03-14 12:03:04 +01:00
Ingo Oppermann
e3138d6e6d
Remove cache layers 2024-03-13 15:14:15 +01:00
Ingo Oppermann
9173f7dcc1
Add armv7 build for vod branch 2024-03-13 11:10:05 +01:00
Ingo Oppermann
ccd9a5fbc1
Fix maintaining the metadata on process config update (datarhei/restreamer#698) 2024-03-08 16:33:25 +01:00
Ingo Oppermann
40a98ca70e
Merge branch 'main' into dev 2024-02-29 15:15:29 +01:00
Ingo Oppermann
06b959ca83
Upgrade actions, go version 2024-02-29 15:12:37 +01:00
Ingo Oppermann
e8ca91d214
Update dependencies 2024-02-29 14:50:38 +01:00
Ingo Oppermann
bcf9efcac6
Autodetect version for bundle 2024-02-26 10:20:31 +01:00
Ingo Oppermann
6a62248b12
Remove schedule from main, add schedule to dev 2024-02-26 10:13:23 +01:00
Ingo Oppermann
72c31af0d4
Automatically detect version 2024-02-26 10:10:01 +01:00
Ingo Oppermann
e6726dd537
Require order of jobs 2024-02-26 10:09:37 +01:00
Ingo Oppermann
32a7916359
Merge branch 'main' into dev 2024-02-21 21:48:50 +01:00
Ingo Oppermann
f41ce2820c
Fix bundle step in dev base 2024-02-21 21:48:40 +01:00
Ingo Oppermann
4c584a2a0b
Merge branch 'main' into dev 2024-02-21 21:44:14 +01:00
Ingo Oppermann
9edd24b38a
Update action versions 2024-02-21 21:27:19 +01:00
Ingo Oppermann
819aafda29
Reset layer cache experiments 2024-02-21 21:22:51 +01:00
Ingo Oppermann
4bc4b6be1d
Enable ubuntu bundles 2024-02-21 21:22:28 +01:00
Ingo Oppermann
9381952175
Rename workflow files 2024-02-21 21:19:10 +01:00
Ingo Oppermann
e487d5e095
Remove deprecated workflows 2024-02-21 21:18:00 +01:00
Ingo Oppermann
f8ce102cf0
Fix matrix build 2024-02-21 21:14:06 +01:00
Ingo Oppermann
4c7b3a6e7c
Enable legacy tags, remove ffmpeg6 bundles 2024-02-21 21:12:03 +01:00
Ingo Oppermann
c4fb342cf3
Add build matrix workflow for bundles 2024-02-21 20:46:30 +01:00
Ingo Oppermann
06e8dc55ed
Upgrade actions 2024-02-21 12:56:34 +01:00
Ingo Oppermann
5c33864ff9
Fix buildx caching 2024-02-21 11:30:02 +01:00
Ingo Oppermann
3d81776720
Fix buildx caching 2024-02-21 11:15:29 +01:00
Ingo Oppermann
b5fad743b5
Add matrix entry for older alpine version 2024-02-20 20:21:32 +01:00
Ingo Oppermann
7f438fa7a8
Rename workflows 2024-02-20 16:41:57 +01:00
Ingo Oppermann
d20aadc8b6
Remove deprecated workflows 2024-02-20 16:40:51 +01:00
Ingo Oppermann
456cca720d
Enable legacy tags, rename workflow 2024-02-20 16:37:37 +01:00
Ingo Oppermann
c40b9b3b08
Add build matrix for main branch 2024-02-20 16:13:55 +01:00
Ingo Oppermann
6dff92f701
Remove deprecated workflow 2024-02-20 16:13:12 +01:00
Ingo Oppermann
c3cd48c67a
Remove unneeded build 2024-02-20 15:39:53 +01:00
Ingo Oppermann
4caaa1d110
Add base matrix build for vod branch 2024-02-20 15:32:45 +01:00
Ingo Oppermann
ea4c660177
Remove deprecated ubuntu dev workflow 2024-02-20 15:32:03 +01:00
Ingo Oppermann
b4904eabe7
Remove deprecated workflows 2024-02-20 15:26:53 +01:00
Ingo Oppermann
51afa34ed6
Update dev workflows 2024-02-20 15:26:11 +01:00
Ingo Oppermann
647f625a55
Update action versions 2024-02-20 15:20:32 +01:00
Ingo Oppermann
96d7100e19
Update bundle matrix build for dev 2024-02-20 15:11:37 +01:00
Ingo Oppermann
72b3df2e1d
Fix missing workflow_call event 2024-02-20 14:47:40 +01:00
Ingo Oppermann
ad29691201
Add matrix build for dev 2024-02-20 14:40:49 +01:00
Ingo Oppermann
4917deb187
Update dev bundles 2024-02-19 15:15:34 +01:00
Ingo Oppermann
17cd1a6dab
Update base alpine dev workflow 2024-02-19 15:01:11 +01:00
Ingo Oppermann
3d0479dacb
Update base alpine workflow 2024-02-19 14:42:45 +01:00
Ingo Oppermann
85b2c9b53d
Provide filesystem for JSON store 2024-02-12 09:56:40 +01:00
Ingo Oppermann
00c5ad3883
Add migrating to ffmpeg 6 2024-02-09 17:15:41 +01:00
Ingo Oppermann
5a4d00f969
Add env files for dev workflows 2024-02-09 14:05:13 +01:00
Ingo Oppermann
2bf2e4b27a
Merge branch 'main' into dev 2024-02-08 14:25:17 +01:00
Ingo Oppermann
d8e81e4f2d
Fix missing process data if process has been deleted meanwhile 2024-02-08 12:16:55 +01:00
Jan Stabenow
07cd18a95a Fix alpine version build 2024-02-02 17:16:06 +01:00
Jan Stabenow
64010da7a4 Mod updates go-build 2024-02-02 14:45:13 +01:00
Ingo Oppermann
f7921a034d
Merge branch 'main' into dev 2024-02-02 13:52:50 +01:00
Ingo Oppermann
9b40b30057
Update changelog 2024-02-02 10:39:10 +01:00
Ingo Oppermann
a3156d3176
Bump version to 16.14.0 2024-01-26 13:12:11 +01:00
Ingo Oppermann
0bd118807b
Fix race condition 2024-01-15 10:42:08 +01:00
Ingo Oppermann
4bffbea48e
Add support for SRTv4 clients
Older clients (e.g. Makito encoders) don't support streamid and
couldn't send a stream. This adds support for v4 clients. Such
clients will always be publishing to the resource equal to the
client address, e.g. 192.168.1.42:63793

Clients that want to play this stream are required to send a
streamid with the respective resource name.
2024-01-12 15:38:32 +01:00
Ingo Oppermann
0ad1ad34d8
Fix require positive persist interval 2024-01-12 15:37:51 +01:00
Ingo Oppermann
574ebdf277
Update dependencies
This update includes a newer version of the RTMP server that supports
the enhances RTMP specification, i.e. HEVC, VP9, and AV1.
2024-01-12 12:35:07 +01:00
Jan Stabenow
477e8c6c32 Mod bump docker tag 2023-12-01 17:21:00 +01:00
Jan Stabenow
d1f3538217 Fix alpine dep. 2023-12-01 16:53:17 +01:00
Jan Stabenow
f7dc0969e5 Mod docker images 2023-12-01 14:33:37 +01:00
Ingo Oppermann
7f2008ae20
Merge branch 'main' into dev 2023-12-01 14:10:52 +01:00
Ingo Oppermann
e3d206d613
Bump version to 16.13.1 2023-12-01 12:24:26 +01:00
Ingo Oppermann
46a44e1a59
Update datarhei/gosrt dependency 2023-11-27 14:11:10 +01:00
Ingo Oppermann
ff698ff50c
Update CHANGELOG 2023-09-22 15:39:35 +02:00
Ingo Oppermann
c3b63c4480
Fix sized filesystem
If purging is enabled, overwriting a file with a file of the same
or smaller size will not result in an error.

It is now possible to change the purging mode on an existing sized
filesystem.
2023-09-22 14:16:46 +02:00
Jan Stabenow
a2e457787d
Create build_base_alpine_vod.yaml 2023-09-11 22:38:10 +02:00
Ingo Oppermann
0f4c88be39
Fix default search paths for config file 2023-09-07 16:23:15 +02:00
Ingo Oppermann
f6d5064211
Update dependencies 2023-09-04 16:36:53 +02:00
Ingo Oppermann
7fa68778b7
Update datarhei/gosrt dependency 2023-07-20 21:18:04 +02:00
Ingo Oppermann
496722c88a
Fix 509 return code if non-existing stream is requested 2023-05-26 21:24:42 +02:00
Ingo Oppermann
4b2b6a57a1
Fix calling Wait after process has been read 2023-05-24 16:27:55 +02:00
Ingo Oppermann
8c3b570ecc
Fix log transfer 2023-05-15 21:31:58 +02:00
2086 changed files with 206649 additions and 99058 deletions

97
.github/workflows/build_base.yaml vendored Normal file
View File

@ -0,0 +1,97 @@
name: "Build main base"
on:
workflow_dispatch:
workflow_call:
push:
branches-ignore:
- "**"
jobs:
versions:
runs-on: ubuntu-latest
outputs:
coreversion: ${{ steps.core.outputs.version }}
steps:
- name: Checkout core repo
uses: actions/checkout@v4
with:
repository: datarhei/core
path: ./core
- name: Get latest version from core
id: core
run: |
echo "version=$(cat ./core/app/version.go | grep -E -o '(Major|Minor|Patch): [0-9]+,' | sed -E 's/^.*: ([0-9]+),.*$/\1/g' | paste -sd '.' - )" >> "$GITHUB_OUTPUT"
- name: Show versions
run: |
echo "core: ${{ steps.core.outputs.version }}"
docker:
needs: versions
runs-on: [self-hosted]
strategy:
matrix:
include:
- core: ${{ needs.versions.outputs.coreversion }}
os: alpine
os_version: "3.20"
golang: golang:1.23-alpine3.20
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
branch: main
latest: yes
- core: ${{ needs.versions.outputs.coreversion }}
os: ubuntu
os_version: "22.04"
golang: golang:1.23-alpine3.20
platforms: linux/amd64
branch: main
latest: yes
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ matrix.branch }}
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
datarhei/base
tags: |
type=raw,value=core${{ matrix.core }}-${{ matrix.os }}${{ matrix.os_version }}
type=raw,value=${{ matrix.os }}-core-${{ matrix.os_version }}-${{ matrix.core}},enable=${{ matrix.latest == 'yes' }}
type=raw,value=${{ matrix.os }}-core-latest,enable=${{ matrix.latest == 'yes' }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v5
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
build-args: |
BUILD_IMAGE=${{ matrix.os }}:${{ matrix.os_version }}
GOLANG_IMAGE=${{ matrix.golang }}
platforms: ${{ matrix.platforms }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@ -1,62 +0,0 @@
name: "Build base:alpine-core"
on:
workflow_dispatch:
push:
branches-ignore:
- "**"
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: cardinalby/export-env-action@v1
with:
envFile: ".github_build/Build.alpine.env"
export: "true"
expandWithJobEnv: "true"
expand: "true"
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
build-args: |
BUILD_IMAGE=${{ env.OS_NAME }}:${{ env.OS_VERSION }}
GOLANG_IMAGE=${{ env.GOLANG_IMAGE }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
push: true
tags: |
datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
datarhei/base:${{ env.OS_NAME }}-core-latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new

View File

@ -1,89 +0,0 @@
name: 'Build base:alpine-core:dev'
on:
workflow_dispatch:
workflow_call:
push:
branches:
- dev
jobs:
docker:
runs-on: [self-hosted]
strategy:
matrix:
branch:
- dev
steps:
- name: Checkout
uses: actions/checkout@v2
with:
ref: ${{ matrix.branch }}
- uses: actions-ecosystem/action-get-latest-tag@v1
id: get-latest-tag
with:
semver_only: true
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.alpine.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
datarhei/base
tags: |
type=raw,value=${{ env.OS_NAME }}-core-dev,enable=${{ matrix.branch == 'dev' }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
build-args: |
BUILD_IMAGE=${{ env.OS_NAME }}:${{ env.OS_VERSION }}
GOLANG_IMAGE=${{ env.GOLANG_IMAGE }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
dockerBundle:
uses: ./.github/workflows/build_bundle_dev.yaml
secrets: inherit
dockerBundleRpi:
uses: ./.github/workflows/build_bundle-rpi_dev.yaml
secrets: inherit

77
.github/workflows/build_base_dev.yaml vendored Normal file
View File

@ -0,0 +1,77 @@
name: "Build dev base"
on:
workflow_dispatch:
workflow_call:
schedule:
- cron: "7 4 * * *"
push:
branches:
- dev
jobs:
docker:
runs-on: [self-hosted]
strategy:
matrix:
include:
- os: alpine
os_version: "3.23"
golang: golang:1.26-alpine3.23
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
branch: dev
- os: ubuntu
os_version: "24.04"
golang: golang:1.26-alpine3.23
platforms: linux/amd64
branch: dev
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ matrix.branch }}
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
datarhei/base
tags: |
type=raw,value=core-${{ matrix.branch }}-${{ matrix.os }}${{ matrix.os_version }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v5
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
build-args: |
BUILD_IMAGE=${{ matrix.os }}:${{ matrix.os_version }}
GOLANG_IMAGE=${{ matrix.golang }}
platforms: ${{ matrix.platforms }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
bundle:
needs: docker
uses: ./.github/workflows/build_bundle_dev.yaml
secrets: inherit

View File

@ -1,62 +0,0 @@
name: 'Build base:ubuntu-core'
on:
workflow_dispatch:
push:
branches-ignore:
- '**'
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.ubuntu.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
build-args: |
BUILD_IMAGE=${{ env.OS_NAME }}:${{ env.OS_VERSION }}
GOLANG_IMAGE=${{ env.GOLANG_IMAGE }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
tags: |
datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
datarhei/base:${{ env.OS_NAME }}-core-latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new

View File

@ -1,83 +0,0 @@
name: 'Build datarhei/base:ubuntu-core-dev'
on:
workflow_dispatch:
workflow_call:
push:
branches:
- dev
jobs:
docker:
runs-on: [self-hosted]
strategy:
matrix:
branch:
- dev
steps:
- name: Checkout
uses: actions/checkout@v2
with:
ref: ${{ matrix.branch }}
- uses: actions-ecosystem/action-get-latest-tag@v1
id: get-latest-tag
with:
semver_only: true
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.ubuntu.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: |
datarhei/base
tags: |
type=raw,value=${{ env.OS_NAME }}-core-dev,enable=${{ matrix.branch == 'dev' }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
build-args: |
BUILD_IMAGE=${{ env.OS_NAME }}:${{ env.OS_VERSION }}
GOLANG_IMAGE=${{ env.GOLANG_IMAGE }}
platforms: linux/amd64
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new

70
.github/workflows/build_base_vod.yaml vendored Normal file
View File

@ -0,0 +1,70 @@
name: "Build vod base"
on:
workflow_dispatch:
workflow_call:
push:
branches:
- vod
jobs:
docker:
runs-on: [self-hosted]
strategy:
matrix:
include:
- os: alpine
os_version: "3.23"
golang: golang:1.26-alpine3.23
platforms: linux/amd64,linux/arm64,linux/arm/v7
branch: vod
- os: ubuntu
os_version: "24.04"
golang: golang:1.26-alpine3.23
platforms: linux/amd64
branch: vod
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ matrix.branch }}
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
datarhei/base
tags: |
type=raw,value=core-${{ matrix.branch }}-${{ matrix.os }}${{ matrix.os_version }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v5
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
build-args: |
BUILD_IMAGE=${{ matrix.os }}:${{ matrix.os_version }}
GOLANG_IMAGE=${{ matrix.golang }}
platforms: ${{ matrix.platforms }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@ -1,71 +0,0 @@
name: 'Build core:cuda'
on:
workflow_dispatch:
schedule:
- cron: '7 5 * * *'
push:
branches-ignore:
- '**'
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.ubuntu.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.bundle.cuda.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
FFMPEG_IMAGE=datarhei/base:${{ env.OS_NAME }}-ffmpeg-cuda-${{ env.OS_VERSION }}-${{ env.FFMPEG_VERSION }}-${{ env.CUDA_VERSION }}
platforms: linux/amd64
push: true
tags: |
datarhei/core:cuda-${{ env.CORE_VERSION }}
datarhei/core:cuda-latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new

View File

@ -1,71 +0,0 @@
name: "Build core:rpi"
on:
workflow_dispatch:
schedule:
- cron: "7 5 * * *"
push:
branches-ignore:
- "**"
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: cardinalby/export-env-action@v1
with:
envFile: ".github_build/Build.alpine.env"
export: "true"
expandWithJobEnv: "true"
expand: "true"
- uses: cardinalby/export-env-action@v1
with:
envFile: ".github_build/Build.bundle.rpi.env"
export: "true"
expandWithJobEnv: "true"
expand: "true"
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
FFMPEG_IMAGE=datarhei/base:${{ env.OS_NAME }}-ffmpeg-rpi-${{ env.OS_VERSION }}-${{ env.FFMPEG_VERSION }}
platforms: linux/arm/v7,linux/arm64
push: true
tags: |
datarhei/core:rpi-${{ env.CORE_VERSION }}
datarhei/core:rpi-latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new

View File

@ -1,69 +0,0 @@
name: 'Build datarhei/core:rpi-dev'
on:
workflow_dispatch:
workflow_call:
push:
branches-ignore:
- '**'
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.alpine.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.bundle.rpi.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:${{ env.OS_NAME }}-core-dev
FFMPEG_IMAGE=datarhei/base:${{ env.OS_NAME }}-ffmpeg-rpi-${{ env.OS_VERSION }}-${{ env.FFMPEG_VERSION }}
platforms: linux/arm64,linux/arm/v7
push: true
tags: |
datarhei/core:rpi-dev
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new

View File

@ -1,71 +0,0 @@
name: 'Build core:vaapi'
on:
workflow_dispatch:
schedule:
- cron: '7 5 * * *'
push:
branches-ignore:
- '**'
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.ubuntu.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.bundle.vaapi.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
FFMPEG_IMAGE=datarhei/base:${{ env.OS_NAME }}-ffmpeg-vaapi-${{ env.OS_VERSION }}-${{ env.FFMPEG_VERSION }}
platforms: linux/amd64
push: true
tags: |
datarhei/core:vaapi-${{ env.CORE_VERSION }}
datarhei/core:vaapi-latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new

View File

@ -1,71 +1,116 @@
name: 'Build core'
name: "Build main bundles"
on:
workflow_dispatch:
schedule:
- cron: '7 5 * * *'
push:
branches-ignore:
- '**'
workflow_dispatch:
workflow_call:
push:
branches-ignore:
- "**"
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
versions:
runs-on: ubuntu-latest
outputs:
coreversion: ${{ steps.core.outputs.version }}
steps:
- name: Checkout core repo
uses: actions/checkout@v4
with:
repository: datarhei/core
path: ./core
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.alpine.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- name: Get latest version from core
id: core
run: |
echo "version=$(cat ./core/app/version.go | grep -E -o '(Major|Minor|Patch): [0-9]+,' | sed -E 's/^.*: ([0-9]+),.*$/\1/g' | paste -sd '.' - )" >> "$GITHUB_OUTPUT"
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.bundle.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- name: Show versions
run: |
echo "core: ${{ steps.core.outputs.version }}"
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
docker:
needs: versions
runs-on: [self-hosted]
strategy:
matrix:
include:
- core: ${{ needs.versions.outputs.coreversion }}
core_os: alpine3.20
ffmpeg: "6.1.1"
ffmpeg_os: alpine3.20
platforms: linux/amd64,linux/arm64,linux/arm/v7
branch: main
prefix:
latest: yes
- core: ${{ needs.versions.outputs.coreversion }}
core_os: alpine3.20
ffmpeg: "6.1.1-rpi"
ffmpeg_os: alpine3.20
platforms: linux/arm64,linux/arm/v7
branch: main
prefix: rpi-
latest: yes
- core: ${{ needs.versions.outputs.coreversion }}
core_os: ubuntu22.04
ffmpeg: "6.1.1-vaapi"
ffmpeg_os: ubuntu22.04
platforms: linux/amd64
branch: main
prefix: vaapi-
latest: yes
- core: ${{ needs.versions.outputs.coreversion }}
core_os: ubuntu22.04
ffmpeg: "6.1.1-cuda"
ffmpeg_os: ubuntu22.04
ffmpeg_tags: "-cuda11.7.1"
platforms: linux/amd64
branch: main
prefix: cuda-
latest: yes
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ matrix.branch }}
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
datarhei/core
tags: |
type=raw,value=${{ matrix.core }}-ffmpeg${{ matrix.ffmpeg }}${{ matrix.ffmpeg_tags }}-${{ matrix.core_os }}
type=raw,value=${{ matrix.prefix }}${{ matrix.core }},enable=${{ matrix.latest == 'yes' }}
type=raw,value=${{ matrix.prefix }}latest,enable=${{ matrix.latest == 'yes' }}
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
FFMPEG_IMAGE=datarhei/base:${{ env.OS_NAME }}-ffmpeg-${{ env.OS_VERSION }}-${{ env.FFMPEG_VERSION }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
tags: |
datarhei/core:${{ env.CORE_VERSION }}
datarhei/core:latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v5
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:core${{ matrix.core }}-${{ matrix.core_os }}
FFMPEG_IMAGE=datarhei/base:ffmpeg${{ matrix.ffmpeg }}-${{ matrix.ffmpeg_os}}${{ matrix.ffmpeg_tags }}
platforms: ${{ matrix.platforms }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@ -1,69 +1,90 @@
name: 'Build datarhei/core:dev'
name: "Build dev bundles"
on:
workflow_dispatch:
workflow_call:
push:
branches-ignore:
- '**'
workflow_dispatch:
workflow_call:
push:
branches-ignore:
- "**"
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
docker:
runs-on: [self-hosted]
strategy:
matrix:
include:
- core_os: alpine3.23
ffmpeg: "7.1.1"
ffmpeg_os: alpine3.23
platforms: linux/amd64,linux/arm64,linux/arm/v7
branch: dev
prefix:
latest: yes
- core_os: alpine3.23
ffmpeg: "7.1.1-rpi"
ffmpeg_os: alpine3.23
platforms: linux/arm64,linux/arm/v7
branch: dev
prefix: rpi-
latest: yes
- core_os: ubuntu24.04
ffmpeg: "7.1.1-vaapi"
ffmpeg_os: ubuntu24.04
platforms: linux/amd64
branch: dev
prefix: vaapi-
latest: yes
- core_os: ubuntu24.04
ffmpeg: "7.1.1-cuda"
ffmpeg_os: ubuntu24.04
ffmpeg_tags: "-cuda12.9.1"
platforms: linux/amd64
branch: dev
prefix: cuda-
latest: yes
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.alpine.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
steps:
- name: Checkout
uses: actions/checkout@v4
with:
ref: ${{ matrix.branch }}
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.bundle.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
datarhei/core
tags: |
type=raw,value=${{ matrix.branch }}-ffmpeg${{ matrix.ffmpeg }}${{ matrix.ffmpeg_tags }}-${{ matrix.core_os }}
type=raw,value=${{ matrix.prefix }}${{ matrix.branch }},enable=${{ matrix.latest == 'yes' }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:${{ env.OS_NAME }}-core-dev
FFMPEG_IMAGE=datarhei/base:${{ env.OS_NAME }}-ffmpeg-${{ env.OS_VERSION }}-${{ env.FFMPEG_VERSION }}
platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true
tags: |
datarhei/core:dev
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
- name: Build Multi-Arch
uses: docker/build-push-action@v5
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:core-${{ matrix.branch }}-${{ matrix.core_os }}
FFMPEG_IMAGE=datarhei/base:ffmpeg${{ matrix.ffmpeg }}-${{ matrix.ffmpeg_os}}${{ matrix.ffmpeg_tags }}
platforms: ${{ matrix.platforms }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

View File

@ -6,16 +6,16 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
fetch-depth: 2
- uses: actions/setup-go@v2
- uses: actions/setup-go@v5
with:
go-version: "1.19"
go-version: "1.22"
- name: Run coverage
run: go test -coverprofile=coverage.out -covermode=atomic -v ./...
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v2
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
files: coverage.out

View File

@ -1,5 +0,0 @@
# CORE ALPINE BASE IMAGE
OS_NAME=alpine
OS_VERSION=3.16
GOLANG_IMAGE=golang:1.20-alpine3.16
CORE_VERSION=16.13.0

View File

@ -1,3 +0,0 @@
# CORE NVIDIA CUDA BUNDLE
FFMPEG_VERSION=5.1.3
CUDA_VERSION=11.7.1

View File

@ -1,2 +0,0 @@
# CORE BUNDLE
FFMPEG_VERSION=5.1.3

View File

@ -1,2 +0,0 @@
# CORE RASPBERRY-PI BUNDLE
FFMPEG_VERSION=5.1.3

View File

@ -1,2 +0,0 @@
# CORE BUNDLE
FFMPEG_VERSION=5.1.3

View File

@ -1,5 +0,0 @@
# CORE UBUNTU BASE IMAGE
OS_NAME=ubuntu
OS_VERSION=20.04
GOLANG_IMAGE=golang:1.20-alpine3.16
CORE_VERSION=16.13.0

View File

@ -1,5 +1,41 @@
# Core
### Core v16.15.0 > v16.16.0
- Add ConnectionIdleTimeout to RTMP server
- Add WithLevel() to Logger interface
- Fix datarhei/restreamer#759
- Fix various RTMP bugs
- Fix wrong log output when receiving a RTMP stream
- Fix skipping session handling if collectors are nil
- Update dependencies
### Core v16.14.0 > v16.15.0
- Add migrating to ffmpeg 6
- Fix missing process data if process has been deleted meanwhile
- Fix maintaining the metadata on process config update (datarhei/restreamer#698)
- Fix placeholder parsing
- Fix concurrent memfs accesses
- Fix memfs concurrent read and write performance
### Core v16.13.1 > v16.14.0
- Add support for SRTv4 clients
- Add support for Enhanced RTMP in internal RTMP server
- Fix require positive persist interval (session)
- Fix race condition (process)
- Update dependencies
### Core v16.13.0 > v16.13.1
- Fix transfer of reports to updated process
- Fix calling Wait after process has been read
- Fix 509 return code if non-existing stream is requested
- Fix default search paths for config file
- Fix sized filesystem
- Update dependencies
### Core v16.12.0 > v16.13.0
- Add updated_at field in process infos

View File

@ -1,19 +1,23 @@
ARG GOLANG_IMAGE=golang:1.20-alpine3.16
ARG GOLANG_IMAGE=golang:1.22-alpine3.19
ARG BUILD_IMAGE=alpine:3.19
ARG BUILD_IMAGE=alpine:3.16
# Cross-Compilation
# https://www.docker.com/blog/faster-multi-platform-builds-dockerfile-cross-compilation-guide/
FROM --platform=$BUILDPLATFORM $GOLANG_IMAGE AS builder
FROM $GOLANG_IMAGE as builder
ARG TARGETOS TARGETARCH TARGETVARIANT
ENV GOOS=$TARGETOS GOARCH=$TARGETARCH GOARM=$TARGETVARIANT
COPY . /dist/core
RUN apk add \
git \
make && \
cd /dist/core && \
go version && \
make release_linux && \
make import_linux && \
make ffmigrate_linux
make
RUN cd /dist/core && \
make release && \
make import && \
make ffmigrate
FROM $BUILD_IMAGE

View File

@ -3,6 +3,7 @@ SHORTCOMMIT := $(shell echo $(COMMIT) | head -c 7)
BRANCH := $(shell if [ -d .git ]; then git rev-parse --abbrev-ref HEAD; else echo "master"; fi)
BUILD := $(shell date -u "+%Y-%m-%dT%H:%M:%SZ")
BINSUFFIX := $(shell if [ "${GOOS}" -a "${GOARCH}" ]; then echo "-${GOOS}-${GOARCH}"; else echo ""; fi)
GOARM := $(subst v,$e,$(GOARM))
all: build
@ -15,11 +16,7 @@ init:
## build: Build core (default)
build:
CGO_ENABLED=${CGO_ENABLED} GOOS=${GOOS} GOARCH=${GOARCH} go build -o core${BINSUFFIX} -trimpath
# github workflow workaround
build_linux:
CGO_ENABLED=0 GOOS=linux GOARCH=${OSARCH} go build -o core -trimpath
CGO_ENABLED=0 GOOS=${GOOS} GOARCH=${GOARCH} GOARM=${GOARM} go build -o core$(BINSUFFIX) -trimpath
## swagger: Update swagger API documentation (requires github.com/swaggo/swag)
swagger:
@ -69,19 +66,11 @@ lint:
## import: Build import binary
import:
cd app/import && CGO_ENABLED=${CGO_ENABLED} GOOS=${GOOS} GOARCH=${GOARCH} go build -o ../../import -trimpath -ldflags="-s -w"
# github workflow workaround
import_linux:
cd app/import && CGO_ENABLED=0 GOOS=linux GOARCH=${OSARCH} go build -o ../../import -trimpath -ldflags="-s -w"
cd app/import && CGO_ENABLED=0 GOOS=${GOOS} GOARCH=${GOARCH} GOARM=$(GOARM) go build -o ../../import -trimpath -ldflags="-s -w"
## ffmigrate: Build ffmpeg migration binary
ffmigrate:
cd app/ffmigrate && CGO_ENABLED=${CGO_ENABLED} GOOS=${GOOS} GOARCH=${GOARCH} go build -o ../../ffmigrate -trimpath -ldflags="-s -w"
# github workflow workaround
ffmigrate_linux:
cd app/ffmigrate && CGO_ENABLED=0 GOOS=linux GOARCH=${OSARCH} go build -o ../../ffmigrate -trimpath -ldflags="-s -w"
cd app/ffmigrate && CGO_ENABLED=0 GOOS=${GOOS} GOARCH=${GOARCH} GOARM=$(GOARM) go build -o ../../ffmigrate -trimpath -ldflags="-s -w"
## coverage: Generate code coverage analysis
coverage:
@ -94,11 +83,7 @@ commit: vet fmt lint test build
## release: Build a release binary of core
release:
CGO_ENABLED=${CGO_ENABLED} GOOS=${GOOS} GOARCH=${GOARCH} go build -o core -trimpath -ldflags="-s -w -X github.com/datarhei/core/v16/app.Commit=$(COMMIT) -X github.com/datarhei/core/v16/app.Branch=$(BRANCH) -X github.com/datarhei/core/v16/app.Build=$(BUILD)"
# github workflow workaround
release_linux:
CGO_ENABLED=0 GOOS=linux GOARCH=${OSARCH} go build -o core -trimpath -ldflags="-s -w -X github.com/datarhei/core/v16/app.Commit=$(COMMIT) -X github.com/datarhei/core/v16/app.Branch=$(BRANCH) -X github.com/datarhei/core/v16/app.Build=$(BUILD)"
CGO_ENABLED=0 GOOS=${GOOS} GOARCH=${GOARCH} GOARM=$(GOARM) go build -o core -trimpath -ldflags="-s -w -X github.com/datarhei/core/v16/app.Commit=$(COMMIT) -X github.com/datarhei/core/v16/app.Branch=$(BRANCH) -X github.com/datarhei/core/v16/app.Build=$(BUILD)"
## docker: Build standard Docker image
docker:

View File

@ -426,7 +426,7 @@ func (a *api) start() error {
} else {
a.memfs.SetMetadata("base", baseMemFS.String())
if sizedfs, ok := a.memfs.(fs.SizedFilesystem); ok {
sizedfs.Resize(cfg.Storage.Memory.Size * 1024 * 1024)
sizedfs.Resize(cfg.Storage.Memory.Size*1024*1024, cfg.Storage.Memory.Purge)
}
}
@ -874,12 +874,13 @@ func (a *api) start() error {
a.log.logger.rtmp = a.log.logger.core.WithComponent("RTMP").WithField("address", cfg.RTMP.Address)
config := rtmp.Config{
Addr: cfg.RTMP.Address,
TLSAddr: cfg.RTMP.AddressTLS,
App: cfg.RTMP.App,
Token: cfg.RTMP.Token,
Logger: a.log.logger.rtmp,
Collector: a.sessions.Collector("rtmp"),
Addr: cfg.RTMP.Address,
TLSAddr: cfg.RTMP.AddressTLS,
App: cfg.RTMP.App,
Token: cfg.RTMP.Token,
Logger: a.log.logger.rtmp,
ConnectionIdleTimeout: 10 * time.Second,
Collector: a.sessions.Collector("rtmp"),
}
if cfg.RTMP.EnableTLS {

View File

@ -4,6 +4,9 @@ import (
"fmt"
"os"
"regexp"
"strconv"
"strings"
"time"
cfgstore "github.com/datarhei/core/v16/config/store"
cfgvars "github.com/datarhei/core/v16/config/vars"
@ -11,6 +14,7 @@ import (
"github.com/datarhei/core/v16/io/file"
"github.com/datarhei/core/v16/io/fs"
"github.com/datarhei/core/v16/log"
"github.com/datarhei/core/v16/restream/app"
"github.com/datarhei/core/v16/restream/store"
"github.com/Masterminds/semver/v3"
@ -18,10 +22,7 @@ import (
)
func main() {
logger := log.New("Migration").WithOutput(log.NewConsoleWriter(os.Stderr, log.Linfo, true)).WithFields(log.Fields{
"from": "ffmpeg4",
"to": "ffmpeg5",
})
logger := log.New("Migration").WithOutput(log.NewConsoleWriter(os.Stderr, log.Linfo, true))
configfile := cfgstore.Location(os.Getenv("CORE_CONFIGFILE"))
@ -33,12 +34,13 @@ func main() {
os.Exit(1)
}
if err := doMigration(logger, configstore); err != nil {
if err := doMigration(logger, diskfs, configstore); err != nil {
logger.Error().WithError(err).Log("Migration failed")
os.Exit(1)
}
}
func doMigration(logger log.Logger, configstore cfgstore.Store) error {
func doMigration(logger log.Logger, fs fs.Filesystem, configstore cfgstore.Store) error {
if logger == nil {
logger = log.New("")
}
@ -82,17 +84,6 @@ func doMigration(logger log.Logger, configstore cfgstore.Store) error {
return fmt.Errorf("parsing FFmpeg version failed: %w", err)
}
// The current FFmpeg version is 4. Nothing to do.
if version.Major() == 4 {
return nil
}
if version.Major() != 5 {
err := fmt.Errorf("unknown FFmpeg version found: %d", version.Major())
logger.Error().WithError(err).Log("Unsupported FFmpeg version found")
return fmt.Errorf("unsupported FFmpeg version found: %w", err)
}
// Check if there's a DB file
dbFilepath := cfg.DB.Dir + "/db.json"
@ -102,29 +93,14 @@ func doMigration(logger log.Logger, configstore cfgstore.Store) error {
return nil
}
// Check if we already have a backup
backupFilepath := cfg.DB.Dir + "/db_ff4.json"
if _, err = os.Stat(backupFilepath); err == nil {
// Yes, we have a backup. The migration already happened
logger.Info().WithField("backup", backupFilepath).Log("Migration already done")
return nil
}
// Create a backup
if err := file.Copy(dbFilepath, backupFilepath); err != nil {
logger.Error().WithError(err).Log("Creating backup file failed")
return fmt.Errorf("creating backup file failed: %w", err)
}
logger.Info().WithField("backup", backupFilepath).Log("Backup created")
// Load the existing DB
datastore, err := store.NewJSON(store.JSONConfig{
Filepath: cfg.DB.Dir + "/db.json",
Filesystem: fs,
Filepath: cfg.DB.Dir + "/db.json",
})
if err != nil {
return err
logger.Error().WithField("db", dbFilepath).WithError(err).Log("Creating JSON store failed")
return fmt.Errorf("creating JSON store failed: %w", err)
}
data, err := datastore.Load()
@ -133,17 +109,102 @@ func doMigration(logger log.Logger, configstore cfgstore.Store) error {
return fmt.Errorf("loading database failed: %w", err)
}
// Migrate processes
logger.Info().Log("Migrating processes ...")
// Migrate the processes to version 5
// Only this happens:
// - for RTSP inputs, replace -stimeout with -timeout
migrated := false
reRTSP := regexp.MustCompile(`^rtsps?://`)
for id, p := range data.Process {
logger.Info().WithField("processid", p.ID).Log("")
ok, err := migrateProcessConfig(logger.WithField("processid", p.ID), p.Config, version.String())
if err != nil {
logger.Info().WithField("processid", p.ID).WithError(err).Log("Migrating process failed")
return fmt.Errorf("migrating process failed: %w", err)
}
for index, input := range p.Config.Input {
data.Process[id] = p
if ok {
migrated = true
}
}
logger.Info().Log("Migrating processes done")
if migrated {
// Create backup if something has been changed.
backupFilepath := cfg.DB.Dir + "/db." + strconv.FormatInt(time.Now().UnixMilli(), 10) + ".json"
if _, err = os.Stat(backupFilepath); err == nil {
// Yes, we have a backup. The migration already happened
logger.Info().WithField("backup", backupFilepath).Log("Migration already done")
return nil
}
// Create a backup
if err := file.Copy(dbFilepath, backupFilepath); err != nil {
logger.Error().WithError(err).Log("Creating backup file failed")
return fmt.Errorf("creating backup file failed: %w", err)
}
logger.Info().WithField("backup", backupFilepath).Log("Backup created")
// Store the modified DB
if err := datastore.Store(data); err != nil {
logger.Error().WithError(err).Log("Storing database failed")
return fmt.Errorf("storing database failed: %w", err)
}
}
logger.Info().Log("Completed")
return nil
}
func migrateProcessConfig(logger log.Logger, config *app.Config, version string) (bool, error) {
migrated := false
vtarget, err := semver.NewVersion(version)
if err != nil {
logger.Error().WithError(err).Log("Parsing target FFmpeg version failed")
return false, fmt.Errorf("parsing target FFmpeg version failed: %w", err)
}
targetmajor := vtarget.Major()
currentmajor := uint64(4)
if len(config.FFVersion) != 0 {
vcurrent, err := semver.NewVersion(strings.TrimPrefix(config.FFVersion, "^"))
if err != nil {
logger.Error().WithError(err).Log("Parsing current FFmpeg version failed")
return false, fmt.Errorf("parsing current FFmpeg version failed: %w", err)
}
currentmajor = vcurrent.Major()
}
if currentmajor < 4 {
err := fmt.Errorf("unknown FFmpeg version found: %d", currentmajor)
logger.Error().WithError(err).Log("Unsupported FFmpeg version")
return false, fmt.Errorf("unsupported FFmpeg version: %w", err)
}
if targetmajor > 6 {
err := fmt.Errorf("unknown FFmpeg version found: %d", targetmajor)
logger.Error().WithError(err).Log("Unsupported FFmpeg version")
return false, fmt.Errorf("unsupported FFmpeg version: %w", err)
}
if currentmajor != targetmajor {
migrated = true
}
if currentmajor == 4 && targetmajor > 4 {
// Migration from version 4 to version 5
// Only this happens:
// - for RTSP inputs, replace -stimeout with -timeout
reRTSP := regexp.MustCompile(`^rtsps?://`)
for index, input := range config.Input {
if !reRTSP.MatchString(input.Address) {
continue
}
@ -156,21 +217,27 @@ func doMigration(logger log.Logger, configstore cfgstore.Store) error {
input.Options[i] = "-timeout"
}
p.Config.Input[index] = input
config.Input[index] = input
}
p.Config.FFVersion = version.String()
data.Process[id] = p
currentmajor = 5
}
logger.Info().Log("Migrating processes done")
if currentmajor == 5 && targetmajor > 5 {
// Migration from version 5 to version 6
// Nothing happens
// Store the modified DB
if err := datastore.Store(data); err != nil {
logger.Error().WithError(err).Log("Storing database failed")
return fmt.Errorf("storing database failed: %w", err)
currentmajor = 6
}
logger.Info().Log("Completed")
if migrated {
logger.Info().WithFields(log.Fields{
"from": config.FFVersion,
"to": "^" + version,
}).Log("Migrated")
}
return nil
config.FFVersion = "^" + version
return migrated, nil
}

View File

@ -29,7 +29,7 @@ func (v versionInfo) MinorString() string {
// Version of the app
var Version = versionInfo{
Major: 16,
Minor: 13,
Minor: 16,
Patch: 0,
}

View File

@ -7,11 +7,11 @@ import (
// Location returns the path to the config file. If no path is provided,
// different standard location will be probed:
// - os.UserConfigDir() + /datarhei-core/config.js
// - os.UserHomeDir() + /.config/datarhei-core/config.js
// - ./config/config.js
// - os.UserConfigDir() + /datarhei-core/config.json
// - os.UserHomeDir() + /.config/datarhei-core/config.json
// - ./config/config.json
// If the config doesn't exist in none of these locations, it will be assumed
// at ./config/config.js
// at ./config/config.json
func Location(filepath string) string {
configfile := filepath
if len(configfile) != 0 {
@ -22,13 +22,16 @@ func Location(filepath string) string {
if dir, err := os.UserConfigDir(); err == nil {
locations = append(locations, dir+"/datarhei-core/config.js")
locations = append(locations, dir+"/datarhei-core/config.json")
}
if dir, err := os.UserHomeDir(); err == nil {
locations = append(locations, dir+"/.config/datarhei-core/config.js")
locations = append(locations, dir+"/.config/datarhei-core/config.json")
}
locations = append(locations, "./config/config.js")
locations = append(locations, "./config/config.json")
for _, path := range locations {
info, err := os.Stat(path)
@ -44,7 +47,7 @@ func Location(filepath string) string {
}
if len(configfile) == 0 {
configfile = "./config/config.js"
configfile = "./config/config.json"
}
os.MkdirAll(path.Dir(configfile), 0740)

View File

@ -4,6 +4,27 @@ import (
"github.com/gobwas/glob"
)
type Glob interface {
Match(name string) bool
}
type globber struct {
glob glob.Glob
}
func Compile(pattern string, separators ...rune) (Glob, error) {
g, err := glob.Compile(pattern, separators...)
if err != nil {
return nil, err
}
return &globber{glob: g}, nil
}
func (g *globber) Match(name string) bool {
return g.glob.Match(name)
}
// Match returns whether the name matches the glob pattern, also considering
// one or several optionnal separator. An error is only returned if the pattern
// is invalid.

24
glob/glob_test.go Normal file
View File

@ -0,0 +1,24 @@
package glob
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestPatterns(t *testing.T) {
ok, err := Match("**/a/b/**", "/s3/a/b/test.m3u8", '/')
require.NoError(t, err)
require.True(t, ok)
ok, err = Match("**/a/b/**", "/a/b/test.m3u8", '/')
require.NoError(t, err)
require.True(t, ok)
ok, err = Match("{/memfs,}/a/b/**", "/a/b/test.m3u8", '/')
require.NoError(t, err)
require.True(t, ok)
}

144
go.mod
View File

@ -1,34 +1,38 @@
module github.com/datarhei/core/v16
go 1.18
go 1.21.0
toolchain go1.22.1
require (
github.com/99designs/gqlgen v0.17.20
github.com/Masterminds/semver/v3 v3.1.1
github.com/99designs/gqlgen v0.17.47
github.com/Masterminds/semver/v3 v3.2.1
github.com/atrox/haikunatorgo/v2 v2.0.1
github.com/caddyserver/certmagic v0.17.2
github.com/datarhei/gosrt v0.3.1
github.com/datarhei/joy4 v0.0.0-20230505074825-fde05957445a
github.com/go-playground/validator/v10 v10.11.1
github.com/caddyserver/certmagic v0.21.2
github.com/datarhei/gosrt v0.6.0
github.com/datarhei/joy4 v0.0.0-20240603190808-b1407345907e
github.com/go-playground/validator/v10 v10.20.0
github.com/gobwas/glob v0.2.3
github.com/golang-jwt/jwt/v4 v4.4.3
github.com/google/uuid v1.3.0
github.com/golang-jwt/jwt/v5 v5.2.1
github.com/google/uuid v1.6.0
github.com/invopop/jsonschema v0.4.0
github.com/joho/godotenv v1.4.0
github.com/labstack/echo/v4 v4.9.1
github.com/joho/godotenv v1.5.1
github.com/labstack/echo-jwt v0.0.0-20221127215225-c84d41a71003
github.com/labstack/echo/v4 v4.12.0
github.com/lithammer/shortuuid/v4 v4.0.0
github.com/mattn/go-isatty v0.0.17
github.com/minio/minio-go/v7 v7.0.47
github.com/mattn/go-isatty v0.0.20
github.com/minio/minio-go/v7 v7.0.70
github.com/prep/average v0.0.0-20200506183628-d26c465f48c3
github.com/prometheus/client_golang v1.14.0
github.com/shirou/gopsutil/v3 v3.23.3
github.com/stretchr/testify v1.8.2
github.com/swaggo/echo-swagger v1.3.5
github.com/swaggo/swag v1.8.7
github.com/vektah/gqlparser/v2 v2.5.1
github.com/prometheus/client_golang v1.19.1
github.com/puzpuzpuz/xsync/v3 v3.1.0
github.com/shirou/gopsutil/v3 v3.24.4
github.com/stretchr/testify v1.9.0
github.com/swaggo/echo-swagger v1.4.1
github.com/swaggo/swag v1.16.3
github.com/vektah/gqlparser/v2 v2.5.12
github.com/xeipuuv/gojsonschema v1.2.0
go.uber.org/zap v1.24.0
golang.org/x/mod v0.7.0
go.uber.org/zap v1.27.0
golang.org/x/mod v0.17.0
)
require (
@ -36,71 +40,69 @@ require (
github.com/agnivade/levenshtein v1.1.1 // indirect
github.com/benburkert/openpgp v0.0.0-20160410205803-c2471f86866c // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect
github.com/caddyserver/zerossl v0.1.3 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-openapi/jsonpointer v0.19.5 // indirect
github.com/go-openapi/jsonreference v0.20.0 // indirect
github.com/go-openapi/spec v0.20.8 // indirect
github.com/go-openapi/swag v0.22.3 // indirect
github.com/go-playground/locales v0.14.0 // indirect
github.com/go-playground/universal-translator v0.18.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.4 // indirect
github.com/ghodss/yaml v1.0.0 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/go-openapi/jsonpointer v0.21.0 // indirect
github.com/go-openapi/jsonreference v0.21.0 // indirect
github.com/go-openapi/spec v0.21.0 // indirect
github.com/go-openapi/swag v0.23.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/goccy/go-json v0.10.3 // indirect
github.com/golang-jwt/jwt v3.2.2+incompatible // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/gorilla/websocket v1.5.0 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
github.com/gorilla/websocket v1.5.1 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/iancoleman/orderedmap v0.2.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/compress v1.15.15 // indirect
github.com/klauspost/cpuid/v2 v2.2.3 // indirect
github.com/labstack/gommon v0.4.0 // indirect
github.com/leodido/go-urn v1.2.1 // indirect
github.com/libdns/libdns v0.2.1 // indirect
github.com/lufia/plan9stats v0.0.0-20220913051719-115f729f3c8c // indirect
github.com/klauspost/compress v1.17.8 // indirect
github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/labstack/gommon v0.4.2 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/libdns/libdns v0.2.2 // indirect
github.com/lufia/plan9stats v0.0.0-20240513124658-fba389f38bae // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mholt/acmez v1.0.4 // indirect
github.com/miekg/dns v1.1.50 // indirect
github.com/mholt/acmez/v2 v2.0.1 // indirect
github.com/miekg/dns v1.1.59 // indirect
github.com/minio/md5-simd v1.1.2 // indirect
github.com/minio/sha256-simd v1.0.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b // indirect
github.com/prometheus/client_model v0.3.0 // indirect
github.com/prometheus/common v0.39.0 // indirect
github.com/prometheus/procfs v0.9.0 // indirect
github.com/rogpeppe/go-internal v1.8.1 // indirect
github.com/rs/xid v1.4.0 // indirect
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
github.com/prometheus/client_model v0.6.1 // indirect
github.com/prometheus/common v0.53.0 // indirect
github.com/prometheus/procfs v0.15.0 // indirect
github.com/rs/xid v1.5.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/shoenig/go-m1cpu v0.1.4 // indirect
github.com/sirupsen/logrus v1.9.0 // indirect
github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a // indirect
github.com/tklauser/go-sysconf v0.3.11 // indirect
github.com/tklauser/numcpus v0.6.0 // indirect
github.com/urfave/cli/v2 v2.8.1 // indirect
github.com/shoenig/go-m1cpu v0.1.6 // indirect
github.com/sosodev/duration v1.3.1 // indirect
github.com/swaggo/files/v2 v2.0.0 // indirect
github.com/tklauser/go-sysconf v0.3.14 // indirect
github.com/tklauser/numcpus v0.8.0 // indirect
github.com/urfave/cli/v2 v2.27.2 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasttemplate v1.2.2 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
github.com/yusufpapurcu/wmi v1.2.2 // indirect
go.uber.org/atomic v1.10.0 // indirect
go.uber.org/goleak v1.1.12 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/crypto v0.5.0 // indirect
golang.org/x/net v0.7.0 // indirect
golang.org/x/sys v0.6.0 // indirect
golang.org/x/text v0.7.0 // indirect
golang.org/x/time v0.3.0 // indirect
golang.org/x/tools v0.4.0 // indirect
google.golang.org/protobuf v1.28.1 // indirect
github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
github.com/zeebo/blake3 v0.2.3 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.23.0 // indirect
golang.org/x/net v0.25.0 // indirect
golang.org/x/sync v0.7.0 // indirect
golang.org/x/sys v0.20.0 // indirect
golang.org/x/text v0.15.0 // indirect
golang.org/x/time v0.5.0 // indirect
golang.org/x/tools v0.21.0 // indirect
google.golang.org/protobuf v1.34.1 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

468
go.sum
View File

@ -1,41 +1,37 @@
github.com/99designs/gqlgen v0.17.20 h1:O7WzccIhKB1dm+7g6dhQcULINftfiLSBg2l/mwbpJMw=
github.com/99designs/gqlgen v0.17.20/go.mod h1:Mja2HI23kWT1VRH09hvWshFgOzKswpO20o4ScpJIES4=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/99designs/gqlgen v0.17.47 h1:M9DTK8X3+3ATNBfZlHBwMwNngn4hhZWDxNmTiuQU5tQ=
github.com/99designs/gqlgen v0.17.47/go.mod h1:ejVkldSdtmuudqmtfaiqjwlGXWAhIv0DKXGXFY25F04=
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc=
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/agiledragon/gomonkey/v2 v2.3.1/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY=
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0=
github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE=
github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk=
github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8=
github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
github.com/atrox/haikunatorgo/v2 v2.0.1 h1:FCVx2KL2YvZtI1rI9WeEHxeLRrKGr0Dd4wfCJiUXupc=
github.com/atrox/haikunatorgo/v2 v2.0.1/go.mod h1:BBQmx2o+1Z5poziaHRgddAZKOpijwfKdAmMnSYlFK70=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/benburkert/openpgp v0.0.0-20160410205803-c2471f86866c h1:8XZeJrs4+ZYhJeJ2aZxADI2tGADS15AzIF8MQ8XAhT4=
github.com/benburkert/openpgp v0.0.0-20160410205803-c2471f86866c/go.mod h1:x1vxHcL/9AVzuk5HOloOEPrtJY0MaalYr78afXZ+pWI=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/caddyserver/certmagic v0.17.2 h1:o30seC1T/dBqBCNNGNHWwj2i5/I/FMjBbTAhjADP3nE=
github.com/caddyserver/certmagic v0.17.2/go.mod h1:ouWUuC490GOLJzkyN35eXfV8bSbwMwSf4bdhkIxtdQE=
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU=
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/datarhei/gosrt v0.3.1 h1:9A75hIvnY74IUFyeguqYXh1lsGF8Qt8fjxJS2Ewr12Q=
github.com/datarhei/gosrt v0.3.1/go.mod h1:M2nl2WPrawncUc1FtUBK6gZX4tpZRC7FqL8NjOdBZV0=
github.com/datarhei/joy4 v0.0.0-20230505074825-fde05957445a h1:Tf4DSHY1xruBglr+yYP5Wct7czM86GKMYgbXH8a7OFo=
github.com/datarhei/joy4 v0.0.0-20230505074825-fde05957445a/go.mod h1:Jcw/6jZDQQmPx8A7INEkXmuEF7E9jjBbSTfVSLwmiQw=
github.com/caddyserver/certmagic v0.21.2 h1:O18LtaYBGDooyy257cYePnhp4lPfz6TaJELil6Q1fDg=
github.com/caddyserver/certmagic v0.21.2/go.mod h1:Zq6pklO9nVRl3DIFUw9gVUfXKdpc/0qwTUAQMBlfgtI=
github.com/caddyserver/zerossl v0.1.3 h1:onS+pxp3M8HnHpN5MMbOMyNjmTheJyWRaZYwn+YTAyA=
github.com/caddyserver/zerossl v0.1.3/go.mod h1:CxA0acn7oEGO6//4rtrRjYgEoa4MFw/XofZnrYwGqG4=
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4=
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/datarhei/gosrt v0.6.0 h1:HrrXAw90V78ok4WMIhX6se1aTHPCn82Sg2hj+PhdmGc=
github.com/datarhei/gosrt v0.6.0/go.mod h1:fsOWdLSHUHShHjgi/46h6wjtdQrtnSdAQFnlas8ONxs=
github.com/datarhei/joy4 v0.0.0-20240603190808-b1407345907e h1:Qc/0D4xvXrazFkoi/4UGqO15yQ1JN5I8h7RwdzCLgTY=
github.com/datarhei/joy4 v0.0.0-20240603190808-b1407345907e/go.mod h1:Jcw/6jZDQQmPx8A7INEkXmuEF7E9jjBbSTfVSLwmiQw=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -43,212 +39,166 @@ github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48 h1:fRzb/w+pyskVMQ+
github.com/dgryski/trifles v0.0.0-20200323201526-dd97f9abfb48/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/gabriel-vasile/mimetype v1.4.4 h1:QjV6pZ7/XZ7ryI2KuyeEDE8wnh7fHP9YnQy+R0LnH8I=
github.com/gabriel-vasile/mimetype v1.4.4/go.mod h1:JwLei5XPtWdGiMFB5Pjle1oEeoSeEuJfJE+TtfvdB/s=
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA=
github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo=
github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I=
github.com/go-openapi/spec v0.20.8 h1:ubHmXNY3FCIOinT8RNrrPfGc9t7I1qhPtdOGoG2AxRU=
github.com/go-openapi/spec v0.20.8/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA=
github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g=
github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU=
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho=
github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ=
github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU=
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ=
github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY=
github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ=
github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4=
github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY=
github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk=
github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE=
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
github.com/golang-jwt/jwt/v4 v4.4.3 h1:Hxl6lhQFj4AnOX6MLrsCb/+7tCj7DxP7VA+2rDIq5AU=
github.com/golang-jwt/jwt/v4 v4.4.3/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
github.com/iancoleman/orderedmap v0.2.0 h1:sq1N/TFpYH++aViPcaKjys3bDClUEU7s5B+z6jq8pNA=
github.com/iancoleman/orderedmap v0.2.0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
github.com/invopop/jsonschema v0.4.0 h1:Yuy/unfgCnfV5Wl7H0HgFufp/rlurqPOOuacqyByrws=
github.com/invopop/jsonschema v0.4.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg=
github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/kevinmbeaulieu/eq-go v1.0.0/go.mod h1:G3S8ajA56gKBZm4UB9AOyoOS37JO3roToPzKNM8dtdM=
github.com/klauspost/compress v1.15.15 h1:EF27CXIuDsYJ6mmvtBRlEuB2UVOqHG1tAXgZ7yIO+lw=
github.com/klauspost/compress v1.15.15/go.mod h1:ZcK2JAFqKOpnBlxcLsJzYfrS9X1akm9fHZNnD9+Vo/4=
github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU=
github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU=
github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/klauspost/cpuid/v2 v2.0.12/go.mod h1:g2LTdtYhdyuGPqyWyv7qRAmj1WBqxuObKfj5c0PQa7c=
github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/labstack/echo/v4 v4.9.0/go.mod h1:xkCDAdFCIf8jsFQ5NnbK7oqaF/yU1A1X20Ltm0OvSks=
github.com/labstack/echo/v4 v4.9.1 h1:GliPYSpzGKlyOhqIbG8nmHBo3i1saKWFOgh41AN3b+Y=
github.com/labstack/echo/v4 v4.9.1/go.mod h1:Pop5HLc+xoc4qhTZ1ip6C0RtP7Z+4VzRLWZZFKqbbjo=
github.com/labstack/gommon v0.3.1/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM=
github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8=
github.com/labstack/gommon v0.4.0/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM=
github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
github.com/libdns/libdns v0.2.1 h1:Wu59T7wSHRgtA0cfxC+n1c/e+O3upJGWytknkmFEDis=
github.com/libdns/libdns v0.2.1/go.mod h1:yQCXzk1lEZmmCPa857bnk4TsOiqYasqpyOEeSObbb40=
github.com/labstack/echo-jwt v0.0.0-20221127215225-c84d41a71003 h1:FyalHKl9hnJvhNbrABJXXjC2hG7gvIF0ioW9i0xHNQU=
github.com/labstack/echo-jwt v0.0.0-20221127215225-c84d41a71003/go.mod h1:ovRFgyKvi73jQIFCWz9ByQwzhIyohkzY0MFAlPGyr8Q=
github.com/labstack/echo/v4 v4.12.0 h1:IKpw49IMryVB2p1a4dzwlhP1O2Tf2E0Ir/450lH+kI0=
github.com/labstack/echo/v4 v4.12.0/go.mod h1:UP9Cr2DJXbOK3Kr9ONYzNowSh7HP0aG0ShAyycHSJvM=
github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0=
github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
github.com/libdns/libdns v0.2.2 h1:O6ws7bAfRPaBsgAYt8MDe2HcNBGC29hkZ9MX2eUSX3s=
github.com/libdns/libdns v0.2.2/go.mod h1:4Bj9+5CQiNMVGf87wjX4CY3HQJypUHRuLvlsfsZqLWQ=
github.com/lithammer/shortuuid/v4 v4.0.0 h1:QRbbVkfgNippHOS8PXDkti4NaWeyYfcBTHtw7k08o4c=
github.com/lithammer/shortuuid/v4 v4.0.0/go.mod h1:Zs8puNcrvf2rV9rTH51ZLLcj7ZXqQI3lv67aw4KiB1Y=
github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc=
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
github.com/lufia/plan9stats v0.0.0-20220913051719-115f729f3c8c h1:VtwQ41oftZwlMnOEbMWQtSEUgU64U4s+GHk7hZK+jtY=
github.com/lufia/plan9stats v0.0.0-20220913051719-115f729f3c8c/go.mod h1:JKx41uQRwqlTZabZc+kILPrO/3jlKnQ2Z8b7YiVw5cE=
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/lufia/plan9stats v0.0.0-20240513124658-fba389f38bae h1:dIZY4ULFcto4tAFlj1FYZl8ztUZ13bdq+PLY+NOfbyI=
github.com/lufia/plan9stats v0.0.0-20240513124658-fba389f38bae/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/matryer/moq v0.2.7/go.mod h1:kITsx543GOENm48TUAQyJ9+SAvFSr7iGQXPoth/VUBk=
github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo=
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
github.com/mholt/acmez v1.0.4 h1:N3cE4Pek+dSolbsofIkAYz6H1d3pE+2G0os7QHslf80=
github.com/mholt/acmez v1.0.4/go.mod h1:qFGLZ4u+ehWINeJZjzPlsnjJBCPAADWTcIqE/7DAYQY=
github.com/miekg/dns v1.1.50 h1:DQUfb9uc6smULcREF09Uc+/Gd46YWqJd5DbpPE9xkcA=
github.com/miekg/dns v1.1.50/go.mod h1:e3IlAVfNqAllflbibAZEWOXOQ+Ynzk/dDozDxY7XnME=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mholt/acmez/v2 v2.0.1 h1:3/3N0u1pLjMK4sNEAFSI+bcvzbPhRpY383sy1kLHJ6k=
github.com/mholt/acmez/v2 v2.0.1/go.mod h1:fX4c9r5jYwMyMsC+7tkYRxHibkOTgta5DIFGoe67e1U=
github.com/miekg/dns v1.1.59 h1:C9EXc/UToRwKLhK5wKU/I4QVsBUc8kE6MkHBkeypWZs=
github.com/miekg/dns v1.1.59/go.mod h1:nZpewl5p6IvctfgrckopVx2OlSEHPRO/U4SYkRklrEk=
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
github.com/minio/minio-go/v7 v7.0.47 h1:sLiuCKGSIcn/MI6lREmTzX91DX/oRau4ia0j6e6eOSs=
github.com/minio/minio-go/v7 v7.0.47/go.mod h1:nCrRzjoSUQh8hgKKtu3Y708OLvRLtuASMg2/nvmbarw=
github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g=
github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM=
github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/minio/minio-go/v7 v7.0.70 h1:1u9NtMgfK1U42kUxcsl5v0yj6TEOPR497OAQxpJnn2g=
github.com/minio/minio-go/v7 v7.0.70/go.mod h1:4yBA8v80xGA30cfM3fz0DKYMXunWl/AV/6tWEs9ryzo=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/otiai10/copy v1.7.0/go.mod h1:rmRl6QPdJj6EiUqXQ/4Nn2lLXoNQjFCQbbNrxgc/t3U=
github.com/otiai10/curr v0.0.0-20150429015615-9b4961190c95/go.mod h1:9qAhocn7zKJG+0mI8eUu6xqkFDYS2kb2saOteoSB3cE=
github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6j4vs=
github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo=
github.com/otiai10/mint v1.3.3/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.6.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b h1:0LFwY6Q3gMACTjAbMZBjXAqTOzOwFaj2Ld6cjeQ7Rig=
github.com/power-devops/perfstat v0.0.0-20221212215047-62379fc7944b/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU=
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
github.com/prep/average v0.0.0-20200506183628-d26c465f48c3 h1:Y7qCvg282QmlyrVQuL2fgGwebuw7zvfnRym09r+dUGc=
github.com/prep/average v0.0.0-20200506183628-d26c465f48c3/go.mod h1:0ZE5gcyWKS151WBDIpmLshHY0l+3edpuKnBUWVVbWKk=
github.com/prometheus/client_golang v1.14.0 h1:nJdhIvne2eSX/XRAFV9PcvFFRbrjbcTUj0VP62TMhnw=
github.com/prometheus/client_golang v1.14.0/go.mod h1:8vpkKitgIVNcqrRBWh1C4TIUQgYNtG/XQE4E/Zae36Y=
github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4=
github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
github.com/prometheus/common v0.39.0 h1:oOyhkDq05hPZKItWVBkJ6g6AtGxi+fy7F4JvUV8uhsI=
github.com/prometheus/common v0.39.0/go.mod h1:6XBZ7lYdLCbkAVhwRsWTZn+IN5AB9F/NXd5w0BbEX0Y=
github.com/prometheus/procfs v0.9.0 h1:wzCHvIvM5SxWqYvwgVL7yJY8Lz3PKn49KQtpgMYJfhI=
github.com/prometheus/procfs v0.9.0/go.mod h1:+pB4zwohETzFnmlpe6yd2lSc+0/46IYZRB/chUwxUZY=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg=
github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
github.com/rs/xid v1.4.0 h1:qd7wPTDkN6KQx2VmMBLrpHkiyQwgFXRnkOLacUiaSNY=
github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE=
github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho=
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
github.com/prometheus/common v0.53.0 h1:U2pL9w9nmJwJDa4qqLQ3ZaePJ6ZTwt7cMD3AG3+aLCE=
github.com/prometheus/common v0.53.0/go.mod h1:BrxBKv3FWBIGXw89Mg1AeBq7FSyRzXWI3l3e7W3RN5U=
github.com/prometheus/procfs v0.15.0 h1:A82kmvXJq2jTu5YUhSGNlYoxh85zLnKgPz4bMZgI5Ek=
github.com/prometheus/procfs v0.15.0/go.mod h1:Y0RJ/Y5g5wJpkTisOtqwDSo4HwhGmLB4VQSw2sQJLHk=
github.com/puzpuzpuz/xsync/v3 v3.1.0 h1:EewKT7/LNac5SLiEblJeUu8z5eERHrmRLnMQL2d7qX4=
github.com/puzpuzpuz/xsync/v3 v3.1.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA=
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
github.com/rs/xid v1.5.0 h1:mKX4bl4iPYJtEIxp6CYiUuLQ/8DYMoz0PUdtGgMFRVc=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/shirou/gopsutil/v3 v3.23.3 h1:Syt5vVZXUDXPEXpIBt5ziWsJ4LdSAAxF4l/xZeQgSEE=
github.com/shirou/gopsutil/v3 v3.23.3/go.mod h1:lSBNN6t3+D6W5e5nXTxc8KIMMVxAcS+6IJlffjRRlMU=
github.com/shoenig/go-m1cpu v0.1.4 h1:SZPIgRM2sEF9NJy50mRHu9PKGwxyyTTJIWvCtgVbozs=
github.com/shoenig/go-m1cpu v0.1.4/go.mod h1:Wwvst4LR89UxjeFtLRMrpgRiyY4xPsejnVZym39dbAQ=
github.com/shoenig/test v0.6.3 h1:GVXWJFk9PiOjN0KoJ7VrJGH6uLPnqxR7/fe3HUPfE0c=
github.com/shoenig/test v0.6.3/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/shirou/gopsutil/v3 v3.24.4 h1:dEHgzZXt4LMNm+oYELpzl9YCqV65Yr/6SfrvgRBtXeU=
github.com/shirou/gopsutil/v3 v3.24.4/go.mod h1:lTd2mdiOspcqLgAnr9/nGi71NkeMpWKdmhuxm9GusH8=
github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM=
github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq4=
github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/swaggo/echo-swagger v1.3.5 h1:kCx1wvX5AKhjI6Ykt48l3PTsfL9UD40ZROOx/tYzWyY=
github.com/swaggo/echo-swagger v1.3.5/go.mod h1:3IMHd2Z8KftdWFEEjGmv6QpWj370LwMCOfovuh7vF34=
github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a h1:kAe4YSu0O0UFn1DowNo2MY5p6xzqtJ/wQ7LZynSvGaY=
github.com/swaggo/files v0.0.0-20220728132757-551d4a08d97a/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w=
github.com/swaggo/swag v1.8.1/go.mod h1:ugemnJsPZm/kRwFUnzBlbHRd0JY9zE1M4F+uy2pAaPQ=
github.com/swaggo/swag v1.8.7 h1:2K9ivTD3teEO+2fXV6zrZKDqk5IuU2aJtBDo8U7omWU=
github.com/swaggo/swag v1.8.7/go.mod h1:ezQVUUhly8dludpVk+/PuwJWvLLanB13ygV5Pr9enSk=
github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+KdJV0CM=
github.com/tklauser/go-sysconf v0.3.11/go.mod h1:GqXfhXY3kiPa0nAXPDIQIWzJbMCB7AmcWpGR8lSZfqI=
github.com/tklauser/numcpus v0.6.0 h1:kebhY2Qt+3U6RNK7UqpYNA+tJ23IBEGKkB7JQBfDYms=
github.com/tklauser/numcpus v0.6.0/go.mod h1:FEZLMke0lhOUG6w2JadTzp0a+Nl8PF/GFkQ5UVIcaL4=
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
github.com/urfave/cli/v2 v2.8.1 h1:CGuYNZF9IKZY/rfBe3lJpccSoIY1ytfvmgQT90cNOl4=
github.com/urfave/cli/v2 v2.8.1/go.mod h1:Z41J9TPoffeoqP0Iza0YbAhGvymRdZAd2uPmZ5JxRdY=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/swaggo/echo-swagger v1.4.1 h1:Yf0uPaJWp1uRtDloZALyLnvdBeoEL5Kc7DtnjzO/TUk=
github.com/swaggo/echo-swagger v1.4.1/go.mod h1:C8bSi+9yH2FLZsnhqMZLIZddpUxZdBYuNHbtaS1Hljc=
github.com/swaggo/files/v2 v2.0.0 h1:hmAt8Dkynw7Ssz46F6pn8ok6YmGZqHSVLZ+HQM7i0kw=
github.com/swaggo/files/v2 v2.0.0/go.mod h1:24kk2Y9NYEJ5lHuCra6iVwkMjIekMCaFq/0JQj66kyM=
github.com/swaggo/swag v1.16.3 h1:PnCYjPCah8FK4I26l2F/KQ4yz3sILcVUN3cTlBFA9Pg=
github.com/swaggo/swag v1.16.3/go.mod h1:DImHIuOFXKpMFAQjcC7FG4m3Dg4+QuUgUzJmKjI/gRk=
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
github.com/tklauser/go-sysconf v0.3.14 h1:g5vzr9iPFFz24v2KZXs/pvpvh8/V9Fw6vQK5ZZb78yU=
github.com/tklauser/go-sysconf v0.3.14/go.mod h1:1ym4lWMLUOhuBOPGtRcJm7tEGX4SCYNEEEtghGG/8uY=
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
github.com/tklauser/numcpus v0.8.0 h1:Mx4Wwe/FjZLeQsK/6kt2EOepwwSl7SmJrK5bV/dXYgY=
github.com/tklauser/numcpus v0.8.0/go.mod h1:ZJZlAY+dmR4eut8epnzf0u/VwodKmryxR8txiloSqBE=
github.com/urfave/cli/v2 v2.27.2 h1:6e0H+AkS+zDckwPCUrZkKX38mRaau4nL2uipkJpbkcI=
github.com/urfave/cli/v2 v2.27.2/go.mod h1:g0+79LmHHATl7DAcHO99smiR/T7uGLw84w8Y42x+4eM=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/vektah/gqlparser/v2 v2.5.1 h1:ZGu+bquAY23jsxDRcYpWjttRZrUz07LbiY77gUOHcr4=
github.com/vektah/gqlparser/v2 v2.5.1/go.mod h1:mPgqFBu/woKTVYWyNk8cO3kh4S/f4aRFZrvOnp3hmCs=
github.com/vektah/gqlparser/v2 v2.5.12 h1:COMhVVnql6RoaF7+aTBWiTADdpLGyZWU3K/NwW0ph98=
github.com/vektah/gqlparser/v2 v2.5.12/go.mod h1:WQQjFc+I1YIzoPvZBhUQX7waZgg3pMLi0r8KymvAE2w=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
@ -256,139 +206,57 @@ github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHo
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74=
github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y=
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU=
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yusufpapurcu/wmi v1.2.2 h1:KBNDSne4vP5mbSWnJbO+51IMOXJB67QiYCSBrubbPRg=
github.com/yusufpapurcu/wmi v1.2.2/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ=
go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ=
go.uber.org/goleak v1.1.12 h1:gZAh5/EyT/HQwlpkCy6wTpqfH9H8Lz8zbm3dZh+OyzA=
go.uber.org/goleak v1.1.12/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw=
go.uber.org/zap v1.24.0 h1:FiJd5l1UOLj0wCgbSE0rwwXHzEdAZS6hiiSnxJN/D60=
go.uber.org/zap v1.24.0/go.mod h1:2kMP+WWQ8aoFoedH3T2sq6iJ2yDWpHbP0f6MQbS9Gkg=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
golang.org/x/crypto v0.5.0 h1:U/0M97KRkSFvyD/3FSmdP5W5swImpNgle/EHFhOsQPE=
golang.org/x/crypto v0.5.0/go.mod h1:NK/OQwhpMQP3MwtdjgLlYHnH9ebylxKWv3e0fK+mkQU=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA=
golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220630215102-69896b714898/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 h1:+qGGcbkzsfDQNPPe9UDgpxAWQrhbbBXOYJFQDq/dtJw=
github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913/go.mod h1:4aEEwZQutDLsQv2Deui4iYQ6DWTxR14g6m8Wv88+Xqk=
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
github.com/zeebo/assert v1.1.0 h1:hU1L1vLTHsnO8x8c9KAR5GmM5QscxHg5RNU5z5qbUWY=
github.com/zeebo/assert v1.1.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
github.com/zeebo/blake3 v0.2.3 h1:TFoLXsjeXqRNFxSbk35Dk4YtszE/MQQGK10BH4ptoTg=
github.com/zeebo/blake3 v0.2.3/go.mod h1:mjJjZpnsyIVtVgTOSpJ9vmRE4wgDeyt2HU3qXvvKCaQ=
github.com/zeebo/pcg v1.0.1 h1:lyqfGeWiv4ahac6ttHs+I5hwtH/+1mrhlCtVNQM2kHo=
github.com/zeebo/pcg v1.0.1/go.mod h1:09F0S9iiKrwn9rlI5yjLkmrug154/YRW6KnnXVDM/l4=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.4.0 h1:7mTAgkunk3fr4GAloyyCasadO6h9zSsQZbwvcaIciV4=
golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.21.0 h1:qc0xYgIbsSDt9EyWz05J5wfa7LOVW0YTLOXrqdLAWIw=
golang.org/x/tools v0.21.0/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

File diff suppressed because it is too large Load Diff

View File

@ -13,6 +13,9 @@ import (
type IProcessReportHistoryEntry interface {
IsIProcessReportHistoryEntry()
GetCreatedAt() time.Time
GetPrelude() []string
GetLog() []*ProcessReportLogEntry
}
type AVStream struct {
@ -55,24 +58,24 @@ type AboutVersion struct {
type Metric struct {
Name string `json:"name"`
Labels map[string]interface{} `json:"labels"`
Labels map[string]interface{} `json:"labels,omitempty"`
Values []*scalars.MetricsResponseValue `json:"values"`
}
type MetricInput struct {
Name string `json:"name"`
Labels map[string]interface{} `json:"labels"`
Labels map[string]interface{} `json:"labels,omitempty"`
}
type Metrics struct {
TimerangeSeconds *int `json:"timerange_seconds"`
IntervalSeconds *int `json:"interval_seconds"`
TimerangeSeconds *int `json:"timerange_seconds,omitempty"`
IntervalSeconds *int `json:"interval_seconds,omitempty"`
Metrics []*Metric `json:"metrics"`
}
type MetricsInput struct {
TimerangeSeconds *int `json:"timerange_seconds"`
IntervalSeconds *int `json:"interval_seconds"`
TimerangeSeconds *int `json:"timerange_seconds,omitempty"`
IntervalSeconds *int `json:"interval_seconds,omitempty"`
Metrics []*MetricInput `json:"metrics"`
}
@ -108,7 +111,7 @@ type Process struct {
Config *ProcessConfig `json:"config"`
State *ProcessState `json:"state"`
Report *ProcessReport `json:"report"`
Metadata map[string]interface{} `json:"metadata"`
Metadata map[string]interface{} `json:"metadata,omitempty"`
}
type ProcessConfig struct {
@ -145,6 +148,27 @@ type ProcessReport struct {
}
func (ProcessReport) IsIProcessReportHistoryEntry() {}
func (this ProcessReport) GetCreatedAt() time.Time { return this.CreatedAt }
func (this ProcessReport) GetPrelude() []string {
if this.Prelude == nil {
return nil
}
interfaceSlice := make([]string, 0, len(this.Prelude))
for _, concrete := range this.Prelude {
interfaceSlice = append(interfaceSlice, concrete)
}
return interfaceSlice
}
func (this ProcessReport) GetLog() []*ProcessReportLogEntry {
if this.Log == nil {
return nil
}
interfaceSlice := make([]*ProcessReportLogEntry, 0, len(this.Log))
for _, concrete := range this.Log {
interfaceSlice = append(interfaceSlice, concrete)
}
return interfaceSlice
}
type ProcessReportHistoryEntry struct {
CreatedAt time.Time `json:"created_at"`
@ -153,6 +177,27 @@ type ProcessReportHistoryEntry struct {
}
func (ProcessReportHistoryEntry) IsIProcessReportHistoryEntry() {}
func (this ProcessReportHistoryEntry) GetCreatedAt() time.Time { return this.CreatedAt }
func (this ProcessReportHistoryEntry) GetPrelude() []string {
if this.Prelude == nil {
return nil
}
interfaceSlice := make([]string, 0, len(this.Prelude))
for _, concrete := range this.Prelude {
interfaceSlice = append(interfaceSlice, concrete)
}
return interfaceSlice
}
func (this ProcessReportHistoryEntry) GetLog() []*ProcessReportLogEntry {
if this.Log == nil {
return nil
}
interfaceSlice := make([]*ProcessReportLogEntry, 0, len(this.Log))
for _, concrete := range this.Log {
interfaceSlice = append(interfaceSlice, concrete)
}
return interfaceSlice
}
type ProcessReportLogEntry struct {
Timestamp time.Time `json:"timestamp"`
@ -208,7 +253,7 @@ type ProgressIo struct {
Sampling scalars.Uint64 `json:"sampling"`
Layout string `json:"layout"`
Channels scalars.Uint64 `json:"channels"`
Avstream *AVStream `json:"avstream"`
Avstream *AVStream `json:"avstream,omitempty"`
}
type RawAVstream struct {
@ -223,7 +268,7 @@ type RawAVstream struct {
Looping bool `json:"looping"`
Duplicating bool `json:"duplicating"`
Gop string `json:"gop"`
Debug interface{} `json:"debug"`
Debug interface{} `json:"debug,omitempty"`
Input *RawAVstreamIo `json:"input"`
Output *RawAVstreamIo `json:"output"`
Swap *RawAVstreamSwap `json:"swap"`
@ -292,17 +337,17 @@ type State string
const (
StateRunning State = "RUNNING"
StateIDLe State = "IDLE"
StateIdle State = "IDLE"
)
var AllState = []State{
StateRunning,
StateIDLe,
StateIdle,
}
func (e State) IsValid() bool {
switch e {
case StateRunning, StateIDLe:
case StateRunning, StateIdle:
return true
}
return false

View File

@ -2,6 +2,7 @@ package resolver
// This file will be automatically regenerated based on the schema, any resolver implementations
// will be copied through when generating and any unknown code will be moved to the end.
// Code generated by github.com/99designs/gqlgen version v0.17.36
import (
"context"
@ -12,6 +13,7 @@ import (
"github.com/datarhei/core/v16/http/graph/scalars"
)
// About is the resolver for the about field.
func (r *queryResolver) About(ctx context.Context) (*models.About, error) {
createdAt := r.Restream.CreatedAt()

View File

@ -2,6 +2,7 @@ package resolver
// This file will be automatically regenerated based on the schema, any resolver implementations
// will be copied through when generating and any unknown code will be moved to the end.
// Code generated by github.com/99designs/gqlgen version v0.17.36
import (
"context"
@ -10,6 +11,7 @@ import (
"github.com/datarhei/core/v16/log"
)
// Log is the resolver for the log field.
func (r *queryResolver) Log(ctx context.Context) ([]string, error) {
if r.LogBuffer == nil {
r.LogBuffer = log.NewBufferWriter(log.Lsilent, 1)

View File

@ -2,6 +2,7 @@ package resolver
// This file will be automatically regenerated based on the schema, any resolver implementations
// will be copied through when generating and any unknown code will be moved to the end.
// Code generated by github.com/99designs/gqlgen version v0.17.36
import (
"context"
@ -12,6 +13,7 @@ import (
"github.com/datarhei/core/v16/monitor/metric"
)
// Metrics is the resolver for the metrics field.
func (r *queryResolver) Metrics(ctx context.Context, query models.MetricsInput) (*models.Metrics, error) {
patterns := []metric.Pattern{}

View File

@ -2,6 +2,7 @@ package resolver
// This file will be automatically regenerated based on the schema, any resolver implementations
// will be copied through when generating and any unknown code will be moved to the end.
// Code generated by github.com/99designs/gqlgen version v0.17.36
import (
"context"
@ -13,6 +14,7 @@ import (
"github.com/datarhei/core/v16/playout"
)
// PlayoutStatus is the resolver for the playoutStatus field.
func (r *queryResolver) PlayoutStatus(ctx context.Context, id string, input string) (*models.RawAVstream, error) {
addr, err := r.Restream.GetPlayout(id, input)
if err != nil {

View File

@ -2,6 +2,7 @@ package resolver
// This file will be automatically regenerated based on the schema, any resolver implementations
// will be copied through when generating and any unknown code will be moved to the end.
// Code generated by github.com/99designs/gqlgen version v0.17.36
import (
"context"
@ -9,6 +10,7 @@ import (
"github.com/datarhei/core/v16/http/graph/models"
)
// Processes is the resolver for the processes field.
func (r *queryResolver) Processes(ctx context.Context) ([]*models.Process, error) {
ids := r.Restream.GetProcessIDs("", "")
@ -26,10 +28,12 @@ func (r *queryResolver) Processes(ctx context.Context) ([]*models.Process, error
return procs, nil
}
// Process is the resolver for the process field.
func (r *queryResolver) Process(ctx context.Context, id string) (*models.Process, error) {
return r.getProcess(id)
}
// Probe is the resolver for the probe field.
func (r *queryResolver) Probe(ctx context.Context, id string) (*models.Probe, error) {
probe := r.Restream.Probe(id)

View File

@ -2,6 +2,7 @@ package resolver
// This file will be automatically regenerated based on the schema, any resolver implementations
// will be copied through when generating and any unknown code will be moved to the end.
// Code generated by github.com/99designs/gqlgen version v0.17.36
import (
"context"
@ -9,10 +10,12 @@ import (
"github.com/datarhei/core/v16/http/graph/graph"
)
// Ping is the resolver for the ping field.
func (r *mutationResolver) Ping(ctx context.Context) (string, error) {
return "pong", nil
}
// Ping is the resolver for the ping field.
func (r *queryResolver) Ping(ctx context.Context) (string, error) {
return "pong", nil
}

View File

@ -553,23 +553,34 @@ func (h *RestreamHandler) getProcess(id, filterString string) (api.Process, erro
}
if wants["state"] {
if state, err := h.restream.GetProcessState(id); err == nil {
info.State = &api.ProcessState{}
info.State.Unmarshal(state)
state, err := h.restream.GetProcessState(id)
if err != nil {
return api.Process{}, err
}
info.State = &api.ProcessState{}
info.State.Unmarshal(state)
}
if wants["report"] {
if log, err := h.restream.GetProcessLog(id); err == nil {
info.Report = &api.ProcessReport{}
info.Report.Unmarshal(log)
log, err := h.restream.GetProcessLog(id)
if err != nil {
return api.Process{}, err
}
info.Report = &api.ProcessReport{}
info.Report.Unmarshal(log)
}
if wants["metadata"] {
if data, err := h.restream.GetProcessMetadata(id, ""); err == nil {
info.Metadata = api.NewMetadata(data)
data, err := h.restream.GetProcessMetadata(id, "")
if err != nil {
return api.Process{}, err
}
info.Metadata = api.NewMetadata(data)
}
return info, nil

View File

@ -25,6 +25,10 @@ type WidgetHandler struct {
// NewWidget return a new Widget type
func NewWidget(config WidgetConfig) *WidgetHandler {
if config.Registry == nil {
config.Registry, _ = session.New(session.Config{})
}
return &WidgetHandler{
restream: config.Restream,
registry: config.Registry,

View File

@ -10,8 +10,9 @@ import (
"github.com/datarhei/core/v16/app"
"github.com/datarhei/core/v16/http/api"
jwtgo "github.com/golang-jwt/jwt/v4"
jwtgo "github.com/golang-jwt/jwt/v5"
"github.com/google/uuid"
echojwt "github.com/labstack/echo-jwt"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
)
@ -47,10 +48,10 @@ type jwt struct {
skipLocalhost bool
secret []byte
accessValidFor time.Duration
accessConfig middleware.JWTConfig
accessConfig echojwt.Config
accessMiddleware echo.MiddlewareFunc
refreshValidFor time.Duration
refreshConfig middleware.JWTConfig
refreshConfig echojwt.Config
refreshMiddleware echo.MiddlewareFunc
// Validators is a map of all recognized issuers to their specific validators. The key is the value of
// the "iss" field in the claims. Somewhat required because otherwise the token cannot be verified.
@ -84,35 +85,31 @@ func New(config Config) (JWT, error) {
return false
}
j.accessConfig = middleware.JWTConfig{
Skipper: skipperFunc,
SigningMethod: middleware.AlgorithmHS256,
ContextKey: "user",
TokenLookup: "header:" + echo.HeaderAuthorization,
AuthScheme: "Bearer",
Claims: jwtgo.MapClaims{},
ErrorHandlerWithContext: j.ErrorHandler,
ParseTokenFunc: j.parseToken("access"),
j.accessConfig = echojwt.Config{
Skipper: skipperFunc,
SigningMethod: middleware.AlgorithmHS256,
ContextKey: "user",
TokenLookup: "header:Authorization:Bearer ",
ErrorHandler: j.ErrorHandler,
ParseTokenFunc: j.parseToken("access"),
}
j.refreshConfig = middleware.JWTConfig{
Skipper: skipperFunc,
SigningMethod: middleware.AlgorithmHS256,
ContextKey: "user",
TokenLookup: "header:" + echo.HeaderAuthorization,
AuthScheme: "Bearer",
Claims: jwtgo.MapClaims{},
ErrorHandlerWithContext: j.ErrorHandler,
ParseTokenFunc: j.parseToken("refresh"),
j.refreshConfig = echojwt.Config{
Skipper: skipperFunc,
SigningMethod: middleware.AlgorithmHS256,
ContextKey: "user",
TokenLookup: "header:Authorization:Bearer ",
ErrorHandler: j.ErrorHandler,
ParseTokenFunc: j.parseToken("refresh"),
}
return j, nil
}
func (j *jwt) parseToken(use string) func(auth string, c echo.Context) (interface{}, error) {
func (j *jwt) parseToken(use string) func(c echo.Context, auth string) (interface{}, error) {
keyFunc := func(*jwtgo.Token) (interface{}, error) { return j.secret, nil }
return func(auth string, c echo.Context) (interface{}, error) {
return func(c echo.Context, auth string) (interface{}, error) {
var token *jwtgo.Token
var err error
@ -184,7 +181,7 @@ func (j *jwt) ClearValidators() {
j.validators = nil
}
func (j *jwt) ErrorHandler(err error, c echo.Context) error {
func (j *jwt) ErrorHandler(c echo.Context, err error) error {
if c.Request().URL.Path == "/api" {
return c.JSON(http.StatusOK, api.MinimalAbout{
App: app.Name,
@ -195,12 +192,12 @@ func (j *jwt) ErrorHandler(err error, c echo.Context) error {
})
}
return api.Err(http.StatusUnauthorized, "Missing or invalid JWT token")
return api.Err(http.StatusUnauthorized, "", "Missing or invalid JWT token")
}
func (j *jwt) AccessMiddleware() echo.MiddlewareFunc {
if j.accessMiddleware == nil {
j.accessMiddleware = middleware.JWTWithConfig(j.accessConfig)
j.accessMiddleware = echojwt.WithConfig(j.accessConfig)
}
return j.accessMiddleware
@ -208,7 +205,7 @@ func (j *jwt) AccessMiddleware() echo.MiddlewareFunc {
func (j *jwt) RefreshMiddleware() echo.MiddlewareFunc {
if j.refreshMiddleware == nil {
j.refreshMiddleware = middleware.JWTWithConfig(j.refreshConfig)
j.refreshMiddleware = echojwt.WithConfig(j.refreshConfig)
}
return j.refreshMiddleware
@ -243,16 +240,16 @@ func (j *jwt) LoginHandler(c echo.Context) error {
if ok {
if err != nil {
time.Sleep(5 * time.Second)
return api.Err(http.StatusUnauthorized, "Invalid authorization credentials", "%s", err)
return api.Err(http.StatusUnauthorized, "", "Invalid authorization credentials: %s", err.Error())
}
} else {
time.Sleep(5 * time.Second)
return api.Err(http.StatusBadRequest, "Missing authorization credentials")
return api.Err(http.StatusBadRequest, "", "Missing authorization credentials")
}
at, rt, err := j.createToken(subject)
if err != nil {
return api.Err(http.StatusInternalServerError, "Failed to create JWT", "%s", err)
return api.Err(http.StatusInternalServerError, "", "Failed to create JWT: %s", err.Error())
}
return c.JSON(http.StatusOK, api.JWT{
@ -273,14 +270,17 @@ func (j *jwt) LoginHandler(c echo.Context) error {
func (j *jwt) RefreshHandler(c echo.Context) error {
token, ok := c.Get("user").(*jwtgo.Token)
if !ok {
return api.Err(http.StatusForbidden, "Invalid token")
return api.Err(http.StatusForbidden, "", "Invalid token")
}
subject := token.Claims.(jwtgo.MapClaims)["sub"].(string)
subject, err := token.Claims.GetSubject()
if err != nil {
return api.Err(http.StatusForbidden, "", "Invalid subject: %s", err.Error())
}
at, _, err := j.createToken(subject)
if err != nil {
return api.Err(http.StatusInternalServerError, "Failed to create JWT", "%s", err)
return api.Err(http.StatusInternalServerError, "", "Failed to create JWT: %s", err.Error())
}
return c.JSON(http.StatusOK, api.JWTRefresh{

View File

@ -8,7 +8,7 @@ import (
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/http/jwt/jwks"
jwtgo "github.com/golang-jwt/jwt/v4"
jwtgo "github.com/golang-jwt/jwt/v5"
"github.com/labstack/echo/v4"
)
@ -148,23 +148,21 @@ func (v *auth0Validator) Validate(c echo.Context) (bool, string, error) {
func (v *auth0Validator) keyFunc(token *jwtgo.Token) (interface{}, error) {
// Verify 'aud' claim
checkAud := token.Claims.(jwtgo.MapClaims).VerifyAudience(v.audience, false)
if !checkAud {
return nil, fmt.Errorf("invalid audience")
if _, err := token.Claims.GetAudience(); err != nil {
return nil, fmt.Errorf("invalid audience: %w", err)
}
// Verify 'iss' claim
checkIss := token.Claims.(jwtgo.MapClaims).VerifyIssuer(v.issuer, false)
if !checkIss {
return nil, fmt.Errorf("invalid issuer")
if _, err := token.Claims.GetIssuer(); err != nil {
return nil, fmt.Errorf("invalid issuer: %w", err)
}
// Verify 'sub' claim
if _, ok := token.Claims.(jwtgo.MapClaims)["sub"]; !ok {
return nil, fmt.Errorf("sub claim is required")
sub, err := token.Claims.GetSubject()
if err != nil {
return nil, fmt.Errorf("invalid subject: %w", err)
}
sub := token.Claims.(jwtgo.MapClaims)["sub"].(string)
found := false
for _, u := range v.users {
if sub == u {

View File

@ -54,6 +54,10 @@ func NewHLSWithConfig(config HLSConfig) echo.MiddlewareFunc {
config.Skipper = DefaultHLSConfig.Skipper
}
if config.EgressCollector == nil && config.IngressCollector == nil {
config.Skipper = func(c echo.Context) bool { return true }
}
if config.EgressCollector == nil {
config.EgressCollector = DefaultHLSConfig.EgressCollector
}
@ -186,7 +190,7 @@ func (h *hls) handleEgress(c echo.Context, next echo.HandlerFunc) error {
// Add the new session's top bitrate to the ingress top bitrate
resultingBitrate := currentBitrate + streamBitrate
if resultingBitrate <= 0.5 || resultingBitrate >= maxBitrate {
if resultingBitrate >= maxBitrate {
return echo.NewHTTPError(509, "Bitrate limit exceeded")
}
}

View File

@ -35,6 +35,7 @@ func NewHTTPWithConfig(config HTTPConfig) echo.MiddlewareFunc {
if config.Collector == nil {
config.Collector = DefaultHTTPConfig.Collector
config.Skipper = func(c echo.Context) bool { return true }
}
return func(next echo.HandlerFunc) echo.HandlerFunc {

View File

@ -287,6 +287,7 @@ func NewServer(config Config) (Server, error) {
s.v3handler.session = api.NewSession(
config.Sessions,
)
s.middleware.session = mwsession.NewHLSWithConfig(mwsession.HLSConfig{
EgressCollector: config.Sessions.Collector("hls"),
IngressCollector: config.Sessions.Collector("hlsingress"),

Binary file not shown.

View File

@ -108,16 +108,11 @@ type memFilesystem struct {
metaLock sync.RWMutex
// Mapping of path to file
files map[string]*memFile
files *memStorage
// Mutex for the files map
filesLock sync.RWMutex
// Pool for the storage of the contents of files
dataPool sync.Pool
// Current size of the filesystem in bytes
// Current size of the filesystem in bytes and its mutes
currentSize int64
sizeLock sync.RWMutex
// Logger from the config
logger log.Logger
@ -137,13 +132,7 @@ func NewMemFilesystem(config MemConfig) (Filesystem, error) {
fs.logger = fs.logger.WithField("type", "mem")
fs.files = make(map[string]*memFile)
fs.dataPool = sync.Pool{
New: func() interface{} {
return new(bytes.Buffer)
},
}
fs.files = newMemStorage()
fs.logger.Debug().Log("Created")
@ -218,25 +207,24 @@ func (fs *memFilesystem) SetMetadata(key, data string) {
}
func (fs *memFilesystem) Size() (int64, int64) {
fs.filesLock.RLock()
defer fs.filesLock.RUnlock()
fs.sizeLock.RLock()
defer fs.sizeLock.RUnlock()
return fs.currentSize, -1
}
func (fs *memFilesystem) Files() int64 {
fs.filesLock.RLock()
defer fs.filesLock.RUnlock()
nfiles := int64(0)
for _, f := range fs.files {
fs.files.Range(func(key string, f *memFile) bool {
if f.dir {
continue
return true
}
nfiles++
}
return true
})
return nfiles
}
@ -244,9 +232,7 @@ func (fs *memFilesystem) Files() int64 {
func (fs *memFilesystem) Open(path string) File {
path = fs.cleanPath(path)
fs.filesLock.RLock()
file, ok := fs.files[path]
fs.filesLock.RUnlock()
file, ok := fs.files.LoadAndCopy(path)
if !ok {
return nil
@ -262,7 +248,9 @@ func (fs *memFilesystem) Open(path string) File {
}
if len(file.linkTo) != 0 {
file, ok = fs.files[file.linkTo]
file.Close()
file, ok = fs.files.LoadAndCopy(file.linkTo)
if !ok {
return nil
}
@ -280,21 +268,23 @@ func (fs *memFilesystem) Open(path string) File {
func (fs *memFilesystem) ReadFile(path string) ([]byte, error) {
path = fs.cleanPath(path)
fs.filesLock.RLock()
file, ok := fs.files[path]
fs.filesLock.RUnlock()
file, ok := fs.files.LoadAndCopy(path)
if !ok {
return nil, os.ErrNotExist
}
if len(file.linkTo) != 0 {
file, ok = fs.files[file.linkTo]
file.Close()
file, ok = fs.files.LoadAndCopy(file.linkTo)
if !ok {
return nil, os.ErrNotExist
}
}
defer file.Close()
if file.data != nil {
return file.data.Bytes(), nil
}
@ -306,21 +296,17 @@ func (fs *memFilesystem) Symlink(oldname, newname string) error {
oldname = fs.cleanPath(oldname)
newname = fs.cleanPath(newname)
fs.filesLock.Lock()
defer fs.filesLock.Unlock()
if _, ok := fs.files[oldname]; !ok {
return os.ErrNotExist
}
if _, ok := fs.files[newname]; ok {
if fs.files.Has(newname) {
return os.ErrExist
}
if file, ok := fs.files[oldname]; ok {
if len(file.linkTo) != 0 {
return fmt.Errorf("%s can't link to another link (%s)", newname, oldname)
}
oldFile, ok := fs.files.Load(oldname)
if !ok {
return os.ErrNotExist
}
if len(oldFile.linkTo) != 0 {
return fmt.Errorf("%s can't link to another link (%s)", newname, oldname)
}
newFile := &memFile{
@ -334,7 +320,17 @@ func (fs *memFilesystem) Symlink(oldname, newname string) error {
data: nil,
}
fs.files[newname] = newFile
oldFile, loaded := fs.files.Store(newname, newFile)
fs.sizeLock.Lock()
defer fs.sizeLock.Unlock()
if loaded {
oldFile.Close()
fs.currentSize -= oldFile.size
}
fs.currentSize += newFile.size
return nil
}
@ -349,10 +345,9 @@ func (fs *memFilesystem) WriteFileReader(path string, r io.Reader) (int64, bool,
size: 0,
lastMod: time.Now(),
},
data: fs.dataPool.Get().(*bytes.Buffer),
data: &bytes.Buffer{},
}
newFile.data.Reset()
size, err := newFile.data.ReadFrom(r)
if err != nil {
fs.logger.WithFields(log.Fields{
@ -364,21 +359,17 @@ func (fs *memFilesystem) WriteFileReader(path string, r io.Reader) (int64, bool,
newFile.size = size
fs.filesLock.Lock()
defer fs.filesLock.Unlock()
oldFile, replace := fs.files.Store(path, newFile)
fs.sizeLock.Lock()
defer fs.sizeLock.Unlock()
file, replace := fs.files[path]
if replace {
delete(fs.files, path)
oldFile.Close()
fs.currentSize -= file.size
fs.dataPool.Put(file.data)
file.data = nil
fs.currentSize -= oldFile.size
}
fs.files[path] = newFile
fs.currentSize += newFile.size
logger := fs.logger.WithFields(log.Fields{
@ -405,14 +396,12 @@ func (fs *memFilesystem) WriteFileSafe(path string, data []byte) (int64, bool, e
}
func (fs *memFilesystem) Purge(size int64) int64 {
fs.filesLock.Lock()
defer fs.filesLock.Unlock()
files := []*memFile{}
for _, f := range fs.files {
files = append(files, f)
}
fs.files.Range(func(_ string, file *memFile) bool {
files = append(files, file)
return true
})
sort.Slice(files, func(i, j int) bool {
return files[i].lastMod.Before(files[j].lastMod)
@ -421,13 +410,15 @@ func (fs *memFilesystem) Purge(size int64) int64 {
var freed int64 = 0
for _, f := range files {
delete(fs.files, f.name)
fs.files.Delete(f.name)
size -= f.size
freed += f.size
fs.currentSize -= f.size
fs.dataPool.Put(f.data)
f.data = nil
fs.sizeLock.Lock()
fs.currentSize -= f.size
fs.sizeLock.Unlock()
f.Close()
fs.logger.WithFields(log.Fields{
"path": f.name,
@ -448,8 +439,8 @@ func (fs *memFilesystem) Purge(size int64) int64 {
func (fs *memFilesystem) MkdirAll(path string, perm os.FileMode) error {
path = fs.cleanPath(path)
fs.filesLock.Lock()
defer fs.filesLock.Unlock()
fs.sizeLock.Lock()
defer fs.sizeLock.Unlock()
info, err := fs.stat(path)
if err == nil {
@ -470,7 +461,7 @@ func (fs *memFilesystem) MkdirAll(path string, perm os.FileMode) error {
data: nil,
}
fs.files[path] = f
fs.files.Store(path, f)
return nil
}
@ -483,25 +474,23 @@ func (fs *memFilesystem) Rename(src, dst string) error {
return nil
}
fs.filesLock.Lock()
defer fs.filesLock.Unlock()
srcFile, ok := fs.files[src]
srcFile, ok := fs.files.Load(src)
if !ok {
return os.ErrNotExist
}
dstFile, ok := fs.files[dst]
if ok {
dstFile, replace := fs.files.Store(dst, srcFile)
fs.files.Delete(src)
fs.sizeLock.Lock()
defer fs.sizeLock.Unlock()
if replace {
dstFile.Close()
fs.currentSize -= dstFile.size
fs.dataPool.Put(dstFile.data)
dstFile.data = nil
}
fs.files[dst] = srcFile
delete(fs.files, src)
return nil
}
@ -513,58 +502,53 @@ func (fs *memFilesystem) Copy(src, dst string) error {
return nil
}
fs.filesLock.Lock()
defer fs.filesLock.Unlock()
if fs.isDir(dst) {
return os.ErrInvalid
}
srcFile, ok := fs.files[src]
srcFile, ok := fs.files.LoadAndCopy(src)
if !ok {
return os.ErrNotExist
}
if srcFile.dir {
srcFile.Close()
return os.ErrNotExist
}
if fs.isDir(dst) {
return os.ErrInvalid
dstFile := &memFile{
memFileInfo: memFileInfo{
name: dst,
dir: false,
size: srcFile.size,
lastMod: time.Now(),
},
data: srcFile.data,
}
dstFile, ok := fs.files[dst]
if ok {
fs.currentSize -= dstFile.size
} else {
dstFile = &memFile{
memFileInfo: memFileInfo{
name: dst,
dir: false,
size: srcFile.size,
lastMod: time.Now(),
},
data: fs.dataPool.Get().(*bytes.Buffer),
}
}
f, replace := fs.files.Store(dst, dstFile)
dstFile.data.Reset()
dstFile.data.Write(srcFile.data.Bytes())
fs.sizeLock.Lock()
defer fs.sizeLock.Unlock()
if replace {
f.Close()
fs.currentSize -= f.size
}
fs.currentSize += dstFile.size
fs.files[dst] = dstFile
return nil
}
func (fs *memFilesystem) Stat(path string) (FileInfo, error) {
path = fs.cleanPath(path)
fs.filesLock.RLock()
defer fs.filesLock.RUnlock()
return fs.stat(path)
}
func (fs *memFilesystem) stat(path string) (FileInfo, error) {
file, ok := fs.files[path]
file, ok := fs.files.Load(path)
if ok {
f := &memFileInfo{
name: file.name,
@ -575,7 +559,7 @@ func (fs *memFilesystem) stat(path string) (FileInfo, error) {
}
if len(f.linkTo) != 0 {
file, ok := fs.files[f.linkTo]
file, ok := fs.files.Load(f.linkTo)
if !ok {
return nil, os.ErrNotExist
}
@ -604,7 +588,7 @@ func (fs *memFilesystem) stat(path string) (FileInfo, error) {
}
func (fs *memFilesystem) isDir(path string) bool {
file, ok := fs.files[path]
file, ok := fs.files.Load(path)
if ok {
return file.dir
}
@ -617,28 +601,29 @@ func (fs *memFilesystem) isDir(path string) bool {
return true
}
for k := range fs.files {
if strings.HasPrefix(k, path) {
return true
}
}
found := false
return false
fs.files.Range(func(k string, _ *memFile) bool {
if strings.HasPrefix(k, path) {
found = true
return false
}
return true
})
return found
}
func (fs *memFilesystem) Remove(path string) int64 {
path = fs.cleanPath(path)
fs.filesLock.Lock()
defer fs.filesLock.Unlock()
file, ok := fs.files[path]
file, ok := fs.files.Delete(path)
if ok {
delete(fs.files, path)
fs.currentSize -= file.size
file.Close()
fs.dataPool.Put(file.data)
file.data = nil
fs.sizeLock.Lock()
defer fs.sizeLock.Unlock()
fs.currentSize -= file.size
} else {
return -1
}
@ -653,12 +638,12 @@ func (fs *memFilesystem) Remove(path string) int64 {
}
func (fs *memFilesystem) RemoveAll() int64 {
fs.filesLock.Lock()
defer fs.filesLock.Unlock()
fs.sizeLock.Lock()
defer fs.sizeLock.Unlock()
size := fs.currentSize
fs.files = make(map[string]*memFile)
fs.files = newMemStorage()
fs.currentSize = 0
return size
@ -668,22 +653,29 @@ func (fs *memFilesystem) List(path, pattern string) []FileInfo {
path = fs.cleanPath(path)
files := []FileInfo{}
fs.filesLock.RLock()
defer fs.filesLock.RUnlock()
var compiledPattern glob.Glob
var err error
for _, file := range fs.files {
if !strings.HasPrefix(file.name, path) {
continue
}
if len(pattern) != 0 {
if ok, _ := glob.Match(pattern, file.name, '/'); !ok {
continue
}
if len(pattern) != 0 {
compiledPattern, err = glob.Compile(pattern, '/')
if err != nil {
return nil
}
}
fs.files.Range(func(key string, file *memFile) bool {
if file.dir {
continue
return true
}
if !strings.HasPrefix(file.name, path) {
return true
}
if compiledPattern != nil {
if !compiledPattern.Match(file.name) {
return true
}
}
files = append(files, &memFileInfo{
@ -692,7 +684,9 @@ func (fs *memFilesystem) List(path, pattern string) []FileInfo {
lastMod: file.lastMod,
linkTo: file.linkTo,
})
}
return true
})
return files
}

84
io/fs/mem_storage.go Normal file
View File

@ -0,0 +1,84 @@
package fs
import (
"bytes"
"github.com/puzpuzpuz/xsync/v3"
)
type memStorage struct {
lock *xsync.RBMutex
files *xsync.MapOf[string, *memFile]
}
func newMemStorage() *memStorage {
m := &memStorage{
lock: xsync.NewRBMutex(),
files: xsync.NewMapOf[string, *memFile](),
}
return m
}
func (m *memStorage) Delete(key string) (*memFile, bool) {
m.lock.Lock()
defer m.lock.Unlock()
return m.files.LoadAndDelete(key)
}
func (m *memStorage) Store(key string, value *memFile) (*memFile, bool) {
m.lock.Lock()
defer m.lock.Unlock()
return m.files.LoadAndStore(key, value)
}
func (m *memStorage) Load(key string) (*memFile, bool) {
token := m.lock.RLock()
defer m.lock.RUnlock(token)
return m.files.Load(key)
}
func (m *memStorage) LoadAndCopy(key string) (*memFile, bool) {
token := m.lock.RLock()
defer m.lock.RUnlock(token)
v, ok := m.files.Load(key)
if !ok {
return nil, false
}
f := &memFile{
memFileInfo: memFileInfo{
name: v.name,
size: v.size,
dir: v.dir,
lastMod: v.lastMod,
linkTo: v.linkTo,
},
}
if v.data != nil {
f.data = bytes.NewBuffer(v.data.Bytes())
}
return f, true
}
func (m *memStorage) Has(key string) bool {
token := m.lock.RLock()
defer m.lock.RUnlock(token)
_, ok := m.files.Load(key)
return ok
}
func (m *memStorage) Range(f func(key string, value *memFile) bool) {
token := m.lock.RLock()
defer m.lock.RUnlock(token)
m.files.Range(f)
}

View File

@ -1,8 +1,16 @@
package fs
import (
"context"
"fmt"
"io"
gorand "math/rand"
"strconv"
"sync"
"testing"
"time"
"github.com/datarhei/core/v16/math/rand"
"github.com/stretchr/testify/require"
)
@ -19,6 +27,7 @@ func TestMemFromDir(t *testing.T) {
"/disk.go",
"/fs_test.go",
"/fs.go",
"/mem_storage.go",
"/mem_test.go",
"/mem.go",
"/readonly_test.go",
@ -28,3 +37,125 @@ func TestMemFromDir(t *testing.T) {
"/sized.go",
}, names)
}
func BenchmarkMemList(b *testing.B) {
mem, err := NewMemFilesystem(MemConfig{})
require.NoError(b, err)
for i := 0; i < 1000; i++ {
id := rand.StringAlphanumeric(8)
path := fmt.Sprintf("/%d/%s.dat", i, id)
mem.WriteFile(path, []byte("foobar"))
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
mem.List("/", "/5/**")
}
}
func BenchmarkMemReadFile(b *testing.B) {
mem, err := NewMemFilesystem(MemConfig{})
require.NoError(b, err)
nFiles := 1000
for i := 0; i < nFiles; i++ {
path := fmt.Sprintf("/%d.dat", i)
mem.WriteFile(path, []byte(rand.StringAlphanumeric(2*1024)))
}
r := gorand.New(gorand.NewSource(42))
b.ResetTimer()
for i := 0; i < b.N; i++ {
num := r.Intn(nFiles)
f := mem.Open("/" + strconv.Itoa(num) + ".dat")
f.Close()
}
}
func TestWriteWhileRead(t *testing.T) {
fs, err := NewMemFilesystem(MemConfig{})
require.NoError(t, err)
_, _, err = fs.WriteFile("/foobar", []byte("xxxxx"))
require.NoError(t, err)
file := fs.Open("/foobar")
require.NotNil(t, file)
_, _, err = fs.WriteFile("/foobar", []byte("yyyyy"))
require.NoError(t, err)
data, err := io.ReadAll(file)
require.NoError(t, err)
require.Equal(t, []byte("xxxxx"), data)
}
func BenchmarkMemReadFileWhileWriting(b *testing.B) {
mem, err := NewMemFilesystem(MemConfig{})
require.NoError(b, err)
nReaders := 500
nWriters := 1000
nFiles := 30
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
writerWg := sync.WaitGroup{}
data := []byte(rand.StringAlphanumeric(2 * 1024))
for i := 0; i < nWriters; i++ {
writerWg.Add(1)
go func(ctx context.Context, from int) {
for i := 0; i < nFiles; i++ {
path := fmt.Sprintf("/%d.dat", i+from)
mem.WriteFile(path, data)
}
ticker := time.NewTicker(40 * time.Millisecond)
defer ticker.Stop()
writerWg.Done()
for {
select {
case <-ctx.Done():
return
case <-ticker.C:
num := gorand.Intn(nFiles) + from
path := fmt.Sprintf("/%d.dat", num)
mem.WriteFile(path, data)
}
}
}(ctx, i*nFiles)
}
// Wait for all writers to be started
writerWg.Wait()
b.ResetTimer()
readerWg := sync.WaitGroup{}
for i := 0; i < nReaders; i++ {
readerWg.Add(1)
go func() {
defer readerWg.Done()
for i := 0; i < b.N; i++ {
num := gorand.Intn(nWriters * nFiles)
f := mem.Open("/" + strconv.Itoa(num) + ".dat")
f.Close()
}
}()
}
readerWg.Wait()
}

View File

@ -49,6 +49,6 @@ func (r *readOnlyFilesystem) Purge(size int64) int64 {
return 0
}
func (r *readOnlyFilesystem) Resize(size int64) error {
func (r *readOnlyFilesystem) Resize(size int64, purge bool) error {
return os.ErrPermission
}

View File

@ -45,6 +45,6 @@ func TestReadOnly(t *testing.T) {
ros, ok := ro.(SizedFilesystem)
require.True(t, ok, "must implement SizedFilesystem")
err = ros.Resize(100)
err = ros.Resize(100, false)
require.Error(t, err)
}

View File

@ -10,7 +10,7 @@ type SizedFilesystem interface {
Filesystem
// Resize resizes the filesystem to the new size. Files may need to be deleted.
Resize(size int64) error
Resize(size int64, purge bool) error
}
type PurgeFilesystem interface {
@ -48,7 +48,9 @@ func (r *sizedFilesystem) Size() (int64, int64) {
return currentSize, r.maxSize
}
func (r *sizedFilesystem) Resize(size int64) error {
func (r *sizedFilesystem) Resize(size int64, purge bool) error {
r.purge = purge
currentSize, _ := r.Size()
if size >= currentSize {
// If the new size is the same or larger than the current size,
@ -82,9 +84,15 @@ func (r *sizedFilesystem) WriteFileReader(path string, rd io.Reader) (int64, boo
return -1, false, fmt.Errorf("File is too big")
}
// Calculate the new size of the filesystem
newSize := currentSize + size
// Calculate the new size of the filesystem
finfo, err := r.Filesystem.Stat(path)
if err == nil {
// If the file already exist, take it's size into account
newSize -= finfo.Size()
}
// If the the new size is larger than the allowed size, we have to free
// some space.
if newSize > maxSize {
@ -117,9 +125,15 @@ func (r *sizedFilesystem) WriteFileSafe(path string, data []byte) (int64, bool,
return -1, false, fmt.Errorf("File is too big")
}
// Calculate the new size of the filesystem
newSize := currentSize + size
// Calculate the new size of the filesystem
finfo, err := r.Filesystem.Stat(path)
if err == nil {
// If the file already exist, take it's size into account
newSize -= finfo.Size()
}
// If the the new size is larger than the allowed size, we have to free
// some space.
if newSize > maxSize {

View File

@ -35,7 +35,7 @@ func TestSizedResize(t *testing.T) {
require.Equal(t, int64(0), cur)
require.Equal(t, int64(10), max)
err := fs.Resize(20)
err := fs.Resize(20, false)
require.NoError(t, err)
cur, max = fs.Size()
@ -44,6 +44,26 @@ func TestSizedResize(t *testing.T) {
require.Equal(t, int64(20), max)
}
func TestSizedResizeSetPurge(t *testing.T) {
fs, _ := NewSizedFilesystem(newMemFS(), 10, false)
_, _, err := fs.WriteFileReader("/foobar1", strings.NewReader("xxxxx"))
require.NoError(t, err)
_, _, err = fs.WriteFileReader("/foobar2", strings.NewReader("xxxxx"))
require.NoError(t, err)
_, _, err = fs.WriteFileReader("/foobar3", strings.NewReader("xxxxx"))
require.Error(t, err)
fs.Resize(10, true)
_, _, err = fs.WriteFileReader("/foobar3", strings.NewReader("xxxxx"))
require.NoError(t, err)
require.Equal(t, int64(2), fs.Files())
}
func TestSizedResizePurge(t *testing.T) {
fs, _ := NewSizedFilesystem(newMemFS(), 10, false)
@ -59,7 +79,7 @@ func TestSizedResizePurge(t *testing.T) {
require.Equal(t, int64(10), cur)
require.Equal(t, int64(10), max)
err := fs.Resize(5)
err := fs.Resize(5, false)
require.NoError(t, err)
cur, max = fs.Size()
@ -132,6 +152,31 @@ func TestSizedReplaceNoPurge(t *testing.T) {
cur = fs.Files()
require.Equal(t, int64(1), cur)
data = strings.NewReader("zzzzzzz")
size, created, err = fs.WriteFileReader("/foobar", data)
require.Nil(t, err)
require.Equal(t, int64(7), size)
require.Equal(t, false, created)
cur, max = fs.Size()
require.Equal(t, int64(7), cur)
require.Equal(t, int64(10), max)
cur = fs.Files()
require.Equal(t, int64(1), cur)
data = strings.NewReader("zzzzzzzz")
size, created, err = fs.WriteFileReader("/foobar", data)
require.Nil(t, err)
require.Equal(t, int64(8), size)
require.Equal(t, false, created)
}
func TestSizedReplacePurge(t *testing.T) {

View File

@ -95,6 +95,11 @@ type Logger interface {
// be reset to nil.
Error() Logger
// WithLevel writes a message with the given level to all registered outputs.
// The message will be written according to fmt.Printf(). The detail field will
// be reset to nil.
WithLevel(level Level) Logger
// Write implements the io.Writer interface such that it can be used in e.g. the
// the log/Logger facility. Messages will be printed with debug level.
Write(p []byte) (int, error)
@ -178,6 +183,10 @@ func (l *logger) Error() Logger {
return newEvent(l).Error()
}
func (l *logger) WithLevel(level Level) Logger {
return newEvent(l).WithLevel(level)
}
func (l *logger) Write(p []byte) (int, error) {
return newEvent(l).Write(p)
}
@ -318,29 +327,24 @@ func (e *Event) WithError(err error) Logger {
}
func (e *Event) Debug() Logger {
clone := e.clone()
clone.Level = Ldebug
return clone
return e.WithLevel(Ldebug)
}
func (e *Event) Info() Logger {
clone := e.clone()
clone.Level = Linfo
return clone
return e.WithLevel(Linfo)
}
func (e *Event) Warn() Logger {
clone := e.clone()
clone.Level = Lwarn
return clone
return e.WithLevel(Lwarn)
}
func (e *Event) Error() Logger {
return e.WithLevel(Lerror)
}
func (e *Event) WithLevel(level Level) Logger {
clone := e.clone()
clone.Level = Lerror
clone.Level = level
return clone
}

View File

@ -4,6 +4,7 @@ package rand
import (
"math/rand"
"sync"
"time"
)
@ -12,12 +13,17 @@ const (
CharsetNumbers = "1234567890"
CharsetSymbols = "#@+*%&/<>[]()=?!$.,:;-_"
CharsetAll = CharsetLetters + CharsetNumbers + CharsetSymbols
CharsetAlphanumeric = CharsetLetters + CharsetNumbers
CharsetAll = CharsetLetters + CharsetNumbers + CharsetSymbols
)
var seededRand *rand.Rand = rand.New(rand.NewSource(time.Now().UnixNano()))
var lock sync.Mutex
func StringWithCharset(length int, charset string) string {
lock.Lock()
defer lock.Unlock()
b := make([]byte, length)
for i := range b {
b[i] = charset[seededRand.Intn(len(charset))]
@ -35,7 +41,7 @@ func StringNumbers(length int) string {
}
func StringAlphanumeric(length int) string {
return StringWithCharset(length, CharsetLetters+CharsetNumbers)
return StringWithCharset(length, CharsetAlphanumeric)
}
func String(length int) string {

View File

@ -91,7 +91,7 @@ func TestDescription(t *testing.T) {
require.Equal(t, "name: blabla (label)", d.String())
}
func TestMetri(t *testing.T) {
func TestMetrics(t *testing.T) {
m := NewMetrics()
require.Equal(t, "", m.String())

View File

@ -504,9 +504,6 @@ func (p *process) start() error {
// Start the reader
go p.reader()
// Wait for the process to finish
go p.waiter()
// Start the stale timeout if enabled
if p.stale.timeout != 0 {
var ctx context.Context
@ -592,7 +589,9 @@ func (p *process) stop(wait bool) error {
p.callbacks.onExit = func() {
wg.Done()
p.callbacks.lock.Lock()
p.callbacks.onExit = nil
p.callbacks.lock.Unlock()
}
} else {
cb := p.callbacks.onExit
@ -600,7 +599,9 @@ func (p *process) stop(wait bool) error {
cb()
wg.Done()
p.callbacks.lock.Lock()
p.callbacks.onExit = cb
p.callbacks.lock.Unlock()
}
}
p.callbacks.lock.Unlock()
@ -750,15 +751,18 @@ func (p *process) reader() {
p.stale.lock.Unlock()
}
}
if err := scanner.Err(); err != nil {
p.logger.Debug().WithError(err).Log("")
}
// Wait for the process to finish
p.waiter()
}
// waiter waits for the process to finish. If enabled, the process will
// be scheduled for a restart.
func (p *process) waiter() {
if p.getState() == stateFinishing {
p.stop(false)
}
if err := p.cmd.Wait(); err != nil {
// The process exited abnormally, i.e. the return code is non-zero or a signal
// has been raised.

View File

@ -51,7 +51,7 @@ type replacer struct {
func New() Replacer {
r := &replacer{
templates: make(map[string]template),
re: regexp.MustCompile(`{([a-z:]+)(?:\^(.))?(?:,(.*?))?}`),
re: regexp.MustCompile(`{([a-z]+(?::[0-9A-Za-z]+)?)(?:\^(.))?(?:,(.*?))?}`),
templateRe: regexp.MustCompile(`{([a-z:]+)}`),
}

View File

@ -138,7 +138,8 @@ func TestReplaceGlob(t *testing.T) {
r := New()
r.RegisterTemplate("foo:bar", "Hello foobar", nil)
r.RegisterTemplate("foo:baz", "Hello foobaz", nil)
r.RegisterTemplate("foo:123", "Hello 456", nil)
replaced := r.Replace("{foo:baz}, {foo:bar}", "foo:*", "", nil, nil, "")
require.Equal(t, "Hello foobaz, Hello foobar", replaced)
replaced := r.Replace("{foo:baz}, {foo:bar}, {foo:123}", "foo:*", "", nil, nil, "")
require.Equal(t, "Hello foobaz, Hello foobar, Hello 456", replaced)
}

View File

@ -875,12 +875,6 @@ func (r *restream) UpdateProcess(id string, config *app.Config) error {
return ErrUnknownProcess
}
// This would require a major version jump
//t.process.CreatedAt = task.process.CreatedAt
t.process.UpdatedAt = time.Now().Unix()
task.parser.TransferReportHistory(t.parser)
t.process.Order = task.process.Order
if id != t.id {
_, ok := r.tasks[t.id]
if ok {
@ -888,6 +882,8 @@ func (r *restream) UpdateProcess(id string, config *app.Config) error {
}
}
t.process.Order = task.process.Order
if err := r.stopProcess(id); err != nil {
return err
}
@ -896,6 +892,14 @@ func (r *restream) UpdateProcess(id string, config *app.Config) error {
return err
}
// This would require a major version jump
//t.process.CreatedAt = task.process.CreatedAt
t.process.UpdatedAt = time.Now().Unix()
task.parser.TransferReportHistory(t.parser)
// Transfer the metadata to the new process
t.metadata = task.metadata
r.tasks[t.id] = t
// set filesystem cleanup rules

View File

@ -4,7 +4,9 @@ package rtmp
import (
"context"
"crypto/tls"
"errors"
"fmt"
"io"
"net"
"net/url"
"path/filepath"
@ -195,6 +197,10 @@ type Config struct {
// ListenAndServe, so it's not possible to modify the configuration
// with methods like tls.Config.SetSessionTicketKeys.
TLSConfig *tls.Config
// ConnectionIdleTimeout is the timeout in seconds after which idle
// connection will be closes. Default is no timeout.
ConnectionIdleTimeout time.Duration
}
// Server represents a RTMP server
@ -252,17 +258,19 @@ func New(config Config) (Server, error) {
}
s.server = &rtmp.Server{
Addr: config.Addr,
HandlePlay: s.handlePlay,
HandlePublish: s.handlePublish,
Addr: config.Addr,
HandlePlay: s.handlePlay,
HandlePublish: s.handlePublish,
ConnectionIdleTimeout: config.ConnectionIdleTimeout,
}
if len(config.TLSAddr) != 0 {
s.tlsServer = &rtmp.Server{
Addr: config.TLSAddr,
TLSConfig: config.TLSConfig.Clone(),
HandlePlay: s.handlePlay,
HandlePublish: s.handlePublish,
Addr: config.TLSAddr,
TLSConfig: config.TLSConfig.Clone(),
HandlePlay: s.handlePlay,
HandlePublish: s.handlePublish,
ConnectionIdleTimeout: config.ConnectionIdleTimeout,
}
}
@ -318,8 +326,8 @@ func (s *server) Channels() []string {
return channels
}
func (s *server) log(who, action, path, message string, client net.Addr) {
s.logger.Info().WithFields(log.Fields{
func (s *server) log(level log.Level, who, action, path, message string, client net.Addr) {
s.logger.WithLevel(level).WithFields(log.Fields{
"who": who,
"action": action,
"path": path,
@ -364,12 +372,12 @@ func (s *server) handlePlay(conn *rtmp.Conn) {
path, token := getToken(conn.URL)
if len(token) == 0 {
s.log("PLAY", "FORBIDDEN", path, "no streamkey provided", client)
s.log(log.Lwarn, "PLAY", "FORBIDDEN", path, "no streamkey provided", client)
return
}
if s.token != token {
s.log("PLAY", "FORBIDDEN", path, "invalid streamkey ("+token+")", client)
s.log(log.Lwarn, "PLAY", "FORBIDDEN", path, "invalid streamkey ("+token+")", client)
return
}
@ -404,7 +412,7 @@ func (s *server) handlePlay(conn *rtmp.Conn) {
// Set the metadata for the client
conn.SetMetaData(ch.metadata)
s.log("PLAY", "START", playPath, "", client)
s.log(log.Linfo, "PLAY", "START", playPath, "", client)
// Get a cursor and apply filters
cursor := ch.queue.Oldest()
@ -427,13 +435,18 @@ func (s *server) handlePlay(conn *rtmp.Conn) {
id := ch.AddSubscriber(conn)
// Transfer the data
avutil.CopyFile(conn, demuxer)
err := avutil.CopyFile(conn, demuxer)
if err != nil {
if !errors.Is(err, io.EOF) {
s.log(log.Lerror, "PLAY", "ERROR", playPath, err.Error(), client)
}
}
ch.RemoveSubscriber(id)
s.log("PLAY", "STOP", playPath, "", client)
s.log(log.Linfo, "PLAY", "STOP", playPath, "", client)
} else {
s.log("PLAY", "NOTFOUND", playPath, "", client)
s.log(log.Lwarn, "PLAY", "NOTFOUND", playPath, "", client)
}
}
@ -449,12 +462,12 @@ func (s *server) handlePublish(conn *rtmp.Conn) {
path, token := getToken(conn.URL)
if len(token) == 0 {
s.log("PLAY", "FORBIDDEN", path, "no streamkey provided", client)
s.log(log.Lwarn, "PUBLISH", "FORBIDDEN", path, "no streamkey provided", client)
return
}
if s.token != token {
s.log("PLAY", "FORBIDDEN", path, "invalid streamkey ("+token+")", client)
s.log(log.Lwarn, "PUBLISH", "FORBIDDEN", path, "invalid streamkey ("+token+")", client)
return
}
@ -463,7 +476,7 @@ func (s *server) handlePublish(conn *rtmp.Conn) {
// Check the app patch
if !strings.HasPrefix(playPath, s.app) {
s.log("PUBLISH", "FORBIDDEN", conn.URL.Path, "invalid app", client)
s.log(log.Lwarn, "PUBLISH", "FORBIDDEN", conn.URL.Path, "invalid app", client)
return
}
@ -471,7 +484,7 @@ func (s *server) handlePublish(conn *rtmp.Conn) {
streams, _ := conn.Streams()
if len(streams) == 0 {
s.log("PUBLISH", "INVALID", playPath, "no streams available", client)
s.log(log.Lwarn, "PUBLISH", "INVALID", playPath, "no streams available", client)
return
}
@ -506,18 +519,23 @@ func (s *server) handlePublish(conn *rtmp.Conn) {
s.lock.Unlock()
if ch == nil {
s.log("PUBLISH", "CONFLICT", playPath, "already publishing", client)
s.log(log.Lwarn, "PUBLISH", "CONFLICT", playPath, "already publishing", client)
return
}
s.log("PUBLISH", "START", playPath, "", client)
s.log(log.Linfo, "PUBLISH", "START", playPath, "", client)
for _, stream := range streams {
s.log("PUBLISH", "STREAM", playPath, stream.Type().String(), client)
s.log(log.Linfo, "PUBLISH", "STREAM", playPath, stream.Type().String(), client)
}
// Ingest the data
avutil.CopyPackets(ch.queue, conn)
err := avutil.CopyPackets(ch.queue, conn)
if err != nil {
if !errors.Is(err, io.EOF) {
s.log(log.Lerror, "PUBLISH", "ERROR", playPath, err.Error(), client)
}
}
s.lock.Lock()
delete(s.channels, playPath)
@ -525,5 +543,5 @@ func (s *server) handlePublish(conn *rtmp.Conn) {
ch.Close()
s.log("PUBLISH", "STOP", playPath, "", client)
s.log(log.Linfo, "PUBLISH", "STOP", playPath, "", client)
}

18
run.sh
View File

@ -3,20 +3,24 @@
# First run the import program. It will read the db.dir from the config file in order to
# find an old v1.json. This will be converted to the new db format.
./bin/import
if [ $? -ne 0 ]; then
exit 1
if [ -x ./bin/import ]; then
./bin/import
if [ $? -ne 0 ]; then
exit 1
fi
fi
# Run the FFmpeg migration program. In case a FFmpeg 5 binary is present, it will create a
# backup of the current DB and modify the FFmpeg parameter such that they are compatible
# with FFmpeg 5.
./bin/ffmigrate
if [ $? -ne 0 ]; then
exit 1
if [ -x ./bin/ffmigrate ]; then
./bin/ffmigrate
if [ $? -ne 0 ]; then
exit 1
fi
fi
# Now run the core with the possibly converted configuration.
./bin/core
exec ./bin/core

View File

@ -394,7 +394,7 @@ func newCollector(id string, persistFS fs.Filesystem, logger log.Logger, config
func (c *collector) start() {
c.startOnce.Do(func() {
if c.persist.enable && c.persist.interval != 0 {
if c.persist.enable && c.persist.interval > 0 {
ctx, cancel := context.WithCancel(context.Background())
c.persist.done = cancel
go c.persister(ctx, c.persist.interval)
@ -409,7 +409,7 @@ func (c *collector) start() {
func (c *collector) Stop() {
c.stopOnce.Do(func() {
if c.persist.enable && c.persist.interval != 0 {
if c.persist.enable && c.persist.interval > 0 {
c.persist.done()
}

View File

@ -468,15 +468,44 @@ func (s *server) handleConnect(req srt.ConnRequest) srt.ConnType {
client := req.RemoteAddr()
streamId := req.StreamId()
si, err := parseStreamId(streamId)
if err != nil {
s.log("CONNECT", "INVALID", "", err.Error(), client)
return srt.REJECT
}
var si streamInfo
var err error
if len(si.resource) == 0 {
s.log("CONNECT", "INVALID", "", "stream resource not provided", client)
return srt.REJECT
if req.Version() == 4 {
si.mode = "publish"
si.resource = client.String()
if len(s.passphrase) != 0 {
req.SetPassphrase(s.passphrase)
}
} else if req.Version() == 5 {
si, err = parseStreamId(streamId)
if err != nil {
s.log("CONNECT", "INVALID", "", err.Error(), client)
return srt.REJECT
}
if len(si.resource) == 0 {
s.log("CONNECT", "INVALID", "", "stream resource not provided", client)
return srt.REJECT
}
if len(s.passphrase) != 0 {
if !req.IsEncrypted() {
s.log("CONNECT", "FORBIDDEN", si.resource, "connection has to be encrypted", client)
return srt.REJECT
}
if err := req.SetPassphrase(s.passphrase); err != nil {
s.log("CONNECT", "FORBIDDEN", si.resource, err.Error(), client)
return srt.REJECT
}
} else {
if req.IsEncrypted() {
s.log("CONNECT", "INVALID", si.resource, "connection must not be encrypted", client)
return srt.REJECT
}
}
}
if si.mode == "publish" {
@ -488,26 +517,13 @@ func (s *server) handleConnect(req srt.ConnRequest) srt.ConnType {
return srt.REJECT
}
if len(s.passphrase) != 0 {
if !req.IsEncrypted() {
s.log("CONNECT", "FORBIDDEN", si.resource, "connection has to be encrypted", client)
return srt.REJECT
}
if err := req.SetPassphrase(s.passphrase); err != nil {
s.log("CONNECT", "FORBIDDEN", si.resource, err.Error(), client)
return srt.REJECT
}
} else {
if req.IsEncrypted() {
s.log("CONNECT", "INVALID", si.resource, "connection must not be encrypted", client)
return srt.REJECT
}
}
// Check the token
if len(s.token) != 0 && s.token != si.token {
s.log("CONNECT", "FORBIDDEN", si.resource, "invalid token ("+si.token+")", client)
if len(si.token) == 0 {
s.log("CONNECT", "FORBIDDEN", si.resource, "token required", client)
} else {
s.log("CONNECT", "FORBIDDEN", si.resource, "invalid token ("+si.token+")", client)
}
return srt.REJECT
}
@ -532,7 +548,13 @@ func (s *server) handlePublish(conn srt.Conn) {
streamId := conn.StreamId()
client := conn.RemoteAddr()
si, _ := parseStreamId(streamId)
var si streamInfo
if len(streamId) == 0 {
si.resource = client.String()
} else {
si, _ = parseStreamId(streamId)
}
// Look for the stream
s.lock.Lock()

View File

@ -16,3 +16,10 @@
*.out
gqlgen
*.exe
node_modules
# generated files
/api/testdata/default/graph/generated.go
/api/testdata/federation2/graph/federation.go
/api/testdata/federation2/graph/generated.go

View File

@ -1,18 +1,30 @@
run:
tests: true
skip-dirs:
- bin
linters-settings:
errcheck:
ignore: fmt:.*,[rR]ead|[wW]rite|[cC]lose,io:Copy
exclude-functions:
- (io.Writer).Write
- io.Copy
- io.WriteString
revive:
enable-all-rules: false
rules:
- name: empty-lines
testifylint:
disable-all: true
enable:
- bool-compare
- compares
- error-is-as
- error-nil
- expected-actual
- nil-compare
linters:
disable-all: true
enable:
- bodyclose
- deadcode
- depguard
- dupl
- errcheck
- gocritic
@ -24,16 +36,19 @@ linters:
- misspell
- nakedret
- prealloc
- revive
- staticcheck
- structcheck
- testifylint
- typecheck
- unconvert
- unused
- varcheck
issues:
exclude-dirs:
- bin
exclude-rules:
# Exclude some linters from running on tests files.
- path: _test\.go
linters:
- dupl
- errcheck

File diff suppressed because it is too large Load Diff

View File

@ -20,15 +20,18 @@ Still not convinced enough to use **gqlgen**? Compare **gqlgen** with other Go g
cd example
go mod init example
2. Add `github.com/99designs/gqlgen` to your [project's tools.go](https://github.com/golang/go/wiki/Modules#how-can-i-track-tool-dependencies-for-a-module)
2. Add `github.com/99designs/gqlgen` to your [project's tools.go](https://go.dev/wiki/Modules#how-can-i-track-tool-dependencies-for-a-module)
printf '//go:build tools\npackage tools\nimport (_ "github.com/99designs/gqlgen"\n _ "github.com/99designs/gqlgen/graphql/introspection")' | gofmt > tools.go
printf '// +build tools\npackage tools\nimport _ "github.com/99designs/gqlgen"' | gofmt > tools.go
go mod tidy
3. Initialise gqlgen config and generate models
go run github.com/99designs/gqlgen init
go mod tidy
4. Start the graphql server
go run server.go
@ -83,7 +86,7 @@ models:
- #### Using Explicit Resolvers
If you want to Keep using the generated model, mark the field as requiring a resolver explicitly in `gqlgen.yml` like this:
If you want to keep using the generated model, mark the field as requiring a resolver explicitly in `gqlgen.yml` like this:
```yaml
# gqlgen.yml
@ -113,7 +116,7 @@ directive @goModel(model: String, models: [String!]) on OBJECT
| INTERFACE
| UNION
directive @goField(forceResolver: Boolean, name: String) on INPUT_FIELD_DEFINITION
directive @goField(forceResolver: Boolean, name: String, omittable: Boolean) on INPUT_FIELD_DEFINITION
| FIELD_DEFINITION
type User @goModel(model: "github.com/you/pkg/model.User") {
@ -132,6 +135,7 @@ models:
model:
- github.com/99designs/gqlgen/graphql.IntID # a go integer
- github.com/99designs/gqlgen/graphql.ID # or a go string
- github.com/99designs/gqlgen/graphql.UintID # or a go uint
```
This means gqlgen will be able to automatically bind to strings or ints for models you have written yourself, but the

View File

@ -6,9 +6,9 @@ Assuming the next version is $NEW_VERSION=v0.16.0 or something like that.
./bin/release $NEW_VERSION
```
2. git-chglog -o CHANGELOG.md
3. git commit and push the CHANGELOG.md
4. Go to https://github.com/99designs/gqlgen/releases and draft new release, autogenerate the release notes, and Create a discussion for this release
5. Comment on the release discussion with any really important notes (breaking changes)
3. go generate ./...
4. git commit and push the CHANGELOG.md
5. Go to https://github.com/99designs/gqlgen/releases and draft new release, autogenerate the release notes, and Create a discussion for this release
6. Comment on the release discussion with any really important notes (breaking changes)
I used https://github.com/git-chglog/git-chglog to automate the changelog maintenance process for now. We could just as easily use go releaser to make the whole thing automated.

View File

@ -25,7 +25,7 @@ Setting up the integration environment is a little tricky:
```bash
cd integration
go generate ./...
go run ./server/server.go
go run ./server/cmd/integration/server.go
```
in another terminal
```bash
@ -36,5 +36,4 @@ npm install
will write the schema to `integration/schema-fetched.graphql`, compare that with `schema-expected.graphql`
CI will run this and fail the build if the two files dont match.
CI will run this and fail the build if the two files don't match.

View File

@ -13,6 +13,11 @@ import (
"github.com/99designs/gqlgen/plugin/resolvergen"
)
var (
urlRegex = regexp.MustCompile(`(?s)@link.*\(.*url:.*?"(.*?)"[^)]+\)`) // regex to grab the url of a link directive, should it exist
versionRegex = regexp.MustCompile(`v(\d+).(\d+)$`) // regex to grab the version number from a url
)
func Generate(cfg *config.Config, option ...Option) error {
_ = syscall.Unlink(cfg.Exec.Filename)
if cfg.Model.IsDefined() {
@ -26,15 +31,17 @@ func Generate(cfg *config.Config, option ...Option) error {
plugins = append(plugins, resolvergen.New())
if cfg.Federation.IsDefined() {
if cfg.Federation.Version == 0 { // default to using the user's choice of version, but if unset, try to sort out which federation version to use
urlRegex := regexp.MustCompile(`(?s)@link.*\(.*url:.*?"(.*?)"[^)]+\)`) // regex to grab the url of a link directive, should it exist
// check the sources, and if one is marked as federation v2, we mark the entirety to be generated using that format
for _, v := range cfg.Sources {
cfg.Federation.Version = 1
urlString := urlRegex.FindStringSubmatch(v.Input)
if urlString != nil && urlString[1] == "https://specs.apollo.dev/federation/v2.0" {
cfg.Federation.Version = 2
break
// e.g. urlString[1] == "https://specs.apollo.dev/federation/v2.7"
if urlString != nil {
matches := versionRegex.FindStringSubmatch(urlString[1])
if matches[1] == "2" {
cfg.Federation.Version = 2
break
}
}
}
}
@ -83,7 +90,11 @@ func Generate(cfg *config.Config, option ...Option) error {
}
}
// Merge again now that the generated models have been injected into the typemap
data, err := codegen.BuildData(cfg)
data_plugins := make([]interface{}, len(plugins))
for index := range plugins {
data_plugins[index] = plugins[index]
}
data, err := codegen.BuildData(cfg, data_plugins...)
if err != nil {
return fmt.Errorf("merging type systems failed: %w", err)
}

View File

@ -5,9 +5,10 @@ import (
"go/types"
"strings"
"github.com/vektah/gqlparser/v2/ast"
"github.com/99designs/gqlgen/codegen/config"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/vektah/gqlparser/v2/ast"
)
type ArgSet struct {
@ -103,9 +104,9 @@ nextArg:
return newArgs, nil
}
func (a *Data) Args() map[string][]*FieldArgument {
func (d *Data) Args() map[string][]*FieldArgument {
ret := map[string][]*FieldArgument{}
for _, o := range a.Objects {
for _, o := range d.Objects {
for _, f := range o.Fields {
if len(f.Args) > 0 {
ret[f.ArgsFunc()] = f.Args
@ -113,9 +114,9 @@ func (a *Data) Args() map[string][]*FieldArgument {
}
}
for _, d := range a.Directives() {
if len(d.Args) > 0 {
ret[d.ArgsFunc()] = d.Args
for _, directive := range d.Directives() {
if len(directive.Args) > 0 {
ret[directive.ArgsFunc()] = directive.Args
}
}
return ret

View File

@ -5,12 +5,13 @@ import (
"fmt"
"go/token"
"go/types"
"strings"
"github.com/vektah/gqlparser/v2/ast"
"golang.org/x/tools/go/packages"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/99designs/gqlgen/internal/code"
"github.com/vektah/gqlparser/v2/ast"
)
var ErrTypeNotFound = errors.New("unable to find type")
@ -20,6 +21,7 @@ type Binder struct {
pkgs *code.Packages
schema *ast.Schema
cfg *Config
tctx *types.Context
References []*TypeReference
SawInvalid bool
objectCache map[string]map[string]types.Object
@ -81,6 +83,14 @@ func (b *Binder) FindType(pkgName string, typeName string) (types.Type, error) {
return obj.Type(), nil
}
func (b *Binder) InstantiateType(orig types.Type, targs []types.Type) (types.Type, error) {
if b.tctx == nil {
b.tctx = types.NewContext()
}
return types.Instantiate(b.tctx, orig, targs, false)
}
var (
MapType = types.NewMap(types.Typ[types.String], types.NewInterfaceType(nil, nil).Complete())
InterfaceType = types.NewInterfaceType(nil, nil)
@ -183,15 +193,19 @@ func (b *Binder) PointerTo(ref *TypeReference) *TypeReference {
// TypeReference is used by args and field types. The Definition can refer to both input and output types.
type TypeReference struct {
Definition *ast.Definition
GQL *ast.Type
GO types.Type // Type of the field being bound. Could be a pointer or a value type of Target.
Target types.Type // The actual type that we know how to bind to. May require pointer juggling when traversing to fields.
CastType types.Type // Before calling marshalling functions cast from/to this base type
Marshaler *types.Func // When using external marshalling functions this will point to the Marshal function
Unmarshaler *types.Func // When using external marshalling functions this will point to the Unmarshal function
IsMarshaler bool // Does the type implement graphql.Marshaler and graphql.Unmarshaler
IsContext bool // Is the Marshaler/Unmarshaller the context version; applies to either the method or interface variety.
Definition *ast.Definition
GQL *ast.Type
GO types.Type // Type of the field being bound. Could be a pointer or a value type of Target.
Target types.Type // The actual type that we know how to bind to. May require pointer juggling when traversing to fields.
CastType types.Type // Before calling marshalling functions cast from/to this base type
Marshaler *types.Func // When using external marshalling functions this will point to the Marshal function
Unmarshaler *types.Func // When using external marshalling functions this will point to the Unmarshal function
IsMarshaler bool // Does the type implement graphql.Marshaler and graphql.Unmarshaler
IsOmittable bool // Is the type wrapped with Omittable
IsContext bool // Is the Marshaler/Unmarshaller the context version; applies to either the method or interface variety.
PointersInUmarshalInput bool // Inverse values and pointers in return.
IsRoot bool // Is the type a root level definition such as Query, Mutation or Subscription
EnumValues []EnumValueReference
}
func (ref *TypeReference) Elem() *TypeReference {
@ -210,91 +224,107 @@ func (ref *TypeReference) Elem() *TypeReference {
return nil
}
func (t *TypeReference) IsPtr() bool {
_, isPtr := t.GO.(*types.Pointer)
func (ref *TypeReference) IsPtr() bool {
_, isPtr := ref.GO.(*types.Pointer)
return isPtr
}
// fix for https://github.com/golang/go/issues/31103 may make it possible to remove this (may still be useful)
func (t *TypeReference) IsPtrToPtr() bool {
if p, isPtr := t.GO.(*types.Pointer); isPtr {
func (ref *TypeReference) IsPtrToPtr() bool {
if p, isPtr := ref.GO.(*types.Pointer); isPtr {
_, isPtr := p.Elem().(*types.Pointer)
return isPtr
}
return false
}
func (t *TypeReference) IsNilable() bool {
return IsNilable(t.GO)
func (ref *TypeReference) IsNilable() bool {
return IsNilable(ref.GO)
}
func (t *TypeReference) IsSlice() bool {
_, isSlice := t.GO.(*types.Slice)
return t.GQL.Elem != nil && isSlice
func (ref *TypeReference) IsSlice() bool {
_, isSlice := ref.GO.(*types.Slice)
return ref.GQL.Elem != nil && isSlice
}
func (t *TypeReference) IsPtrToSlice() bool {
if t.IsPtr() {
_, isPointerToSlice := t.GO.(*types.Pointer).Elem().(*types.Slice)
func (ref *TypeReference) IsPtrToSlice() bool {
if ref.IsPtr() {
_, isPointerToSlice := ref.GO.(*types.Pointer).Elem().(*types.Slice)
return isPointerToSlice
}
return false
}
func (t *TypeReference) IsNamed() bool {
_, isSlice := t.GO.(*types.Named)
func (ref *TypeReference) IsPtrToIntf() bool {
if ref.IsPtr() {
_, isPointerToInterface := ref.GO.(*types.Pointer).Elem().(*types.Interface)
return isPointerToInterface
}
return false
}
func (ref *TypeReference) IsNamed() bool {
_, isSlice := ref.GO.(*types.Named)
return isSlice
}
func (t *TypeReference) IsStruct() bool {
_, isStruct := t.GO.Underlying().(*types.Struct)
func (ref *TypeReference) IsStruct() bool {
_, isStruct := ref.GO.Underlying().(*types.Struct)
return isStruct
}
func (t *TypeReference) IsScalar() bool {
return t.Definition.Kind == ast.Scalar
func (ref *TypeReference) IsScalar() bool {
return ref.Definition.Kind == ast.Scalar
}
func (t *TypeReference) UniquenessKey() string {
func (ref *TypeReference) IsMap() bool {
return ref.GO == MapType
}
func (ref *TypeReference) UniquenessKey() string {
nullability := "O"
if t.GQL.NonNull {
if ref.GQL.NonNull {
nullability = "N"
}
elemNullability := ""
if t.GQL.Elem != nil && t.GQL.Elem.NonNull {
if ref.GQL.Elem != nil && ref.GQL.Elem.NonNull {
// Fix for #896
elemNullability = "ᚄ"
}
return nullability + t.Definition.Name + "2" + templates.TypeIdentifier(t.GO) + elemNullability
return nullability + ref.Definition.Name + "2" + templates.TypeIdentifier(ref.GO) + elemNullability
}
func (t *TypeReference) MarshalFunc() string {
if t.Definition == nil {
panic(errors.New("Definition missing for " + t.GQL.Name()))
func (ref *TypeReference) MarshalFunc() string {
if ref.Definition == nil {
panic(errors.New("Definition missing for " + ref.GQL.Name()))
}
if t.Definition.Kind == ast.InputObject {
if ref.Definition.Kind == ast.InputObject {
return ""
}
return "marshal" + t.UniquenessKey()
return "marshal" + ref.UniquenessKey()
}
func (t *TypeReference) UnmarshalFunc() string {
if t.Definition == nil {
panic(errors.New("Definition missing for " + t.GQL.Name()))
func (ref *TypeReference) UnmarshalFunc() string {
if ref.Definition == nil {
panic(errors.New("Definition missing for " + ref.GQL.Name()))
}
if !t.Definition.IsInputType() {
if !ref.Definition.IsInputType() {
return ""
}
return "unmarshal" + t.UniquenessKey()
return "unmarshal" + ref.UniquenessKey()
}
func (t *TypeReference) IsTargetNilable() bool {
return IsNilable(t.Target)
func (ref *TypeReference) IsTargetNilable() bool {
return IsNilable(ref.Target)
}
func (ref *TypeReference) HasEnumValues() bool {
return len(ref.EnumValues) > 0
}
func (b *Binder) PushRef(ret *TypeReference) {
@ -317,7 +347,35 @@ func isIntf(t types.Type) bool {
return ok
}
func unwrapOmittable(t types.Type) (types.Type, bool) {
if t == nil {
return t, false
}
named, ok := t.(*types.Named)
if !ok {
return t, false
}
if named.Origin().String() != "github.com/99designs/gqlgen/graphql.Omittable[T any]" {
return t, false
}
return named.TypeArgs().At(0), true
}
func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret *TypeReference, err error) {
if innerType, ok := unwrapOmittable(bindTarget); ok {
if schemaType.NonNull {
return nil, fmt.Errorf("%s is wrapped with Omittable but non-null", schemaType.Name())
}
ref, err := b.TypeReference(schemaType, innerType)
if err != nil {
return nil, err
}
ref.IsOmittable = true
return ref, err
}
if !isValid(bindTarget) {
b.SawInvalid = true
return nil, fmt.Errorf("%s has an invalid type", schemaType.Name())
@ -344,6 +402,7 @@ func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret
Definition: def,
GQL: schemaType,
GO: MapType,
IsRoot: b.cfg.IsRoot(def),
}, nil
}
@ -355,6 +414,7 @@ func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret
Definition: def,
GQL: schemaType,
GO: InterfaceType,
IsRoot: b.cfg.IsRoot(def),
}, nil
}
@ -366,6 +426,7 @@ func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret
ref := &TypeReference{
Definition: def,
GQL: schemaType,
IsRoot: b.cfg.IsRoot(def),
}
obj, err := b.FindObject(pkgName, typeName)
@ -373,7 +434,12 @@ func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret
return nil, err
}
if fun, isFunc := obj.(*types.Func); isFunc {
if values := b.enumValues(def); len(values) > 0 {
err = b.enumReference(ref, obj, values)
if err != nil {
return nil, err
}
} else if fun, isFunc := obj.(*types.Func); isFunc {
ref.GO = fun.Type().(*types.Signature).Params().At(0).Type()
ref.IsContext = fun.Type().(*types.Signature).Results().At(0).Type().String() == "github.com/99designs/gqlgen/graphql.ContextMarshaler"
ref.Marshaler = fun
@ -412,6 +478,8 @@ func (b *Binder) TypeReference(schemaType *ast.Type, bindTarget types.Type) (ret
ref.GO = bindTarget
}
ref.PointersInUmarshalInput = b.cfg.ReturnPointersInUmarshalInput
return ref, nil
}
@ -491,3 +559,81 @@ func basicUnderlying(it types.Type) *types.Basic {
return nil
}
type EnumValueReference struct {
Definition *ast.EnumValueDefinition
Object types.Object
}
func (b *Binder) enumValues(def *ast.Definition) map[string]EnumValue {
if def.Kind != ast.Enum {
return nil
}
if strings.HasPrefix(def.Name, "__") {
return nil
}
model, ok := b.cfg.Models[def.Name]
if !ok {
return nil
}
return model.EnumValues
}
func (b *Binder) enumReference(ref *TypeReference, obj types.Object, values map[string]EnumValue) error {
if len(ref.Definition.EnumValues) != len(values) {
return fmt.Errorf("not all enum values are binded for %v", ref.Definition.Name)
}
if fn, ok := obj.Type().(*types.Signature); ok {
ref.GO = fn.Params().At(0).Type()
} else {
ref.GO = obj.Type()
}
str, err := b.TypeReference(&ast.Type{NamedType: "String"}, nil)
if err != nil {
return err
}
ref.Marshaler = str.Marshaler
ref.Unmarshaler = str.Unmarshaler
ref.EnumValues = make([]EnumValueReference, 0, len(values))
for _, value := range ref.Definition.EnumValues {
v, ok := values[value.Name]
if !ok {
return fmt.Errorf("enum value not found for: %v, of enum: %v", value.Name, ref.Definition.Name)
}
pkgName, typeName := code.PkgAndType(v.Value)
if pkgName == "" {
return fmt.Errorf("missing package name for %v", value.Name)
}
valueObj, err := b.FindObject(pkgName, typeName)
if err != nil {
return err
}
if !types.AssignableTo(valueObj.Type(), ref.GO) {
return fmt.Errorf("wrong type: %v, for enum value: %v, expected type: %v, of enum: %v",
valueObj.Type(), value.Name, ref.GO, ref.Definition.Name)
}
switch valueObj.(type) {
case *types.Const, *types.Var:
ref.EnumValues = append(ref.EnumValues, EnumValueReference{
Definition: value,
Object: valueObj,
})
default:
return fmt.Errorf("unsupported enum value for: %v, of enum: %v, only const and var allowed",
value.Name, ref.Definition.Name)
}
}
return nil
}

View File

@ -3,16 +3,21 @@ package config
import (
"bytes"
"fmt"
"go/types"
"io"
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"github.com/99designs/gqlgen/internal/code"
"github.com/vektah/gqlparser/v2"
"github.com/vektah/gqlparser/v2/ast"
"golang.org/x/tools/go/packages"
"gopkg.in/yaml.v3"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/99designs/gqlgen/internal/code"
)
type Config struct {
@ -25,10 +30,21 @@ type Config struct {
Models TypeMap `yaml:"models,omitempty"`
StructTag string `yaml:"struct_tag,omitempty"`
Directives map[string]DirectiveConfig `yaml:"directives,omitempty"`
GoBuildTags StringList `yaml:"go_build_tags,omitempty"`
GoInitialisms GoInitialismsConfig `yaml:"go_initialisms,omitempty"`
OmitSliceElementPointers bool `yaml:"omit_slice_element_pointers,omitempty"`
OmitGetters bool `yaml:"omit_getters,omitempty"`
OmitInterfaceChecks bool `yaml:"omit_interface_checks,omitempty"`
OmitComplexity bool `yaml:"omit_complexity,omitempty"`
OmitGQLGenFileNotice bool `yaml:"omit_gqlgen_file_notice,omitempty"`
OmitGQLGenVersionInFileNotice bool `yaml:"omit_gqlgen_version_in_file_notice,omitempty"`
OmitRootModels bool `yaml:"omit_root_models,omitempty"`
OmitResolverFields bool `yaml:"omit_resolver_fields,omitempty"`
StructFieldsAlwaysPointers bool `yaml:"struct_fields_always_pointers,omitempty"`
ReturnPointersInUmarshalInput bool `yaml:"return_pointers_in_unmarshalinput,omitempty"`
ResolversAlwaysReturnPointers bool `yaml:"resolvers_always_return_pointers,omitempty"`
NullableInputOmittable bool `yaml:"nullable_input_omittable,omitempty"`
EnableModelJsonOmitemptyTag *bool `yaml:"enable_model_json_omitempty_tag,omitempty"`
SkipValidation bool `yaml:"skip_validation,omitempty"`
SkipModTidy bool `yaml:"skip_mod_tidy,omitempty"`
Sources []*ast.Source `yaml:"-"`
@ -50,7 +66,9 @@ func DefaultConfig() *Config {
Directives: map[string]DirectiveConfig{},
Models: TypeMap{},
StructFieldsAlwaysPointers: true,
ReturnPointersInUmarshalInput: false,
ResolversAlwaysReturnPointers: true,
NullableInputOmittable: false,
}
}
@ -97,14 +115,18 @@ var path2regex = strings.NewReplacer(
// LoadConfig reads the gqlgen.yml config file
func LoadConfig(filename string) (*Config, error) {
config := DefaultConfig()
b, err := os.ReadFile(filename)
if err != nil {
return nil, fmt.Errorf("unable to read config: %w", err)
}
dec := yaml.NewDecoder(bytes.NewReader(b))
return ReadConfig(bytes.NewReader(b))
}
func ReadConfig(cfgFile io.Reader) (*Config, error) {
config := DefaultConfig()
dec := yaml.NewDecoder(cfgFile)
dec.KnownFields(true)
if err := dec.Decode(config); err != nil {
@ -188,12 +210,17 @@ func CompleteConfig(config *Config) error {
config.Sources = append(config.Sources, &ast.Source{Name: filename, Input: string(schemaRaw)})
}
config.GoInitialisms.setInitialisms()
return nil
}
func (c *Config) Init() error {
if c.Packages == nil {
c.Packages = &code.Packages{}
c.Packages = code.NewPackages(
code.WithBuildTags(c.GoBuildTags...),
)
}
if c.Schema == nil {
@ -239,11 +266,19 @@ func (c *Config) ReloadAllPackages() {
c.Packages.ReloadAll(c.packageList()...)
}
func (c *Config) IsRoot(def *ast.Definition) bool {
return def == c.Schema.Query || def == c.Schema.Mutation || def == c.Schema.Subscription
}
func (c *Config) injectTypesFromSchema() error {
c.Directives["goModel"] = DirectiveConfig{
SkipRuntime: true,
}
c.Directives["goExtraField"] = DirectiveConfig{
SkipRuntime: true,
}
c.Directives["goField"] = DirectiveConfig{
SkipRuntime: true,
}
@ -252,8 +287,12 @@ func (c *Config) injectTypesFromSchema() error {
SkipRuntime: true,
}
c.Directives["goEnum"] = DirectiveConfig{
SkipRuntime: true,
}
for _, schemaType := range c.Schema.Types {
if schemaType == c.Schema.Query || schemaType == c.Schema.Mutation || schemaType == c.Schema.Subscription {
if c.IsRoot(schemaType) {
continue
}
@ -263,6 +302,7 @@ func (c *Config) injectTypesFromSchema() error {
c.Models.Add(schemaType.Name, mv.(string))
}
}
if ma := bd.Arguments.ForName("models"); ma != nil {
if mvs, err := ma.Value.Value(nil); err == nil {
for _, mv := range mvs.([]interface{}) {
@ -270,6 +310,12 @@ func (c *Config) injectTypesFromSchema() error {
}
}
}
if fg := bd.Arguments.ForName("forceGenerate"); fg != nil {
if mv, err := fg.Value.Value(nil); err == nil {
c.Models.ForceGenerate(schemaType.Name, mv.(bool))
}
}
}
if schemaType.Kind == ast.Object || schemaType.Kind == ast.InputObject {
@ -292,8 +338,9 @@ func (c *Config) injectTypesFromSchema() error {
if c.Models[schemaType.Name].Fields == nil {
c.Models[schemaType.Name] = TypeMapEntry{
Model: c.Models[schemaType.Name].Model,
Fields: map[string]TypeMapField{},
Model: c.Models[schemaType.Name].Model,
ExtraFields: c.Models[schemaType.Name].ExtraFields,
Fields: map[string]TypeMapField{},
}
}
@ -303,6 +350,82 @@ func (c *Config) injectTypesFromSchema() error {
}
}
}
if efds := schemaType.Directives.ForNames("goExtraField"); len(efds) != 0 {
for _, efd := range efds {
if fn := efd.Arguments.ForName("name"); fn != nil {
extraFieldName := ""
if fnv, err := fn.Value.Value(nil); err == nil {
extraFieldName = fnv.(string)
}
if extraFieldName == "" {
return fmt.Errorf(
"argument 'name' for directive @goExtraField (src: %s, line: %d) cannot by empty",
efd.Position.Src.Name,
efd.Position.Line,
)
}
extraField := ModelExtraField{}
if t := efd.Arguments.ForName("type"); t != nil {
if tv, err := t.Value.Value(nil); err == nil {
extraField.Type = tv.(string)
}
}
if extraField.Type == "" {
return fmt.Errorf(
"argument 'type' for directive @goExtraField (src: %s, line: %d) cannot by empty",
efd.Position.Src.Name,
efd.Position.Line,
)
}
if ot := efd.Arguments.ForName("overrideTags"); ot != nil {
if otv, err := ot.Value.Value(nil); err == nil {
extraField.OverrideTags = otv.(string)
}
}
if d := efd.Arguments.ForName("description"); d != nil {
if dv, err := d.Value.Value(nil); err == nil {
extraField.Description = dv.(string)
}
}
typeMapEntry := c.Models[schemaType.Name]
if typeMapEntry.ExtraFields == nil {
typeMapEntry.ExtraFields = make(map[string]ModelExtraField)
}
c.Models[schemaType.Name] = typeMapEntry
c.Models[schemaType.Name].ExtraFields[extraFieldName] = extraField
}
}
}
}
if schemaType.Kind == ast.Enum && !strings.HasPrefix(schemaType.Name, "__") {
values := make(map[string]EnumValue)
for _, value := range schemaType.EnumValues {
if directive := value.Directives.ForName("goEnum"); directive != nil {
if arg := directive.Arguments.ForName("value"); arg != nil {
if v, err := arg.Value.Value(nil); err == nil {
values[value.Name] = EnumValue{
Value: v.(string),
}
}
}
}
}
if len(values) > 0 {
model := c.Models[schemaType.Name]
model.EnumValues = values
c.Models[schemaType.Name] = model
}
}
}
@ -310,8 +433,13 @@ func (c *Config) injectTypesFromSchema() error {
}
type TypeMapEntry struct {
Model StringList `yaml:"model"`
Fields map[string]TypeMapField `yaml:"fields,omitempty"`
Model StringList `yaml:"model,omitempty"`
ForceGenerate bool `yaml:"forceGenerate,omitempty"`
Fields map[string]TypeMapField `yaml:"fields,omitempty"`
EnumValues map[string]EnumValue `yaml:"enum_values,omitempty"`
// Key is the Go name of the field.
ExtraFields map[string]ModelExtraField `yaml:"extraFields,omitempty"`
}
type TypeMapField struct {
@ -320,6 +448,36 @@ type TypeMapField struct {
GeneratedMethod string `yaml:"-"`
}
type EnumValue struct {
Value string
}
type ModelExtraField struct {
// Type is the Go type of the field.
//
// It supports the builtin basic types (like string or int64), named types
// (qualified by the full package path), pointers to those types (prefixed
// with `*`), and slices of those types (prefixed with `[]`).
//
// For example, the following are valid types:
// string
// *github.com/author/package.Type
// []string
// []*github.com/author/package.Type
//
// Note that the type will be referenced from the generated/graphql, which
// means the package it lives in must not reference the generated/graphql
// package to avoid circular imports.
// restrictions.
Type string `yaml:"type"`
// OverrideTags is an optional override of the Go field tag.
OverrideTags string `yaml:"overrideTags"`
// Description is an optional the Go field doc-comment.
Description string `yaml:"description"`
}
type StringList []string
func (a *StringList) UnmarshalYAML(unmarshal func(interface{}) error) error {
@ -449,6 +607,14 @@ func (tm TypeMap) Check() error {
return fmt.Errorf("model %s: invalid type specifier \"%s\" - you need to specify a struct to map to", typeName, entry.Model)
}
}
if len(entry.Model) == 0 {
for enum, v := range entry.EnumValues {
if v.Value != "" {
return fmt.Errorf("model is empty for: %v, but enum value is specified for %v", typeName, enum)
}
}
}
}
return nil
}
@ -481,6 +647,12 @@ func (tm TypeMap) Add(name string, goType string) {
tm[name] = modelCfg
}
func (tm TypeMap) ForceGenerate(name string, forceGenerate bool) {
modelCfg := tm[name]
modelCfg.ForceGenerate = forceGenerate
tm[name] = modelCfg
}
type DirectiveConfig struct {
SkipRuntime bool `yaml:"skip_runtime"`
}
@ -535,7 +707,7 @@ func (c *Config) autobind() error {
ps := c.Packages.LoadAll(c.AutoBind...)
for _, t := range c.Schema.Types {
if c.Models.UserDefined(t.Name) {
if c.Models.UserDefined(t.Name) || c.Models[t.Name].ForceGenerate {
continue
}
@ -543,14 +715,20 @@ func (c *Config) autobind() error {
if p == nil || p.Module == nil {
return fmt.Errorf("unable to load %s - make sure you're using an import path to a package that exists", c.AutoBind[i])
}
if t := p.Types.Scope().Lookup(t.Name); t != nil {
c.Models.Add(t.Name(), t.Pkg().Path()+"."+t.Name())
autobindType := c.lookupAutobindType(p, t)
if autobindType != nil {
c.Models.Add(t.Name, autobindType.Pkg().Path()+"."+autobindType.Name())
break
}
}
}
for i, t := range c.Models {
if t.ForceGenerate {
continue
}
for j, m := range t.Model {
pkg, typename := code.PkgAndType(m)
@ -574,6 +752,17 @@ func (c *Config) autobind() error {
return nil
}
func (c *Config) lookupAutobindType(p *packages.Package, schemaType *ast.Definition) types.Object {
// Try binding to either the original schema type name, or the normalized go type name
for _, lookupName := range []string{schemaType.Name, templates.ToGo(schemaType.Name)} {
if t := p.Types.Scope().Lookup(lookupName); t != nil {
return t
}
}
return nil
}
func (c *Config) injectBuiltins() {
builtins := TypeMap{
"__Directive": {Model: StringList{"github.com/99designs/gqlgen/graphql/introspection.Directive"}},
@ -623,7 +812,9 @@ func (c *Config) injectBuiltins() {
func (c *Config) LoadSchema() error {
if c.Packages != nil {
c.Packages = &code.Packages{}
c.Packages = code.NewPackages(
code.WithBuildTags(c.GoBuildTags...),
)
}
if err := c.check(); err != nil {

View File

@ -0,0 +1,42 @@
package config
import (
"strings"
"github.com/99designs/gqlgen/codegen/templates"
)
// GoInitialismsConfig allows to modify the default behavior of naming Go methods, types and properties
type GoInitialismsConfig struct {
// If true, the Initialisms won't get appended to the default ones but replace them
ReplaceDefaults bool `yaml:"replace_defaults"`
// Custom initialisms to be added or to replace the default ones
Initialisms []string `yaml:"initialisms"`
}
// setInitialisms adjustes GetInitialisms based on its settings.
func (i GoInitialismsConfig) setInitialisms() {
toUse := i.determineGoInitialisms()
templates.GetInitialisms = func() map[string]bool {
return toUse
}
}
// determineGoInitialisms returns the Go initialims to be used, based on its settings.
func (i GoInitialismsConfig) determineGoInitialisms() (initialismsToUse map[string]bool) {
if i.ReplaceDefaults {
initialismsToUse = make(map[string]bool, len(i.Initialisms))
for _, initialism := range i.Initialisms {
initialismsToUse[strings.ToUpper(initialism)] = true
}
} else {
initialismsToUse = make(map[string]bool, len(templates.CommonInitialisms)+len(i.Initialisms))
for initialism, value := range templates.CommonInitialisms {
initialismsToUse[strings.ToUpper(initialism)] = value
}
for _, initialism := range i.Initialisms {
initialismsToUse[strings.ToUpper(initialism)] = true
}
}
return initialismsToUse
}

View File

@ -10,9 +10,11 @@ import (
)
type PackageConfig struct {
Filename string `yaml:"filename,omitempty"`
Package string `yaml:"package,omitempty"`
Version int `yaml:"version,omitempty"`
Filename string `yaml:"filename,omitempty"`
Package string `yaml:"package,omitempty"`
Version int `yaml:"version,omitempty"`
ModelTemplate string `yaml:"model_template,omitempty"`
Options map[string]bool `yaml:"options,omitempty"`
}
func (c *PackageConfig) ImportPath() string {

View File

@ -10,12 +10,14 @@ import (
)
type ResolverConfig struct {
Filename string `yaml:"filename,omitempty"`
FilenameTemplate string `yaml:"filename_template,omitempty"`
Package string `yaml:"package,omitempty"`
Type string `yaml:"type,omitempty"`
Layout ResolverLayout `yaml:"layout,omitempty"`
DirName string `yaml:"dir"`
Filename string `yaml:"filename,omitempty"`
FilenameTemplate string `yaml:"filename_template,omitempty"`
Package string `yaml:"package,omitempty"`
Type string `yaml:"type,omitempty"`
Layout ResolverLayout `yaml:"layout,omitempty"`
DirName string `yaml:"dir"`
OmitTemplateComment bool `yaml:"omit_template_comment,omitempty"`
ResolverTemplate string `yaml:"resolver_template,omitempty"`
}
type ResolverLayout string

View File

@ -34,6 +34,7 @@ type Data struct {
MutationRoot *Object
SubscriptionRoot *Object
AugmentedSources []AugmentedSource
Plugins []interface{}
}
func (d *Data) HasEmbeddableSources() bool {
@ -76,7 +77,7 @@ func (d *Data) Directives() DirectiveList {
return res
}
func BuildData(cfg *config.Config) (*Data, error) {
func BuildData(cfg *config.Config, plugins ...interface{}) (*Data, error) {
// We reload all packages to allow packages to be compared correctly.
cfg.ReloadAllPackages()
@ -105,6 +106,7 @@ func BuildData(cfg *config.Config) (*Data, error) {
AllDirectives: dataDirectives,
Schema: b.Schema,
Interfaces: map[string]*Interface{},
Plugins: plugins,
}
for _, schemaType := range b.Schema.Types {

View File

@ -5,8 +5,9 @@ import (
"strconv"
"strings"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/vektah/gqlparser/v2/ast"
"github.com/99designs/gqlgen/codegen/templates"
)
type DirectiveList map[string]*Directive
@ -123,7 +124,6 @@ func (b *builder) getDirectives(list ast.DirectiveList) ([]*Directive, error) {
DirectiveDefinition: list[i].Definition,
Builtin: b.Config.Directives[d.Name].SkipRuntime,
}
}
return dirs, nil

View File

@ -3,17 +3,19 @@ package codegen
import (
"errors"
"fmt"
goast "go/ast"
"go/types"
"log"
"reflect"
"strconv"
"strings"
"github.com/99designs/gqlgen/codegen/config"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/vektah/gqlparser/v2/ast"
"golang.org/x/text/cases"
"golang.org/x/text/language"
"github.com/99designs/gqlgen/codegen/config"
"github.com/99designs/gqlgen/codegen/templates"
)
type Field struct {
@ -151,6 +153,11 @@ func (b *builder) bindField(obj *Object, f *Field) (errret error) {
switch target := target.(type) {
case nil:
// Skips creating a resolver for any root types
if b.Config.IsRoot(b.Schema.Types[f.Type.Name()]) {
return nil
}
objPos := b.Binder.TypePosition(obj.Type)
return fmt.Errorf(
"%s:%d adding resolver method for %s.%s, nothing matched",
@ -280,7 +287,7 @@ func (b *builder) findBindStructTagTarget(in types.Type, name string) (types.Obj
tags := reflect.StructTag(t.Tag(i))
if val, ok := tags.Lookup(b.Config.StructTag); ok && equalFieldName(val, name) {
if found != nil {
return nil, fmt.Errorf("tag %s is ambigious; multiple fields have the same tag value of %s", b.Config.StructTag, val)
return nil, fmt.Errorf("tag %s is ambiguous; multiple fields have the same tag value of %s", b.Config.StructTag, val)
}
found = field
@ -502,7 +509,21 @@ func (f *Field) ResolverType() string {
return fmt.Sprintf("%s().%s(%s)", f.Object.Definition.Name, f.GoFieldName, f.CallArgs())
}
func (f *Field) IsInputObject() bool {
return f.Object.Kind == ast.InputObject
}
func (f *Field) IsRoot() bool {
return f.Object.Root
}
func (f *Field) ShortResolverDeclaration() string {
return f.ShortResolverSignature(nil)
}
// ShortResolverSignature is identical to ShortResolverDeclaration,
// but respects previous naming (return) conventions, if any.
func (f *Field) ShortResolverSignature(ft *goast.FuncType) string {
if f.Object.Kind == ast.InputObject {
return fmt.Sprintf("(ctx context.Context, obj %s, data %s) error",
templates.CurrentImports.LookupType(f.Object.Reference()),
@ -523,11 +544,27 @@ func (f *Field) ShortResolverDeclaration() string {
if f.Object.Stream {
result = "<-chan " + result
}
res += fmt.Sprintf(") (%s, error)", result)
// Named return.
var namedV, namedE string
if ft != nil {
if ft.Results != nil && len(ft.Results.List) > 0 && len(ft.Results.List[0].Names) > 0 {
namedV = ft.Results.List[0].Names[0].Name
}
if ft.Results != nil && len(ft.Results.List) > 1 && len(ft.Results.List[1].Names) > 0 {
namedE = ft.Results.List[1].Names[0].Name
}
}
res += fmt.Sprintf(") (%s %s, %s error)", namedV, result, namedE)
return res
}
func (f *Field) GoResultName() (string, bool) {
name := fmt.Sprintf("%v", f.TypeReference.GO)
splits := strings.Split(name, "/")
return splits[len(splits)-1], strings.HasPrefix(name, "[]")
}
func (f *Field) ComplexitySignature() string {
res := "func(childComplexity int"
for _, arg := range f.Args {

View File

@ -16,53 +16,63 @@ func (ec *executionContext) _{{$object.Name}}_{{$field.Name}}(ctx context.Contex
ret = {{ $null }}
}
}()
{{- if $.AllDirectives.LocationDirectives "FIELD" }}
resTmp := ec._fieldMiddleware(ctx, {{if $object.Root}}nil{{else}}obj{{end}}, func(rctx context.Context) (interface{}, error) {
{{ template "field" $field }}
})
{{ else }}
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
{{ template "field" $field }}
})
if err != nil {
ec.Error(ctx, err)
return {{ $null }}
}
{{- end }}
if resTmp == nil {
{{- if $field.TypeReference.GQL.NonNull }}
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
{{- if $field.TypeReference.IsRoot }}
{{- if $field.TypeReference.IsPtr }}
res := &{{ $field.TypeReference.Elem.GO | ref }}{}
{{- else }}
res := {{ $field.TypeReference.GO | ref }}{}
{{- end }}
return {{ $null }}
}
{{- if $object.Stream }}
return func(ctx context.Context) graphql.Marshaler {
select {
case res, ok := <-resTmp.(<-chan {{$field.TypeReference.GO | ref}}):
if !ok {
return nil
}
return graphql.WriterFunc(func(w io.Writer) {
w.Write([]byte{'{'})
graphql.MarshalString(field.Alias).MarshalGQL(w)
w.Write([]byte{':'})
ec.{{ $field.TypeReference.MarshalFunc }}(ctx, field.Selections, res).MarshalGQL(w)
w.Write([]byte{'}'})
})
case <-ctx.Done():
return nil
}
}
{{- else }}
res := resTmp.({{$field.TypeReference.GO | ref}})
fc.Result = res
return ec.{{ $field.TypeReference.MarshalFunc }}(ctx, field.Selections, res)
{{- else}}
{{- if $.AllDirectives.LocationDirectives "FIELD" }}
resTmp := ec._fieldMiddleware(ctx, {{if $object.Root}}nil{{else}}obj{{end}}, func(rctx context.Context) (interface{}, error) {
{{ template "field" $field }}
})
{{ else }}
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
{{ template "field" $field }}
})
if err != nil {
ec.Error(ctx, err)
return {{ $null }}
}
{{- end }}
if resTmp == nil {
{{- if $field.TypeReference.GQL.NonNull }}
if !graphql.HasFieldError(ctx, fc) {
ec.Errorf(ctx, "must not be null")
}
{{- end }}
return {{ $null }}
}
{{- if $object.Stream }}
return func(ctx context.Context) graphql.Marshaler {
select {
case res, ok := <-resTmp.(<-chan {{$field.TypeReference.GO | ref}}):
if !ok {
return nil
}
return graphql.WriterFunc(func(w io.Writer) {
w.Write([]byte{'{'})
graphql.MarshalString(field.Alias).MarshalGQL(w)
w.Write([]byte{':'})
ec.{{ $field.TypeReference.MarshalFunc }}(ctx, field.Selections, res).MarshalGQL(w)
w.Write([]byte{'}'})
})
case <-ctx.Done():
return nil
}
}
{{- else }}
res := resTmp.({{$field.TypeReference.GO | ref}})
fc.Result = res
return ec.{{ $field.TypeReference.MarshalFunc }}(ctx, field.Selections, res)
{{- end }}
{{- end }}
}
func (ec *executionContext) {{ $field.FieldContextFunc }}(ctx context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
func (ec *executionContext) {{ $field.FieldContextFunc }}({{ if not $field.Args }}_{{ else }}ctx{{ end }} context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
fc = &graphql.FieldContext{
Object: {{quote $field.Object.Name}},
Field: field,
@ -94,7 +104,7 @@ func (ec *executionContext) {{ $field.FieldContextFunc }}(ctx context.Context, f
ctx = graphql.WithFieldContext(ctx, fc)
if fc.Args, err = ec.{{ $field.ArgsFunc }}(ctx, field.ArgumentMap(ec.Variables)); err != nil {
ec.Error(ctx, err)
return
return fc, err
}
{{- end }}
return fc, nil

View File

@ -9,9 +9,10 @@ import (
"runtime"
"strings"
"github.com/vektah/gqlparser/v2/ast"
"github.com/99designs/gqlgen/codegen/config"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/vektah/gqlparser/v2/ast"
)
//go:embed *.gotpl

View File

@ -14,11 +14,11 @@
{{ reserveImport "github.com/99designs/gqlgen/graphql" }}
{{ reserveImport "github.com/99designs/gqlgen/graphql/introspection" }}
{{ if eq .Config.Exec.Layout "single-file" }}
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
return &executableSchema{
schema: cfg.Schema,
resolvers: cfg.Resolvers,
directives: cfg.Directives,
complexity: cfg.Complexity,
@ -26,6 +26,7 @@
}
type Config struct {
Schema *ast.Schema
Resolvers ResolverRoot
Directives DirectiveRoot
Complexity ComplexityRoot
@ -51,6 +52,7 @@
}
type ComplexityRoot struct {
{{- if not .Config.OmitComplexity }}
{{ range $object := .Objects }}
{{ if not $object.IsReserved -}}
{{ ucFirst $object.Name }} struct {
@ -63,6 +65,7 @@
}
{{- end }}
{{ end }}
{{- end }}
}
{{ end }}
@ -92,18 +95,23 @@
{{ if eq .Config.Exec.Layout "single-file" }}
type executableSchema struct {
schema *ast.Schema
resolvers ResolverRoot
directives DirectiveRoot
complexity ComplexityRoot
}
func (e *executableSchema) Schema() *ast.Schema {
if e.schema != nil {
return e.schema
}
return parsedSchema
}
func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
ec := executionContext{nil, e}
ec := executionContext{nil, e, 0, 0, nil}
_ = ec
{{ if not .Config.OmitComplexity -}}
switch typeName + "." + field {
{{ range $object := .Objects }}
{{ if not $object.IsReserved }}
@ -130,12 +138,13 @@
{{ end }}
{{ end }}
}
{{- end }}
return 0, false
}
func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler {
rc := graphql.GetOperationContext(ctx)
ec := executionContext{rc, e}
ec := executionContext{rc, e, 0, 0, make(chan graphql.DeferredResult)}
inputUnmarshalMap := graphql.BuildUnmarshalerMap(
{{- range $input := .Inputs -}}
{{ if not $input.HasUnmarshal }}
@ -148,22 +157,39 @@
switch rc.Operation.Operation {
{{- if .QueryRoot }} case ast.Query:
return func(ctx context.Context) *graphql.Response {
if !first { return nil }
first = false
ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap)
{{ if .Directives.LocationDirectives "QUERY" -}}
data := ec._queryMiddleware(ctx, rc.Operation, func(ctx context.Context) (interface{}, error){
return ec._{{.QueryRoot.Name}}(ctx, rc.Operation.SelectionSet), nil
})
{{- else -}}
data := ec._{{.QueryRoot.Name}}(ctx, rc.Operation.SelectionSet)
{{- end }}
var response graphql.Response
var data graphql.Marshaler
if first {
first = false
ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap)
{{ if .Directives.LocationDirectives "QUERY" -}}
data = ec._queryMiddleware(ctx, rc.Operation, func(ctx context.Context) (interface{}, error){
return ec._{{.QueryRoot.Name}}(ctx, rc.Operation.SelectionSet), nil
})
{{- else -}}
data = ec._{{.QueryRoot.Name}}(ctx, rc.Operation.SelectionSet)
{{- end }}
} else {
if atomic.LoadInt32(&ec.pendingDeferred) > 0 {
result := <-ec.deferredResults
atomic.AddInt32(&ec.pendingDeferred, -1)
data = result.Result
response.Path = result.Path
response.Label = result.Label
response.Errors = result.Errors
} else {
return nil
}
}
var buf bytes.Buffer
data.MarshalGQL(&buf)
return &graphql.Response{
Data: buf.Bytes(),
response.Data = buf.Bytes()
if atomic.LoadInt32(&ec.deferred) > 0 {
hasNext := atomic.LoadInt32(&ec.pendingDeferred) > 0
response.HasNext = &hasNext
}
return &response
}
{{ end }}
@ -220,20 +246,42 @@
type executionContext struct {
*graphql.OperationContext
*executableSchema
deferred int32
pendingDeferred int32
deferredResults chan graphql.DeferredResult
}
func (ec *executionContext) processDeferredGroup(dg graphql.DeferredGroup) {
atomic.AddInt32(&ec.pendingDeferred, 1)
go func () {
ctx := graphql.WithFreshResponseContext(dg.Context)
dg.FieldSet.Dispatch(ctx)
ds := graphql.DeferredResult{
Path: dg.Path,
Label: dg.Label,
Result: dg.FieldSet,
Errors: graphql.GetErrors(ctx),
}
// null fields should bubble up
if dg.FieldSet.Invalids > 0 {
ds.Result = graphql.Null
}
ec.deferredResults <- ds
}()
}
func (ec *executionContext) introspectSchema() (*introspection.Schema, error) {
if ec.DisableIntrospection {
return nil, errors.New("introspection disabled")
}
return introspection.WrapSchema(parsedSchema), nil
return introspection.WrapSchema(ec.Schema()), nil
}
func (ec *executionContext) introspectType(name string) (*introspection.Type, error) {
if ec.DisableIntrospection {
return nil, errors.New("introspection disabled")
}
return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name]), nil
return introspection.WrapTypeFromDef(ec.Schema(), ec.Schema().Types[name]), nil
}
{{if .HasEmbeddableSources }}

View File

@ -1,7 +1,15 @@
{{- range $input := .Inputs }}
{{- if not .HasUnmarshal }}
func (ec *executionContext) unmarshalInput{{ .Name }}(ctx context.Context, obj interface{}) ({{.Type | ref}}, error) {
var it {{.Type | ref}}
{{- $it := "it" }}
{{- if .PointersInUmarshalInput }}
{{- $it = "&it" }}
{{- end }}
func (ec *executionContext) unmarshalInput{{ .Name }}(ctx context.Context, obj interface{}) ({{ if .PointersInUmarshalInput }}*{{ end }}{{.Type | ref}}, error) {
{{- if $input.IsMap }}
it := make(map[string]interface{}, len(obj.(map[string]interface{})))
{{- else }}
var it {{.Type | ref}}
{{- end }}
asMap := map[string]interface{}{}
for k, v := range obj.(map[string]interface{}) {
asMap[k] = v
@ -23,55 +31,70 @@
switch k {
{{- range $field := .Fields }}
case {{$field.Name|quote}}:
var err error
{{- $lhs := (printf "it.%s" $field.GoFieldName) }}
{{- if $input.IsMap }}
{{- $lhs = (printf "it[%q]" $field.Name) }}
{{- end }}
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField({{$field.Name|quote}}))
{{- if $field.ImplDirectives }}
directive0 := func(ctx context.Context) (interface{}, error) { return ec.{{ $field.TypeReference.UnmarshalFunc }}(ctx, v) }
{{ template "implDirectives" $field }}
tmp, err := directive{{$field.ImplDirectives|len}}(ctx)
if err != nil {
return it, graphql.ErrorOnPath(ctx, err)
return {{$it}}, graphql.ErrorOnPath(ctx, err)
}
if data, ok := tmp.({{ $field.TypeReference.GO | ref }}) ; ok {
{{- if $field.IsResolver }}
if err = ec.resolvers.{{ $field.ShortInvocation }}; err != nil {
return it, err
return {{$it}}, err
}
{{- else }}
it.{{$field.GoFieldName}} = data
{{- if $field.TypeReference.IsOmittable }}
{{ $lhs }} = graphql.OmittableOf(data)
{{- else }}
{{ $lhs }} = data
{{- end }}
{{- end }}
{{- if $field.TypeReference.IsNilable }}
{{- if not $field.IsResolver }}
} else if tmp == nil {
it.{{$field.GoFieldName}} = nil
{{- if $field.TypeReference.IsOmittable }}
{{ $lhs }} = graphql.OmittableOf[{{ $field.TypeReference.GO | ref }}](nil)
{{- else }}
{{ $lhs }} = nil
{{- end }}
{{- end }}
{{- end }}
} else {
err := fmt.Errorf(`unexpected type %T from directive, should be {{ $field.TypeReference.GO }}`, tmp)
return it, graphql.ErrorOnPath(ctx, err)
return {{$it}}, graphql.ErrorOnPath(ctx, err)
}
{{- else }}
{{- if $field.IsResolver }}
data, err := ec.{{ $field.TypeReference.UnmarshalFunc }}(ctx, v)
if err != nil {
return it, err
return {{$it}}, err
}
if err = ec.resolvers.{{ $field.ShortInvocation }}; err != nil {
return it, err
return {{$it}}, err
}
{{- else }}
it.{{$field.GoFieldName}}, err = ec.{{ $field.TypeReference.UnmarshalFunc }}(ctx, v)
data, err := ec.{{ $field.TypeReference.UnmarshalFunc }}(ctx, v)
if err != nil {
return it, err
return {{$it}}, err
}
{{- if $field.TypeReference.IsOmittable }}
{{ $lhs }} = graphql.OmittableOf(data)
{{- else }}
{{ $lhs }} = data
{{- end }}
{{- end }}
{{- end }}
{{- end }}
}
}
return it, nil
return {{$it}}, nil
}
{{- end }}
{{ end }}

View File

@ -3,6 +3,7 @@ package codegen
import (
"fmt"
"go/types"
"sort"
"github.com/vektah/gqlparser/v2/ast"
@ -40,7 +41,13 @@ func (b *builder) buildInterface(typ *ast.Definition) (*Interface, error) {
return nil, fmt.Errorf("%s is not an interface", i.Type)
}
for _, implementor := range b.Schema.GetPossibleTypes(typ) {
// Sort so that more specific types are evaluated first.
implementors := b.Schema.GetPossibleTypes(typ)
sort.Slice(implementors, func(i, j int) bool {
return len(implementors[i].Interfaces) > len(implementors[j].Interfaces)
})
for _, implementor := range implementors {
obj, err := b.Binder.DefaultUserObject(implementor.Name)
if err != nil {
return nil, fmt.Errorf("%s has no backing go type", implementor.Name)

View File

@ -7,10 +7,11 @@ import (
"strings"
"unicode"
"github.com/99designs/gqlgen/codegen/config"
"github.com/vektah/gqlparser/v2/ast"
"golang.org/x/text/cases"
"golang.org/x/text/language"
"github.com/99designs/gqlgen/codegen/config"
)
type GoFieldType int
@ -25,14 +26,15 @@ const (
type Object struct {
*ast.Definition
Type types.Type
ResolverInterface types.Type
Root bool
Fields []*Field
Implements []*ast.Definition
DisableConcurrency bool
Stream bool
Directives []*Directive
Type types.Type
ResolverInterface types.Type
Root bool
Fields []*Field
Implements []*ast.Definition
DisableConcurrency bool
Stream bool
Directives []*Directive
PointersInUmarshalInput bool
}
func (b *builder) buildObject(typ *ast.Definition) (*Object, error) {
@ -42,11 +44,12 @@ func (b *builder) buildObject(typ *ast.Definition) (*Object, error) {
}
caser := cases.Title(language.English, cases.NoLower)
obj := &Object{
Definition: typ,
Root: b.Schema.Query == typ || b.Schema.Mutation == typ || b.Schema.Subscription == typ,
DisableConcurrency: typ == b.Schema.Mutation,
Stream: typ == b.Schema.Subscription,
Directives: dirs,
Definition: typ,
Root: b.Config.IsRoot(typ),
DisableConcurrency: typ == b.Schema.Mutation,
Stream: typ == b.Schema.Subscription,
Directives: dirs,
PointersInUmarshalInput: b.Config.ReturnPointersInUmarshalInput,
ResolverInterface: types.NewNamed(
types.NewTypeName(0, b.Config.Exec.Pkg(), caser.String(typ.Name)+"Resolver", nil),
nil,
@ -110,8 +113,8 @@ func (o *Object) HasResolvers() bool {
}
func (o *Object) HasUnmarshal() bool {
if o.Type == config.MapType {
return true
if o.IsMap() {
return false
}
for i := 0; i < o.Type.(*types.Named).NumMethods(); i++ {
if o.Type.(*types.Named).Method(i).Name() == "UnmarshalGQL" {
@ -147,10 +150,24 @@ func (o *Object) IsReserved() bool {
return strings.HasPrefix(o.Definition.Name, "__")
}
func (o *Object) IsMap() bool {
return o.Type == config.MapType
}
func (o *Object) Description() string {
return o.Definition.Description
}
func (o *Object) HasField(name string) bool {
for _, f := range o.Fields {
if f.Name == name {
return true
}
}
return false
}
func (os Objects) ByName(name string) *Object {
for i, o := range os {
if strings.EqualFold(o.Definition.Name, name) {

View File

@ -25,86 +25,121 @@ func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.Selec
{{- else }}
func (ec *executionContext) _{{$object.Name}}(ctx context.Context, sel ast.SelectionSet{{ if not $object.Root }},obj {{$object.Reference | ref }}{{ end }}) graphql.Marshaler {
fields := graphql.CollectFields(ec.OperationContext, sel, {{$object.Name|lcFirst}}Implementors)
{{- if $object.Root }}
ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{
Object: {{$object.Name|quote}},
})
{{end}}
{{- if $object.Root }}
ctx = graphql.WithFieldContext(ctx, &graphql.FieldContext{
Object: {{$object.Name|quote}},
})
{{end}}
out := graphql.NewFieldSet(fields)
var invalids uint32
deferred := make(map[string]*graphql.FieldSet)
for i, field := range fields {
{{- if $object.Root }}
innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{
Object: field.Name,
Field: field,
})
{{end}}
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString({{$object.Name|quote}})
{{- range $field := $object.Fields }}
case "{{$field.Name}}":
{{- if $field.IsConcurrent }}
field := field
{{- if $object.Root }}
innerCtx := graphql.WithRootFieldContext(ctx, &graphql.RootFieldContext{
Object: field.Name,
Field: field,
})
{{end}}
switch field.Name {
case "__typename":
out.Values[i] = graphql.MarshalString({{$object.Name|quote}})
{{- range $field := $object.Fields }}
case "{{$field.Name}}":
{{- if $field.IsConcurrent }}
field := field
innerFunc := func(ctx context.Context) (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._{{$object.Name}}_{{$field.Name}}(ctx, field{{if not $object.Root}}, obj{{end}})
{{- if $field.TypeReference.GQL.NonNull }}
if res == graphql.Null {
{{- if $object.IsConcurrent }}
atomic.AddUint32(&invalids, 1)
{{- else }}
invalids++
{{- end }}
}
{{- end }}
return res
}
innerFunc := func(ctx context.Context, {{ if $field.TypeReference.GQL.NonNull }}fs{{ else }}_{{ end }} *graphql.FieldSet) (res graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
}
}()
res = ec._{{$object.Name}}_{{$field.Name}}(ctx, field{{if not $object.Root}}, obj{{end}})
{{- if $field.TypeReference.GQL.NonNull }}
if res == graphql.Null {
{{- if $object.IsConcurrent }}
atomic.AddUint32(&fs.Invalids, 1)
{{- else }}
fs.Invalids++
{{- end }}
}
{{- end }}
return res
}
{{if $object.Root}}
rrm := func(ctx context.Context) graphql.Marshaler {
return ec.OperationContext.RootResolverMiddleware(ctx, innerFunc)
}
{{end}}
{{if $object.Root}}
rrm := func(ctx context.Context) graphql.Marshaler {
return ec.OperationContext.RootResolverMiddleware(ctx,
func(ctx context.Context) graphql.Marshaler { return innerFunc(ctx, out) })
}
{{end}}
out.Concurrently(i, func() graphql.Marshaler {
{{- if $object.Root -}}
return rrm(innerCtx)
{{- else -}}
return innerFunc(ctx)
{{end}}
})
{{- else }}
{{if $object.Root}}
out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) {
return ec._{{$object.Name}}_{{$field.Name}}(ctx, field)
})
{{else}}
out.Values[i] = ec._{{$object.Name}}_{{$field.Name}}(ctx, field, obj)
{{end}}
{{if not $object.Root}}
if field.Deferrable != nil {
dfs, ok := deferred[field.Deferrable.Label]
di := 0
if ok {
dfs.AddField(field)
di = len(dfs.Values) - 1
} else {
dfs = graphql.NewFieldSet([]graphql.CollectedField{field})
deferred[field.Deferrable.Label] = dfs
}
dfs.Concurrently(di, func(ctx context.Context) graphql.Marshaler {
return innerFunc(ctx, dfs)
})
{{- if $field.TypeReference.GQL.NonNull }}
if out.Values[i] == graphql.Null {
{{- if $object.IsConcurrent }}
atomic.AddUint32(&invalids, 1)
{{- else }}
invalids++
{{- end }}
}
{{- end }}
{{- end }}
{{- end }}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
// don't run the out.Concurrently() call below
out.Values[i] = graphql.Null
continue
}
{{end}}
out.Concurrently(i, func(ctx context.Context) graphql.Marshaler {
{{- if $object.Root -}}
return rrm(innerCtx)
{{- else -}}
return innerFunc(ctx, out)
{{- end -}}
})
{{- else }}
{{- if $object.Root -}}
out.Values[i] = ec.OperationContext.RootResolverMiddleware(innerCtx, func(ctx context.Context) (res graphql.Marshaler) {
return ec._{{$object.Name}}_{{$field.Name}}(ctx, field)
})
{{- else -}}
out.Values[i] = ec._{{$object.Name}}_{{$field.Name}}(ctx, field, obj)
{{- end -}}
{{- if $field.TypeReference.GQL.NonNull }}
if out.Values[i] == graphql.Null {
{{- if $object.IsConcurrent }}
atomic.AddUint32(&out.Invalids, 1)
{{- else }}
out.Invalids++
{{- end }}
}
{{- end }}
{{- end }}
{{- end }}
default:
panic("unknown field " + strconv.Quote(field.Name))
}
}
out.Dispatch()
if invalids > 0 { return graphql.Null }
out.Dispatch(ctx)
if out.Invalids > 0 { return graphql.Null }
atomic.AddInt32(&ec.deferred, int32(len(deferred)))
for label, dfs := range deferred {
ec.processDeferredGroup(graphql.DeferredGroup{
Label: label,
Path: graphql.GetPath(ctx),
FieldSet: dfs,
Context: ctx,
})
}
return out
}
{{- end }}

View File

@ -17,6 +17,7 @@
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
return &executableSchema{
schema: cfg.Schema,
resolvers: cfg.Resolvers,
directives: cfg.Directives,
complexity: cfg.Complexity,
@ -24,6 +25,7 @@ func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
}
type Config struct {
Schema *ast.Schema
Resolvers ResolverRoot
Directives DirectiveRoot
Complexity ComplexityRoot
@ -49,6 +51,7 @@ type DirectiveRoot struct {
}
type ComplexityRoot struct {
{{- if not .Config.OmitComplexity }}
{{ range $object := .Objects }}
{{ if not $object.IsReserved -}}
{{ ucFirst $object.Name }} struct {
@ -61,21 +64,27 @@ type ComplexityRoot struct {
}
{{- end }}
{{ end }}
{{- end }}
}
type executableSchema struct {
schema *ast.Schema
resolvers ResolverRoot
directives DirectiveRoot
complexity ComplexityRoot
}
func (e *executableSchema) Schema() *ast.Schema {
if e.schema != nil {
return e.schema
}
return parsedSchema
}
func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
ec := executionContext{nil, e}
ec := executionContext{nil, e, 0, 0, nil}
_ = ec
{{- if not .Config.OmitComplexity }}
switch typeName + "." + field {
{{ range $object := .Objects }}
{{ if not $object.IsReserved }}
@ -102,12 +111,13 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
{{ end }}
{{ end }}
}
{{- end }}
return 0, false
}
func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler {
rc := graphql.GetOperationContext(ctx)
ec := executionContext{rc, e}
ec := executionContext{rc, e, 0, 0, make(chan graphql.DeferredResult)}
inputUnmarshalMap := graphql.BuildUnmarshalerMap(
{{- range $input := .Inputs -}}
{{ if not $input.HasUnmarshal }}
@ -120,22 +130,39 @@ func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler {
switch rc.Operation.Operation {
{{- if .QueryRoot }} case ast.Query:
return func(ctx context.Context) *graphql.Response {
if !first { return nil }
first = false
ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap)
{{ if .Directives.LocationDirectives "QUERY" -}}
data := ec._queryMiddleware(ctx, rc.Operation, func(ctx context.Context) (interface{}, error){
return ec._{{.QueryRoot.Name}}(ctx, rc.Operation.SelectionSet), nil
})
{{- else -}}
data := ec._{{.QueryRoot.Name}}(ctx, rc.Operation.SelectionSet)
{{- end }}
var response graphql.Response
var data graphql.Marshaler
if first {
first = false
ctx = graphql.WithUnmarshalerMap(ctx, inputUnmarshalMap)
{{ if .Directives.LocationDirectives "QUERY" -}}
data = ec._queryMiddleware(ctx, rc.Operation, func(ctx context.Context) (interface{}, error){
return ec._{{.QueryRoot.Name}}(ctx, rc.Operation.SelectionSet), nil
})
{{- else -}}
data = ec._{{.QueryRoot.Name}}(ctx, rc.Operation.SelectionSet)
{{- end }}
} else {
if atomic.LoadInt32(&ec.pendingDeferred) > 0 {
result := <-ec.deferredResults
atomic.AddInt32(&ec.pendingDeferred, -1)
data = result.Result
response.Path = result.Path
response.Label = result.Label
response.Errors = result.Errors
} else {
return nil
}
}
var buf bytes.Buffer
data.MarshalGQL(&buf)
return &graphql.Response{
Data: buf.Bytes(),
response.Data = buf.Bytes()
if atomic.LoadInt32(&ec.deferred) > 0 {
hasNext := atomic.LoadInt32(&ec.pendingDeferred) > 0
response.HasNext = &hasNext
}
return &response
}
{{ end }}
@ -192,20 +219,42 @@ func (e *executableSchema) Exec(ctx context.Context) graphql.ResponseHandler {
type executionContext struct {
*graphql.OperationContext
*executableSchema
deferred int32
pendingDeferred int32
deferredResults chan graphql.DeferredResult
}
func (ec *executionContext) processDeferredGroup(dg graphql.DeferredGroup) {
atomic.AddInt32(&ec.pendingDeferred, 1)
go func () {
ctx := graphql.WithFreshResponseContext(dg.Context)
dg.FieldSet.Dispatch(ctx)
ds := graphql.DeferredResult{
Path: dg.Path,
Label: dg.Label,
Result: dg.FieldSet,
Errors: graphql.GetErrors(ctx),
}
// null fields should bubble up
if dg.FieldSet.Invalids > 0 {
ds.Result = graphql.Null
}
ec.deferredResults <- ds
}()
}
func (ec *executionContext) introspectSchema() (*introspection.Schema, error) {
if ec.DisableIntrospection {
return nil, errors.New("introspection disabled")
}
return introspection.WrapSchema(parsedSchema), nil
return introspection.WrapSchema(ec.Schema()), nil
}
func (ec *executionContext) introspectType(name string) (*introspection.Type, error) {
if ec.DisableIntrospection {
return nil, errors.New("introspection disabled")
}
return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name]), nil
return introspection.WrapTypeFromDef(ec.Schema(), ec.Schema().Types[name]), nil
}

View File

@ -45,7 +45,7 @@ func (s *Imports) Reserve(path string, aliases ...string) (string, error) {
panic("empty ambient import")
}
// if we are referencing our own package we dont need an import
// if we are referencing our own package we don't need an import
if code.ImportPathForDir(s.destDir) == path {
return "", nil
}
@ -85,7 +85,7 @@ func (s *Imports) Lookup(path string) string {
path = code.NormalizeVendor(path)
// if we are referencing our own package we dont need an import
// if we are referencing our own package we don't need an import
if code.ImportPathForDir(s.destDir) == path {
return ""
}

View File

@ -18,7 +18,6 @@ import (
"unicode"
"github.com/99designs/gqlgen/internal/code"
"github.com/99designs/gqlgen/internal/imports"
)
@ -172,7 +171,7 @@ func parseTemplates(cfg Options, t *template.Template) (*template.Template, erro
fileSystem = cfg.TemplateFS
} else {
// load path relative to calling source file
_, callerFile, _, _ := runtime.Caller(1)
_, callerFile, _, _ := runtime.Caller(2)
rootDir := filepath.Dir(callerFile)
fileSystem = os.DirFS(rootDir)
}
@ -202,6 +201,7 @@ func Funcs() template.FuncMap {
"rawQuote": rawQuote,
"dump": Dump,
"ref": ref,
"obj": obj,
"ts": TypeIdentifier,
"call": Call,
"prefixLines": prefixLines,
@ -248,42 +248,13 @@ func ref(p types.Type) string {
return CurrentImports.LookupType(p)
}
var pkgReplacer = strings.NewReplacer(
"/", "ᚋ",
".", "ᚗ",
"-", "ᚑ",
"~", "א",
)
func TypeIdentifier(t types.Type) string {
res := ""
for {
switch it := t.(type) {
case *types.Pointer:
t.Underlying()
res += "ᚖ"
t = it.Elem()
case *types.Slice:
res += "ᚕ"
t = it.Elem()
case *types.Named:
res += pkgReplacer.Replace(it.Obj().Pkg().Path())
res += "ᚐ"
res += it.Obj().Name()
return res
case *types.Basic:
res += it.Name()
return res
case *types.Map:
res += "map"
return res
case *types.Interface:
res += "interface"
return res
default:
panic(fmt.Errorf("unexpected type %T", it))
}
func obj(obj types.Object) string {
pkg := CurrentImports.Lookup(obj.Pkg().Path())
if pkg != "" {
pkg += "."
}
return pkg + obj.Name()
}
func Call(p *types.Func) string {
@ -503,21 +474,40 @@ func wordWalker(str string, f func(*wordInfo)) {
}
i++
initialisms := GetInitialisms()
// [w,i) is a word.
word := string(runes[w:i])
if !eow && commonInitialisms[word] && !unicode.IsLower(runes[i]) {
if !eow && initialisms[word] && !unicode.IsLower(runes[i]) {
// through
// split IDFoo → ID, Foo
// but URLs → URLs
} else if !eow {
if commonInitialisms[word] {
if initialisms[word] {
hasCommonInitial = true
}
continue
}
matchCommonInitial := false
if commonInitialisms[strings.ToUpper(word)] {
upperWord := strings.ToUpper(word)
if initialisms[upperWord] {
// If the uppercase word (string(runes[w:i]) is "ID" or "IP"
// AND
// the word is the first two characters of the str
// AND
// that is not the end of the word
// AND
// the length of the string is greater than 3
// AND
// the third rune is an uppercase one
// THEN
// do NOT count this as an initialism.
switch upperWord {
case "ID", "IP":
if word == str[:2] && !eow && len(str) > 3 && unicode.IsUpper(runes[3]) {
continue
}
}
hasCommonInitial = true
matchCommonInitial = true
}
@ -573,57 +563,6 @@ func sanitizeKeywords(name string) string {
return name
}
// commonInitialisms is a set of common initialisms.
// Only add entries that are highly unlikely to be non-initialisms.
// For instance, "ID" is fine (Freudian code is rare), but "AND" is not.
var commonInitialisms = map[string]bool{
"ACL": true,
"API": true,
"ASCII": true,
"CPU": true,
"CSS": true,
"CSV": true,
"DNS": true,
"EOF": true,
"GUID": true,
"HTML": true,
"HTTP": true,
"HTTPS": true,
"ICMP": true,
"ID": true,
"IP": true,
"JSON": true,
"KVK": true,
"LHS": true,
"PDF": true,
"PGP": true,
"QPS": true,
"QR": true,
"RAM": true,
"RHS": true,
"RPC": true,
"SLA": true,
"SMTP": true,
"SQL": true,
"SSH": true,
"SVG": true,
"TCP": true,
"TLS": true,
"TTL": true,
"UDP": true,
"UI": true,
"UID": true,
"URI": true,
"URL": true,
"UTF8": true,
"UUID": true,
"VM": true,
"XML": true,
"XMPP": true,
"XSRF": true,
"XSS": true,
}
func rawQuote(s string) string {
return "`" + strings.ReplaceAll(s, "`", "`+\"`\"+`") + "`"
}
@ -738,3 +677,97 @@ func write(filename string, b []byte, packages *code.Packages) error {
return nil
}
var pkgReplacer = strings.NewReplacer(
"/", "ᚋ",
".", "ᚗ",
"-", "ᚑ",
"~", "א",
)
func TypeIdentifier(t types.Type) string {
res := ""
for {
switch it := t.(type) {
case *types.Pointer:
t.Underlying()
res += "ᚖ"
t = it.Elem()
case *types.Slice:
res += "ᚕ"
t = it.Elem()
case *types.Named:
res += pkgReplacer.Replace(it.Obj().Pkg().Path())
res += "ᚐ"
res += it.Obj().Name()
return res
case *types.Basic:
res += it.Name()
return res
case *types.Map:
res += "map"
return res
case *types.Interface:
res += "interface"
return res
default:
panic(fmt.Errorf("unexpected type %T", it))
}
}
}
// CommonInitialisms is a set of common initialisms.
// Only add entries that are highly unlikely to be non-initialisms.
// For instance, "ID" is fine (Freudian code is rare), but "AND" is not.
var CommonInitialisms = map[string]bool{
"ACL": true,
"API": true,
"ASCII": true,
"CPU": true,
"CSS": true,
"CSV": true,
"DNS": true,
"EOF": true,
"GUID": true,
"HTML": true,
"HTTP": true,
"HTTPS": true,
"ICMP": true,
"ID": true,
"IP": true,
"JSON": true,
"KVK": true,
"LHS": true,
"PDF": true,
"PGP": true,
"QPS": true,
"QR": true,
"RAM": true,
"RHS": true,
"RPC": true,
"SLA": true,
"SMTP": true,
"SQL": true,
"SSH": true,
"SVG": true,
"TCP": true,
"TLS": true,
"TTL": true,
"UDP": true,
"UI": true,
"UID": true,
"URI": true,
"URL": true,
"UTF8": true,
"UUID": true,
"VM": true,
"XML": true,
"XMPP": true,
"XSRF": true,
"XSS": true,
}
// GetInitialisms returns the initialisms to capitalize in Go names. If unchanged, default initialisms will be returned
var GetInitialisms = func() map[string]bool {
return CommonInitialisms
}

View File

@ -26,7 +26,7 @@ func processType(ret map[string]*config.TypeReference, ref *config.TypeReference
}
ret[key] = ref
if ref.IsSlice() || ref.IsPtrToSlice() || ref.IsPtrToPtr() {
if ref.IsSlice() || ref.IsPtrToSlice() || ref.IsPtrToPtr() || ref.IsPtrToIntf() {
processType(ret, ref.Elem())
}
}

View File

@ -4,7 +4,7 @@
{{- if and $type.IsNilable (not $type.GQL.NonNull) (not $type.IsPtrToPtr) }}
if v == nil { return nil, nil }
{{- end }}
{{- if $type.IsPtrToSlice }}
{{- if or $type.IsPtrToSlice $type.IsPtrToIntf }}
res, err := ec.{{ $type.Elem.UnmarshalFunc }}(ctx, v)
return &res, graphql.ErrorOnPath(ctx, err)
{{- else if $type.IsSlice }}
@ -34,7 +34,10 @@
return &pres, nil
{{- else }}
{{- if $type.Unmarshaler }}
{{- if $type.CastType }}
{{- if $type.HasEnumValues }}
tmp, err := {{ $type.Unmarshaler | call }}(v)
res := {{ $type.UnmarshalFunc }}[tmp]
{{- else if $type.CastType }}
{{- if $type.IsContext }}
tmp, err := {{ $type.Unmarshaler | call }}(ctx, v)
{{- else }}
@ -59,8 +62,6 @@
{{- else}}
return res, graphql.ErrorOnPath(ctx, err)
{{- end }}
{{- else if eq ($type.GO | ref) "map[string]interface{}" }}
return v.(map[string]interface{}), nil
{{- else if $type.IsMarshaler }}
{{- if and $type.IsNilable $type.Elem }}
var res = new({{ $type.Elem.GO | ref }})
@ -75,9 +76,11 @@
return res, graphql.ErrorOnPath(ctx, err)
{{- else }}
res, err := ec.unmarshalInput{{ $type.GQL.Name }}(ctx, v)
{{- if $type.IsNilable }}
{{- if and $type.IsNilable (not $type.IsMap) (not $type.PointersInUmarshalInput) }}
return &res, graphql.ErrorOnPath(ctx, err)
{{- else}}
{{- else if and (not $type.IsNilable) $type.PointersInUmarshalInput }}
return *res, graphql.ErrorOnPath(ctx, err)
{{- else }}
return res, graphql.ErrorOnPath(ctx, err)
{{- end }}
{{- end }}
@ -87,7 +90,7 @@
{{ with $type.MarshalFunc }}
func (ec *executionContext) {{ . }}(ctx context.Context, sel ast.SelectionSet, v {{ $type.GO | ref }}) graphql.Marshaler {
{{- if $type.IsPtrToSlice }}
{{- if or $type.IsPtrToSlice $type.IsPtrToIntf }}
return ec.{{ $type.Elem.MarshalFunc }}(ctx, sel, *v)
{{- else if $type.IsSlice }}
{{- if not $type.GQL.NonNull }}
@ -170,7 +173,12 @@
{{- else if and (not $type.IsTargetNilable) $type.IsNilable }}
{{- $v = "*v" }}
{{- end }}
res := {{ $type.Marshaler | call }}({{- if $type.CastType }}{{ $type.CastType | ref }}({{ $v }}){{else}}{{ $v }}{{- end }})
{{- if $type.HasEnumValues }}
{{- $v = printf "%v[%v]" $type.MarshalFunc $v }}
{{- else if $type.CastType }}
{{- $v = printf "%v(%v)" ($type.CastType | ref) $v}}
{{- end }}
res := {{ $type.Marshaler | call }}({{ $v }})
{{- if $type.GQL.NonNull }}
if res == graphql.Null {
if !graphql.HasFieldError(ctx, graphql.GetFieldContext(ctx)) {
@ -183,10 +191,36 @@
{{- else }}
return res
{{- end }}
{{- else if $type.IsRoot }}
{{- if eq $type.Definition.Name "Subscription" }}
res := ec._{{$type.Definition.Name}}(ctx, sel)
return res(ctx)
{{- else }}
return ec._{{$type.Definition.Name}}(ctx, sel)
{{- end }}
{{- else }}
return ec._{{$type.Definition.Name}}(ctx, sel, {{ if not $type.IsNilable}}&{{end}} v)
{{- end }}
{{- end }}
}
{{- end }}
{{- if $type.HasEnumValues }}
{{- $enum := $type.GO }}
{{- if $type.IsNilable }}
{{- $enum = $type.GO.Elem }}
{{- end }}
var (
{{ $type.UnmarshalFunc }} = map[string]{{ $enum | ref }}{
{{- range $value := $type.EnumValues }}
"{{ $value.Definition.Name }}": {{ $value.Object | obj }},
{{- end }}
}
{{ $type.MarshalFunc }} = map[{{ $enum | ref }}]string{
{{- range $value := $type.EnumValues }}
{{ $value.Object | obj }}: "{{ $value.Definition.Name }}",
{{- end }}
}
)
{{- end }}
{{- end }}

View File

@ -41,6 +41,7 @@ func findGoInterface(def types.Type) (*types.Interface, error) {
func equalFieldName(source, target string) bool {
source = strings.ReplaceAll(source, "_", "")
source = strings.ReplaceAll(source, ",omitempty", "")
target = strings.ReplaceAll(target, "_", "")
return strings.EqualFold(source, target)
}

View File

@ -1,8 +1,9 @@
package complexity
import (
"github.com/99designs/gqlgen/graphql"
"github.com/vektah/gqlparser/v2/ast"
"github.com/99designs/gqlgen/graphql"
)
func Calculate(es graphql.ExecutableSchema, op *ast.OperationDefinition, vars map[string]interface{}) int {

View File

@ -15,15 +15,15 @@ type Cache interface {
type MapCache map[string]interface{}
// Get looks up a key's value from the cache.
func (m MapCache) Get(ctx context.Context, key string) (value interface{}, ok bool) {
func (m MapCache) Get(_ context.Context, key string) (value interface{}, ok bool) {
v, ok := m[key]
return v, ok
}
// Add adds a value to the cache.
func (m MapCache) Add(ctx context.Context, key string, value interface{}) { m[key] = value }
func (m MapCache) Add(_ context.Context, key string, value interface{}) { m[key] = value }
type NoCache struct{}
func (n NoCache) Get(ctx context.Context, key string) (value interface{}, ok bool) { return nil, false }
func (n NoCache) Add(ctx context.Context, key string, value interface{}) {}
func (n NoCache) Get(_ context.Context, _ string) (value interface{}, ok bool) { return nil, false }
func (n NoCache) Add(_ context.Context, _ string, _ interface{}) {}

View File

@ -6,6 +6,7 @@ import (
"net/http"
"github.com/vektah/gqlparser/v2/ast"
"github.com/vektah/gqlparser/v2/gqlerror"
)
// Deprecated: Please update all references to OperationContext instead
@ -72,8 +73,8 @@ func WithOperationContext(ctx context.Context, rc *OperationContext) context.Con
//
// Some errors can happen outside of an operation, eg json unmarshal errors.
func HasOperationContext(ctx context.Context) bool {
_, ok := ctx.Value(operationCtx).(*OperationContext)
return ok
val, ok := ctx.Value(operationCtx).(*OperationContext)
return ok && val != nil
}
// This is just a convenient wrapper method for CollectFields
@ -106,9 +107,16 @@ func (c *OperationContext) Errorf(ctx context.Context, format string, args ...in
AddErrorf(ctx, format, args...)
}
// Error sends an error to the client, passing it through the formatter.
// Deprecated: use graphql.AddError(ctx, err) instead
// Error add error or multiple errors (if underlaying type is gqlerror.List) into the stack.
// Then it will be sends to the client, passing it through the formatter.
func (c *OperationContext) Error(ctx context.Context, err error) {
if errList, ok := err.(gqlerror.List); ok {
for _, e := range errList {
AddError(ctx, e)
}
return
}
AddError(ctx, err)
}

View File

@ -34,7 +34,6 @@ func (fic *PathContext) Path() ast.Path {
if fic.ParentField != nil {
fieldPath := fic.ParentField.Path()
return append(fieldPath, path...)
}
return path

View File

@ -36,6 +36,14 @@ func WithResponseContext(ctx context.Context, presenterFunc ErrorPresenterFunc,
})
}
func WithFreshResponseContext(ctx context.Context) context.Context {
e := getResponseContext(ctx)
return context.WithValue(ctx, resultCtx, &responseContext{
errorPresenter: e.errorPresenter,
recover: e.recover,
})
}
// AddErrorf writes a formatted error to the client, first passing it through the error presenter.
func AddErrorf(ctx context.Context, format string, args ...interface{}) {
AddError(ctx, fmt.Errorf(format, args...))

Some files were not shown because too many files have changed in this diff Show More