Merge branch 'dev'

This commit is contained in:
Ingo Oppermann 2022-07-21 22:25:55 +02:00
commit 8c956111ba
No known key found for this signature in database
GPG Key ID: 2AB32426E9DD229E
507 changed files with 36917 additions and 15310 deletions

View File

@ -1,62 +1,62 @@
name: 'Build base:alpine-core'
name: "Build base:alpine-core"
on:
workflow_dispatch:
push:
branches-ignore:
- '**'
workflow_dispatch:
push:
branches-ignore:
- "**"
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.alpine.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- uses: cardinalby/export-env-action@v1
with:
envFile: ".github_build/Build.alpine.env"
export: "true"
expandWithJobEnv: "true"
expand: "true"
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
build-args: |
BUILD_IMAGE=${{ env.OS_NAME }}:${{ env.OS_VERSION }}
GOLANG_IMAGE=${{ env.GOLANG_IMAGE }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
push: true
tags: |
datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
datarhei/base:${{ env.OS_NAME }}-core-latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile
build-args: |
BUILD_IMAGE=${{ env.OS_NAME }}:${{ env.OS_VERSION }}
GOLANG_IMAGE=${{ env.GOLANG_IMAGE }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6
push: true
tags: |
datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
datarhei/base:${{ env.OS_NAME }}-core-latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new

View File

@ -1,4 +1,4 @@
name: 'Build datarhei/base:alpine-core-dev'
name: 'Build base:alpine-core:dev'
on:
workflow_dispatch:
@ -81,9 +81,12 @@ jobs:
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
<<<<<<< HEAD
dockerBundle:
uses: ./.github/workflows/build_bundle_dev.yaml
secrets: inherit
dockerBundleRpi:
uses: ./.github/workflows/build_bundle-rpi_dev.yaml
secrets: inherit
=======
>>>>>>> dev

View File

@ -1,71 +1,71 @@
name: 'Build core:rpi'
name: "Build core:rpi"
on:
workflow_dispatch:
schedule:
- cron: '7 5 * * *'
push:
branches-ignore:
- '**'
workflow_dispatch:
schedule:
- cron: "7 5 * * *"
push:
branches-ignore:
- "**"
jobs:
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
docker:
runs-on: [self-hosted]
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.alpine.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- uses: cardinalby/export-env-action@v1
with:
envFile: ".github_build/Build.alpine.env"
export: "true"
expandWithJobEnv: "true"
expand: "true"
- uses: cardinalby/export-env-action@v1
with:
envFile: '.github_build/Build.bundle.rpi.env'
export: 'true'
expandWithJobEnv: 'true'
expand: 'true'
- uses: cardinalby/export-env-action@v1
with:
envFile: ".github_build/Build.bundle.rpi.env"
export: "true"
expandWithJobEnv: "true"
expand: "true"
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
FFMPEG_IMAGE=datarhei/base:${{ env.OS_NAME }}-ffmpeg-rpi-${{ env.OS_VERSION }}-${{ env.FFMPEG_VERSION }}
platforms: linux/arm/v7,linux/arm/v6,linux/arm64
push: true
tags: |
datarhei/core:rpi-${{ env.CORE_VERSION }}
datarhei/core:rpi-latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
- name: Build Multi-Arch
uses: docker/build-push-action@v2
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: ./Dockerfile.bundle
build-args: |
CORE_IMAGE=datarhei/base:${{ env.OS_NAME }}-core-${{ env.OS_VERSION }}-${{ env.CORE_VERSION }}
FFMPEG_IMAGE=datarhei/base:${{ env.OS_NAME }}-ffmpeg-rpi-${{ env.OS_VERSION }}-${{ env.FFMPEG_VERSION }}
platforms: linux/arm/v7,linux/arm/v6,linux/arm64
push: true
tags: |
datarhei/core:rpi-${{ env.CORE_VERSION }}
datarhei/core:rpi-latest
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new

View File

@ -1,5 +1,5 @@
# CORE ALPINE BASE IMAGE
OS_NAME=alpine
OS_VERSION=3.15
GOLANG_IMAGE=golang:1.18.2-alpine3.15
CORE_VERSION=16.8.0
GOLANG_IMAGE=golang:1.18.4-alpine3.15
CORE_VERSION=16.9.0

View File

@ -1,5 +1,5 @@
# CORE UBUNTU BASE IMAGE
OS_NAME=ubuntu
OS_VERSION=20.04
GOLANG_IMAGE=golang:1.18.2-alpine3.15
CORE_VERSION=16.8.0
GOLANG_IMAGE=golang:1.18.4-alpine3.15
CORE_VERSION=16.9.0

View File

@ -1,5 +1,20 @@
# Core
#### Core v16.8.0 > v16.9.0
- Add new placeholders and parameters for placeholder
- Allow RTMP server if RTMPS server is enabled. In case you already had RTMPS enabled it will listen on the same port as before. An RTMP server will be started additionally listening on a lower port number. The RTMP app is required to start with a slash.
- Add optional escape character to process placeholder
- Fix output address validation for tee outputs
- Fix updating process config
- Add experimental SRT connection stats and logs API
- Hide /config/reload endpoint in reade-only mode
- Add experimental SRT server (datarhei/gosrt)
- Create v16 in go.mod
- Fix data races, tests, lint, and update dependencies
- Add trailing slash for routed directories (datarhei/restreamer#340)
- Allow relative URLs in content in static routes
#### Core v16.7.2 > v16.8.0
- Add purge_on_delete function

View File

@ -1,4 +1,4 @@
ARG GOLANG_IMAGE=golang:1.17.6-alpine3.15
ARG GOLANG_IMAGE=golang:1.18.4-alpine3.15
ARG BUILD_IMAGE=alpine:3.15
@ -11,8 +11,8 @@ RUN apk add \
make && \
cd /dist/core && \
go version && \
make release && \
make import
make release_linux && \
make import_linux
FROM $BUILD_IMAGE

8
Dockerfile.test Normal file
View File

@ -0,0 +1,8 @@
FROM golang:1.18.3-alpine3.15
RUN apk add alpine-sdk
COPY . /dist/core
RUN cd /dist/core && \
go test -coverprofile=coverage.out -covermode=atomic -v ./...

View File

@ -2,13 +2,17 @@ COMMIT := $(shell if [ -d .git ]; then git rev-parse HEAD; else echo "unknown";
SHORTCOMMIT := $(shell echo $(COMMIT) | head -c 7)
BRANCH := $(shell if [ -d .git ]; then git rev-parse --abbrev-ref HEAD; else echo "master"; fi)
BUILD := $(shell date -u "+%Y-%m-%dT%H:%M:%SZ")
OSARCH := $(shell if [ "${GOOS}" -a "${GOARCH}" ]; then echo "-${GOOS}-${GOARCH}"; else echo ""; fi)
BINSUFFIX := $(shell if [ "${GOOS}" -a "${GOARCH}" ]; then echo "-${GOOS}-${GOARCH}"; else echo ""; fi)
all: build
## build: Build core (default)
build:
CGO_ENABLED=0 GOOS=linux GOARCH=${OSARCH} go build -o core${OSARCH}
CGO_ENABLED=${CGO_ENABLED} GOOS=${GOOS} GOARCH=${GOARCH} go build -o core${BINSUFFIX}
# github workflow workaround
build_linux:
CGO_ENABLED=0 GOOS=linux GOARCH=${OSARCH} go build -o core
## swagger: Update swagger API documentation (requires github.com/swaggo/swag)
swagger:
@ -20,7 +24,7 @@ gqlgen:
## test: Run all tests
test:
go test -coverprofile=/dev/null ./...
go test -race -coverprofile=/dev/null -v ./...
## vet: Analyze code for potential errors
vet:
@ -54,11 +58,15 @@ lint:
## import: Build import binary
import:
cd app/import && CGO_ENABLED=${CGO_ENABLED} GOOS=${GOOS} GOARCH=${GOARCH} go build -o ../../import -ldflags="-s -w"
# github workflow workaround
import_linux:
cd app/import && CGO_ENABLED=0 GOOS=linux GOARCH=${OSARCH} go build -o ../../import -ldflags="-s -w"
## coverage: Generate code coverage analysis
coverage:
go test -coverprofile test/cover.out ./...
go test -race -coverprofile test/cover.out ./...
go tool cover -html=test/cover.out -o test/cover.html
## commit: Prepare code for commit (vet, fmt, test)
@ -67,6 +75,10 @@ commit: vet fmt lint test build
## release: Build a release binary of core
release:
CGO_ENABLED=${CGO_ENABLED} GOOS=${GOOS} GOARCH=${GOARCH} go build -o core -ldflags="-s -w -X github.com/datarhei/core/app.Commit=$(COMMIT) -X github.com/datarhei/core/app.Branch=$(BRANCH) -X github.com/datarhei/core/app.Build=$(BUILD)"
# github workflow workaround
release_linux:
CGO_ENABLED=0 GOOS=linux GOARCH=${OSARCH} go build -o core -ldflags="-s -w -X github.com/datarhei/core/app.Commit=$(COMMIT) -X github.com/datarhei/core/app.Branch=$(BRANCH) -X github.com/datarhei/core/app.Build=$(BUILD)"
## docker: Build standard Docker image

View File

@ -1,5 +1,8 @@
# Core
[![CodeQL](https://github.com/datarhei/core/workflows/CodeQL/badge.svg)](https://github.com/datarhei/core/actions?query=workflow%3ACodeQL)
![Docker Pulls](https://img.shields.io/docker/pulls/datarhei/core.svg?maxAge=604800&label=Docker%20Pulls)
The cloud-native audio/video processing API.
[![License: MIT](https://img.shields.io/badge/License-Apache%202.0-brightgreen.svg)]([https://opensource.org/licenses/MI](https://www.apache.org/licenses/LICENSE-2.0))
@ -49,8 +52,8 @@ docker run --name core -d \
Native (linux/amd64,linux/arm64,linux/arm/v7)
- datarhei/base:alpine-core-latest
- datarhei/base:ubuntu-core-latest
- datarhei/base:core-alpine-latest
- datarhei/base:core-ubuntu-latest
Bundle with FFmpeg (linux/amd64,linux/arm64,linux/arm/v7)
@ -129,8 +132,15 @@ The currently known environment variables (but not all will be respected) are:
| CORE_RTMP_ENABLE | `false` | Enable RTMP server. |
| CORE_RTMP_ENABLE_TLS | `false` | Enable RTMP over TLS (RTMPS). Requires `CORE_TLS_ENABLE` to be `true`. |
| CORE_RTMP_ADDRESS | `:1935` | RTMP server listen address. |
| CORE_RTMP_ADDRESS_TLS | `:1936` | RTMPS server listen address. |
| CORE_RTMP_APP | `/` | RTMP app for publishing. |
| CORE_RTMP_TOKEN | (not set) | RTMP token for publishing and playing. The token is the value of the URL query parameter `token`. |
| CORE_SRT_ENABLE | `false` | Enable SRT server. |
| CORE_SRT_ADDRESS | `:6000` | SRT server listen address. |
| CORE_SRT_PASSPHRASE | (not set) | SRT passphrase. |
| CORE_SRT_TOKEN | (not set) | SRT token for publishing and playing. The token is the value of the URL query parameter `token`. |
| CORE_SRT_LOG_ENABLE | `false` | Enable SRT server logging. |
| CORE_SRT_LOG_TOPICS | (not set) | List topics to log from SRT server. See https://github.com/datarhei/gosrt#logging. |
| CORE_FFMPEG_BINARY | `ffmpeg` | Path to FFmpeg binary. |
| CORE_FFMPEG_MAXPROCESSES | `0` | Max. allowed simultaneously running FFmpeg instances. Any value <= 0 means unlimited. |
| CORE_FFMPEG_ACCESS_INPUT_ALLOW | (not set) | List of pattern for allowed input URI (space-separated), leave emtpy to allow any. |
@ -253,9 +263,20 @@ All other values will be filled with default values and persisted on disk. The e
"enable": false,
"enable_tls": false,
"address": ":1935",
"address_tls": ":1936",
"app": "/",
"token": ""
},
"srt": {
"enable": false,
"address": ":6000",
"passphrase": "",
"token": "",
"log": {
"enable": false,
"topics": [],
}
},
"ffmpeg": {
"binary": "ffmpeg",
"max_processes": 0,
@ -370,12 +391,36 @@ If you set a value for `CORE_STORAGE_DISK_CACHE_MAXSIZEMBYTES`, which is larger
## RTMP
The datarhei Core includes a simple RTMP server for publishing and playing streams. Set the environment variable `CORE_RTMP_ENABLE` to `true` to enable the RTMP server. It is listening on `CORE_RTMP_ADDRESS.` Use `CORE_RTMP_APP` to limit the app a stream can be published on, e.g. `/live` to require URLs to start with `/live`. To prevent anybody can publish streams, set `CORE_RTMP_TOKEN` to a secret only known to the publishers. The token has to be put in the query of the stream URL, e.g. `/live/stream?token=...`.
The datarhei Core includes a simple RTMP server for publishing and playing streams. Set the environment variable `CORE_RTMP_ENABLE` to `true` to enable the RTMP server. It is listening on `CORE_RTMP_ADDRESS`. Use `CORE_RTMP_APP` to limit the app a stream can be published on, e.g. `/live` to require URLs to start with `/live`. To prevent anybody can publish streams, set `CORE_RTMP_TOKEN` to a secret only known to the publishers and subscribers. The token has to be put in the query of the stream URL, e.g. `/live/stream?token=...`.
For additionaly enabling the RTMPS server, set the config variable `rtmp.enable_tls` or environment variable `CORE_RTMP_ENABLE_TLS` to `true`. This requires `tls.enable` or `CORE_TLS_ENABLE` to be set to to `true`. Use `rtmp.address_tls` or `CORE_RTMP_ADDRESS_TLS` to set the listen address for the RTMPS server.
| Method | Path | Description |
| ------ | ------------ | ------------------------------------- |
| GET | /api/v3/rtmp | List all currently published streams. |
## SRT
The datarhei Core includes a simple SRT server for publishing and playing streams. Set the environment variable `CORE_SRT_ENABLE` to `true` to enable the SRT server. It is listening on `CORE_SRT_ADDRESS`.
The `streamid` is formatted according to Appendix B of the [SRT specs](https://datatracker.ietf.org/doc/html/draft-sharabayko-srt#appendix-B). The following keys are supported:
| Key | Descriptions |
| ------- | ----------------------------------------------------------------------------------------------------------------- |
| `m` | The connection mode, either `publish` for publishing a stream or `request` for subscribing to a published stream. |
| `r` | Name of the resource. |
| `token` | A token to prevent anybody to publish or subscribe to a stream. This is set with `CORE_SRT_TOKEN`. |
An example publishing streamid: `#!:m=publish,r=12345,token=foobar`.
With your SRT client, connect to the SRT server always in `caller` mode, e.g. `srt://127.0.0.1:6000?mode=caller&streamid=#!:m=publish,r=12345,token=foobar&passphrase=foobarfoobar&transmode=live`.
Via the API you can gather statistics of the currently connected SRT clients.
| Method | Path | Description |
| ------ | ----------- | ------------------------------------- |
| GET | /api/v3/srt | List all currently published streams. |
## Playout
FFmpeg processes with a `avstream:` (or `playout:`) input stream can expose an HTTP API to control the playout of that stream. With
@ -604,17 +649,24 @@ A command is defined as:
Currently supported placeholders are:
| Placeholder | Description | Location |
| ------------- | --------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------- |
| `{diskfs}` | Will be replaced by the provided `CORE_STORAGE_DISK_DIR`. | `options`, `input.address`, `input.options`, `output.address`, `output.options` |
| `{memfs}` | Will be replace by the base URL of the MemFS. | `input.address`, `input.options`, `output.address`, `output.options` |
| `{processid}` | Will be replaced by the ID of the process. | `input.id`, `input.address`, `input.options`, `output.id`, `output.address`, `output.options`, `output.cleanup.pattern` |
| `{reference}` | Will be replaced by the reference of the process | `input.id`, `input.address`, `input.options`, `output.id`, `output.address`, `output.options`, `output.cleanup.pattern` |
| `{inputid}` | Will be replaced by the ID of the input. | `input.address`, `input.options` |
| `{outputid}` | Will be replaced by the ID of the output. | `output.address`, `output.options`, `output.cleanup.pattern` |
| Placeholder | Description | Location |
| ------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------------- |
| `{diskfs}` | Will be replaced by the provided `CORE_STORAGE_DISK_DIR`. | `options`, `input.address`, `input.options`, `output.address`, `output.options` |
| `{memfs}` | Will be replace by the base URL of the MemFS. | `input.address`, `input.options`, `output.address`, `output.options` |
| `{processid}` | Will be replaced by the ID of the process. | `input.id`, `input.address`, `input.options`, `output.id`, `output.address`, `output.options`, `output.cleanup.pattern` |
| `{reference}` | Will be replaced by the reference of the process | `input.id`, `input.address`, `input.options`, `output.id`, `output.address`, `output.options`, `output.cleanup.pattern` |
| `{inputid}` | Will be replaced by the ID of the input. | `input.address`, `input.options` |
| `{outputid}` | Will be replaced by the ID of the output. | `output.address`, `output.options`, `output.cleanup.pattern` |
| `{rtmp}` | Will be replaced by the internal address of the RTMP server. Requires parameter `name` (name of the stream). | `input.address`, `output.address` |
| `{srt}` | Will be replaced by the internal address of the SRT server. Requires parameter `name` (name of the stream) and `mode` (either `publish` or `request`). | `input.address`, `output.address` |
Before replacing the placeholder in the process, all references will be resolved, i.e., you can put the placeholder also in the params for an
option.
Before replacing the placeholders in the process config, all references (see below) will be resolved.
If the value that gets filled in on the place of the placeholder needs escaping, you can define the character to be escaped in the placeholder by adding it to the placeholder name and prefix it with a `^`.
E.g. escape all `:` in the value (`http://example.com:8080`) for `{memfs}` placeholder, write `{memfs^:}`. It will then be replaced by `http\://example.com\:8080`. The escape character is always `\`. In
case there are `\` in the value, they will also get escaped. If the placeholder doesn't imply escaping, the value will be uses as-is.
Add parameters to a placeholder by appending a comma separated list of key/values, e.g. `{placeholder,key1=value1,key2=value2}`. This can be combined with escaping.
### References
@ -923,7 +975,7 @@ available. If authentication is enabled, you have to provide the token in the he
### Requirement
- Go v1.16+ ([Download here](https://golang.org/dl/))
- Go v1.18+ ([Download here](https://golang.org/dl/))
### Build

View File

@ -15,25 +15,27 @@ import (
"sync"
"time"
"github.com/datarhei/core/app"
"github.com/datarhei/core/config"
"github.com/datarhei/core/ffmpeg"
"github.com/datarhei/core/http"
"github.com/datarhei/core/http/cache"
"github.com/datarhei/core/http/jwt"
"github.com/datarhei/core/http/router"
"github.com/datarhei/core/io/fs"
"github.com/datarhei/core/log"
"github.com/datarhei/core/math/rand"
"github.com/datarhei/core/monitor"
"github.com/datarhei/core/net"
"github.com/datarhei/core/prometheus"
"github.com/datarhei/core/restream"
"github.com/datarhei/core/restream/store"
"github.com/datarhei/core/rtmp"
"github.com/datarhei/core/service"
"github.com/datarhei/core/session"
"github.com/datarhei/core/update"
"github.com/datarhei/core/v16/app"
"github.com/datarhei/core/v16/config"
"github.com/datarhei/core/v16/ffmpeg"
"github.com/datarhei/core/v16/http"
"github.com/datarhei/core/v16/http/cache"
"github.com/datarhei/core/v16/http/jwt"
"github.com/datarhei/core/v16/http/router"
"github.com/datarhei/core/v16/io/fs"
"github.com/datarhei/core/v16/log"
"github.com/datarhei/core/v16/math/rand"
"github.com/datarhei/core/v16/monitor"
"github.com/datarhei/core/v16/net"
"github.com/datarhei/core/v16/prometheus"
"github.com/datarhei/core/v16/restream"
"github.com/datarhei/core/v16/restream/replace"
"github.com/datarhei/core/v16/restream/store"
"github.com/datarhei/core/v16/rtmp"
"github.com/datarhei/core/v16/service"
"github.com/datarhei/core/v16/session"
"github.com/datarhei/core/v16/srt"
"github.com/datarhei/core/v16/update"
"golang.org/x/crypto/acme/autocert"
)
@ -64,6 +66,7 @@ type api struct {
diskfs fs.Filesystem
memfs fs.Filesystem
rtmpserver rtmp.Server
srtserver srt.Server
metrics monitor.HistoryMonitor
prom prometheus.Metrics
service service.Service
@ -73,12 +76,10 @@ type api struct {
sidecarserver *gohttp.Server
httpjwt jwt.JWT
update update.Checker
replacer replace.Replacer
errorChan chan error
startOnce sync.Once
stopOnce sync.Once
gcTickerStop context.CancelFunc
log struct {
@ -89,6 +90,8 @@ type api struct {
main log.Logger
sidecar log.Logger
rtmp log.Logger
rtmps log.Logger
srt log.Logger
}
}
@ -151,6 +154,12 @@ func (a *api) Reload() error {
}
cfg := store.Get()
if err := cfg.Migrate(); err == nil {
store.Set(cfg)
} else {
return err
}
cfg.Merge()
if len(cfg.Host.Name) == 0 && cfg.Host.Auto {
@ -319,6 +328,11 @@ func (a *api) start() error {
return fmt.Errorf("unable to register session collector: %w", err)
}
srt, err := sessions.Register("srt", config)
if err != nil {
return fmt.Errorf("unable to register session collector: %w", err)
}
if _, err := sessions.Register("http", config); err != nil {
return fmt.Errorf("unable to register session collector: %w", err)
}
@ -333,13 +347,20 @@ func (a *api) start() error {
}
hls.AddCompanion(rtmp)
hls.AddCompanion(srt)
hls.AddCompanion(ffmpeg)
rtmp.AddCompanion(hls)
rtmp.AddCompanion(ffmpeg)
rtmp.AddCompanion(srt)
srt.AddCompanion(hls)
srt.AddCompanion(ffmpeg)
srt.AddCompanion(rtmp)
ffmpeg.AddCompanion(hls)
ffmpeg.AddCompanion(rtmp)
ffmpeg.AddCompanion(srt)
} else {
sessions, _ := session.New(session.Config{})
a.sessions = sessions
@ -426,12 +447,46 @@ func (a *api) start() error {
a.ffmpeg = ffmpeg
a.replacer = replace.New()
{
a.replacer.RegisterTemplate("diskfs", a.diskfs.Base())
a.replacer.RegisterTemplate("memfs", a.memfs.Base())
host, port, _ := gonet.SplitHostPort(cfg.RTMP.Address)
if len(host) == 0 {
host = "localhost"
}
template := "rtmp://" + host + ":" + port + cfg.RTMP.App + "/{name}"
if len(cfg.RTMP.Token) != 0 {
template += "?token=" + cfg.RTMP.Token
}
a.replacer.RegisterTemplate("rtmp", template)
host, port, _ = gonet.SplitHostPort(cfg.SRT.Address)
if len(host) == 0 {
host = "localhost"
}
template = "srt://" + host + ":" + port + "?mode=caller&transtype=live&streamid=#!:m={mode},r={name}"
if len(cfg.SRT.Token) != 0 {
template += ",token=" + cfg.SRT.Token
}
if len(cfg.SRT.Passphrase) != 0 {
template += "&passphrase=" + cfg.SRT.Passphrase
}
a.replacer.RegisterTemplate("srt", template)
}
restream, err := restream.New(restream.Config{
ID: cfg.ID,
Name: cfg.Name,
Store: store,
DiskFS: a.diskfs,
MemFS: a.memfs,
Replace: a.replacer,
FFmpeg: a.ffmpeg,
MaxProcesses: cfg.FFmpeg.MaxProcesses,
Logger: a.log.logger.core.WithComponent("Process"),
@ -657,11 +712,14 @@ func (a *api) start() error {
}
if cfg.RTMP.Enable {
a.log.logger.rtmp = a.log.logger.core.WithComponent("RTMP").WithField("address", cfg.RTMP.Address)
config := rtmp.Config{
Addr: cfg.RTMP.Address,
TLSAddr: cfg.RTMP.AddressTLS,
App: cfg.RTMP.App,
Token: cfg.RTMP.Token,
Logger: a.log.logger.core.WithComponent("RTMP").WithField("address", cfg.RTMP.Address),
Logger: a.log.logger.rtmp,
Collector: a.sessions.Collector("rtmp"),
}
@ -670,19 +728,41 @@ func (a *api) start() error {
GetCertificate: autocertManager.GetCertificate,
}
config.Logger = config.Logger.WithComponent("RTMPS")
config.Logger = config.Logger.WithComponent("RTMP/S")
a.log.logger.rtmps = a.log.logger.core.WithComponent("RTMPS").WithField("address", cfg.RTMP.AddressTLS)
}
rtmpserver, err := rtmp.New(config)
if err != nil {
return fmt.Errorf("unable to create RMTP server: %w", err)
}
a.log.logger.rtmp = config.Logger
a.rtmpserver = rtmpserver
}
if cfg.SRT.Enable {
config := srt.Config{
Addr: cfg.SRT.Address,
Passphrase: cfg.SRT.Passphrase,
Token: cfg.SRT.Token,
Logger: a.log.logger.core.WithComponent("SRT").WithField("address", cfg.SRT.Address),
Collector: a.sessions.Collector("srt"),
}
if cfg.SRT.Log.Enable {
config.SRTLogTopics = cfg.SRT.Log.Topics
}
srtserver, err := srt.New(config)
if err != nil {
return fmt.Errorf("unable to create SRT server: %w", err)
}
a.log.logger.srt = config.Logger
a.srtserver = srtserver
}
logcontext := "HTTP"
if cfg.TLS.Enable {
logcontext = "HTTPS"
@ -733,6 +813,7 @@ func (a *api) start() error {
Origins: cfg.Storage.CORS.Origins,
},
RTMP: a.rtmpserver,
SRT: a.srtserver,
JWT: a.httpjwt,
Config: a.config.store,
Cache: a.cache,
@ -794,6 +875,7 @@ func (a *api) start() error {
Origins: cfg.Storage.CORS.Origins,
},
RTMP: a.rtmpserver,
SRT: a.srtserver,
JWT: a.httpjwt,
Config: a.config.store,
Cache: a.cache,
@ -866,7 +948,33 @@ func (a *api) start() error {
var err error
if cfg.TLS.Enable && cfg.RTMP.EnableTLS {
logger.Info().Log("Server started")
err = a.rtmpserver.ListenAndServe()
if err != nil && err != rtmp.ErrServerClosed {
err = fmt.Errorf("RTMP server: %w", err)
} else {
err = nil
}
sendError(err)
}()
if cfg.TLS.Enable && cfg.RTMP.EnableTLS {
wgStart.Add(1)
a.wgStop.Add(1)
go func() {
logger := a.log.logger.rtmps
defer func() {
logger.Info().Log("Server exited")
a.wgStop.Done()
}()
wgStart.Done()
var err error
logger.Info().Log("Server started")
err = a.rtmpserver.ListenAndServeTLS(cfg.TLS.CertFile, cfg.TLS.KeyFile)
if err != nil && err != rtmp.ErrServerClosed {
@ -874,14 +982,34 @@ func (a *api) start() error {
} else {
err = nil
}
sendError(err)
}()
}
}
if a.srtserver != nil {
wgStart.Add(1)
a.wgStop.Add(1)
go func() {
logger := a.log.logger.srt
defer func() {
logger.Info().Log("Server exited")
a.wgStop.Done()
}()
wgStart.Done()
var err error
logger.Info().Log("Server started")
err = a.srtserver.ListenAndServe()
if err != nil && err != srt.ErrServerClosed {
err = fmt.Errorf("SRT server: %w", err)
} else {
logger.Info().Log("Server started")
err = a.rtmpserver.ListenAndServe()
if err != nil && err != rtmp.ErrServerClosed {
err = fmt.Errorf("RTMP server: %w", err)
} else {
err = nil
}
err = nil
}
sendError(err)
@ -1031,10 +1159,22 @@ func (a *api) stop() {
a.cache = nil
}
// Stop the SRT server
if a.srtserver != nil {
a.log.logger.srt.Info().Log("Stopping ...")
a.srtserver.Close()
a.srtserver = nil
}
// Stop the RTMP server
if a.rtmpserver != nil {
a.log.logger.rtmp.Info().Log("Stopping ...")
if a.log.logger.rtmps != nil {
a.log.logger.rtmps.Info().Log("Stopping ...")
}
a.rtmpserver.Close()
a.rtmpserver = nil
}

View File

@ -15,12 +15,12 @@ import (
"strings"
"time"
"github.com/datarhei/core/encoding/json"
"github.com/datarhei/core/ffmpeg"
"github.com/datarhei/core/ffmpeg/skills"
"github.com/datarhei/core/restream"
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/restream/store"
"github.com/datarhei/core/v16/encoding/json"
"github.com/datarhei/core/v16/ffmpeg"
"github.com/datarhei/core/v16/ffmpeg/skills"
"github.com/datarhei/core/v16/restream"
"github.com/datarhei/core/v16/restream/app"
"github.com/datarhei/core/v16/restream/store"
"github.com/google/uuid"
)

View File

@ -6,8 +6,8 @@ import (
"os"
"testing"
"github.com/datarhei/core/encoding/json"
"github.com/datarhei/core/restream/store"
"github.com/datarhei/core/v16/encoding/json"
"github.com/datarhei/core/v16/restream/store"
"github.com/stretchr/testify/require"
)

View File

@ -3,9 +3,9 @@ package main
import (
"os"
"github.com/datarhei/core/config"
"github.com/datarhei/core/log"
"github.com/datarhei/core/restream/store"
"github.com/datarhei/core/v16/config"
"github.com/datarhei/core/v16/log"
"github.com/datarhei/core/v16/restream/store"
_ "github.com/joho/godotenv/autoload"
)

View File

@ -29,7 +29,7 @@ func (v versionInfo) MinorString() string {
// Version of the app
var Version = versionInfo{
Major: 16,
Minor: 8,
Minor: 9,
Patch: 0,
}

View File

@ -6,14 +6,18 @@ import (
"fmt"
"net"
"os"
"strconv"
"strings"
"time"
"github.com/datarhei/core/math/rand"
"github.com/datarhei/core/v16/math/rand"
haikunator "github.com/atrox/haikunatorgo/v2"
"github.com/google/uuid"
)
const version int64 = 2
type variable struct {
value value // The actual value
defVal string // The default value in string representation
@ -129,12 +133,23 @@ type Data struct {
MimeTypes string `json:"mimetypes_file"`
} `json:"storage"`
RTMP struct {
Enable bool `json:"enable"`
EnableTLS bool `json:"enable_tls"`
Address string `json:"address"`
App string `json:"app"`
Token string `json:"token"`
Enable bool `json:"enable"`
EnableTLS bool `json:"enable_tls"`
Address string `json:"address"`
AddressTLS string `json:"address_tls"`
App string `json:"app"`
Token string `json:"token"`
} `json:"rtmp"`
SRT struct {
Enable bool `json:"enable"`
Address string `json:"address"`
Passphrase string `json:"passphrase"`
Token string `json:"token"`
Log struct {
Enable bool `json:"enable"`
Topics []string `json:"topics"`
} `json:"log"`
} `json:"srt"`
FFmpeg struct {
Binary string `json:"binary"`
MaxProcesses int64 `json:"max_processes"`
@ -227,6 +242,7 @@ func NewConfigFrom(d *Config) *Config {
data.TLS = d.TLS
data.Storage = d.Storage
data.RTMP = d.RTMP
data.SRT = d.SRT
data.FFmpeg = d.FFmpeg
data.Playout = d.Playout
data.Debug = d.Debug
@ -255,6 +271,8 @@ func NewConfigFrom(d *Config) *Config {
data.Sessions.IPIgnoreList = copyStringSlice(d.Sessions.IPIgnoreList)
data.SRT.Log.Topics = copyStringSlice(d.SRT.Log.Topics)
data.Router.BlockedPrefixes = copyStringSlice(d.Router.BlockedPrefixes)
data.Router.Routes = copyStringMap(d.Router.Routes)
@ -266,7 +284,7 @@ func NewConfigFrom(d *Config) *Config {
}
func (d *Config) init() {
d.val(newInt64Value(&d.Version, 1), "version", "", nil, "Configuration file layout version", true, false)
d.val(newInt64Value(&d.Version, version), "version", "", nil, "Configuration file layout version", true, false)
d.val(newTimeValue(&d.CreatedAt, time.Now()), "created_at", "", nil, "Configuration file creation time", false, false)
d.val(newStringValue(&d.ID, uuid.New().String()), "id", "CORE_ID", nil, "ID for this instance", true, false)
d.val(newStringValue(&d.Name, haikunator.New().Haikunate()), "name", "CORE_NAME", nil, "A human readable name for this instance", false, false)
@ -336,9 +354,18 @@ func (d *Config) init() {
d.val(newBoolValue(&d.RTMP.Enable, false), "rtmp.enable", "CORE_RTMP_ENABLE", nil, "Enable RTMP server", false, false)
d.val(newBoolValue(&d.RTMP.EnableTLS, false), "rtmp.enable_tls", "CORE_RTMP_ENABLE_TLS", nil, "Enable RTMPS server instead of RTMP", false, false)
d.val(newAddressValue(&d.RTMP.Address, ":1935"), "rtmp.address", "CORE_RTMP_ADDRESS", nil, "RTMP server listen address", false, false)
d.val(newStringValue(&d.RTMP.App, "/"), "rtmp.app", "CORE_RTMP_APP", nil, "RTMP app for publishing", false, false)
d.val(newAddressValue(&d.RTMP.AddressTLS, ":1936"), "rtmp.address_tls", "CORE_RTMP_ADDRESS_TLS", nil, "RTMPS server listen address", false, false)
d.val(newAbsolutePathValue(&d.RTMP.App, "/"), "rtmp.app", "CORE_RTMP_APP", nil, "RTMP app for publishing", false, false)
d.val(newStringValue(&d.RTMP.Token, ""), "rtmp.token", "CORE_RTMP_TOKEN", nil, "RTMP token for publishing and playing", false, true)
// SRT
d.val(newBoolValue(&d.SRT.Enable, false), "srt.enable", "CORE_SRT_ENABLE", nil, "Enable SRT server", false, false)
d.val(newAddressValue(&d.SRT.Address, ":6000"), "srt.address", "CORE_SRT_ADDRESS", nil, "SRT server listen address", false, false)
d.val(newStringValue(&d.SRT.Passphrase, ""), "srt.passphrase", "CORE_SRT_PASSPHRASE", nil, "SRT encryption passphrase", false, true)
d.val(newStringValue(&d.SRT.Token, ""), "srt.token", "CORE_SRT_TOKEN", nil, "SRT token for publishing and playing", false, true)
d.val(newBoolValue(&d.SRT.Log.Enable, false), "srt.log.enable", "CORE_SRT_LOG_ENABLE", nil, "Enable SRT server logging", false, false)
d.val(newStringListValue(&d.SRT.Log.Topics, []string{}, ","), "srt.log.topics", "CORE_SRT_LOG_TOPICS", nil, "List of topics to log", false, false)
// FFmpeg
d.val(newExecValue(&d.FFmpeg.Binary, "ffmpeg"), "ffmpeg.binary", "CORE_FFMPEG_BINARY", nil, "Path to ffmpeg binary", true, false)
d.val(newInt64Value(&d.FFmpeg.MaxProcesses, 0), "ffmpeg.max_processes", "CORE_FFMPEG_MAXPROCESSES", nil, "Max. allowed simultaneously running ffmpeg instances, 0 for unlimited", false, false)
@ -456,6 +483,36 @@ func (d *Config) Merge() {
}
}
// Migrate will migrate some settings, depending on the version it finds. Migrations
// are only going upwards,i.e. from a lower version to a higher version.
func (d *Config) Migrate() error {
if d.Version == 1 {
if !strings.HasPrefix(d.RTMP.App, "/") {
d.RTMP.App = "/" + d.RTMP.App
}
if d.RTMP.EnableTLS {
d.RTMP.Enable = true
d.RTMP.AddressTLS = d.RTMP.Address
host, sport, err := net.SplitHostPort(d.RTMP.Address)
if err != nil {
return fmt.Errorf("migrating rtmp.address to rtmp.address_tls failed: %w", err)
}
port, err := strconv.Atoi(sport)
if err != nil {
return fmt.Errorf("migrating rtmp.address to rtmp.address_tls failed: %w", err)
}
d.RTMP.Address = net.JoinHostPort(host, strconv.Itoa(port-1))
}
d.Version = 2
}
return nil
}
// Validate validates the current state of the Config for completeness and sanity. Errors are
// written to the log. Use resetLogs to indicate to reset the logs prior validation.
func (d *Config) Validate(resetLogs bool) {
@ -463,8 +520,8 @@ func (d *Config) Validate(resetLogs bool) {
d.logs = nil
}
if d.Version != 1 {
d.log("error", d.findVariable("version"), "unknown configuration layout version")
if d.Version != version {
d.log("error", d.findVariable("version"), "unknown configuration layout version (found version %d, expecting version %d)", d.Version, version)
return
}
@ -542,9 +599,21 @@ func (d *Config) Validate(resetLogs bool) {
// If TLS for RTMP is enabled, TLS must be enabled
if d.RTMP.EnableTLS {
if !d.RTMP.Enable {
d.log("error", d.findVariable("rtmp.enable"), "RTMP server must be enabled if RTMPS server is enabled")
}
if !d.TLS.Enable {
d.log("error", d.findVariable("rtmp.enable_tls"), "RTMPS server can only be enabled if TLS is enabled")
}
if len(d.RTMP.AddressTLS) == 0 {
d.log("error", d.findVariable("rtmp.address_tls"), "RTMPS server address must be set")
}
if d.RTMP.Enable && d.RTMP.Address == d.RTMP.AddressTLS {
d.log("error", d.findVariable("rtmp.address"), "The RTMP and RTMPS server can't listen on the same address")
}
}
// If CORE_MEMFS_USERNAME and CORE_MEMFS_PASSWORD are set, automatically active/deactivate Basic-Auth for memfs

View File

@ -8,8 +8,8 @@ import (
"path/filepath"
"time"
"github.com/datarhei/core/encoding/json"
"github.com/datarhei/core/io/file"
"github.com/datarhei/core/v16/encoding/json"
"github.com/datarhei/core/v16/io/file"
)
type jsonStore struct {

View File

@ -8,12 +8,13 @@ import (
"net/url"
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
"github.com/datarhei/core/http/cors"
"github.com/datarhei/core/v16/http/cors"
)
type value interface {
@ -772,3 +773,35 @@ func (u *urlValue) Validate() error {
func (u *urlValue) IsEmpty() bool {
return len(string(*u)) == 0
}
// absolute path
type absolutePathValue string
func newAbsolutePathValue(p *string, val string) *absolutePathValue {
*p = filepath.Clean(val)
return (*absolutePathValue)(p)
}
func (s *absolutePathValue) Set(val string) error {
*s = absolutePathValue(filepath.Clean(val))
return nil
}
func (s *absolutePathValue) String() string {
return string(*s)
}
func (s *absolutePathValue) Validate() error {
path := string(*s)
if !filepath.IsAbs(path) {
return fmt.Errorf("%s is not an absolute path", path)
}
return nil
}
func (s *absolutePathValue) IsEmpty() bool {
return len(string(*s)) == 0
}

View File

@ -17,7 +17,7 @@ const docTemplate = `{
},
"license": {
"name": "Apache 2.0",
"url": "https://github.com/datarhei/core/blob/main/LICENSE"
"url": "https://github.com/datarhei/core/v16/blob/main/LICENSE"
},
"version": "{{.Version}}"
},
@ -1784,11 +1784,11 @@ const docTemplate = `{
"ApiKeyAuth": []
}
],
"description": "List all currently publishing streams",
"description": "List all currently publishing RTMP streams",
"produces": [
"application/json"
],
"summary": "List all publishing streams",
"summary": "List all publishing RTMP streams",
"operationId": "rtmp-3-list-channels",
"responses": {
"200": {
@ -1911,6 +1911,32 @@ const docTemplate = `{
}
}
},
"/api/v3/srt": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "List all currently publishing SRT streams. This endpoint is EXPERIMENTAL and may change in future.",
"produces": [
"application/json"
],
"summary": "List all publishing SRT treams",
"operationId": "srt-3-list-channels",
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/api.SRTChannels"
}
}
}
}
}
},
"/api/v3/widget/process/{id}": {
"get": {
"description": "Fetch minimal statistics about a process, which is not protected by any auth.",
@ -2640,6 +2666,37 @@ const docTemplate = `{
}
}
},
"srt": {
"type": "object",
"properties": {
"address": {
"type": "string"
},
"enable": {
"type": "boolean"
},
"log": {
"type": "object",
"properties": {
"enable": {
"type": "boolean"
},
"topics": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"passphrase": {
"type": "string"
},
"token": {
"type": "string"
}
}
},
"storage": {
"type": "object",
"properties": {
@ -3447,6 +3504,265 @@ const docTemplate = `{
}
}
},
"api.SRTChannels": {
"type": "object",
"properties": {
"connections": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/api.SRTConnection"
}
},
"log": {
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"$ref": "#/definitions/api.SRTLog"
}
}
},
"publisher": {
"type": "object",
"additionalProperties": {
"type": "integer"
}
},
"subscriber": {
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"type": "integer"
}
}
}
}
},
"api.SRTConnection": {
"type": "object",
"properties": {
"log": {
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"$ref": "#/definitions/api.SRTLog"
}
}
},
"stats": {
"$ref": "#/definitions/api.SRTStatistics"
}
}
},
"api.SRTLog": {
"type": "object",
"properties": {
"msg": {
"type": "array",
"items": {
"type": "string"
}
},
"ts": {
"type": "integer"
}
}
},
"api.SRTStatistics": {
"type": "object",
"properties": {
"avail_recv_buf_bytes": {
"description": "The available space in the receiver's buffer, in bytes",
"type": "integer"
},
"avail_send_buf_bytes": {
"description": "The available space in the sender's buffer, in bytes",
"type": "integer"
},
"bandwidth_mbit": {
"description": "Estimated bandwidth of the network link, in Mbps",
"type": "number"
},
"flight_size_pkt": {
"description": "The number of packets in flight",
"type": "integer"
},
"flow_window_pkt": {
"description": "The maximum number of packets that can be \"in flight\"",
"type": "integer"
},
"max_bandwidth_mbit": {
"description": "Transmission bandwidth limit, in Mbps",
"type": "number"
},
"mss_bytes": {
"description": "Maximum Segment Size (MSS), in bytes",
"type": "integer"
},
"pkt_recv_avg_belated_time_ms": {
"description": "Accumulated difference between the current time and the time-to-play of a packet that is received late",
"type": "integer"
},
"pkt_send_period_us": {
"description": "Current minimum time interval between which consecutive packets are sent, in microseconds",
"type": "number"
},
"recv_ack_pkt": {
"description": "The total number of received ACK (Acknowledgement) control packets",
"type": "integer"
},
"recv_buf_bytes": {
"description": "Instantaneous (current) value of pktRcvBuf, expressed in bytes, including payload and all headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_buf_ms": {
"description": "The timespan (msec) of acknowledged packets in the receiver's buffer",
"type": "integer"
},
"recv_buf_pkt": {
"description": "The number of acknowledged packets in receiver's buffer",
"type": "integer"
},
"recv_bytes": {
"description": "Same as pktRecv, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_drop_bytes": {
"description": "Same as pktRcvDrop, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_drop_pkt": {
"description": "The total number of dropped by the SRT receiver and, as a result, not delivered to the upstream application DATA packets",
"type": "integer"
},
"recv_km_pkt": {
"description": "The total number of received KM (Key Material) control packets",
"type": "integer"
},
"recv_loss__bytes": {
"description": "Same as pktRcvLoss, but expressed in bytes, including payload and all the headers (IP, TCP, SRT), bytes for the presently missing (either reordered or lost) packets' payloads are estimated based on the average packet size",
"type": "integer"
},
"recv_loss_pkt": {
"description": "The total number of SRT DATA packets detected as presently missing (either reordered or lost) at the receiver side",
"type": "integer"
},
"recv_nak_pkt": {
"description": "The total number of received NAK (Negative Acknowledgement) control packets",
"type": "integer"
},
"recv_pkt": {
"description": "The total number of received DATA packets, including retransmitted packets",
"type": "integer"
},
"recv_retran_pkts": {
"description": "The total number of retransmitted packets registered at the receiver side",
"type": "integer"
},
"recv_tsbpd_delay_ms": {
"description": "Timestamp-based Packet Delivery Delay value set on the socket via SRTO_RCVLATENCY or SRTO_LATENCY",
"type": "integer"
},
"recv_undecrypt_bytes": {
"description": "Same as pktRcvUndecrypt, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_undecrypt_pkt": {
"description": "The total number of packets that failed to be decrypted at the receiver side",
"type": "integer"
},
"recv_unique_bytes": {
"description": "Same as pktRecvUnique, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_unique_pkt": {
"description": "The total number of unique original, retransmitted or recovered by the packet filter DATA packets received in time, decrypted without errors and, as a result, scheduled for delivery to the upstream application by the SRT receiver.",
"type": "integer"
},
"reorder_tolerance_pkt": {
"description": "Instant value of the packet reorder tolerance",
"type": "integer"
},
"rtt_ms": {
"description": "Smoothed round-trip time (SRTT), an exponentially-weighted moving average (EWMA) of an endpoint's RTT samples, in milliseconds",
"type": "number"
},
"send_buf_bytes": {
"description": "Instantaneous (current) value of pktSndBuf, but expressed in bytes, including payload and all headers (IP, TCP, SRT)",
"type": "integer"
},
"send_buf_ms": {
"description": "The timespan (msec) of packets in the sender's buffer (unacknowledged packets)",
"type": "integer"
},
"send_buf_pkt": {
"description": "The number of packets in the sender's buffer that are already scheduled for sending or even possibly sent, but not yet acknowledged",
"type": "integer"
},
"send_drop_bytes": {
"description": "Same as pktSndDrop, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"send_drop_pkt": {
"description": "The total number of dropped by the SRT sender DATA packets that have no chance to be delivered in time",
"type": "integer"
},
"send_duration_us": {
"description": "The total accumulated time in microseconds, during which the SRT sender has some data to transmit, including packets that have been sent, but not yet acknowledged",
"type": "integer"
},
"send_km_pkt": {
"description": "The total number of sent KM (Key Material) control packets",
"type": "integer"
},
"send_loss_pkt": {
"description": "The total number of data packets considered or reported as lost at the sender side. Does not correspond to the packets detected as lost at the receiver side.",
"type": "integer"
},
"send_tsbpd_delay_ms": {
"description": "Timestamp-based Packet Delivery Delay value of the peer",
"type": "integer"
},
"sent_ack_pkt": {
"description": "The total number of sent ACK (Acknowledgement) control packets",
"type": "integer"
},
"sent_bytes": {
"description": "Same as pktSent, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"sent_nak_pkt": {
"description": "The total number of sent NAK (Negative Acknowledgement) control packets",
"type": "integer"
},
"sent_pkt": {
"description": "The total number of sent DATA packets, including retransmitted packets",
"type": "integer"
},
"sent_retrans_bytes": {
"description": "Same as pktRetrans, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"sent_retrans_pkt": {
"description": "The total number of retransmitted packets sent by the SRT sender",
"type": "integer"
},
"sent_unique__bytes": {
"description": "Same as pktSentUnique, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"sent_unique_pkt": {
"description": "The total number of unique DATA packets sent by the SRT sender",
"type": "integer"
},
"timestamp_ms": {
"description": "The time elapsed, in milliseconds, since the SRT socket has been created",
"type": "integer"
}
}
},
"api.Session": {
"type": "object",
"properties": {
@ -3944,6 +4260,37 @@ const docTemplate = `{
}
}
},
"srt": {
"type": "object",
"properties": {
"address": {
"type": "string"
},
"enable": {
"type": "boolean"
},
"log": {
"type": "object",
"properties": {
"enable": {
"type": "boolean"
},
"topics": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"passphrase": {
"type": "string"
},
"token": {
"type": "string"
}
}
},
"storage": {
"type": "object",
"properties": {

View File

@ -10,7 +10,7 @@
},
"license": {
"name": "Apache 2.0",
"url": "https://github.com/datarhei/core/blob/main/LICENSE"
"url": "https://github.com/datarhei/core/v16/blob/main/LICENSE"
},
"version": "3.0"
},
@ -1776,11 +1776,11 @@
"ApiKeyAuth": []
}
],
"description": "List all currently publishing streams",
"description": "List all currently publishing RTMP streams",
"produces": [
"application/json"
],
"summary": "List all publishing streams",
"summary": "List all publishing RTMP streams",
"operationId": "rtmp-3-list-channels",
"responses": {
"200": {
@ -1903,6 +1903,32 @@
}
}
},
"/api/v3/srt": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "List all currently publishing SRT streams. This endpoint is EXPERIMENTAL and may change in future.",
"produces": [
"application/json"
],
"summary": "List all publishing SRT treams",
"operationId": "srt-3-list-channels",
"responses": {
"200": {
"description": "OK",
"schema": {
"type": "array",
"items": {
"$ref": "#/definitions/api.SRTChannels"
}
}
}
}
}
},
"/api/v3/widget/process/{id}": {
"get": {
"description": "Fetch minimal statistics about a process, which is not protected by any auth.",
@ -2632,6 +2658,37 @@
}
}
},
"srt": {
"type": "object",
"properties": {
"address": {
"type": "string"
},
"enable": {
"type": "boolean"
},
"log": {
"type": "object",
"properties": {
"enable": {
"type": "boolean"
},
"topics": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"passphrase": {
"type": "string"
},
"token": {
"type": "string"
}
}
},
"storage": {
"type": "object",
"properties": {
@ -3439,6 +3496,265 @@
}
}
},
"api.SRTChannels": {
"type": "object",
"properties": {
"connections": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/api.SRTConnection"
}
},
"log": {
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"$ref": "#/definitions/api.SRTLog"
}
}
},
"publisher": {
"type": "object",
"additionalProperties": {
"type": "integer"
}
},
"subscriber": {
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"type": "integer"
}
}
}
}
},
"api.SRTConnection": {
"type": "object",
"properties": {
"log": {
"type": "object",
"additionalProperties": {
"type": "array",
"items": {
"$ref": "#/definitions/api.SRTLog"
}
}
},
"stats": {
"$ref": "#/definitions/api.SRTStatistics"
}
}
},
"api.SRTLog": {
"type": "object",
"properties": {
"msg": {
"type": "array",
"items": {
"type": "string"
}
},
"ts": {
"type": "integer"
}
}
},
"api.SRTStatistics": {
"type": "object",
"properties": {
"avail_recv_buf_bytes": {
"description": "The available space in the receiver's buffer, in bytes",
"type": "integer"
},
"avail_send_buf_bytes": {
"description": "The available space in the sender's buffer, in bytes",
"type": "integer"
},
"bandwidth_mbit": {
"description": "Estimated bandwidth of the network link, in Mbps",
"type": "number"
},
"flight_size_pkt": {
"description": "The number of packets in flight",
"type": "integer"
},
"flow_window_pkt": {
"description": "The maximum number of packets that can be \"in flight\"",
"type": "integer"
},
"max_bandwidth_mbit": {
"description": "Transmission bandwidth limit, in Mbps",
"type": "number"
},
"mss_bytes": {
"description": "Maximum Segment Size (MSS), in bytes",
"type": "integer"
},
"pkt_recv_avg_belated_time_ms": {
"description": "Accumulated difference between the current time and the time-to-play of a packet that is received late",
"type": "integer"
},
"pkt_send_period_us": {
"description": "Current minimum time interval between which consecutive packets are sent, in microseconds",
"type": "number"
},
"recv_ack_pkt": {
"description": "The total number of received ACK (Acknowledgement) control packets",
"type": "integer"
},
"recv_buf_bytes": {
"description": "Instantaneous (current) value of pktRcvBuf, expressed in bytes, including payload and all headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_buf_ms": {
"description": "The timespan (msec) of acknowledged packets in the receiver's buffer",
"type": "integer"
},
"recv_buf_pkt": {
"description": "The number of acknowledged packets in receiver's buffer",
"type": "integer"
},
"recv_bytes": {
"description": "Same as pktRecv, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_drop_bytes": {
"description": "Same as pktRcvDrop, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_drop_pkt": {
"description": "The total number of dropped by the SRT receiver and, as a result, not delivered to the upstream application DATA packets",
"type": "integer"
},
"recv_km_pkt": {
"description": "The total number of received KM (Key Material) control packets",
"type": "integer"
},
"recv_loss__bytes": {
"description": "Same as pktRcvLoss, but expressed in bytes, including payload and all the headers (IP, TCP, SRT), bytes for the presently missing (either reordered or lost) packets' payloads are estimated based on the average packet size",
"type": "integer"
},
"recv_loss_pkt": {
"description": "The total number of SRT DATA packets detected as presently missing (either reordered or lost) at the receiver side",
"type": "integer"
},
"recv_nak_pkt": {
"description": "The total number of received NAK (Negative Acknowledgement) control packets",
"type": "integer"
},
"recv_pkt": {
"description": "The total number of received DATA packets, including retransmitted packets",
"type": "integer"
},
"recv_retran_pkts": {
"description": "The total number of retransmitted packets registered at the receiver side",
"type": "integer"
},
"recv_tsbpd_delay_ms": {
"description": "Timestamp-based Packet Delivery Delay value set on the socket via SRTO_RCVLATENCY or SRTO_LATENCY",
"type": "integer"
},
"recv_undecrypt_bytes": {
"description": "Same as pktRcvUndecrypt, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_undecrypt_pkt": {
"description": "The total number of packets that failed to be decrypted at the receiver side",
"type": "integer"
},
"recv_unique_bytes": {
"description": "Same as pktRecvUnique, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"recv_unique_pkt": {
"description": "The total number of unique original, retransmitted or recovered by the packet filter DATA packets received in time, decrypted without errors and, as a result, scheduled for delivery to the upstream application by the SRT receiver.",
"type": "integer"
},
"reorder_tolerance_pkt": {
"description": "Instant value of the packet reorder tolerance",
"type": "integer"
},
"rtt_ms": {
"description": "Smoothed round-trip time (SRTT), an exponentially-weighted moving average (EWMA) of an endpoint's RTT samples, in milliseconds",
"type": "number"
},
"send_buf_bytes": {
"description": "Instantaneous (current) value of pktSndBuf, but expressed in bytes, including payload and all headers (IP, TCP, SRT)",
"type": "integer"
},
"send_buf_ms": {
"description": "The timespan (msec) of packets in the sender's buffer (unacknowledged packets)",
"type": "integer"
},
"send_buf_pkt": {
"description": "The number of packets in the sender's buffer that are already scheduled for sending or even possibly sent, but not yet acknowledged",
"type": "integer"
},
"send_drop_bytes": {
"description": "Same as pktSndDrop, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"send_drop_pkt": {
"description": "The total number of dropped by the SRT sender DATA packets that have no chance to be delivered in time",
"type": "integer"
},
"send_duration_us": {
"description": "The total accumulated time in microseconds, during which the SRT sender has some data to transmit, including packets that have been sent, but not yet acknowledged",
"type": "integer"
},
"send_km_pkt": {
"description": "The total number of sent KM (Key Material) control packets",
"type": "integer"
},
"send_loss_pkt": {
"description": "The total number of data packets considered or reported as lost at the sender side. Does not correspond to the packets detected as lost at the receiver side.",
"type": "integer"
},
"send_tsbpd_delay_ms": {
"description": "Timestamp-based Packet Delivery Delay value of the peer",
"type": "integer"
},
"sent_ack_pkt": {
"description": "The total number of sent ACK (Acknowledgement) control packets",
"type": "integer"
},
"sent_bytes": {
"description": "Same as pktSent, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"sent_nak_pkt": {
"description": "The total number of sent NAK (Negative Acknowledgement) control packets",
"type": "integer"
},
"sent_pkt": {
"description": "The total number of sent DATA packets, including retransmitted packets",
"type": "integer"
},
"sent_retrans_bytes": {
"description": "Same as pktRetrans, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"sent_retrans_pkt": {
"description": "The total number of retransmitted packets sent by the SRT sender",
"type": "integer"
},
"sent_unique__bytes": {
"description": "Same as pktSentUnique, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)",
"type": "integer"
},
"sent_unique_pkt": {
"description": "The total number of unique DATA packets sent by the SRT sender",
"type": "integer"
},
"timestamp_ms": {
"description": "The time elapsed, in milliseconds, since the SRT socket has been created",
"type": "integer"
}
}
},
"api.Session": {
"type": "object",
"properties": {
@ -3936,6 +4252,37 @@
}
}
},
"srt": {
"type": "object",
"properties": {
"address": {
"type": "string"
},
"enable": {
"type": "boolean"
},
"log": {
"type": "object",
"properties": {
"enable": {
"type": "boolean"
},
"topics": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"passphrase": {
"type": "string"
},
"token": {
"type": "string"
}
}
},
"storage": {
"type": "object",
"properties": {

View File

@ -301,6 +301,26 @@ definitions:
session_timeout_sec:
type: integer
type: object
srt:
properties:
address:
type: string
enable:
type: boolean
log:
properties:
enable:
type: boolean
topics:
items:
type: string
type: array
type: object
passphrase:
type: string
token:
type: string
type: object
storage:
properties:
cors:
@ -834,6 +854,230 @@ definitions:
name:
type: string
type: object
api.SRTChannels:
properties:
connections:
additionalProperties:
$ref: '#/definitions/api.SRTConnection'
type: object
log:
additionalProperties:
items:
$ref: '#/definitions/api.SRTLog'
type: array
type: object
publisher:
additionalProperties:
type: integer
type: object
subscriber:
additionalProperties:
items:
type: integer
type: array
type: object
type: object
api.SRTConnection:
properties:
log:
additionalProperties:
items:
$ref: '#/definitions/api.SRTLog'
type: array
type: object
stats:
$ref: '#/definitions/api.SRTStatistics'
type: object
api.SRTLog:
properties:
msg:
items:
type: string
type: array
ts:
type: integer
type: object
api.SRTStatistics:
properties:
avail_recv_buf_bytes:
description: The available space in the receiver's buffer, in bytes
type: integer
avail_send_buf_bytes:
description: The available space in the sender's buffer, in bytes
type: integer
bandwidth_mbit:
description: Estimated bandwidth of the network link, in Mbps
type: number
flight_size_pkt:
description: The number of packets in flight
type: integer
flow_window_pkt:
description: The maximum number of packets that can be "in flight"
type: integer
max_bandwidth_mbit:
description: Transmission bandwidth limit, in Mbps
type: number
mss_bytes:
description: Maximum Segment Size (MSS), in bytes
type: integer
pkt_recv_avg_belated_time_ms:
description: Accumulated difference between the current time and the time-to-play
of a packet that is received late
type: integer
pkt_send_period_us:
description: Current minimum time interval between which consecutive packets
are sent, in microseconds
type: number
recv_ack_pkt:
description: The total number of received ACK (Acknowledgement) control packets
type: integer
recv_buf_bytes:
description: Instantaneous (current) value of pktRcvBuf, expressed in bytes,
including payload and all headers (IP, TCP, SRT)
type: integer
recv_buf_ms:
description: The timespan (msec) of acknowledged packets in the receiver's
buffer
type: integer
recv_buf_pkt:
description: The number of acknowledged packets in receiver's buffer
type: integer
recv_bytes:
description: Same as pktRecv, but expressed in bytes, including payload and
all the headers (IP, TCP, SRT)
type: integer
recv_drop_bytes:
description: Same as pktRcvDrop, but expressed in bytes, including payload
and all the headers (IP, TCP, SRT)
type: integer
recv_drop_pkt:
description: The total number of dropped by the SRT receiver and, as a result,
not delivered to the upstream application DATA packets
type: integer
recv_km_pkt:
description: The total number of received KM (Key Material) control packets
type: integer
recv_loss__bytes:
description: Same as pktRcvLoss, but expressed in bytes, including payload
and all the headers (IP, TCP, SRT), bytes for the presently missing (either
reordered or lost) packets' payloads are estimated based on the average
packet size
type: integer
recv_loss_pkt:
description: The total number of SRT DATA packets detected as presently missing
(either reordered or lost) at the receiver side
type: integer
recv_nak_pkt:
description: The total number of received NAK (Negative Acknowledgement) control
packets
type: integer
recv_pkt:
description: The total number of received DATA packets, including retransmitted
packets
type: integer
recv_retran_pkts:
description: The total number of retransmitted packets registered at the receiver
side
type: integer
recv_tsbpd_delay_ms:
description: Timestamp-based Packet Delivery Delay value set on the socket
via SRTO_RCVLATENCY or SRTO_LATENCY
type: integer
recv_undecrypt_bytes:
description: Same as pktRcvUndecrypt, but expressed in bytes, including payload
and all the headers (IP, TCP, SRT)
type: integer
recv_undecrypt_pkt:
description: The total number of packets that failed to be decrypted at the
receiver side
type: integer
recv_unique_bytes:
description: Same as pktRecvUnique, but expressed in bytes, including payload
and all the headers (IP, TCP, SRT)
type: integer
recv_unique_pkt:
description: The total number of unique original, retransmitted or recovered
by the packet filter DATA packets received in time, decrypted without errors
and, as a result, scheduled for delivery to the upstream application by
the SRT receiver.
type: integer
reorder_tolerance_pkt:
description: Instant value of the packet reorder tolerance
type: integer
rtt_ms:
description: Smoothed round-trip time (SRTT), an exponentially-weighted moving
average (EWMA) of an endpoint's RTT samples, in milliseconds
type: number
send_buf_bytes:
description: Instantaneous (current) value of pktSndBuf, but expressed in
bytes, including payload and all headers (IP, TCP, SRT)
type: integer
send_buf_ms:
description: The timespan (msec) of packets in the sender's buffer (unacknowledged
packets)
type: integer
send_buf_pkt:
description: The number of packets in the sender's buffer that are already
scheduled for sending or even possibly sent, but not yet acknowledged
type: integer
send_drop_bytes:
description: Same as pktSndDrop, but expressed in bytes, including payload
and all the headers (IP, TCP, SRT)
type: integer
send_drop_pkt:
description: The total number of dropped by the SRT sender DATA packets that
have no chance to be delivered in time
type: integer
send_duration_us:
description: The total accumulated time in microseconds, during which the
SRT sender has some data to transmit, including packets that have been sent,
but not yet acknowledged
type: integer
send_km_pkt:
description: The total number of sent KM (Key Material) control packets
type: integer
send_loss_pkt:
description: The total number of data packets considered or reported as lost
at the sender side. Does not correspond to the packets detected as lost
at the receiver side.
type: integer
send_tsbpd_delay_ms:
description: Timestamp-based Packet Delivery Delay value of the peer
type: integer
sent_ack_pkt:
description: The total number of sent ACK (Acknowledgement) control packets
type: integer
sent_bytes:
description: Same as pktSent, but expressed in bytes, including payload and
all the headers (IP, TCP, SRT)
type: integer
sent_nak_pkt:
description: The total number of sent NAK (Negative Acknowledgement) control
packets
type: integer
sent_pkt:
description: The total number of sent DATA packets, including retransmitted
packets
type: integer
sent_retrans_bytes:
description: Same as pktRetrans, but expressed in bytes, including payload
and all the headers (IP, TCP, SRT)
type: integer
sent_retrans_pkt:
description: The total number of retransmitted packets sent by the SRT sender
type: integer
sent_unique__bytes:
description: Same as pktSentUnique, but expressed in bytes, including payload
and all the headers (IP, TCP, SRT)
type: integer
sent_unique_pkt:
description: The total number of unique DATA packets sent by the SRT sender
type: integer
timestamp_ms:
description: The time elapsed, in milliseconds, since the SRT socket has been
created
type: integer
type: object
api.Session:
properties:
bandwidth_rx_kbit:
@ -1160,6 +1404,26 @@ definitions:
session_timeout_sec:
type: integer
type: object
srt:
properties:
address:
type: string
enable:
type: boolean
log:
properties:
enable:
type: boolean
topics:
items:
type: string
type: array
type: object
passphrase:
type: string
token:
type: string
type: object
storage:
properties:
cors:
@ -1419,7 +1683,7 @@ info:
description: Expose REST API for the datarhei Core
license:
name: Apache 2.0
url: https://github.com/datarhei/core/blob/main/LICENSE
url: https://github.com/datarhei/core/v16/blob/main/LICENSE
title: datarhei Core API
version: "3.0"
paths:
@ -2605,7 +2869,7 @@ paths:
summary: Get the state of a process
/api/v3/rtmp:
get:
description: List all currently publishing streams
description: List all currently publishing RTMP streams
operationId: rtmp-3-list-channels
produces:
- application/json
@ -2618,7 +2882,7 @@ paths:
type: array
security:
- ApiKeyAuth: []
summary: List all publishing streams
summary: List all publishing RTMP streams
/api/v3/session:
get:
description: Get a summary of all active and past sessions of the given collector
@ -2686,6 +2950,23 @@ paths:
security:
- ApiKeyAuth: []
summary: Refresh FFmpeg capabilities
/api/v3/srt:
get:
description: List all currently publishing SRT streams. This endpoint is EXPERIMENTAL
and may change in future.
operationId: srt-3-list-channels
produces:
- application/json
responses:
"200":
description: OK
schema:
items:
$ref: '#/definitions/api.SRTChannels'
type: array
security:
- ApiKeyAuth: []
summary: List all publishing SRT treams
/api/v3/widget/process/{id}:
get:
description: Fetch minimal statistics about a process, which is not protected

View File

@ -1,7 +1,7 @@
package ffmpeg
import (
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/session"
)
type wrappedCollector struct {

View File

@ -3,15 +3,16 @@ package ffmpeg
import (
"fmt"
"os/exec"
"sync"
"time"
"github.com/datarhei/core/ffmpeg/parse"
"github.com/datarhei/core/ffmpeg/probe"
"github.com/datarhei/core/ffmpeg/skills"
"github.com/datarhei/core/log"
"github.com/datarhei/core/net"
"github.com/datarhei/core/process"
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/ffmpeg/parse"
"github.com/datarhei/core/v16/ffmpeg/probe"
"github.com/datarhei/core/v16/ffmpeg/skills"
"github.com/datarhei/core/v16/log"
"github.com/datarhei/core/v16/net"
"github.com/datarhei/core/v16/process"
"github.com/datarhei/core/v16/session"
)
type FFmpeg interface {
@ -64,7 +65,8 @@ type ffmpeg struct {
collector session.Collector
states process.States
states process.States
statesLock sync.RWMutex
}
func New(config Config) (FFmpeg, error) {
@ -120,6 +122,7 @@ func (f *ffmpeg) New(config ProcessConfig) (process.Process, error) {
OnStart: config.OnStart,
OnExit: config.OnExit,
OnStateChange: func(from, to string) {
f.statesLock.Lock()
switch to {
case "finished":
f.states.Finished++
@ -135,6 +138,7 @@ func (f *ffmpeg) New(config ProcessConfig) (process.Process, error) {
f.states.Killed++
default:
}
f.statesLock.Unlock()
if config.OnStateChange != nil {
config.OnStateChange(from, to)
@ -196,5 +200,8 @@ func (f *ffmpeg) PutPort(port int) {
}
func (f *ffmpeg) States() process.States {
f.statesLock.RLock()
defer f.statesLock.RUnlock()
return f.states
}

View File

@ -10,11 +10,12 @@ import (
"sync"
"time"
"github.com/datarhei/core/log"
"github.com/datarhei/core/net/url"
"github.com/datarhei/core/process"
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/ffmpeg/prelude"
"github.com/datarhei/core/v16/log"
"github.com/datarhei/core/v16/net/url"
"github.com/datarhei/core/v16/process"
"github.com/datarhei/core/v16/restream/app"
"github.com/datarhei/core/v16/session"
)
// Parser is an extension to the process.Parser interface
@ -133,6 +134,7 @@ func New(config Config) Parser {
p.re.drop = regexp.MustCompile(`drop=\s*([0-9]+)`)
p.re.dup = regexp.MustCompile(`dup=\s*([0-9]+)`)
p.lock.prelude.Lock()
p.prelude.headLines = config.PreludeHeadLines
if p.prelude.headLines <= 0 {
p.prelude.headLines = 100
@ -142,7 +144,9 @@ func New(config Config) Parser {
p.prelude.tailLines = 50
}
p.prelude.tail = ring.New(p.prelude.tailLines)
p.lock.prelude.Unlock()
p.lock.log.Lock()
p.log = ring.New(config.LogLines)
if p.logHistoryLength > 0 {
@ -154,6 +158,7 @@ func New(config Config) Parser {
}
p.logStart = time.Now()
p.lock.log.Unlock()
p.ResetStats()
@ -168,7 +173,9 @@ func (p *parser) Parse(line string) uint64 {
isAVstreamProgress := strings.HasPrefix(line, "avstream.progress:")
if p.logStart.IsZero() {
p.lock.log.Lock()
p.logStart = time.Now()
p.lock.log.Unlock()
}
if !p.prelude.done {
@ -199,7 +206,10 @@ func (p *parser) Parse(line string) uint64 {
}).Error().Log("Failed parsing outputs")
} else {
p.logger.WithField("prelude", p.Prelude()).Debug().Log("")
p.lock.prelude.Lock()
p.prelude.done = true
p.lock.prelude.Unlock()
}
return 0
@ -211,7 +221,10 @@ func (p *parser) Parse(line string) uint64 {
}
p.logger.WithField("prelude", p.Prelude()).Debug().Log("")
p.lock.prelude.Lock()
p.prelude.done = true
p.lock.prelude.Unlock()
}
}
@ -219,17 +232,17 @@ func (p *parser) Parse(line string) uint64 {
// Write the current non-progress line to the log
p.addLog(line)
p.lock.prelude.Lock()
if !p.prelude.done {
if len(p.prelude.data) < p.prelude.headLines {
p.prelude.data = append(p.prelude.data, line)
} else {
p.lock.prelude.Lock()
p.prelude.tail.Value = line
p.prelude.tail = p.prelude.tail.Next()
p.lock.prelude.Unlock()
p.prelude.truncatedLines++
}
}
p.lock.prelude.Unlock()
return 0
}
@ -508,7 +521,9 @@ func (p *parser) Progress() app.Progress {
}
func (p *parser) Prelude() []string {
p.lock.prelude.RLock()
if p.prelude.data == nil {
p.lock.prelude.RUnlock()
return []string{}
}
@ -517,8 +532,6 @@ func (p *parser) Prelude() []string {
tail := []string{}
p.lock.prelude.RLock()
p.prelude.tail.Do(func(l interface{}) {
if l == nil {
return
@ -540,131 +553,60 @@ func (p *parser) Prelude() []string {
}
func (p *parser) parsePrelude() bool {
process := ffmpegProcess{}
p.lock.progress.Lock()
defer p.lock.progress.Unlock()
// Input #0, lavfi, from 'testsrc=size=1280x720:rate=25':
// Input #1, lavfi, from 'anullsrc=r=44100:cl=stereo':
// Output #0, hls, to './data/testsrc.m3u8':
reFormat := regexp.MustCompile(`^(Input|Output) #([0-9]+), (.*?), (from|to) '([^']+)`)
// Stream #0:0: Video: rawvideo (RGB[24] / 0x18424752), rgb24, 1280x720 [SAR 1:1 DAR 16:9], 25 tbr, 25 tbn, 25 tbc
// Stream #1:0: Audio: pcm_u8, 44100 Hz, stereo, u8, 705 kb/s
// Stream #0:0: Video: h264 (libx264), yuv420p(progressive), 1280x720 [SAR 1:1 DAR 16:9], q=-1--1, 25 fps, 90k tbn, 25 tbc
// Stream #0:1(eng): Audio: aac (LC), 44100 Hz, stereo, fltp, 64 kb/s
reStream := regexp.MustCompile(`Stream #([0-9]+):([0-9]+)(?:\(([a-z]+)\))?: (Video|Audio|Subtitle): (.*)`)
reStreamCodec := regexp.MustCompile(`^([^\s,]+)`)
reStreamVideoSize := regexp.MustCompile(`, ([0-9]+)x([0-9]+)`)
//reStreamVideoFPS := regexp.MustCompile(`, ([0-9]+) fps`)
reStreamAudio := regexp.MustCompile(`, ([0-9]+) Hz, ([^,]+)`)
//reStreamBitrate := regexp.MustCompile(`, ([0-9]+) kb/s`)
reStreamMapping := regexp.MustCompile(`^Stream mapping:`)
reStreamMap := regexp.MustCompile(`^[\s]+Stream #[0-9]+:[0-9]+`)
//format := InputOutput{}
formatType := ""
formatURL := ""
var noutputs int
streamMapping := false
data := p.Prelude()
for _, line := range data {
if reStreamMapping.MatchString(line) {
streamMapping = true
continue
}
inputs, outputs, noutputs := prelude.Parse(data)
if streamMapping {
if reStreamMap.MatchString(line) {
noutputs++
} else {
streamMapping = false
}
continue
}
if matches := reFormat.FindStringSubmatch(line); matches != nil {
formatType = strings.ToLower(matches[1])
formatURL = matches[5]
continue
}
if matches := reStream.FindStringSubmatch(line); matches != nil {
format := ffmpegProcessIO{}
format.Address = formatURL
if ip, _ := url.Lookup(format.Address); len(ip) != 0 {
format.IP = ip
}
if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil {
format.Index = x
}
if x, err := strconv.ParseUint(matches[2], 10, 64); err == nil {
format.Stream = x
}
format.Type = strings.ToLower(matches[4])
streamDetail := matches[5]
if matches = reStreamCodec.FindStringSubmatch(streamDetail); matches != nil {
format.Codec = matches[1]
}
/*
if matches = reStreamBitrate.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseFloat(matches[1], 64); err == nil {
format.Bitrate = x
}
}
*/
if format.Type == "video" {
if matches = reStreamVideoSize.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil {
format.Width = x
}
if x, err := strconv.ParseUint(matches[2], 10, 64); err == nil {
format.Height = x
}
}
/*
if matches = reStreamVideoFPS.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseFloat(matches[1], 64); err == nil {
format.FPS = x
}
}
*/
} else if format.Type == "audio" {
if matches = reStreamAudio.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil {
format.Sampling = x
}
format.Layout = matches[2]
}
}
if formatType == "input" {
process.input = append(process.input, format)
} else {
process.output = append(process.output, format)
}
}
}
if len(process.output) != noutputs {
if len(outputs) != noutputs {
return false
}
p.process.input = process.input
p.process.output = process.output
for _, in := range inputs {
io := ffmpegProcessIO{
Address: in.Address,
Format: in.Format,
Index: in.Index,
Stream: in.Stream,
Type: in.Type,
Codec: in.Codec,
Pixfmt: in.Pixfmt,
Width: in.Width,
Height: in.Height,
Sampling: in.Sampling,
Layout: in.Layout,
}
if ip, _ := url.Lookup(io.Address); len(ip) != 0 {
io.IP = ip
}
p.process.input = append(p.process.input, io)
}
for _, out := range outputs {
io := ffmpegProcessIO{
Address: out.Address,
Format: out.Format,
Index: out.Index,
Stream: out.Stream,
Type: out.Type,
Codec: out.Codec,
Pixfmt: out.Pixfmt,
Width: out.Width,
Height: out.Height,
Sampling: out.Sampling,
Layout: out.Layout,
}
if ip, _ := url.Lookup(io.Address); len(ip) != 0 {
io.IP = ip
}
p.process.output = append(p.process.output, io)
}
return true
}
@ -734,24 +676,25 @@ func (p *parser) ResetStats() {
p.progress.ffmpeg = ffmpegProgress{}
p.progress.avstream = make(map[string]ffmpegAVstream)
p.lock.prelude.Lock()
p.prelude.done = false
p.lock.prelude.Unlock()
}
func (p *parser) ResetLog() {
p.storeLogHistory()
p.prelude.data = []string{}
p.lock.prelude.Lock()
p.prelude.data = []string{}
p.prelude.tail = ring.New(p.prelude.tailLines)
p.lock.prelude.Unlock()
p.prelude.truncatedLines = 0
p.prelude.done = false
p.lock.prelude.Unlock()
p.lock.log.Lock()
p.log = ring.New(p.logLines)
p.lock.log.Unlock()
p.logStart = time.Now()
p.lock.log.Unlock()
}
// Report represents a log report, including the prelude and the last log lines
@ -777,11 +720,14 @@ func (p *parser) storeLogHistory() {
func (p *parser) Report() Report {
h := Report{
CreatedAt: p.logStart,
Prelude: p.Prelude(),
Log: p.Log(),
Prelude: p.Prelude(),
Log: p.Log(),
}
p.lock.log.RLock()
h.CreatedAt = p.logStart
p.lock.log.RUnlock()
return h
}

View File

@ -6,7 +6,7 @@ import (
"testing"
"time"
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/v16/restream/app"
"github.com/stretchr/testify/require"
)

View File

@ -5,7 +5,7 @@ import (
"errors"
"time"
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/v16/restream/app"
)
// Duration represents a time.Duration

191
ffmpeg/prelude/prelude.go Normal file
View File

@ -0,0 +1,191 @@
package prelude
import (
"regexp"
"strconv"
"strings"
)
type IO struct {
// common
Address string
Format string
Index uint64
Stream uint64
Language string
Type string
Codec string
Coder string
Bitrate float64 // kbps
Duration float64 // sec
// video
FPS float64
Pixfmt string
Width uint64
Height uint64
// audio
Sampling uint64 // Hz
Layout string
Channels uint64
}
// Parse parses the inputs and outputs from the default FFmpeg output. It returns a list of
// detected inputs and outputs as well as the number of outputs according to the stream mapping.
func Parse(lines []string) (inputs, outputs []IO, noutputs int) {
// Input #0, lavfi, from 'testsrc=size=1280x720:rate=25':
// Input #1, lavfi, from 'anullsrc=r=44100:cl=stereo':
// Output #0, hls, to './data/testsrc.m3u8':
reFormat := regexp.MustCompile(`^(Input|Output) #([0-9]+), (.*?), (from|to) '([^']+)`)
// Duration: 00:01:02.28, start: 0.000000, bitrate: 5895 kb/s
// Duration: N/A, start: 0.000000, bitrate: 5895 kb/s
reDuration := regexp.MustCompile(`Duration: ([0-9]+):([0-9]+):([0-9]+)\.([0-9]+)`)
// Stream #0:0: Video: rawvideo (RGB[24] / 0x18424752), rgb24, 1280x720 [SAR 1:1 DAR 16:9], 25 tbr, 25 tbn, 25 tbc
// Stream #1:0: Audio: pcm_u8, 44100 Hz, stereo, u8, 705 kb/s
// Stream #0:0: Video: h264 (libx264), yuv420p(progressive), 1280x720 [SAR 1:1 DAR 16:9], q=-1--1, 25 fps, 90k tbn, 25 tbc
// Stream #0:1: Audio: aac (LC), 44100 Hz, stereo, fltp, 64 kb/s
// Stream #4:0[0x100]: Video: h264 (Main) ([27][0][0][0] / 0x001B), yuv420p(tv, smpte170m/bt709/bt709, progressive), 1920x1080 [SAR 1:1 DAR 16:9], 25 tbr, 90k tbn
// Stream #4:1[0x101]: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 162 kb/s
reStream := regexp.MustCompile(`Stream #([0-9]+):([0-9]+)(?:\[[0-9a-fx]+\])?(?:\(([a-z]+)\))?: (Video|Audio|Subtitle): (.*)`)
reStreamCodec := regexp.MustCompile(`^([^\s,]+)`)
reStreamVideoPixfmtSize := regexp.MustCompile(`, ([0-9A-Za-z]+)(\([^\)]+\))?, ([0-9]+)x([0-9]+)`)
reStreamVideoFPS := regexp.MustCompile(`, ([0-9]+(\.[0-9]+)?) fps`)
reStreamAudio := regexp.MustCompile(`, ([0-9]+) Hz, ([^,]+)`)
reStreamBitrate := regexp.MustCompile(`, ([0-9]+) kb/s`)
reStreamMapping := regexp.MustCompile(`^Stream mapping:`)
reStreamMap := regexp.MustCompile(`^[\s]+Stream #[0-9]+:[0-9]+`)
iotype := ""
format := ""
address := ""
var duration float64 = 0.0
streamMapping := false
for _, line := range lines {
if reStreamMapping.MatchString(line) {
streamMapping = true
continue
}
if streamMapping {
if reStreamMap.MatchString(line) {
noutputs++
} else {
streamMapping = false
}
continue
}
if matches := reFormat.FindStringSubmatch(line); matches != nil {
iotype = matches[1]
format = matches[3]
address = matches[5]
duration = 0
continue
}
if matches := reDuration.FindStringSubmatch(line); matches != nil {
duration = 0.0
// hours
if x, err := strconv.ParseFloat(matches[1], 64); err == nil {
duration += x * 60 * 60
}
// minutes
if x, err := strconv.ParseFloat(matches[2], 64); err == nil {
duration += x * 60
}
// seconds
if x, err := strconv.ParseFloat(matches[3], 64); err == nil {
duration += x
}
// fractions
if x, err := strconv.ParseFloat(matches[4], 64); err == nil {
duration += x / 100
}
continue
}
if matches := reStream.FindStringSubmatch(line); matches != nil {
io := IO{}
io.Address = address
io.Format = format
io.Duration = duration
if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil {
io.Index = x
}
if x, err := strconv.ParseUint(matches[2], 10, 64); err == nil {
io.Stream = x
}
io.Language = "und"
if len(matches[3]) == 3 {
io.Language = matches[3]
}
io.Type = strings.ToLower(matches[4])
streamDetail := matches[5]
if matches = reStreamCodec.FindStringSubmatch(streamDetail); matches != nil {
io.Codec = matches[1]
}
if matches = reStreamBitrate.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseFloat(matches[1], 64); err == nil {
io.Bitrate = x
}
}
if io.Type == "video" {
if matches = reStreamVideoPixfmtSize.FindStringSubmatch(streamDetail); matches != nil {
io.Pixfmt = matches[1]
if x, err := strconv.ParseUint(matches[3], 10, 64); err == nil {
io.Width = x
}
if x, err := strconv.ParseUint(matches[4], 10, 64); err == nil {
io.Height = x
}
}
if matches = reStreamVideoFPS.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseFloat(matches[1], 64); err == nil {
io.FPS = x
}
}
} else if io.Type == "audio" {
if matches = reStreamAudio.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil {
io.Sampling = x
}
io.Layout = matches[2]
}
}
if iotype == "Input" {
inputs = append(inputs, io)
} else if iotype == "Output" {
outputs = append(outputs, io)
}
}
}
return
}

View File

@ -0,0 +1,213 @@
package prelude
import (
"strings"
"testing"
"github.com/stretchr/testify/require"
)
func TestPrelude(t *testing.T) {
rawdata := `ffmpeg version 4.0.2 Copyright (c) 2000-2018 the FFmpeg developers
built with Apple LLVM version 9.1.0 (clang-902.0.39.2)
configuration: --prefix=/usr/local/Cellar/ffmpeg/4.0.2 --enable-shared --enable-pthreads --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags= --host-ldflags= --enable-gpl --enable-libmp3lame --enable-libx264 --enable-libx265 --enable-libxvid --enable-opencl --enable-videotoolbox --disable-lzma
libavutil 56. 14.100 / 56. 14.100
libavcodec 58. 18.100 / 58. 18.100
libavformat 58. 12.100 / 58. 12.100
libavdevice 58. 3.100 / 58. 3.100
libavfilter 7. 16.100 / 7. 16.100
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 1.100 / 5. 1.100
libswresample 3. 1.100 / 3. 1.100
libpostproc 55. 1.100 / 55. 1.100
Input #0, lavfi, from 'testsrc=size=1280x720:rate=25':
Duration: N/A, start: 0.000000, bitrate: N/A
Stream #0:0: Video: rawvideo (RGB[24] / 0x18424752), rgb24, 1280x720 [SAR 1:1 DAR 16:9], 25 tbr, 25 tbn, 25 tbc
Input #1, lavfi, from 'anullsrc=r=44100:cl=stereo':
Duration: N/A, start: 0.000000, bitrate: 705 kb/s
Stream #1:0: Audio: pcm_u8, 44100 Hz, stereo, u8, 705 kb/s
Input #2, playout, from 'playout:rtmp://l5gn74l5-vpu.livespotting.com/live/0chl6hu7_360?token=m5ZuiCQYRlIon8':
Duration: N/A, start: 0.000000, bitrate: 265 kb/s
Stream #2:0: Video: h264 (Constrained Baseline), yuvj420p(pc, progressive), 640x360 [SAR 1:1 DAR 16:9], 265 kb/s, 10 fps, 10 tbr, 1000k tbn, 20 tbc
Input #3, mov,mp4,m4a,3gp,3g2,mj2, from 'movie.mp4':
Metadata:
major_brand : isom
minor_version : 512
compatible_brands: isomiso2avc1mp41
encoder : Lavf58.20.100
Duration: 00:01:02.28, start: 0.000000, bitrate: 5895 kb/s
Stream #3:0(eng): Video: h264 (Main) (avc1 / 0x31637661), yuvj420p(pc, bt709), 2560x1440 [SAR 1:1 DAR 16:9], 5894 kb/s, 23.98 fps, 25 tbr, 90k tbn, 50 tbc (default)
Stream #3:1(por): Subtitle: subrip
Input #4, mpegts, from 'srt://localhost:6000?mode=caller&transtype=live&streamid=#!:m=request,r=ingest/ad045490-8233-4f31-a296-ea5771a340ac&passphrase=foobarfoobar':
Duration: N/A, start: 71.786667, bitrate: N/A
Program 1
Metadata:
service_name : Service01
service_provider: FFmpeg
Stream #4:0[0x100]: Video: h264 (Main) ([27][0][0][0] / 0x001B), yuv420p(tv, smpte170m/bt709/bt709, progressive), 1920x1080 [SAR 1:1 DAR 16:9], 25 tbr, 90k tbn
Stream #4:1[0x101]: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 162 kb/s
Stream mapping:
Stream #0:0 -> #0:0 (rawvideo (native) -> h264 (libx264))
Stream #1:0 -> #0:1 (pcm_u8 (native) -> aac (native))
Press [q] to stop, [?] for help
[libx264 @ 0x7fa96a800600] using SAR=1/1
[libx264 @ 0x7fa96a800600] using cpu capabilities: MMX2 SSE2Fast SSSE3 SSE4.2 AVX FMA3 BMI2 AVX2
[libx264 @ 0x7fa96a800600] profile Constrained Baseline, level 3.1
[libx264 @ 0x7fa96a800600] 264 - core 152 r2854 e9a5903 - H.264/MPEG-4 AVC codec - Copyleft 2003-2017 - http://www.videolan.org/x264.html - options: cabac=0 ref=1 deblock=0:0:0 analyse=0:0 me=dia subme=0 psy=1 psy_rd=1.00:0.00 mixed_ref=0 me_range=16 chroma_me=1 trellis=0 8x8dct=0 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=0 threads=6 lookahead_threads=1 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=0 weightp=0 keyint=50 keyint_min=5 scenecut=0 intra_refresh=0 rc=crf mbtree=0 crf=23.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 ip_ratio=1.40 aq=0
[hls @ 0x7fa969803a00] Opening './data/testsrc5375.ts.tmp' for writing
Output #0, hls, to './data/testsrc.m3u8':
Metadata:
encoder : Lavf58.12.100
Stream #0:0: Video: h264 (libx264), yuv420p(progressive), 1280x720 [SAR 1:1 DAR 16:9], q=-1--1, 25 fps, 90k tbn, 25 tbc
Metadata:
encoder : Lavc58.18.100 libx264
Side data:
cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: -1
Stream #0:1: Audio: aac (LC), 44100 Hz, stereo, fltp, 64 kb/s
Metadata:
encoder : Lavc58.18.100 aac
[hls @ 0x7fa969803a00] Opening './data/testsrc5376.ts.tmp' for writing=0.872x
[hls @ 0x7fa969803a00] Opening './data/testsrc.m3u8.tmp' for writing
[hls @ 0x7fa969803a00] Opening './data/testsrc.m3u8.tmp' for writing
frame= 58 fps= 25 q=-1.0 Lsize=N/A time=00:00:02.32 bitrate=N/A speed=0.999x`
data := strings.Split(rawdata, "\n")
inputs, outputs, noutputs := Parse(data)
require.Equal(t, 7, len(inputs))
require.Equal(t, 2, len(outputs))
require.Equal(t, 2, noutputs)
i := inputs[0]
require.Equal(t, "testsrc=size=1280x720:rate=25", i.Address)
require.Equal(t, "lavfi", i.Format)
require.Equal(t, uint64(0), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "video", i.Type)
require.Equal(t, "rawvideo", i.Codec)
require.Equal(t, 0.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, 0.0, i.FPS)
require.Equal(t, "rgb24", i.Pixfmt)
require.Equal(t, uint64(1280), i.Width)
require.Equal(t, uint64(720), i.Height)
i = inputs[1]
require.Equal(t, "anullsrc=r=44100:cl=stereo", i.Address)
require.Equal(t, "lavfi", i.Format)
require.Equal(t, uint64(1), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "audio", i.Type)
require.Equal(t, "pcm_u8", i.Codec)
require.Equal(t, 705.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, uint64(44100), i.Sampling)
require.Equal(t, "stereo", i.Layout)
i = inputs[2]
require.Equal(t, "playout:rtmp://l5gn74l5-vpu.livespotting.com/live/0chl6hu7_360?token=m5ZuiCQYRlIon8", i.Address)
require.Equal(t, "playout", i.Format)
require.Equal(t, uint64(2), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "video", i.Type)
require.Equal(t, "h264", i.Codec)
require.Equal(t, 265.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, 10.0, i.FPS)
require.Equal(t, "yuvj420p", i.Pixfmt)
require.Equal(t, uint64(640), i.Width)
require.Equal(t, uint64(360), i.Height)
i = inputs[3]
require.Equal(t, "movie.mp4", i.Address)
require.Equal(t, "mov,mp4,m4a,3gp,3g2,mj2", i.Format)
require.Equal(t, uint64(3), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "eng", i.Language)
require.Equal(t, "video", i.Type)
require.Equal(t, "h264", i.Codec)
require.Equal(t, 5894.0, i.Bitrate)
require.Equal(t, 62.28, i.Duration)
require.Equal(t, 23.98, i.FPS)
require.Equal(t, "yuvj420p", i.Pixfmt)
require.Equal(t, uint64(2560), i.Width)
require.Equal(t, uint64(1440), i.Height)
i = inputs[4]
require.Equal(t, "movie.mp4", i.Address)
require.Equal(t, "mov,mp4,m4a,3gp,3g2,mj2", i.Format)
require.Equal(t, uint64(3), i.Index)
require.Equal(t, uint64(1), i.Stream)
require.Equal(t, "por", i.Language)
require.Equal(t, "subtitle", i.Type)
require.Equal(t, "subrip", i.Codec)
i = inputs[5]
require.Equal(t, "srt://localhost:6000?mode=caller&transtype=live&streamid=#!:m=request,r=ingest/ad045490-8233-4f31-a296-ea5771a340ac&passphrase=foobarfoobar", i.Address)
require.Equal(t, "mpegts", i.Format)
require.Equal(t, uint64(4), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "video", i.Type)
require.Equal(t, "h264", i.Codec)
require.Equal(t, 0.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, 0.0, i.FPS)
require.Equal(t, "yuv420p", i.Pixfmt)
require.Equal(t, uint64(1920), i.Width)
require.Equal(t, uint64(1080), i.Height)
i = inputs[6]
require.Equal(t, "srt://localhost:6000?mode=caller&transtype=live&streamid=#!:m=request,r=ingest/ad045490-8233-4f31-a296-ea5771a340ac&passphrase=foobarfoobar", i.Address)
require.Equal(t, "mpegts", i.Format)
require.Equal(t, uint64(4), i.Index)
require.Equal(t, uint64(1), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "audio", i.Type)
require.Equal(t, "aac", i.Codec)
require.Equal(t, 162.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, uint64(48000), i.Sampling)
require.Equal(t, "stereo", i.Layout)
i = outputs[0]
require.Equal(t, "./data/testsrc.m3u8", i.Address)
require.Equal(t, "hls", i.Format)
require.Equal(t, uint64(0), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "video", i.Type)
require.Equal(t, "h264", i.Codec)
require.Equal(t, 0.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, 25.0, i.FPS)
require.Equal(t, "yuv420p", i.Pixfmt)
require.Equal(t, uint64(1280), i.Width)
require.Equal(t, uint64(720), i.Height)
i = outputs[1]
require.Equal(t, "./data/testsrc.m3u8", i.Address)
require.Equal(t, "hls", i.Format)
require.Equal(t, uint64(0), i.Index)
require.Equal(t, uint64(1), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "audio", i.Type)
require.Equal(t, "aac", i.Codec)
require.Equal(t, 64.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, uint64(44100), i.Sampling)
require.Equal(t, "stereo", i.Layout)
}

View File

@ -2,14 +2,13 @@ package probe
import (
"encoding/json"
"regexp"
"strconv"
"strings"
"time"
"github.com/datarhei/core/log"
"github.com/datarhei/core/process"
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/v16/ffmpeg/prelude"
"github.com/datarhei/core/v16/log"
"github.com/datarhei/core/v16/process"
"github.com/datarhei/core/v16/restream/app"
)
type Parser interface {
@ -84,128 +83,32 @@ func (p *prober) parseJSON(line string) {
}
func (p *prober) parseDefault() {
// Input #0, lavfi, from 'testsrc=size=1280x720:rate=25':
// Input #1, lavfi, from 'anullsrc=r=44100:cl=stereo':
// Output #0, hls, to './data/testsrc.m3u8':
reFormat := regexp.MustCompile(`^Input #([0-9]+), (.*?), (from|to) '([^']+)`)
lines := make([]string, len(p.data))
// Duration: 00:01:02.28, start: 0.000000, bitrate: 5895 kb/s
// Duration: N/A, start: 0.000000, bitrate: 5895 kb/s
reDuration := regexp.MustCompile(`Duration: ([0-9]+):([0-9]+):([0-9]+)\.([0-9]+)`)
for i, line := range p.data {
lines[i] = line.Data
}
// Stream #0:0: Video: rawvideo (RGB[24] / 0x18424752), rgb24, 1280x720 [SAR 1:1 DAR 16:9], 25 tbr, 25 tbn, 25 tbc
// Stream #1:0: Audio: pcm_u8, 44100 Hz, stereo, u8, 705 kb/s
// Stream #0:0: Video: h264 (libx264), yuv420p(progressive), 1280x720 [SAR 1:1 DAR 16:9], q=-1--1, 25 fps, 90k tbn, 25 tbc
// Stream #0:1: Audio: aac (LC), 44100 Hz, stereo, fltp, 64 kb/s
reStream := regexp.MustCompile(`Stream #([0-9]+):([0-9]+)(?:\(([a-z]+)\))?: (Video|Audio|Subtitle): (.*)`)
reStreamCodec := regexp.MustCompile(`^([^\s,]+)`)
reStreamVideoPixfmtSize := regexp.MustCompile(`, ([0-9A-Za-z]+)(\([^\)]+\))?, ([0-9]+)x([0-9]+)`)
reStreamVideoFPS := regexp.MustCompile(`, ([0-9]+(\.[0-9]+)?) fps`)
reStreamAudio := regexp.MustCompile(`, ([0-9]+) Hz, ([^,]+)`)
reStreamBitrate := regexp.MustCompile(`, ([0-9]+) kb/s`)
inputs, _, _ := prelude.Parse(lines)
format := ""
address := ""
var duration float64 = 0.0
p.inputs = make([]probeIO, len(inputs))
for _, line := range p.data {
if matches := reFormat.FindStringSubmatch(line.Data); matches != nil {
format = matches[2]
address = matches[4]
continue
}
if matches := reDuration.FindStringSubmatch(line.Data); matches != nil {
duration = 0.0
// hours
if x, err := strconv.ParseFloat(matches[1], 64); err == nil {
duration += x * 60 * 60
}
// minutes
if x, err := strconv.ParseFloat(matches[2], 64); err == nil {
duration += x * 60
}
// seconds
if x, err := strconv.ParseFloat(matches[3], 64); err == nil {
duration += x
}
// fractions
if x, err := strconv.ParseFloat(matches[4], 64); err == nil {
duration += x / 100
}
continue
}
if matches := reStream.FindStringSubmatch(line.Data); matches != nil {
io := probeIO{}
io.Address = address
io.Format = format
io.Duration = duration
if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil {
io.Index = x
}
if x, err := strconv.ParseUint(matches[2], 10, 64); err == nil {
io.Stream = x
}
io.Language = "und"
if len(matches[3]) == 3 {
io.Language = matches[3]
}
io.Type = strings.ToLower(matches[4])
streamDetail := matches[5]
if matches = reStreamCodec.FindStringSubmatch(streamDetail); matches != nil {
io.Codec = matches[1]
}
if matches = reStreamBitrate.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseFloat(matches[1], 64); err == nil {
io.Bitrate = x
}
}
if io.Type == "video" {
if matches = reStreamVideoPixfmtSize.FindStringSubmatch(streamDetail); matches != nil {
io.Pixfmt = matches[1]
if x, err := strconv.ParseUint(matches[3], 10, 64); err == nil {
io.Width = x
}
if x, err := strconv.ParseUint(matches[4], 10, 64); err == nil {
io.Height = x
}
}
if matches = reStreamVideoFPS.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseFloat(matches[1], 64); err == nil {
io.FPS = x
}
}
} else if io.Type == "audio" {
if matches = reStreamAudio.FindStringSubmatch(streamDetail); matches != nil {
if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil {
io.Sampling = x
}
io.Layout = matches[2]
}
}
p.inputs = append(p.inputs, io)
}
for i, input := range inputs {
p.inputs[i].Address = input.Address
p.inputs[i].Format = input.Format
p.inputs[i].Duration = input.Duration
p.inputs[i].Index = input.Index
p.inputs[i].Stream = input.Stream
p.inputs[i].Language = input.Language
p.inputs[i].Type = input.Type
p.inputs[i].Codec = input.Codec
p.inputs[i].Bitrate = input.Bitrate
p.inputs[i].Pixfmt = input.Pixfmt
p.inputs[i].Width = input.Width
p.inputs[i].Height = input.Height
p.inputs[i].FPS = input.FPS
p.inputs[i].Sampling = input.Sampling
p.inputs[i].Layout = input.Layout
}
}

View File

@ -3,6 +3,8 @@ package probe
import (
"strings"
"testing"
"github.com/stretchr/testify/require"
)
func TestProber(t *testing.T) {
@ -38,6 +40,14 @@ Input #3, mov,mp4,m4a,3gp,3g2,mj2, from 'movie.mp4':
Duration: 00:01:02.28, start: 0.000000, bitrate: 5895 kb/s
Stream #3:0(eng): Video: h264 (Main) (avc1 / 0x31637661), yuvj420p(pc, bt709), 2560x1440 [SAR 1:1 DAR 16:9], 5894 kb/s, 23.98 fps, 25 tbr, 90k tbn, 50 tbc (default)
Stream #3:1(por): Subtitle: subrip
Input #4, mpegts, from 'srt://localhost:6000?mode=caller&transtype=live&streamid=#!:m=request,r=ingest/ad045490-8233-4f31-a296-ea5771a340ac&passphrase=foobarfoobar':
Duration: N/A, start: 71.786667, bitrate: N/A
Program 1
Metadata:
service_name : Service01
service_provider: FFmpeg
Stream #4:0[0x100]: Video: h264 (Main) ([27][0][0][0] / 0x001B), yuv420p(tv, smpte170m/bt709/bt709, progressive), 1920x1080 [SAR 1:1 DAR 16:9], 25 tbr, 90k tbn
Stream #4:1[0x101]: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 162 kb/s
Stream mapping:
Stream #0:0 -> #0:0 (rawvideo (native) -> h264 (libx264))
Stream #1:0 -> #0:1 (pcm_u8 (native) -> aac (native))
@ -51,230 +61,107 @@ Press [q] to stop, [?] for help`
prober.ResetStats()
if len(prober.inputs) != 5 {
t.Errorf("#inputs: want=5, have=%d\n", len(prober.inputs))
return
}
require.Equal(t, 7, len(prober.inputs))
i := prober.inputs[0]
if i.Address != "testsrc=size=1280x720:rate=25" {
t.Errorf("#input0.address: want=testsrc=size=1280x720:rate=25, have=%s\n", i.Address)
}
if i.Format != "lavfi" {
t.Errorf("#input0.format: want=lavfi, have=%s\n", i.Format)
}
if i.Index != 0 {
t.Errorf("#input0.index: want=0, have=%d\n", i.Index)
}
if i.Stream != 0 {
t.Errorf("#input0.stream: want=0, have=%d\n", i.Stream)
}
if i.Language != "und" {
t.Errorf("#input0.language: want=und, have=%s\n", i.Language)
}
if i.Type != "video" {
t.Errorf("#input0.type: want=video, have=%s\n", i.Type)
}
if i.Codec != "rawvideo" {
t.Errorf("#input0.codec: want=rawvideo, have=%s\n", i.Codec)
}
if i.Bitrate != 0 {
t.Errorf("#input0.bitrate: want=0, have=%f\n", i.Bitrate)
}
if i.Duration != 0 {
t.Errorf("#input0.duration: want=0, have=%f\n", i.Duration)
}
if i.FPS != 0 {
t.Errorf("#input0.fps: want=0, have=%f\n", i.FPS)
}
if i.Pixfmt != "rgb24" {
t.Errorf("#input0.pixfmt: want=rgb24, have=%s\n", i.Pixfmt)
}
if i.Width != 1280 {
t.Errorf("#input0.width: want=1280, have=%d\n", i.Width)
}
if i.Height != 720 {
t.Errorf("#input0.height: want=720, have=%d\n", i.Height)
}
require.Equal(t, "testsrc=size=1280x720:rate=25", i.Address)
require.Equal(t, "lavfi", i.Format)
require.Equal(t, uint64(0), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "video", i.Type)
require.Equal(t, "rawvideo", i.Codec)
require.Equal(t, 0.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, 0.0, i.FPS)
require.Equal(t, "rgb24", i.Pixfmt)
require.Equal(t, uint64(1280), i.Width)
require.Equal(t, uint64(720), i.Height)
i = prober.inputs[1]
if i.Address != "anullsrc=r=44100:cl=stereo" {
t.Errorf("#input1.address: want=anullsrc=r=44100:cl=stereo, have=%s\n", i.Address)
}
if i.Format != "lavfi" {
t.Errorf("#input1.format: want=lavfi, have=%s\n", i.Format)
}
if i.Index != 1 {
t.Errorf("#input1.index: want=1, have=%d\n", i.Index)
}
if i.Stream != 0 {
t.Errorf("#input1.stream: want=0, have=%d\n", i.Stream)
}
if i.Language != "und" {
t.Errorf("#input1.language: want=und, have=%s\n", i.Language)
}
if i.Type != "audio" {
t.Errorf("#input1.type: want=audio, have=%s\n", i.Type)
}
if i.Codec != "pcm_u8" {
t.Errorf("#input1.codec: want=pcm_u8, have=%s\n", i.Codec)
}
if i.Bitrate != 705 {
t.Errorf("#input1.bitrate: want=705, have=%f\n", i.Bitrate)
}
if i.Duration != 0 {
t.Errorf("#input1.duration: want=0, have=%f\n", i.Duration)
}
if i.Sampling != 44100 {
t.Errorf("#input1.sampling: want=44100, have=%d\n", i.Sampling)
}
if i.Layout != "stereo" {
t.Errorf("#input1.layout: want=stereo, have=%s\n", i.Layout)
}
require.Equal(t, "anullsrc=r=44100:cl=stereo", i.Address)
require.Equal(t, "lavfi", i.Format)
require.Equal(t, uint64(1), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "audio", i.Type)
require.Equal(t, "pcm_u8", i.Codec)
require.Equal(t, 705.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, uint64(44100), i.Sampling)
require.Equal(t, "stereo", i.Layout)
i = prober.inputs[2]
if i.Address != "playout:rtmp://l5gn74l5-vpu.livespotting.com/live/0chl6hu7_360?token=m5ZuiCQYRlIon8" {
t.Errorf("#input2.address: want=playout:rtmp://l5gn74l5-vpu.livespotting.com/live/0chl6hu7_360?token=m5ZuiCQYRlIon8, have=%s\n", i.Address)
}
if i.Format != "playout" {
t.Errorf("#input2.format: want=playout, have=%s\n", i.Format)
}
if i.Index != 2 {
t.Errorf("#input2.index: want=2, have=%d\n", i.Index)
}
if i.Stream != 0 {
t.Errorf("#input2.stream: want=0, have=%d\n", i.Stream)
}
if i.Language != "und" {
t.Errorf("#input2.language: want=und, have=%s\n", i.Language)
}
if i.Type != "video" {
t.Errorf("#input2.type: want=video, have=%s\n", i.Type)
}
if i.Codec != "h264" {
t.Errorf("#input2.codec: want=h264, have=%s\n", i.Codec)
}
if i.Bitrate != 265 {
t.Errorf("#input2.bitrate: want=265, have=%f\n", i.Bitrate)
}
if i.Duration != 0 {
t.Errorf("#input2.duration: want=0, have=%f\n", i.Duration)
}
if i.FPS != 10 {
t.Errorf("#input2.fps: want=10, have=%f\n", i.FPS)
}
if i.Pixfmt != "yuvj420p" {
t.Errorf("#input2.pixfmt: want=yuvj420p, have=%s\n", i.Pixfmt)
}
if i.Width != 640 {
t.Errorf("#input2.width: want=640, have=%d\n", i.Width)
}
if i.Height != 360 {
t.Errorf("#input2.height: want=360, have=%d\n", i.Height)
}
require.Equal(t, "playout:rtmp://l5gn74l5-vpu.livespotting.com/live/0chl6hu7_360?token=m5ZuiCQYRlIon8", i.Address)
require.Equal(t, "playout", i.Format)
require.Equal(t, uint64(2), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "video", i.Type)
require.Equal(t, "h264", i.Codec)
require.Equal(t, 265.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, 10.0, i.FPS)
require.Equal(t, "yuvj420p", i.Pixfmt)
require.Equal(t, uint64(640), i.Width)
require.Equal(t, uint64(360), i.Height)
i = prober.inputs[3]
if i.Address != "movie.mp4" {
t.Errorf("#input3.address: want=movie.mp4, have=%s\n", i.Address)
}
if i.Format != "mov,mp4,m4a,3gp,3g2,mj2" {
t.Errorf("#input3.format: want=mov,mp4,m4a,3gp,3g2,mj2, have=%s\n", i.Format)
}
if i.Index != 3 {
t.Errorf("#input3.index: want=3, have=%d\n", i.Index)
}
if i.Stream != 0 {
t.Errorf("#input3.stream: want=0, have=%d\n", i.Stream)
}
if i.Language != "eng" {
t.Errorf("#input3.language: want=eng, have=%s\n", i.Language)
}
if i.Type != "video" {
t.Errorf("#input3.type: want=video, have=%s\n", i.Type)
}
if i.Codec != "h264" {
t.Errorf("#input3.codec: want=h264, have=%s\n", i.Codec)
}
if i.Bitrate != 5894 {
t.Errorf("#input3.bitrate: want=5894, have=%f\n", i.Bitrate)
}
if i.Duration != 62.28 {
t.Errorf("#input3.duration: want=62.82, have=%f\n", i.Duration)
}
if i.FPS != 23.98 {
t.Errorf("#input3.fps: want=23.98, have=%f\n", i.FPS)
}
if i.Pixfmt != "yuvj420p" {
t.Errorf("#input3.pixfmt: want=yuvj420p, have=%s\n", i.Pixfmt)
}
if i.Width != 2560 {
t.Errorf("#input3.width: want=2560, have=%d\n", i.Width)
}
if i.Height != 1440 {
t.Errorf("#input3.height: want=1440, have=%d\n", i.Height)
}
require.Equal(t, "movie.mp4", i.Address)
require.Equal(t, "mov,mp4,m4a,3gp,3g2,mj2", i.Format)
require.Equal(t, uint64(3), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "eng", i.Language)
require.Equal(t, "video", i.Type)
require.Equal(t, "h264", i.Codec)
require.Equal(t, 5894.0, i.Bitrate)
require.Equal(t, 62.28, i.Duration)
require.Equal(t, 23.98, i.FPS)
require.Equal(t, "yuvj420p", i.Pixfmt)
require.Equal(t, uint64(2560), i.Width)
require.Equal(t, uint64(1440), i.Height)
i = prober.inputs[4]
if i.Language != "por" {
t.Errorf("#input4.language: want=por, have=%s\n", i.Language)
}
require.Equal(t, "movie.mp4", i.Address)
require.Equal(t, "mov,mp4,m4a,3gp,3g2,mj2", i.Format)
require.Equal(t, uint64(3), i.Index)
require.Equal(t, uint64(1), i.Stream)
require.Equal(t, "por", i.Language)
require.Equal(t, "subtitle", i.Type)
require.Equal(t, "subrip", i.Codec)
if i.Type != "subtitle" {
t.Errorf("#input4.type: want=subtitle, have=%s\n", i.Type)
}
i = prober.inputs[5]
if i.Codec != "subrip" {
t.Errorf("#input4.codec: want=subtip, have=%s\n", i.Codec)
}
require.Equal(t, "srt://localhost:6000?mode=caller&transtype=live&streamid=#!:m=request,r=ingest/ad045490-8233-4f31-a296-ea5771a340ac&passphrase=foobarfoobar", i.Address)
require.Equal(t, "mpegts", i.Format)
require.Equal(t, uint64(4), i.Index)
require.Equal(t, uint64(0), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "video", i.Type)
require.Equal(t, "h264", i.Codec)
require.Equal(t, 0.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, 0.0, i.FPS)
require.Equal(t, "yuv420p", i.Pixfmt)
require.Equal(t, uint64(1920), i.Width)
require.Equal(t, uint64(1080), i.Height)
i = prober.inputs[6]
require.Equal(t, "srt://localhost:6000?mode=caller&transtype=live&streamid=#!:m=request,r=ingest/ad045490-8233-4f31-a296-ea5771a340ac&passphrase=foobarfoobar", i.Address)
require.Equal(t, "mpegts", i.Format)
require.Equal(t, uint64(4), i.Index)
require.Equal(t, uint64(1), i.Stream)
require.Equal(t, "und", i.Language)
require.Equal(t, "audio", i.Type)
require.Equal(t, "aac", i.Codec)
require.Equal(t, 162.0, i.Bitrate)
require.Equal(t, 0.0, i.Duration)
require.Equal(t, uint64(48000), i.Sampling)
require.Equal(t, "stereo", i.Layout)
}

View File

@ -1,7 +1,7 @@
package probe
import (
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/v16/restream/app"
)
type probeIO struct {

View File

@ -10,8 +10,6 @@ import (
// DevicesV4L returns a list of available V4L devices
func DevicesV4L() ([]HWDevice, error) {
devices := []HWDevice{}
buf := bytes.NewBuffer(nil)
cmd := exec.Command("v4l2-ctl", "--list-devices")
@ -19,7 +17,7 @@ func DevicesV4L() ([]HWDevice, error) {
cmd.Stdout = buf
cmd.Run()
devices = parseV4LDevices(buf)
devices := parseV4LDevices(buf)
return devices, nil
}

92
go.mod
View File

@ -1,39 +1,83 @@
module github.com/datarhei/core
module github.com/datarhei/core/v16
go 1.16
go 1.18
require (
github.com/99designs/gqlgen v0.17.9
github.com/alecthomas/jsonschema v0.0.0-20211228220459-151e3c21f49d
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751
github.com/99designs/gqlgen v0.17.12
github.com/atrox/haikunatorgo/v2 v2.0.1
github.com/datarhei/gosrt v0.1.2
github.com/datarhei/joy4 v0.0.0-20210125162555-2102a8289cce
github.com/go-openapi/spec v0.20.6 // indirect
github.com/go-openapi/swag v0.21.1 // indirect
github.com/go-playground/validator/v10 v10.11.0
github.com/golang-jwt/jwt/v4 v4.4.1
github.com/golang-jwt/jwt/v4 v4.4.2
github.com/google/uuid v1.3.0
github.com/iancoleman/orderedmap v0.2.0 // indirect
github.com/invopop/jsonschema v0.4.0
github.com/joho/godotenv v1.4.0
github.com/labstack/echo/v4 v4.7.2
github.com/lithammer/shortuuid/v4 v4.0.0
github.com/lufia/plan9stats v0.0.0-20220517141722-cf486979b281 // indirect
github.com/mattn/go-isatty v0.0.14
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c // indirect
github.com/prep/average v0.0.0-20200506183628-d26c465f48c3
github.com/prometheus/client_golang v1.12.2
github.com/prometheus/common v0.34.0 // indirect
github.com/shirou/gopsutil/v3 v3.22.4
github.com/stretchr/testify v1.7.1
github.com/swaggo/echo-swagger v1.3.2
github.com/swaggo/swag v1.8.2
github.com/tklauser/numcpus v0.5.0 // indirect
github.com/vektah/gqlparser/v2 v2.4.4
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/shirou/gopsutil/v3 v3.22.6
github.com/stretchr/testify v1.7.5
github.com/swaggo/echo-swagger v1.3.3
github.com/swaggo/swag v1.8.3
github.com/vektah/gqlparser/v2 v2.4.6
github.com/xeipuuv/gojsonschema v1.2.0
golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3
golang.org/x/net v0.0.0-20220526153639-5463443f8c37 // indirect
golang.org/x/time v0.0.0-20220411224347-583f2d630306 // indirect
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4
)
require (
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/agnivade/levenshtein v1.1.1 // indirect
github.com/benburkert/openpgp v0.0.0-20160410205803-c2471f86866c // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-openapi/jsonpointer v0.19.5 // indirect
github.com/go-openapi/jsonreference v0.20.0 // indirect
github.com/go-openapi/spec v0.20.6 // indirect
github.com/go-openapi/swag v0.21.1 // indirect
github.com/go-playground/locales v0.14.0 // indirect
github.com/go-playground/universal-translator v0.18.0 // indirect
github.com/golang-jwt/jwt v3.2.2+incompatible // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/gorilla/websocket v1.5.0 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/iancoleman/orderedmap v0.2.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/labstack/gommon v0.3.1 // indirect
github.com/leodido/go-urn v1.2.1 // indirect
github.com/lufia/plan9stats v0.0.0-20220517141722-cf486979b281 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/matryer/moq v0.2.7 // indirect
github.com/mattn/go-colorable v0.1.12 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/power-devops/perfstat v0.0.0-20220216144756-c35f1ee13d7c // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.35.0 // indirect
github.com/prometheus/procfs v0.7.3 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe // indirect
github.com/tklauser/go-sysconf v0.3.10 // indirect
github.com/tklauser/numcpus v0.5.0 // indirect
github.com/urfave/cli/v2 v2.8.1 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/fasttemplate v1.2.1 // indirect
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
github.com/yusufpapurcu/wmi v1.2.2 // indirect
golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 // indirect
golang.org/x/sys v0.0.0-20220708085239-5a0f0661e09d // indirect
golang.org/x/text v0.3.7 // indirect
golang.org/x/time v0.0.0-20220609170525-579cf78fd858 // indirect
golang.org/x/tools v0.1.11 // indirect
google.golang.org/protobuf v1.28.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)

72
go.sum
View File

@ -31,8 +31,8 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/99designs/gqlgen v0.17.9 h1:0XvE3nMaTaLYq7XbBz1MY0t9BFcntydlt1zzNa4eY+4=
github.com/99designs/gqlgen v0.17.9/go.mod h1:PThAZAK9t2pAat7g8QdSI4dCBMOhBO+t2qj+0jvDqps=
github.com/99designs/gqlgen v0.17.12 h1:lH/H5dTYCY5eLNRKXeq22l0wFMavpOnN6v9GAIw+fxY=
github.com/99designs/gqlgen v0.17.12/go.mod h1:w1brbeOdqVyNJI553BGwtwdVcYu1LKeYE1opLWN9RgQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
@ -44,10 +44,7 @@ github.com/agiledragon/gomonkey/v2 v2.3.1/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaW
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8=
github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo=
github.com/alecthomas/jsonschema v0.0.0-20211228220459-151e3c21f49d h1:4BQNwS4T13UU3Yee4GfzZH3Q9SNpKeJvLigfw8fDjX0=
github.com/alecthomas/jsonschema v0.0.0-20211228220459-151e3c21f49d/go.mod h1:/n6+1/DWPltRLWL/VKyUxg6tzsl5kHUCcraimt4vr60=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@ -58,6 +55,8 @@ github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
github.com/atrox/haikunatorgo/v2 v2.0.1 h1:FCVx2KL2YvZtI1rI9WeEHxeLRrKGr0Dd4wfCJiUXupc=
github.com/atrox/haikunatorgo/v2 v2.0.1/go.mod h1:BBQmx2o+1Z5poziaHRgddAZKOpijwfKdAmMnSYlFK70=
github.com/benburkert/openpgp v0.0.0-20160410205803-c2471f86866c h1:8XZeJrs4+ZYhJeJ2aZxADI2tGADS15AzIF8MQ8XAhT4=
github.com/benburkert/openpgp v0.0.0-20160410205803-c2471f86866c/go.mod h1:x1vxHcL/9AVzuk5HOloOEPrtJY0MaalYr78afXZ+pWI=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
@ -75,6 +74,8 @@ github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:ma
github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU=
github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/datarhei/gosrt v0.1.2 h1:rGOP2Xkbi52z4tLzBwCBw2TKt7BrfTO2LmEVY+yWf1M=
github.com/datarhei/gosrt v0.1.2/go.mod h1:IftDbZGIIC9OvQO5on5ZpU0iB/JX/PFOqGXORbwHYQM=
github.com/datarhei/joy4 v0.0.0-20210125162555-2102a8289cce h1:bg/OE9GfGK6d/XbqiMq8YaGQzw1Ul3Y3qiGMzU1G4HQ=
github.com/datarhei/joy4 v0.0.0-20210125162555-2102a8289cce/go.mod h1:Jcw/6jZDQQmPx8A7INEkXmuEF7E9jjBbSTfVSLwmiQw=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -125,8 +126,8 @@ github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/me
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
github.com/golang-jwt/jwt/v4 v4.4.1 h1:pC5DB52sCeK48Wlb9oPcdhnjkz1TKt1D/P7WKJ0kUcQ=
github.com/golang-jwt/jwt/v4 v4.4.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v4 v4.4.2 h1:rcc4lwaZgFMCZ5jxF9ABolDcIHdBytAFgqFPbSJQAYs=
github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
@ -167,7 +168,6 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@ -196,6 +196,8 @@ github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0
github.com/iancoleman/orderedmap v0.2.0 h1:sq1N/TFpYH++aViPcaKjys3bDClUEU7s5B+z6jq8pNA=
github.com/iancoleman/orderedmap v0.2.0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/invopop/jsonschema v0.4.0 h1:Yuy/unfgCnfV5Wl7H0HgFufp/rlurqPOOuacqyByrws=
github.com/invopop/jsonschema v0.4.0/go.mod h1:O9uiLokuu0+MGFlyiaqtWxwqJm41/+8Nj0lD7A36YH0=
github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg=
github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
@ -269,6 +271,7 @@ github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsK
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.6.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
@ -292,8 +295,8 @@ github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls=
github.com/prometheus/common v0.34.0 h1:RBmGO9d/FVjqHT0yUGQwBJhkwKV+wPCn7KGpvfab0uE=
github.com/prometheus/common v0.34.0/go.mod h1:gB3sOl7P0TvJabZpLY5uQMpUqRCPPCyRLCZYc7JZTNE=
github.com/prometheus/common v0.35.0 h1:Eyr+Pw2VymWejHqCugNaQXkAi6KayVNxaHeu6khmFBE=
github.com/prometheus/common v0.35.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
@ -309,8 +312,8 @@ github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/shirou/gopsutil/v3 v3.22.4 h1:srAQaiX6jX/cYL6q29aE0m8lOskT9CurZ9N61YR3yoI=
github.com/shirou/gopsutil/v3 v3.22.4/go.mod h1:D01hZJ4pVHPpCTZ3m3T2+wDF2YAGfd+H4ifUguaQzHM=
github.com/shirou/gopsutil/v3 v3.22.6 h1:FnHOFOh+cYAM0C30P+zysPISzlknLC5Z1G4EAElznfQ=
github.com/shirou/gopsutil/v3 v3.22.6/go.mod h1:EdIubSnZhbAvBS1yJ7Xi+AShB/hxwLHOMz4MCYz7yMs=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
@ -319,21 +322,24 @@ github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/swaggo/echo-swagger v1.3.2 h1:D+3BNl8JMC6pKhA+egjh4LGI0jNesqlt77WahTHfTXQ=
github.com/swaggo/echo-swagger v1.3.2/go.mod h1:Sjj0O7Puf939HXhxhfZdR49MIrtcg3mLgdg3/qVcbyw=
github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2 h1:+iNTcqQJy0OZ5jk6a5NLib47eqXK8uYcPX+O4+cBpEM=
github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w=
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
github.com/stretchr/testify v1.7.5 h1:s5PTfem8p8EbKQOctVV53k6jCJt3UX4IEJzwh+C324Q=
github.com/stretchr/testify v1.7.5/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/swaggo/echo-swagger v1.3.3 h1:Fx8kQ8IcIIEL3ZE20wzvcT8gFnPo/4U+fsnS3I1wvCw=
github.com/swaggo/echo-swagger v1.3.3/go.mod h1:vbKcEBeJgOexLuPcsdZhrRAV508fsE79xaKIqmvse98=
github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe h1:K8pHPVoTgxFJt1lXuIzzOX7zZhZFldJQK/CgKx9BFIc=
github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w=
github.com/swaggo/swag v1.8.1/go.mod h1:ugemnJsPZm/kRwFUnzBlbHRd0JY9zE1M4F+uy2pAaPQ=
github.com/swaggo/swag v1.8.2 h1:D4aBiVS2a65zhyk3WFqOUz7Rz0sOaUcgeErcid5uGL4=
github.com/swaggo/swag v1.8.2/go.mod h1:jMLeXOOmYyjk8PvHTsXBdrubsNd9gUJTTCzL5iBnseg=
github.com/swaggo/swag v1.8.3 h1:3pZSSCQ//gAH88lfmxM3Cd1+JCsxV8Md6f36b9hrZ5s=
github.com/swaggo/swag v1.8.3/go.mod h1:jMLeXOOmYyjk8PvHTsXBdrubsNd9gUJTTCzL5iBnseg=
github.com/tklauser/go-sysconf v0.3.10 h1:IJ1AZGZRWbY8T5Vfk04D9WOA5WSejdflXxP03OUqALw=
github.com/tklauser/go-sysconf v0.3.10/go.mod h1:C8XykCvCb+Gn0oNCWPIlcb0RuglQTYaQ2hGm7jmxEFk=
github.com/tklauser/numcpus v0.4.0/go.mod h1:1+UI3pD8NW14VMwdgJNJ1ESk2UnwhAnz5hMwiKKqXCQ=
@ -346,8 +352,8 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4=
github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/vektah/gqlparser/v2 v2.4.4 h1:rh9hwZ5Jx9cCq88zXz2YHKmuQBuwY1JErHU8GywFdwE=
github.com/vektah/gqlparser/v2 v2.4.4/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
github.com/vektah/gqlparser/v2 v2.4.6 h1:Yjzp66g6oVq93Jihbi0qhGnf/6zIWjcm8H6gA27zstE=
github.com/vektah/gqlparser/v2 v2.4.6/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo=
github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
@ -379,8 +385,9 @@ golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e h1:T8NU3HyQ8ClP4SEE+KbFlg6n0NhuTsN4MyznaarGsZM=
golang.org/x/crypto v0.0.0-20220525230936-793ad666bf5e/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d h1:sK3txAijHtOK88l68nt020reeT1ZdKLIYetKl95FzVY=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -413,8 +420,9 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o=
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -452,8 +460,8 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220526153639-5463443f8c37 h1:lUkvobShwKsOesNfWWlCS5q7fnbG1MEliIzwu886fn8=
golang.org/x/net v0.0.0-20220526153639-5463443f8c37/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.0.0-20220706163947-c90051bbdb60 h1:8NSylCMxLW4JvserAndSgFL7aPli6A68yf0bYFTcWCM=
golang.org/x/net v0.0.0-20220706163947-c90051bbdb60/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -521,8 +529,9 @@ golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a h1:dGzPydgVsqGcTRVwiLJ1jVbufYwmzD3LfVPLKsKg+0k=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220708085239-5a0f0661e09d h1:/m5NbqQelATgoSPVC2Z23sR4kVNokFwDDyWh/3rGY+I=
golang.org/x/sys v0.0.0-20220708085239-5a0f0661e09d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -537,8 +546,8 @@ golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxb
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20220411224347-583f2d630306 h1:+gHMid33q6pen7kv9xvT+JRinntgeXO2AeZVd0AWD3w=
golang.org/x/time v0.0.0-20220411224347-583f2d630306/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20220609170525-579cf78fd858 h1:Dpdu/EMxGMFgq0CeYMh4fazTD2vtlZRYE7wyynxJb9U=
golang.org/x/time v0.0.0-20220609170525-579cf78fd858/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
@ -582,12 +591,12 @@ golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
golang.org/x/tools v0.1.11 h1:loJ25fNOEhSXfHrpoGj91eCUThwdNX6u24rO1xnNteY=
golang.org/x/tools v0.1.11/go.mod h1:SgwaegtQh8clINPpECJMqnxLv9I09HLqnW3RMqW0CA4=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
@ -685,8 +694,9 @@ gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

View File

@ -1,7 +1,7 @@
package api
import (
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/v16/restream/app"
)
type AVstreamIO struct {

View File

@ -3,7 +3,7 @@ package api
import (
"time"
"github.com/datarhei/core/config"
"github.com/datarhei/core/v16/config"
)
// ConfigData embeds config.Data
@ -41,6 +41,7 @@ func (rscfg *SetConfig) MergeTo(cfg *config.Config) {
cfg.TLS = rscfg.TLS
cfg.Storage = rscfg.Storage
cfg.RTMP = rscfg.RTMP
cfg.SRT = rscfg.SRT
cfg.FFmpeg = rscfg.FFmpeg
cfg.Playout = rscfg.Playout
cfg.Debug = rscfg.Debug

View File

@ -4,7 +4,7 @@ import (
"fmt"
"time"
"github.com/datarhei/core/monitor"
"github.com/datarhei/core/v16/monitor"
)
type MetricsQueryMetric struct {

View File

@ -1,6 +1,6 @@
package api
import "github.com/datarhei/core/playout"
import "github.com/datarhei/core/v16/playout"
type PlayoutStatusIO struct {
State string `json:"state" enums:"running,idle" jsonschema:"enum=running,enum=idle"`

View File

@ -3,7 +3,7 @@ package api
import (
"encoding/json"
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/v16/restream/app"
)
// ProbeIO represents a stream of a probed file
@ -17,11 +17,11 @@ type ProbeIO struct {
Type string `json:"type"`
Codec string `json:"codec"`
Coder string `json:"coder"`
Bitrate json.Number `json:"bitrate_kbps" swaggertype:"number"`
Duration json.Number `json:"duration_sec" swaggertype:"number"`
Bitrate json.Number `json:"bitrate_kbps" swaggertype:"number" jsonschema:"type=number"`
Duration json.Number `json:"duration_sec" swaggertype:"number" jsonschema:"type=number"`
// video
FPS json.Number `json:"fps" swaggertype:"number"`
FPS json.Number `json:"fps" swaggertype:"number" jsonschema:"type=number"`
Pixfmt string `json:"pix_fmt"`
Width uint64 `json:"width"`
Height uint64 `json:"height"`

View File

@ -4,7 +4,7 @@ import (
"encoding/json"
"strconv"
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/v16/restream/app"
"github.com/lithammer/shortuuid/v4"
)
@ -239,7 +239,7 @@ type ProcessState struct {
LastLog string `json:"last_logline"`
Progress *Progress `json:"progress"`
Memory uint64 `json:"memory_bytes"`
CPU json.Number `json:"cpu_usage" swaggertype:"number"`
CPU json.Number `json:"cpu_usage" swaggertype:"number" jsonschema:"type=number"`
Command []string `json:"command"`
}

View File

@ -4,7 +4,7 @@ import (
"encoding/json"
"fmt"
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/v16/restream/app"
)
// ProgressIO represents the progress of an ffmpeg input or output
@ -20,15 +20,15 @@ type ProgressIO struct {
Codec string `json:"codec"`
Coder string `json:"coder"`
Frame uint64 `json:"frame"`
FPS json.Number `json:"fps" swaggertype:"number"`
FPS json.Number `json:"fps" swaggertype:"number" jsonschema:"type=number"`
Packet uint64 `json:"packet"`
PPS json.Number `json:"pps" swaggertype:"number"`
Size uint64 `json:"size_kb"` // kbytes
Bitrate json.Number `json:"bitrate_kbit" swaggertype:"number"` // kbit/s
PPS json.Number `json:"pps" swaggertype:"number" jsonschema:"type=number"`
Size uint64 `json:"size_kb"` // kbytes
Bitrate json.Number `json:"bitrate_kbit" swaggertype:"number" jsonschema:"type=number"` // kbit/s
// Video
Pixfmt string `json:"pix_fmt,omitempty"`
Quantizer json.Number `json:"q,omitempty" swaggertype:"number"`
Quantizer json.Number `json:"q,omitempty" swaggertype:"number" jsonschema:"type=number"`
Width uint64 `json:"width,omitempty"`
Height uint64 `json:"height,omitempty"`
@ -81,12 +81,12 @@ type Progress struct {
Output []ProgressIO `json:"outputs"`
Frame uint64 `json:"frame"`
Packet uint64 `json:"packet"`
FPS json.Number `json:"fps" swaggertype:"number"`
Quantizer json.Number `json:"q" swaggertype:"number"`
FPS json.Number `json:"fps" swaggertype:"number" jsonschema:"type=number"`
Quantizer json.Number `json:"q" swaggertype:"number" jsonschema:"type=number"`
Size uint64 `json:"size_kb"` // kbytes
Time json.Number `json:"time" swaggertype:"number"`
Bitrate json.Number `json:"bitrate_kbit" swaggertype:"number"` // kbit/s
Speed json.Number `json:"speed" swaggertype:"number"`
Time json.Number `json:"time" swaggertype:"number" jsonschema:"type=number"`
Bitrate json.Number `json:"bitrate_kbit" swaggertype:"number" jsonschema:"type=number"` // kbit/s
Speed json.Number `json:"speed" swaggertype:"number" jsonschema:"type=number"`
Drop uint64 `json:"drop"`
Dup uint64 `json:"dup"`
}

View File

@ -3,7 +3,7 @@ package api
import (
"encoding/json"
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/session"
)
// SessionStats are the accumulated numbers for the session summary
@ -30,8 +30,8 @@ type Session struct {
Extra string `json:"extra"`
RxBytes uint64 `json:"bytes_rx"`
TxBytes uint64 `json:"bytes_tx"`
RxBitrate json.Number `json:"bandwidth_rx_kbit" swaggertype:"number"` // kbit/s
TxBitrate json.Number `json:"bandwidth_tx_kbit" swaggertype:"number"` // kbit/s
RxBitrate json.Number `json:"bandwidth_rx_kbit" swaggertype:"number" jsonschema:"type=number"` // kbit/s
TxBitrate json.Number `json:"bandwidth_tx_kbit" swaggertype:"number" jsonschema:"type=number"` // kbit/s
}
func (s *Session) Unmarshal(sess session.Session) {
@ -51,11 +51,11 @@ func (s *Session) Unmarshal(sess session.Session) {
type SessionSummaryActive struct {
SessionList []Session `json:"list"`
Sessions uint64 `json:"sessions"`
RxBitrate json.Number `json:"bandwidth_rx_mbit" swaggertype:"number"` // mbit/s
TxBitrate json.Number `json:"bandwidth_tx_mbit" swaggertype:"number"` // mbit/s
RxBitrate json.Number `json:"bandwidth_rx_mbit" swaggertype:"number" jsonschema:"type=number"` // mbit/s
TxBitrate json.Number `json:"bandwidth_tx_mbit" swaggertype:"number" jsonschema:"type=number"` // mbit/s
MaxSessions uint64 `json:"max_sessions"`
MaxRxBitrate json.Number `json:"max_bandwidth_rx_mbit" swaggertype:"number"` // mbit/s
MaxTxBitrate json.Number `json:"max_bandwidth_tx_mbit" swaggertype:"number"` // mbit/s
MaxRxBitrate json.Number `json:"max_bandwidth_rx_mbit" swaggertype:"number" jsonschema:"type=number"` // mbit/s
MaxTxBitrate json.Number `json:"max_bandwidth_tx_mbit" swaggertype:"number" jsonschema:"type=number"` // mbit/s
}
// SessionSummarySummary represents the summary (history) of all finished sessions

View File

@ -1,7 +1,7 @@
package api
import (
"github.com/datarhei/core/ffmpeg/skills"
"github.com/datarhei/core/v16/ffmpeg/skills"
)
// SkillsFilter represents an ffmpeg filter

164
http/api/srt.go Normal file
View File

@ -0,0 +1,164 @@
package api
import (
"github.com/datarhei/core/v16/srt"
gosrt "github.com/datarhei/gosrt"
)
// SRTStatistics represents the statistics of a SRT connection
type SRTStatistics struct {
MsTimeStamp uint64 `json:"timestamp_ms"` // The time elapsed, in milliseconds, since the SRT socket has been created
// Accumulated
PktSent uint64 `json:"sent_pkt"` // The total number of sent DATA packets, including retransmitted packets
PktRecv uint64 `json:"recv_pkt"` // The total number of received DATA packets, including retransmitted packets
PktSentUnique uint64 `json:"sent_unique_pkt"` // The total number of unique DATA packets sent by the SRT sender
PktRecvUnique uint64 `json:"recv_unique_pkt"` // The total number of unique original, retransmitted or recovered by the packet filter DATA packets received in time, decrypted without errors and, as a result, scheduled for delivery to the upstream application by the SRT receiver.
PktSndLoss uint64 `json:"send_loss_pkt"` // The total number of data packets considered or reported as lost at the sender side. Does not correspond to the packets detected as lost at the receiver side.
PktRcvLoss uint64 `json:"recv_loss_pkt"` // The total number of SRT DATA packets detected as presently missing (either reordered or lost) at the receiver side
PktRetrans uint64 `json:"sent_retrans_pkt"` // The total number of retransmitted packets sent by the SRT sender
PktRcvRetrans uint64 `json:"recv_retran_pkts"` // The total number of retransmitted packets registered at the receiver side
PktSentACK uint64 `json:"sent_ack_pkt"` // The total number of sent ACK (Acknowledgement) control packets
PktRecvACK uint64 `json:"recv_ack_pkt"` // The total number of received ACK (Acknowledgement) control packets
PktSentNAK uint64 `json:"sent_nak_pkt"` // The total number of sent NAK (Negative Acknowledgement) control packets
PktRecvNAK uint64 `json:"recv_nak_pkt"` // The total number of received NAK (Negative Acknowledgement) control packets
PktSentKM uint64 `json:"send_km_pkt"` // The total number of sent KM (Key Material) control packets
PktRecvKM uint64 `json:"recv_km_pkt"` // The total number of received KM (Key Material) control packets
UsSndDuration uint64 `json:"send_duration_us"` // The total accumulated time in microseconds, during which the SRT sender has some data to transmit, including packets that have been sent, but not yet acknowledged
PktSndDrop uint64 `json:"send_drop_pkt"` // The total number of dropped by the SRT sender DATA packets that have no chance to be delivered in time
PktRcvDrop uint64 `json:"recv_drop_pkt"` // The total number of dropped by the SRT receiver and, as a result, not delivered to the upstream application DATA packets
PktRcvUndecrypt uint64 `json:"recv_undecrypt_pkt"` // The total number of packets that failed to be decrypted at the receiver side
ByteSent uint64 `json:"sent_bytes"` // Same as pktSent, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)
ByteRecv uint64 `json:"recv_bytes"` // Same as pktRecv, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)
ByteSentUnique uint64 `json:"sent_unique__bytes"` // Same as pktSentUnique, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)
ByteRecvUnique uint64 `json:"recv_unique_bytes"` // Same as pktRecvUnique, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)
ByteRcvLoss uint64 `json:"recv_loss__bytes"` // Same as pktRcvLoss, but expressed in bytes, including payload and all the headers (IP, TCP, SRT), bytes for the presently missing (either reordered or lost) packets' payloads are estimated based on the average packet size
ByteRetrans uint64 `json:"sent_retrans_bytes"` // Same as pktRetrans, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)
ByteSndDrop uint64 `json:"send_drop_bytes"` // Same as pktSndDrop, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)
ByteRcvDrop uint64 `json:"recv_drop_bytes"` // Same as pktRcvDrop, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)
ByteRcvUndecrypt uint64 `json:"recv_undecrypt_bytes"` // Same as pktRcvUndecrypt, but expressed in bytes, including payload and all the headers (IP, TCP, SRT)
// Instantaneous
UsPktSndPeriod float64 `json:"pkt_send_period_us"` // Current minimum time interval between which consecutive packets are sent, in microseconds
PktFlowWindow uint64 `json:"flow_window_pkt"` // The maximum number of packets that can be "in flight"
PktFlightSize uint64 `json:"flight_size_pkt"` // The number of packets in flight
MsRTT float64 `json:"rtt_ms"` // Smoothed round-trip time (SRTT), an exponentially-weighted moving average (EWMA) of an endpoint's RTT samples, in milliseconds
MbpsBandwidth float64 `json:"bandwidth_mbit"` // Estimated bandwidth of the network link, in Mbps
ByteAvailSndBuf uint64 `json:"avail_send_buf_bytes"` // The available space in the sender's buffer, in bytes
ByteAvailRcvBuf uint64 `json:"avail_recv_buf_bytes"` // The available space in the receiver's buffer, in bytes
MbpsMaxBW float64 `json:"max_bandwidth_mbit"` // Transmission bandwidth limit, in Mbps
ByteMSS uint64 `json:"mss_bytes"` // Maximum Segment Size (MSS), in bytes
PktSndBuf uint64 `json:"send_buf_pkt"` // The number of packets in the sender's buffer that are already scheduled for sending or even possibly sent, but not yet acknowledged
ByteSndBuf uint64 `json:"send_buf_bytes"` // Instantaneous (current) value of pktSndBuf, but expressed in bytes, including payload and all headers (IP, TCP, SRT)
MsSndBuf uint64 `json:"send_buf_ms"` // The timespan (msec) of packets in the sender's buffer (unacknowledged packets)
MsSndTsbPdDelay uint64 `json:"send_tsbpd_delay_ms"` // Timestamp-based Packet Delivery Delay value of the peer
PktRcvBuf uint64 `json:"recv_buf_pkt"` // The number of acknowledged packets in receiver's buffer
ByteRcvBuf uint64 `json:"recv_buf_bytes"` // Instantaneous (current) value of pktRcvBuf, expressed in bytes, including payload and all headers (IP, TCP, SRT)
MsRcvBuf uint64 `json:"recv_buf_ms"` // The timespan (msec) of acknowledged packets in the receiver's buffer
MsRcvTsbPdDelay uint64 `json:"recv_tsbpd_delay_ms"` // Timestamp-based Packet Delivery Delay value set on the socket via SRTO_RCVLATENCY or SRTO_LATENCY
PktReorderTolerance uint64 `json:"reorder_tolerance_pkt"` // Instant value of the packet reorder tolerance
PktRcvAvgBelatedTime uint64 `json:"pkt_recv_avg_belated_time_ms"` // Accumulated difference between the current time and the time-to-play of a packet that is received late
}
// Unmarshal converts the SRT statistics into API representation
func (s *SRTStatistics) Unmarshal(ss *gosrt.Statistics) {
s.MsTimeStamp = ss.MsTimeStamp
s.PktSent = ss.PktSent
s.PktRecv = ss.PktRecv
s.PktSentUnique = ss.PktSentUnique
s.PktRecvUnique = ss.PktRecvUnique
s.PktSndLoss = ss.PktSndLoss
s.PktRcvLoss = ss.PktRcvLoss
s.PktRetrans = ss.PktRetrans
s.PktRcvRetrans = ss.PktRcvRetrans
s.PktSentACK = ss.PktSentACK
s.PktRecvACK = ss.PktRecvACK
s.PktSentNAK = ss.PktSentNAK
s.PktRecvNAK = ss.PktRecvNAK
s.PktSentKM = ss.PktSentKM
s.PktRecvKM = ss.PktRecvKM
s.UsSndDuration = ss.UsSndDuration
s.PktSndDrop = ss.PktSndDrop
s.PktRcvDrop = ss.PktRcvDrop
s.PktRcvUndecrypt = ss.PktRcvUndecrypt
s.ByteSent = ss.ByteSent
s.ByteRecv = ss.ByteRecv
s.ByteSentUnique = ss.ByteSentUnique
s.ByteRecvUnique = ss.ByteRecvUnique
s.ByteRcvLoss = ss.ByteRcvLoss
s.ByteRetrans = ss.ByteRetrans
s.ByteSndDrop = ss.ByteSndDrop
s.ByteRcvDrop = ss.ByteRcvDrop
s.ByteRcvUndecrypt = ss.ByteRcvUndecrypt
}
type SRTLog struct {
Timestamp int64 `json:"ts"`
Message []string `json:"msg"`
}
// SRTConnection represents a SRT connection with statistics and logs
type SRTConnection struct {
Log map[string][]SRTLog `json:"log"`
Stats SRTStatistics `json:"stats"`
}
// Unmarshal converts the SRT connection into API representation
func (s *SRTConnection) Unmarshal(ss *srt.Connection) {
s.Log = make(map[string][]SRTLog)
s.Stats.Unmarshal(&ss.Stats)
for k, v := range ss.Log {
s.Log[k] = make([]SRTLog, len(v))
for i, l := range v {
s.Log[k][i].Timestamp = l.Timestamp.UnixMilli()
s.Log[k][i].Message = l.Message
}
}
}
// SRTChannels represents all current SRT connections
type SRTChannels struct {
Publisher map[string]uint32 `json:"publisher"`
Subscriber map[string][]uint32 `json:"subscriber"`
Connections map[uint32]SRTConnection `json:"connections"`
Log map[string][]SRTLog `json:"log"`
}
// Unmarshal converts the SRT channels into API representation
func (s *SRTChannels) Unmarshal(ss *srt.Channels) {
s.Publisher = make(map[string]uint32)
s.Subscriber = make(map[string][]uint32)
s.Connections = make(map[uint32]SRTConnection)
s.Log = make(map[string][]SRTLog)
for k, v := range ss.Publisher {
s.Publisher[k] = v
}
for k, v := range ss.Subscriber {
vv := make([]uint32, len(v))
copy(vv, v)
s.Subscriber[k] = vv
}
for k, v := range ss.Connections {
c := s.Connections[k]
c.Unmarshal(&v)
s.Connections[k] = c
}
for k, v := range ss.Log {
s.Log[k] = make([]SRTLog, len(v))
for i, l := range v {
s.Log[k][i].Timestamp = l.Timestamp.UnixMilli()
s.Log[k][i].Message = l.Message
}
}
}

2
http/cache/lru.go vendored
View File

@ -6,7 +6,7 @@ import (
"sync"
"time"
"github.com/datarhei/core/log"
"github.com/datarhei/core/v16/log"
)
// LRUConfig is the configuration for a new LRU cache

View File

@ -5,7 +5,7 @@ import (
"net/http"
"strings"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/v16/http/api"
"github.com/labstack/echo/v4"
)

View File

@ -11,9 +11,9 @@ model:
models:
Uint64:
model: github.com/datarhei/core/http/graph/scalars.Uint64
model: github.com/datarhei/core/v16/http/graph/scalars.Uint64
MetricsResponseValue:
model: github.com/datarhei/core/http/graph/scalars.MetricsResponseValue
model: github.com/datarhei/core/v16/http/graph/scalars.MetricsResponseValue
resolver:
layout: follow-schema

File diff suppressed because it is too large Load Diff

View File

@ -3,9 +3,9 @@ package models
import (
"time"
"github.com/datarhei/core/http/graph/scalars"
"github.com/datarhei/core/playout"
"github.com/datarhei/core/restream/app"
"github.com/datarhei/core/v16/http/graph/scalars"
"github.com/datarhei/core/v16/playout"
"github.com/datarhei/core/v16/restream/app"
)
func (s *RawAVstream) UnmarshalPlayout(status playout.Status) {

View File

@ -8,7 +8,7 @@ import (
"strconv"
"time"
"github.com/datarhei/core/http/graph/scalars"
"github.com/datarhei/core/v16/http/graph/scalars"
)
type IProcessReportHistoryEntry interface {

View File

@ -7,9 +7,9 @@ import (
"context"
"time"
"github.com/datarhei/core/app"
"github.com/datarhei/core/http/graph/models"
"github.com/datarhei/core/http/graph/scalars"
"github.com/datarhei/core/v16/app"
"github.com/datarhei/core/v16/http/graph/models"
"github.com/datarhei/core/v16/http/graph/scalars"
)
func (r *queryResolver) About(ctx context.Context) (*models.About, error) {

View File

@ -7,7 +7,7 @@ import (
"context"
"strings"
"github.com/datarhei/core/log"
"github.com/datarhei/core/v16/log"
)
func (r *queryResolver) Log(ctx context.Context) ([]string, error) {

View File

@ -7,9 +7,9 @@ import (
"context"
"time"
"github.com/datarhei/core/http/graph/models"
"github.com/datarhei/core/http/graph/scalars"
"github.com/datarhei/core/monitor/metric"
"github.com/datarhei/core/v16/http/graph/models"
"github.com/datarhei/core/v16/http/graph/scalars"
"github.com/datarhei/core/v16/monitor/metric"
)
func (r *queryResolver) Metrics(ctx context.Context, query models.MetricsInput) (*models.Metrics, error) {

View File

@ -9,8 +9,8 @@ import (
"fmt"
"net/http"
"github.com/datarhei/core/http/graph/models"
"github.com/datarhei/core/playout"
"github.com/datarhei/core/v16/http/graph/models"
"github.com/datarhei/core/v16/playout"
)
func (r *queryResolver) PlayoutStatus(ctx context.Context, id string, input string) (*models.RawAVstream, error) {

View File

@ -6,7 +6,7 @@ package resolver
import (
"context"
"github.com/datarhei/core/http/graph/models"
"github.com/datarhei/core/v16/http/graph/models"
)
func (r *queryResolver) Processes(ctx context.Context) ([]*models.Process, error) {

View File

@ -6,10 +6,10 @@ import (
"net/http"
"time"
"github.com/datarhei/core/http/graph/models"
"github.com/datarhei/core/log"
"github.com/datarhei/core/monitor"
"github.com/datarhei/core/restream"
"github.com/datarhei/core/v16/http/graph/models"
"github.com/datarhei/core/v16/log"
"github.com/datarhei/core/v16/monitor"
"github.com/datarhei/core/v16/restream"
)
// This file will not be regenerated automatically.

View File

@ -6,7 +6,7 @@ package resolver
import (
"context"
"github.com/datarhei/core/http/graph/graph"
"github.com/datarhei/core/v16/http/graph/graph"
)
func (r *mutationResolver) Ping(ctx context.Context) (string, error) {

View File

@ -4,9 +4,9 @@ import (
"net/http"
"time"
"github.com/datarhei/core/app"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/restream"
"github.com/datarhei/core/v16/app"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/restream"
"github.com/labstack/echo/v4"
)

View File

@ -4,25 +4,31 @@ import (
"net/http"
"testing"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/mock"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/mock"
"github.com/stretchr/testify/require"
"github.com/labstack/echo/v4"
)
func getDummyAboutRouter() *echo.Echo {
func getDummyAboutRouter() (*echo.Echo, error) {
router := mock.DummyEcho()
rs := mock.DummyRestreamer()
rs, err := mock.DummyRestreamer("../../mock")
if err != nil {
return nil, err
}
handler := NewAbout(rs, []string{})
router.Add("GET", "/", handler.About)
return router
return router, nil
}
func TestAbout(t *testing.T) {
router := getDummyAboutRouter()
router, err := getDummyAboutRouter()
require.NoError(t, err)
response := mock.Request(t, http.StatusOK, router, "GET", "/", nil)

View File

@ -3,9 +3,9 @@ package api
import (
"net/http"
"github.com/datarhei/core/config"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/v16/config"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/labstack/echo/v4"
)

View File

@ -6,8 +6,8 @@ import (
"net/http"
"testing"
"github.com/datarhei/core/config"
"github.com/datarhei/core/http/mock"
"github.com/datarhei/core/v16/config"
"github.com/datarhei/core/v16/http/mock"
"github.com/labstack/echo/v4"
)
@ -49,8 +49,10 @@ func TestConfigSet(t *testing.T) {
var data bytes.Buffer
cfg := config.New()
cfg.FFmpeg.Binary = "true"
cfg.DB.Dir = "."
cfg.Storage.Disk.Dir = "."
cfg.Storage.MimeTypes = ""
encoder := json.NewEncoder(&data)
encoder.Encode(cfg)

View File

@ -5,11 +5,11 @@ import (
"path/filepath"
"sort"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/cache"
"github.com/datarhei/core/http/handler"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/io/fs"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/cache"
"github.com/datarhei/core/v16/http/handler"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/io/fs"
"github.com/labstack/echo/v4"
)

View File

@ -3,8 +3,8 @@ package api
import (
"net/http"
"github.com/datarhei/core/http/graph/graph"
"github.com/datarhei/core/http/graph/resolver"
"github.com/datarhei/core/v16/http/graph/graph"
"github.com/datarhei/core/v16/http/graph/resolver"
"github.com/99designs/gqlgen/graphql/handler"
"github.com/99designs/gqlgen/graphql/playground"

View File

@ -4,8 +4,8 @@ import (
"net/http"
"strings"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/log"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/log"
"github.com/labstack/echo/v4"
)

View File

@ -4,8 +4,8 @@ import (
"net/http"
"testing"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/mock"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/mock"
"github.com/labstack/echo/v4"
)

View File

@ -6,10 +6,10 @@ import (
"net/url"
"sort"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/handler"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/io/fs"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/handler"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/io/fs"
"github.com/labstack/echo/v4"
)

View File

@ -4,10 +4,10 @@ import (
"net/http"
"time"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/monitor"
"github.com/datarhei/core/monitor/metric"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/monitor"
"github.com/datarhei/core/v16/monitor/metric"
"github.com/labstack/echo/v4"
)

View File

@ -8,10 +8,10 @@ import (
"strings"
"time"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/playout"
"github.com/datarhei/core/restream"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/playout"
"github.com/datarhei/core/v16/restream"
"github.com/labstack/echo/v4"
)

View File

@ -4,9 +4,9 @@ import (
"net/http"
"strings"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/restream"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/restream"
"github.com/labstack/echo/v4"
"github.com/lithammer/shortuuid/v4"
@ -60,7 +60,9 @@ func (h *RestreamHandler) Add(c echo.Context) error {
return api.Err(http.StatusBadRequest, "Invalid process config", "%s", err.Error())
}
return c.JSON(http.StatusOK, process)
p, _ := h.getProcess(config.ID, "config")
return c.JSON(http.StatusOK, p.Config)
}
// GetAll returns all known processes
@ -182,40 +184,19 @@ func (h *RestreamHandler) Update(c echo.Context) error {
return api.Err(http.StatusBadRequest, "Invalid JSON", "%s", err)
}
if process.Type != "ffmpeg" {
return api.Err(http.StatusBadRequest, "Unsupported process type", "Supported process types are: ffmpeg")
}
if len(process.Input) == 0 && len(process.Output) == 0 {
return api.Err(http.StatusBadRequest, "At least one input and one output need to be defined")
}
config := process.Marshal()
fstate, err := h.restream.GetProcessState(id)
if err != nil {
return api.Err(http.StatusNotFound, "Unknown process ID", "%s", err)
if err := h.restream.UpdateProcess(id, config); err != nil {
if err == restream.ErrUnknownProcess {
return api.Err(http.StatusNotFound, "Process not found", "%s", id)
}
return api.Err(http.StatusBadRequest, "Process can't be updated", "%s", err)
}
order := fstate.Order
p, _ := h.getProcess(config.ID, "config")
if err := h.restream.StopProcess(id); err != nil {
return api.Err(http.StatusNotFound, "Unknown process ID", "%s", err)
}
if err := h.restream.DeleteProcess(id); err != nil {
return api.Err(http.StatusBadRequest, "Process can't be deleted", "%s", err)
}
if err := h.restream.AddProcess(config); err != nil {
return api.Err(http.StatusBadRequest, "Invalid process config", "%s", err)
}
if order == "start" {
h.restream.StartProcess(process.ID)
}
return c.JSON(http.StatusOK, process)
return c.JSON(http.StatusOK, p.Config)
}
// Command issues a command to a process

View File

@ -1,11 +1,14 @@
package api
import (
"bytes"
"encoding/json"
"net/http"
"testing"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/mock"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/mock"
"github.com/stretchr/testify/require"
"github.com/labstack/echo/v4"
)
@ -16,18 +19,24 @@ type Response struct {
Data interface{}
}
func getDummyRestreamHandler() *RestreamHandler {
rs := mock.DummyRestreamer()
func getDummyRestreamHandler() (*RestreamHandler, error) {
rs, err := mock.DummyRestreamer("../../mock")
if err != nil {
return nil, err
}
handler := NewRestream(rs)
return handler
return handler, nil
}
func getDummyRestreamRouter() *echo.Echo {
func getDummyRestreamRouter() (*echo.Echo, error) {
router := mock.DummyEcho()
restream := getDummyRestreamHandler()
restream, err := getDummyRestreamHandler()
if err != nil {
return nil, err
}
router.GET("/", restream.GetAll)
router.POST("/", restream.Add)
@ -37,19 +46,21 @@ func getDummyRestreamRouter() *echo.Echo {
router.DELETE("/:id", restream.Delete)
router.PUT("/:id/command", restream.Command)
return router
return router, nil
}
func TestAddProcessMissingField(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcessMissingField.json")
mock.Request(t, http.StatusBadRequest, router, "POST", "/", data)
mock.Request(t, http.StatusOK, router, "POST", "/", data)
}
func TestAddProcessInvalidType(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcessInvalidType.json")
@ -57,7 +68,8 @@ func TestAddProcessInvalidType(t *testing.T) {
}
func TestAddProcess(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcess.json")
@ -66,14 +78,124 @@ func TestAddProcess(t *testing.T) {
mock.Validate(t, &api.ProcessConfig{}, response.Data)
}
func TestUpdateProcessInvalid(t *testing.T) {
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcess.json")
response := mock.Request(t, http.StatusOK, router, "POST", "/", data)
mock.Validate(t, &api.ProcessConfig{}, response.Data)
update := bytes.Buffer{}
_, err = update.ReadFrom(mock.Read(t, "./fixtures/addProcess.json"))
require.NoError(t, err)
proc := api.ProcessConfig{}
err = json.Unmarshal(update.Bytes(), &proc)
require.NoError(t, err)
// invalid address
proc.Output[0].Address = ""
encoded, err := json.Marshal(&proc)
require.NoError(t, err)
update.Reset()
_, err = update.Write(encoded)
require.NoError(t, err)
mock.Request(t, http.StatusBadRequest, router, "PUT", "/"+proc.ID, &update)
mock.Request(t, http.StatusOK, router, "GET", "/"+proc.ID, nil)
}
func TestUpdateReplaceProcess(t *testing.T) {
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcess.json")
response := mock.Request(t, http.StatusOK, router, "POST", "/", data)
mock.Validate(t, &api.ProcessConfig{}, response.Data)
update := bytes.Buffer{}
_, err = update.ReadFrom(mock.Read(t, "./fixtures/addProcess.json"))
require.NoError(t, err)
proc := api.ProcessConfig{}
err = json.Unmarshal(update.Bytes(), &proc)
require.NoError(t, err)
encoded, err := json.Marshal(&proc)
require.NoError(t, err)
update.Reset()
_, err = update.Write(encoded)
require.NoError(t, err)
response = mock.Request(t, http.StatusOK, router, "PUT", "/test", &update)
mock.Validate(t, &api.ProcessConfig{}, response.Data)
mock.Request(t, http.StatusOK, router, "GET", "/test", nil)
}
func TestUpdateNewProcess(t *testing.T) {
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcess.json")
response := mock.Request(t, http.StatusOK, router, "POST", "/", data)
mock.Validate(t, &api.ProcessConfig{}, response.Data)
update := bytes.Buffer{}
_, err = update.ReadFrom(mock.Read(t, "./fixtures/addProcess.json"))
require.NoError(t, err)
proc := api.ProcessConfig{}
err = json.Unmarshal(update.Bytes(), &proc)
require.NoError(t, err)
proc.ID = "test2"
encoded, err := json.Marshal(&proc)
require.NoError(t, err)
update.Reset()
_, err = update.Write(encoded)
require.NoError(t, err)
response = mock.Request(t, http.StatusOK, router, "PUT", "/test", &update)
mock.Validate(t, &api.ProcessConfig{}, response.Data)
mock.Request(t, http.StatusNotFound, router, "GET", "/test", nil)
mock.Request(t, http.StatusOK, router, "GET", "/test2", nil)
}
func TestUpdateNonExistentProcess(t *testing.T) {
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcess.json")
mock.Request(t, http.StatusNotFound, router, "PUT", "/test", data)
}
func TestRemoveUnknownProcess(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
mock.Request(t, http.StatusNotFound, router, "DELETE", "/foobar", nil)
}
func TestRemoveProcess(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/removeProcess.json")
@ -82,7 +204,8 @@ func TestRemoveProcess(t *testing.T) {
}
func TestProcessInfo(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcess.json")
@ -93,13 +216,15 @@ func TestProcessInfo(t *testing.T) {
}
func TestProcessReportNotFound(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
mock.Request(t, http.StatusNotFound, router, "GET", "/test/report", nil)
}
func TestProcessReport(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcess.json")
@ -110,14 +235,16 @@ func TestProcessReport(t *testing.T) {
}
func TestProcessCommandNotFound(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
command := mock.Read(t, "./fixtures/commandStart.json")
mock.Request(t, http.StatusBadRequest, router, "PUT", "/test/command", command)
}
func TestProcessCommandInvalid(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcess.json")
@ -128,7 +255,8 @@ func TestProcessCommandInvalid(t *testing.T) {
}
func TestProcessCommand(t *testing.T) {
router := getDummyRestreamRouter()
router, err := getDummyRestreamRouter()
require.NoError(t, err)
data := mock.Read(t, "./fixtures/addProcess.json")

View File

@ -3,8 +3,8 @@ package api
import (
"net/http"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/rtmp"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/rtmp"
"github.com/labstack/echo/v4"
)
@ -21,9 +21,9 @@ func NewRTMP(rtmp rtmp.Server) *RTMPHandler {
}
}
// ListChannels lists all currently publishing streams
// @Summary List all publishing streams
// @Description List all currently publishing streams
// ListChannels lists all currently publishing RTMP streams
// @Summary List all publishing RTMP streams
// @Description List all currently publishing RTMP streams
// @ID rtmp-3-list-channels
// @Produce json
// @Success 200 {array} api.RTMPChannel

View File

@ -4,9 +4,9 @@ import (
"net/http"
"strings"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/session"
"github.com/labstack/echo/v4"
)
@ -63,9 +63,8 @@ func (s *SessionHandler) Active(c echo.Context) error {
for _, name := range collectors {
sessions := s.registry.Active(name)
active := []api.Session{}
active = make([]api.Session, len(sessions))
active := make([]api.Session, len(sessions))
for i, s := range sessions {
active[i].Unmarshal(s)

View File

@ -4,9 +4,9 @@ import (
"net/http"
"testing"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/mock"
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/mock"
"github.com/datarhei/core/v16/session"
"github.com/labstack/echo/v4"
)

39
http/handler/api/srt.go Normal file
View File

@ -0,0 +1,39 @@
package api
import (
"net/http"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/srt"
"github.com/labstack/echo/v4"
)
// The SRTHandler type provides a handler for retrieving details from the SRTHandler server
type SRTHandler struct {
srt srt.Server
}
// NewSRT returns a new SRT type. You have to provide a SRT server instance.
func NewSRT(srt srt.Server) *SRTHandler {
return &SRTHandler{
srt: srt,
}
}
// ListChannels lists all currently publishing SRT streams
// @Summary List all publishing SRT treams
// @Description List all currently publishing SRT streams. This endpoint is EXPERIMENTAL and may change in future.
// @ID srt-3-list-channels
// @Produce json
// @Success 200 {array} api.SRTChannels
// @Security ApiKeyAuth
// @Router /api/v3/srt [get]
func (srth *SRTHandler) ListChannels(c echo.Context) error {
channels := srth.srt.Channels()
srtchannels := api.SRTChannels{}
srtchannels.Unmarshal(&channels)
return c.JSON(http.StatusOK, srtchannels)
}

View File

@ -3,10 +3,10 @@ package api
import (
"net/http"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/restream"
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/restream"
"github.com/datarhei/core/v16/session"
"github.com/labstack/echo/v4"
)

View File

@ -4,10 +4,10 @@ import (
"net/http"
"path/filepath"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/cache"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/io/fs"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/cache"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/io/fs"
"github.com/labstack/echo/v4"
)

View File

@ -4,9 +4,9 @@ import (
"net/http"
"path/filepath"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/io/fs"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/io/fs"
"github.com/labstack/echo/v4"
)

View File

@ -4,7 +4,7 @@ import (
"net/http"
"testing"
"github.com/datarhei/core/http/mock"
"github.com/datarhei/core/v16/http/mock"
"github.com/labstack/echo/v4"
"github.com/stretchr/testify/require"
)

View File

@ -6,7 +6,7 @@ import (
"net/url"
"strings"
"github.com/datarhei/core/encoding/json"
"github.com/datarhei/core/v16/encoding/json"
"github.com/labstack/echo/v4"
)

View File

@ -7,8 +7,8 @@ import (
"sync"
"time"
"github.com/datarhei/core/app"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/v16/app"
"github.com/datarhei/core/v16/http/api"
jwtgo "github.com/golang-jwt/jwt/v4"
"github.com/google/uuid"

View File

@ -4,9 +4,9 @@ import (
"fmt"
"strings"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/handler/util"
"github.com/datarhei/core/http/jwt/jwks"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/handler/util"
"github.com/datarhei/core/v16/http/jwt/jwks"
jwtgo "github.com/golang-jwt/jwt/v4"
"github.com/labstack/echo/v4"

View File

@ -8,7 +8,7 @@ import (
"path"
"strings"
"github.com/datarhei/core/http/cache"
"github.com/datarhei/core/v16/http/cache"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"

View File

@ -5,7 +5,7 @@ import (
"strings"
"time"
"github.com/datarhei/core/http/cors"
"github.com/datarhei/core/v16/http/cors"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"

View File

@ -3,7 +3,7 @@ package iplimit
import (
"net/http"
"github.com/datarhei/core/net"
"github.com/datarhei/core/v16/net"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"

View File

@ -5,7 +5,7 @@ import (
"net/http"
"time"
"github.com/datarhei/core/log"
"github.com/datarhei/core/v16/log"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"

View File

@ -13,8 +13,8 @@ import (
"strings"
"sync"
"github.com/datarhei/core/net"
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/net"
"github.com/datarhei/core/v16/session"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
@ -113,11 +113,12 @@ func (h *hls) handleIngress(c echo.Context, next echo.HandlerFunc) error {
// Register a new session
reference := strings.TrimSuffix(filepath.Base(path), filepath.Ext(path))
h.ingressCollector.RegisterAndActivate(path, reference, path, "")
h.ingressCollector.Ingress(path, headerSize(req.Header))
h.ingressCollector.Ingress(path, r.size)
h.ingressCollector.Extra(path, req.Header.Get("User-Agent"))
}
h.ingressCollector.Ingress(path, headerSize(req.Header))
h.ingressCollector.Ingress(path, r.size)
segments := r.getSegments(urlpath.Dir(path))
if len(segments) != 0 {

View File

@ -5,7 +5,7 @@ import (
"net"
"net/http"
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/session"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"

View File

@ -3,39 +3,53 @@ package mock
import (
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/httptest"
"path/filepath"
"strings"
"testing"
"github.com/datarhei/core/ffmpeg"
"github.com/datarhei/core/http/api"
"github.com/datarhei/core/http/errorhandler"
"github.com/datarhei/core/http/validator"
"github.com/datarhei/core/restream"
"github.com/datarhei/core/restream/store"
"github.com/datarhei/core/v16/ffmpeg"
"github.com/datarhei/core/v16/http/api"
"github.com/datarhei/core/v16/http/errorhandler"
"github.com/datarhei/core/v16/http/validator"
"github.com/datarhei/core/v16/internal/testhelper"
"github.com/datarhei/core/v16/restream"
"github.com/datarhei/core/v16/restream/store"
"github.com/alecthomas/jsonschema"
"github.com/invopop/jsonschema"
"github.com/labstack/echo/v4"
"github.com/stretchr/testify/require"
"github.com/xeipuuv/gojsonschema"
)
func DummyRestreamer() restream.Restreamer {
func DummyRestreamer(pathPrefix string) (restream.Restreamer, error) {
binary, err := testhelper.BuildBinary("ffmpeg", filepath.Join(pathPrefix, "../../internal/testhelper"))
if err != nil {
return nil, fmt.Errorf("failed to build helper program: %w", err)
}
store := store.NewDummyStore(store.DummyConfig{})
ffmpeg, _ := ffmpeg.New(ffmpeg.Config{
Binary: "ffmpeg",
ffmpeg, err := ffmpeg.New(ffmpeg.Config{
Binary: binary,
})
if err != nil {
return nil, err
}
rs, _ := restream.New(restream.Config{
rs, err := restream.New(restream.Config{
Store: store,
FFmpeg: ffmpeg,
})
if err != nil {
return nil, err
}
return rs
return rs, nil
}
func DummyEcho() *echo.Echo {

View File

@ -7,13 +7,14 @@
// @contact.email hello@datarhei.com
// @license.name Apache 2.0
// @license.url https://github.com/datarhei/core/blob/main/LICENSE
// @license.url https://github.com/datarhei/core/v16/blob/main/LICENSE
// @BasePath /
// @securityDefinitions.apikey ApiKeyAuth
// @in header
// @name Authorization
// @Param Authorization header string true "Insert your access token" default(Bearer <Add access token here>)
// @securityDefinitions.apikey ApiRefreshKeyAuth
// @in header
@ -29,34 +30,36 @@ package http
import (
"net/http"
"strings"
"github.com/datarhei/core/config"
"github.com/datarhei/core/http/cache"
"github.com/datarhei/core/http/errorhandler"
"github.com/datarhei/core/http/graph/resolver"
"github.com/datarhei/core/http/handler"
api "github.com/datarhei/core/http/handler/api"
"github.com/datarhei/core/http/jwt"
"github.com/datarhei/core/http/router"
"github.com/datarhei/core/http/validator"
"github.com/datarhei/core/io/fs"
"github.com/datarhei/core/log"
"github.com/datarhei/core/monitor"
"github.com/datarhei/core/net"
"github.com/datarhei/core/prometheus"
"github.com/datarhei/core/restream"
"github.com/datarhei/core/rtmp"
"github.com/datarhei/core/session"
"github.com/datarhei/core/v16/config"
"github.com/datarhei/core/v16/http/cache"
"github.com/datarhei/core/v16/http/errorhandler"
"github.com/datarhei/core/v16/http/graph/resolver"
"github.com/datarhei/core/v16/http/handler"
api "github.com/datarhei/core/v16/http/handler/api"
"github.com/datarhei/core/v16/http/jwt"
"github.com/datarhei/core/v16/http/router"
"github.com/datarhei/core/v16/http/validator"
"github.com/datarhei/core/v16/io/fs"
"github.com/datarhei/core/v16/log"
"github.com/datarhei/core/v16/monitor"
"github.com/datarhei/core/v16/net"
"github.com/datarhei/core/v16/prometheus"
"github.com/datarhei/core/v16/restream"
"github.com/datarhei/core/v16/rtmp"
"github.com/datarhei/core/v16/session"
"github.com/datarhei/core/v16/srt"
mwbodysize "github.com/datarhei/core/http/middleware/bodysize"
mwcache "github.com/datarhei/core/http/middleware/cache"
mwcors "github.com/datarhei/core/http/middleware/cors"
mwgzip "github.com/datarhei/core/http/middleware/gzip"
mwiplimit "github.com/datarhei/core/http/middleware/iplimit"
mwlog "github.com/datarhei/core/http/middleware/log"
mwmime "github.com/datarhei/core/http/middleware/mime"
mwredirect "github.com/datarhei/core/http/middleware/redirect"
mwsession "github.com/datarhei/core/http/middleware/session"
mwbodysize "github.com/datarhei/core/v16/http/middleware/bodysize"
mwcache "github.com/datarhei/core/v16/http/middleware/cache"
mwcors "github.com/datarhei/core/v16/http/middleware/cors"
mwgzip "github.com/datarhei/core/v16/http/middleware/gzip"
mwiplimit "github.com/datarhei/core/v16/http/middleware/iplimit"
mwlog "github.com/datarhei/core/v16/http/middleware/log"
mwmime "github.com/datarhei/core/v16/http/middleware/mime"
mwredirect "github.com/datarhei/core/v16/http/middleware/redirect"
mwsession "github.com/datarhei/core/v16/http/middleware/session"
"github.com/labstack/echo/v4"
"github.com/labstack/echo/v4/middleware"
@ -64,7 +67,7 @@ import (
echoSwagger "github.com/swaggo/echo-swagger" // echo-swagger middleware
// Expose the API docs
_ "github.com/datarhei/core/docs"
_ "github.com/datarhei/core/v16/docs"
)
var ListenAndServe = http.ListenAndServe
@ -82,6 +85,7 @@ type Config struct {
Profiling bool
Cors CorsConfig
RTMP rtmp.Server
SRT srt.Server
JWT jwt.JWT
Config config.Store
Cache cache.Cacher
@ -126,6 +130,7 @@ type server struct {
memfs *api.MemFSHandler
diskfs *api.DiskFSHandler
rtmp *api.RTMPHandler
srt *api.SRTHandler
config *api.ConfigHandler
session *api.SessionHandler
widget *api.WidgetHandler
@ -248,6 +253,12 @@ func NewServer(config Config) (Server, error) {
)
}
if config.SRT != nil {
s.v3handler.srt = api.NewSRT(
config.SRT,
)
}
if config.Config != nil {
s.v3handler.config = api.NewConfig(
config.Config,
@ -323,7 +334,13 @@ func NewServer(config Config) (Server, error) {
s.router.HTTPErrorHandler = errorhandler.HTTPErrorHandler
s.router.Validator = validator.New()
s.router.Use(s.middleware.log)
s.router.Use(middleware.Recover())
s.router.Use(middleware.RecoverWithConfig(middleware.RecoverConfig{
LogErrorFunc: func(c echo.Context, err error, stack []byte) error {
rows := strings.Split(string(stack), "\n")
s.logger.Error().WithField("stack", rows).Log("recovered from a panic")
return nil
},
}))
s.router.Use(mwbodysize.New())
s.router.Use(mwsession.NewHTTPWithConfig(mwsession.HTTPConfig{
Collector: config.Sessions.Collector("http"),
@ -338,13 +355,15 @@ func NewServer(config Config) (Server, error) {
s.router.Use(s.middleware.cors)
}
s.router.Use(middleware.RemoveTrailingSlashWithConfig(middleware.TrailingSlashConfig{
RedirectCode: 301,
}))
// Add static routes
if path, target := config.Router.StaticRoute(); len(target) != 0 {
group := s.router.Group(path)
group.Use(middleware.AddTrailingSlashWithConfig(middleware.TrailingSlashConfig{
Skipper: func(c echo.Context) bool {
return path != c.Request().URL.Path
},
RedirectCode: 301,
}))
group.Use(middleware.StaticWithConfig(middleware.StaticConfig{
Skipper: middleware.DefaultSkipper,
Root: target,
@ -359,6 +378,14 @@ func NewServer(config Config) (Server, error) {
for prefix, target := range config.Router.DirRoutes() {
group := s.router.Group(prefix)
group.Use(middleware.AddTrailingSlashWithConfig(middleware.TrailingSlashConfig{
Skipper: func(prefix string) func(c echo.Context) bool {
return func(c echo.Context) bool {
return prefix != c.Request().URL.Path
}
}(prefix),
RedirectCode: 301,
}))
group.Use(middleware.StaticWithConfig(middleware.StaticConfig{
Skipper: middleware.DefaultSkipper,
Root: target,
@ -591,13 +618,18 @@ func (s *server) setRoutesV3(v3 *echo.Group) {
v3.GET("/rtmp", s.v3handler.rtmp.ListChannels)
}
// v3 SRT
if s.v3handler.srt != nil {
v3.GET("/srt", s.v3handler.srt.ListChannels)
}
// v3 Config
if s.v3handler.config != nil {
v3.GET("/config", s.v3handler.config.Get)
v3.GET("/config/reload", s.v3handler.config.Reload)
if !s.readOnly {
v3.PUT("/config", s.v3handler.config.Set)
v3.GET("/config/reload", s.v3handler.config.Reload)
}
}

3
internal/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
testhelper/ignoresigint/ignoresigint
testhelper/sigint/sigint
testhelper/ffmpeg/ffmpeg

View File

@ -0,0 +1,103 @@
package main
import (
"context"
"fmt"
"os"
"os/signal"
"time"
)
func main() {
header := `ffmpeg version 4.0.2 Copyright (c) 2000-2018 the FFmpeg developers
built with Apple LLVM version 9.1.0 (clang-902.0.39.2)
configuration: --prefix=/usr/local/Cellar/ffmpeg/4.0.2 --enable-shared --enable-pthreads --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags= --host-ldflags= --enable-gpl --enable-libmp3lame --enable-libx264 --enable-libx265 --enable-libxvid --enable-opencl --enable-videotoolbox --disable-lzma
libavutil 56. 14.100 / 56. 14.100
libavcodec 58. 18.100 / 58. 18.100
libavformat 58. 12.100 / 58. 12.100
libavdevice 58. 3.100 / 58. 3.100
libavfilter 7. 16.100 / 7. 16.100
libavresample 4. 0. 0 / 4. 0. 0
libswscale 5. 1.100 / 5. 1.100
libswresample 3. 1.100 / 3. 1.100
libpostproc 55. 1.100 / 55. 1.100`
prelude := `Input #0, lavfi, from 'testsrc=size=1280x720:rate=25':
Duration: N/A, start: 0.000000, bitrate: N/A
Stream #0:0: Video: rawvideo (RGB[24] / 0x18424752), rgb24, 1280x720 [SAR 1:1 DAR 16:9], 25 tbr, 25 tbn, 25 tbc
Input #1, lavfi, from 'anullsrc=r=44100:cl=stereo':
Duration: N/A, start: 0.000000, bitrate: 705 kb/s
Stream #1:0: Audio: pcm_u8, 44100 Hz, stereo, u8, 705 kb/s
Stream #1:1(eng): Audio: aac (LC), 48000 Hz, stereo, fltp (default)
Stream mapping:
Stream #0:0 -> #0:0 (rawvideo (native) -> h264 (libx264))
Stream #1:0 -> #0:1 (pcm_u8 (native) -> aac (native))
Press [q] to stop, [?] for help
[libx264 @ 0x7fa96a800600] using SAR=1/1
[libx264 @ 0x7fa96a800600] using cpu capabilities: MMX2 SSE2Fast SSSE3 SSE4.2 AVX FMA3 BMI2 AVX2
[libx264 @ 0x7fa96a800600] profile Constrained Baseline, level 3.1
[libx264 @ 0x7fa96a800600] 264 - core 152 r2854 e9a5903 - H.264/MPEG-4 AVC codec - Copyleft 2003-2017 - http://www.videolan.org/x264.html - options: cabac=0 ref=1 deblock=0:0:0 analyse=0:0 me=dia subme=0 psy=1 psy_rd=1.00:0.00 mixed_ref=0 me_range=16 chroma_me=1 trellis=0 8x8dct=0 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=0 threads=6 lookahead_threads=1 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=0 weightp=0 keyint=50 keyint_min=5 scenecut=0 intra_refresh=0 rc=crf mbtree=0 crf=23.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 ip_ratio=1.40 aq=0
[hls @ 0x7fa969803a00] Opening './data/testsrc5375.ts.tmp' for writing
Output #0, hls, to './data/testsrc.m3u8':
Metadata:
encoder : Lavf58.12.100
Stream #0:0: Video: h264 (libx264), yuv420p(progressive), 1280x720 [SAR 1:1 DAR 16:9], q=-1--1, 25 fps, 90k tbn, 25 tbc
Metadata:
encoder : Lavc58.18.100 libx264
Side data:
cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: -1
Stream #0:1: Audio: aac (LC), 44100 Hz, stereo, fltp, 64 kb/s
Metadata:
encoder : Lavc58.18.100 aac
[hls @ 0x7fa969803a00] Opening './data/testsrc5376.ts.tmp' for writing=0.872x
[hls @ 0x7fa969803a00] Opening './data/testsrc.m3u8.tmp' for writing
[hls @ 0x7fa969803a00] Opening './data/testsrc.m3u8.tmp' for writing`
fmt.Fprintf(os.Stderr, "%s\n", header)
if len(os.Args) <= 1 {
os.Exit(2)
}
lastArg := os.Args[len(os.Args)-1]
if lastArg == "-version" {
os.Exit(0)
}
if len(lastArg) > 1 && lastArg[0] == '-' {
os.Exit(2)
}
fmt.Fprintf(os.Stderr, "%s\n", prelude)
ctx, cancel := context.WithCancel(context.Background())
go func(ctx context.Context) {
ticker := time.NewTicker(time.Second)
defer ticker.Stop()
frame := uint64(0)
for {
select {
case <-ctx.Done():
return
case <-ticker.C:
frame += 25
fmt.Fprintf(os.Stderr, "frame=%5d fps= 25 q=-1.0 Lsize=N/A time=00:00:02.32 bitrate=N/A speed=1.0x \r", frame)
}
}
}(ctx)
// Wait for interrupt signal to gracefully shutdown the app
quit := make(chan os.Signal, 1)
signal.Notify(quit, os.Interrupt)
<-quit
cancel()
fmt.Fprintf(os.Stderr, "\nExiting normally, received signal 2.\n")
os.Exit(255)
}

View File

@ -0,0 +1,15 @@
package main
import (
"os"
"os/signal"
)
func main() {
// Wait for interrupt signal to gracefully shutdown the app
quit := make(chan os.Signal, 1)
signal.Notify(quit, os.Interrupt)
<-quit
os.Exit(255)
}

View File

@ -0,0 +1,19 @@
package testhelper
import (
"fmt"
"os/exec"
"path/filepath"
)
func BuildBinary(name, pathprefix string) (string, error) {
dir := filepath.Join(pathprefix, name)
aout := filepath.Join(dir, name)
err := exec.Command("go", "build", "-o", aout, dir).Run()
if err != nil {
return "", fmt.Errorf("build command: %w", err)
}
return aout, nil
}

View File

@ -8,7 +8,7 @@ import (
"strings"
"time"
"github.com/datarhei/core/log"
"github.com/datarhei/core/v16/log"
)
// DiskConfig is the config required to create a new disk
@ -241,7 +241,7 @@ func (fs *diskFilesystem) Store(path string, r io.Reader) (int64, bool, error) {
dir := filepath.Dir(path)
if err := os.MkdirAll(dir, 0755); err != nil {
return -1, false, fmt.Errorf("Creating file failed: %w", err)
return -1, false, fmt.Errorf("creating file failed: %w", err)
}
var f *os.File
@ -251,7 +251,7 @@ func (fs *diskFilesystem) Store(path string, r io.Reader) (int64, bool, error) {
if err != nil {
f, err = os.OpenFile(path, os.O_WRONLY|os.O_CREATE, 0644)
if err != nil {
return -1, false, fmt.Errorf("Creating file failed: %w", err)
return -1, false, fmt.Errorf("creating file failed: %w", err)
}
replace = false
@ -259,7 +259,7 @@ func (fs *diskFilesystem) Store(path string, r io.Reader) (int64, bool, error) {
size, err := f.ReadFrom(r)
if err != nil {
return -1, false, fmt.Errorf("Reading data failed: %w", err)
return -1, false, fmt.Errorf("reading data failed: %w", err)
}
return size, !replace, nil

View File

@ -9,7 +9,7 @@ import (
"sync"
"time"
"github.com/datarhei/core/log"
"github.com/datarhei/core/v16/log"
)
// MemConfig is the config that is required for creating
@ -305,7 +305,7 @@ func (fs *memFilesystem) Store(path string, r io.Reader) (int64, bool, error) {
if newSize > fs.maxSize {
if !fs.purge {
fs.dataPool.Put(data)
return -1, false, fmt.Errorf("Not enough space on device")
return -1, false, fmt.Errorf("not enough space on device")
}
if replace {

View File

@ -168,9 +168,9 @@ rules:
continue rules
}
switch value.(type) {
switch value := value.(type) {
case string:
if !re.MatchString(value.(string)) {
if !re.MatchString(value) {
continue rules
}
}

View File

@ -4,8 +4,8 @@ import (
"os"
"os/signal"
"github.com/datarhei/core/app/api"
"github.com/datarhei/core/log"
"github.com/datarhei/core/v16/app/api"
"github.com/datarhei/core/v16/log"
_ "github.com/joho/godotenv/autoload"
)

View File

@ -1,8 +1,8 @@
package monitor
import (
"github.com/datarhei/core/monitor/metric"
"github.com/datarhei/core/psutil"
"github.com/datarhei/core/v16/monitor/metric"
"github.com/datarhei/core/v16/psutil"
)
type cpuCollector struct {

View File

@ -1,8 +1,8 @@
package monitor
import (
"github.com/datarhei/core/monitor/metric"
"github.com/datarhei/core/psutil"
"github.com/datarhei/core/v16/monitor/metric"
"github.com/datarhei/core/v16/psutil"
)
type diskCollector struct {

Some files were not shown because too many files have changed in this diff Show More