Collect statistics about buffer pool, export as metrics

This commit is contained in:
Ingo Oppermann 2024-10-14 14:55:29 +02:00
parent 519f0cfb2b
commit 7a58c52a17
No known key found for this signature in database
GPG Key ID: 2AB32426E9DD229E
3 changed files with 99 additions and 0 deletions

View File

@ -1235,6 +1235,7 @@ func (a *api) start(ctx context.Context) error {
for name, fs := range a.s3fs {
metrics.Register(monitor.NewFilesystemCollector(name, fs))
}
metrics.Register(monitor.NewSelfCollector())
metrics.Register(monitor.NewRestreamCollector(a.restream))
metrics.Register(monitor.NewFFmpegCollector(a.ffmpeg))
metrics.Register(monitor.NewSessionCollector(a.sessions, []string{}))

View File

@ -26,9 +26,22 @@ type BufferPool struct {
defaultSize uint64
maxSize uint64
alloc uint64
recycle uint64
dump uint64
pool sync.Pool
}
type PoolStats struct {
Alloc uint64
Recycle uint64
Dump uint64
DefaultSize uint64
MaxSize uint64
}
func NewBufferPool() *BufferPool {
p := &BufferPool{
pool: sync.Pool{},
@ -37,12 +50,26 @@ func NewBufferPool() *BufferPool {
return p
}
func (p *BufferPool) Stats() PoolStats {
s := PoolStats{
Alloc: atomic.LoadUint64(&p.alloc),
Recycle: atomic.LoadUint64(&p.recycle),
Dump: atomic.LoadUint64(&p.dump),
DefaultSize: atomic.LoadUint64(&p.defaultSize),
MaxSize: atomic.LoadUint64(&p.maxSize),
}
return s
}
func (p *BufferPool) Get() *Buffer {
v := p.pool.Get()
if v != nil {
return v.(*Buffer)
}
atomic.AddUint64(&p.alloc, 1)
return &Buffer{
data: make([]byte, 0, atomic.LoadUint64(&p.defaultSize)),
}
@ -59,6 +86,10 @@ func (p *BufferPool) Put(buf *Buffer) {
if maxSize == 0 || cap(buf.data) <= maxSize {
buf.Reset()
p.pool.Put(buf)
atomic.AddUint64(&p.recycle, 1)
} else {
atomic.AddUint64(&p.dump, 1)
}
}
@ -140,6 +171,10 @@ func init() {
DefaultBufferPool = NewBufferPool()
}
func Stats() PoolStats {
return DefaultBufferPool.Stats()
}
func Get() *Buffer {
return DefaultBufferPool.Get()
}

63
monitor/self.go Normal file
View File

@ -0,0 +1,63 @@
package monitor
import (
"runtime"
"github.com/datarhei/core/v16/mem"
"github.com/datarhei/core/v16/monitor/metric"
)
type selfCollector struct {
allocDescr *metric.Description
recycleDescr *metric.Description
dumpDescr *metric.Description
defaultSizeDescr *metric.Description
maxSizeDescr *metric.Description
}
func NewSelfCollector() metric.Collector {
c := &selfCollector{}
c.allocDescr = metric.NewDesc("bufferpool_alloc", "Number of buffer allocations", nil)
c.recycleDescr = metric.NewDesc("bufferpool_recycle", "Number of buffer recycles", nil)
c.dumpDescr = metric.NewDesc("bufferpool_dump", "Number of buffer dumps", nil)
c.defaultSizeDescr = metric.NewDesc("bufferpool_default_size", "Default buffer size", nil)
c.maxSizeDescr = metric.NewDesc("bufferpool_max_size", "Max. buffer size for recycling", nil)
return c
}
func (c *selfCollector) Stop() {}
func (c *selfCollector) Prefix() string {
return "bufferpool"
}
func (c *selfCollector) Describe() []*metric.Description {
return []*metric.Description{
c.allocDescr,
c.recycleDescr,
c.dumpDescr,
c.defaultSizeDescr,
c.maxSizeDescr,
}
}
func (c *selfCollector) Collect() metric.Metrics {
stats := mem.Stats()
metrics := metric.NewMetrics()
metrics.Add(metric.NewValue(c.allocDescr, float64(stats.Alloc)))
metrics.Add(metric.NewValue(c.recycleDescr, float64(stats.Recycle)))
metrics.Add(metric.NewValue(c.dumpDescr, float64(stats.Dump)))
metrics.Add(metric.NewValue(c.defaultSizeDescr, float64(stats.DefaultSize)))
metrics.Add(metric.NewValue(c.maxSizeDescr, float64(stats.MaxSize)))
memstats := runtime.MemStats{}
runtime.ReadMemStats(&memstats)
//metrics.Add(metric.NewValue())
return metrics
}