Skip to content
Snippets Groups Projects

catfile: Introduce request queues to allow batching reads

Merged Patrick Steinhardt requested to merge pks-catfile-queue into master
1 unresolved thread
1 file
+ 8
1
Compare changes
  • Side-by-side
  • Inline
  • 5e1194f5
    We're about to convert the catfile package to use request queues, and in
    that world it's possible for Go to observe data races in our tracing.
    Prepare for that by locking the trace when either recording new requests
    or when finishing up the tracing span.
@@ -2,6 +2,7 @@ package catfile
import (
"context"
"sync"
"github.com/opentracing/opentracing-go"
"github.com/prometheus/client_golang/prometheus"
@@ -11,7 +12,8 @@ type trace struct {
span opentracing.Span
counter *prometheus.CounterVec
requests map[string]int
requestsLock sync.Mutex
requests map[string]int
}
// startTrace starts a new tracing span and updates metrics according to how many requests have been
@@ -41,10 +43,15 @@ func startTrace(
}
func (t *trace) recordRequest(requestType string) {
t.requestsLock.Lock()
defer t.requestsLock.Unlock()
t.requests[requestType]++
}
func (t *trace) finish() {
t.requestsLock.Lock()
defer t.requestsLock.Unlock()
for requestType, requestCount := range t.requests {
if requestCount == 0 {
continue
Loading