Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -282,6 +282,20 @@ jobs:

We can also define multiple jobs to run different collectors against different target sets.

Since v0.14, sql_exporter can be passed an optional list of job names to filter out metrics. The `jobs[]` query
parameter may be used multiple times. In Prometheus configuration we can use this syntax under the [scrape
config](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#%3Cscrape_config%3E):

```yaml
params:
jobs[]:
- db_targets1
- db_targets2
```

This might be useful for scraping targets with different intervals or any other advanced use cases, when calling all
jobs at once is undesired.

### TLS and Basic Authentication

SQL Exporter supports TLS and Basic Authentication. This enables better control of the various HTTP endpoints.
Expand Down
2 changes: 1 addition & 1 deletion cmd/sql_exporter/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ func reloadCollectors(e sql_exporter.Exporter) func(http.ResponseWriter, *http.R
if currentConfig.Target != nil {
klog.Warning("Reloading target collectors...")
// FIXME: Should be t.Collectors() instead of config.Collectors
target, err := sql_exporter.NewTarget("", currentConfig.Target.Name, string(currentConfig.Target.DSN),
target, err := sql_exporter.NewTarget("", currentConfig.Target.Name, "", string(currentConfig.Target.DSN),
exporterNewConfig.Target.Collectors(), nil, currentConfig.Globals, currentConfig.Target.EnablePing)
if err != nil {
klog.Errorf("Error recreating a target - %v", err)
Expand Down
4 changes: 4 additions & 0 deletions cmd/sql_exporter/promhttp.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,10 @@ func ExporterHandlerFor(exporter sql_exporter.Exporter) http.Handler {
ctx, cancel := contextFor(req, exporter)
defer cancel()

// Parse the query params and set the job filters if any
jobFilters := req.URL.Query()["jobs[]"]
exporter.SetJobFilters(jobFilters)

// Go through prometheus.Gatherers to sanitize and sort metrics.
gatherer := prometheus.Gatherers{exporter.WithContext(ctx), sql_exporter.SvcRegistry}
mfs, err := gatherer.Gather()
Expand Down
56 changes: 47 additions & 9 deletions exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
dto "github.com/prometheus/client_model/go"

"google.golang.org/protobuf/proto"
"k8s.io/klog/v2"
)

var (
Expand All @@ -28,12 +29,16 @@ type Exporter interface {
WithContext(context.Context) Exporter
// Config returns the Exporter's underlying Config object.
Config() *config.Config
// UpdateTarget updates the targets field
UpdateTarget([]Target)
// SetJobFilters sets the jobFilters field
SetJobFilters([]string)
}

type exporter struct {
config *config.Config
targets []Target
config *config.Config
targets []Target
jobFilters []string

ctx context.Context
}
Expand All @@ -58,7 +63,7 @@ func NewExporter(configFile string) (Exporter, error) {
if c.Target.EnablePing == nil {
c.Target.EnablePing = &config.EnablePing
}
target, err := NewTarget("", c.Target.Name, string(c.Target.DSN), c.Target.Collectors(), nil, c.Globals, c.Target.EnablePing)
target, err := NewTarget("", c.Target.Name, "", string(c.Target.DSN), c.Target.Collectors(), nil, c.Globals, c.Target.EnablePing)
if err != nil {
return nil, err
}
Expand All @@ -80,17 +85,19 @@ func NewExporter(configFile string) (Exporter, error) {
scrapeErrorsMetric = registerScrapeErrorMetric()

return &exporter{
config: c,
targets: targets,
ctx: context.Background(),
config: c,
targets: targets,
jobFilters: []string{},
ctx: context.Background(),
}, nil
}

func (e *exporter) WithContext(ctx context.Context) Exporter {
return &exporter{
config: e.config,
targets: e.targets,
ctx: ctx,
config: e.config,
targets: e.targets,
jobFilters: e.jobFilters,
ctx: ctx,
}
}

Expand All @@ -101,6 +108,13 @@ func (e *exporter) Gather() ([]*dto.MetricFamily, error) {
errs prometheus.MultiError
)

// Filter out jobs that are not in the jobFilters list
e.filterTargets(e.jobFilters)

if len(e.targets) == 0 {
return nil, errors.New("no targets found")
}

var wg sync.WaitGroup
wg.Add(len(e.targets))
for _, t := range e.targets {
Expand Down Expand Up @@ -166,15 +180,39 @@ func (e *exporter) Gather() ([]*dto.MetricFamily, error) {
return result, errs
}

func (e *exporter) filterTargets(jf []string) {
if len(e.jobFilters) > 0 {
var filteredTargets []Target
for _, target := range e.targets {
for _, jobFilter := range e.jobFilters {
if jobFilter == target.JobGroup() {
filteredTargets = append(filteredTargets, target)
break
}
}
}
if len(filteredTargets) == 0 {
klog.Errorf("No targets found for job filters. Nothing to scrape.")
}
e.targets = filteredTargets
}
}

// Config implements Exporter.
func (e *exporter) Config() *config.Config {
return e.config
}

// UpdateTarget implements Exporter.
func (e *exporter) UpdateTarget(target []Target) {
e.targets = target
}

// SetJobFilters implements Exporter.
func (e *exporter) SetJobFilters(filters []string) {
e.jobFilters = filters
}

// registerScrapeErrorMetric registers the metrics for the exporter itself.
func registerScrapeErrorMetric() *prometheus.CounterVec {
scrapeErrors := prometheus.NewCounterVec(prometheus.CounterOpts{
Expand Down
2 changes: 1 addition & 1 deletion job.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ func NewJob(jc *config.JobConfig, gc *config.GlobalConfig) (Job, errors.WithCont
}
constLabels[name] = value
}
t, err := NewTarget(j.logContext, tname, string(dsn), jc.Collectors(), constLabels, gc, jc.EnablePing)
t, err := NewTarget(j.logContext, tname, jc.Name, string(dsn), jc.Collectors(), constLabels, gc, jc.EnablePing)
if err != nil {
return nil, err
}
Expand Down
9 changes: 8 additions & 1 deletion target.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,13 @@ const (
type Target interface {
// Collect is the equivalent of prometheus.Collector.Collect(), but takes a context to run in.
Collect(ctx context.Context, ch chan<- Metric)
JobGroup() string
}

// target implements Target. It wraps a sql.DB, which is initially nil but never changes once instantianted.
type target struct {
name string
jobGroup string
dsn string
collectors []Collector
constLabels prometheus.Labels
Expand All @@ -52,7 +54,7 @@ type target struct {
// NewTarget returns a new Target with the given target name, data source name, collectors and constant labels.
// An empty target name means the exporter is running in single target mode: no synthetic metrics will be exported.
func NewTarget(
logContext, tname, dsn string, ccs []*config.CollectorConfig, constLabels prometheus.Labels, gc *config.GlobalConfig, ep *bool) (
logContext, tname, jg, dsn string, ccs []*config.CollectorConfig, constLabels prometheus.Labels, gc *config.GlobalConfig, ep *bool) (
Target, errors.WithContext,
) {

Expand Down Expand Up @@ -88,6 +90,7 @@ func NewTarget(
scrapeDurationDesc := NewAutomaticMetricDesc(logContext, scrapeDurationName, scrapeDurationHelp, prometheus.GaugeValue, constLabelPairs)
t := target{
name: tname,
jobGroup: jg,
dsn: dsn,
collectors: collectors,
constLabels: constLabels,
Expand Down Expand Up @@ -189,3 +192,7 @@ func boolToFloat64(value bool) float64 {
func OfBool(i bool) *bool {
return &i
}

func (t *target) JobGroup() string {
return t.jobGroup
}