Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,17 @@ The exporter returns the following `Buildpacks` metrics:
| *metrics.namespace*_last_buildpacks_scrape_timestamp | Number of seconds since 1970 since last scrape of Buildpacks metrics from Cloud Foundry | `environment`, `deployment` |
| *metrics.namespace*_last_buildpacks_scrape_duration_seconds | Duration of the last scrape of Buildpacks metrics from Cloud Foundry | `environment`, `deployment` |

The exporter returns the following `Domain` metrics:

| Metric | Description | Labels |
| ------ | ----------- | ------ |
| *metrics.namespace*_domain_info | Cloud Foundry domains, labeled by domain ID, name, whether it is internal, and supported protocol. Metric value is set to 1. | `environment`, `deployment`, `domain_id`, `domain_name`, `internal`, `protocol` |
| *metrics.namespace*_domain_scrapes_total | Total number of scrapes for Cloud Foundry Domains | `environment`, `deployment` |
| *metrics.namespace*_domain_scrape_errors_total | Total number of scrape errors of Cloud Foundry Domains | `environment`, `deployment` |
| *metrics.namespace*_last_domains_scrape_error | Whether the last scrape of Domain metrics from Cloud Foundry resulted in an error (`1` for error, `0` for success) | `environment`, `deployment` |
| *metrics.namespace*_last_domains_scrape_timestamp | Number of seconds since 1970 since last scrape of Domain metrics from Cloud Foundry | `environment`, `deployment` |
| *metrics.namespace*_last_domains_scrape_duration_seconds | Duration of the last scrape of Domain metrics from Cloud Foundry | `environment`, `deployment`

The exporter returns the following `Events` metrics:

| Metric | Description | Labels |
Expand Down
6 changes: 6 additions & 0 deletions collectors/collectors.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,12 @@ func NewCollector(
res.collectors = append(res.collectors, collector)
}

if filter.Enabled(filters.Domains) {
collector := NewDomainsCollector(namespace, environment, deployment)
res.collectors = append(res.collectors, collector)

}

if filter.Enabled(filters.IsolationSegments) {
collector := NewIsolationSegmentsCollector(namespace, environment, deployment)
res.collectors = append(res.collectors, collector)
Expand Down
146 changes: 146 additions & 0 deletions collectors/domains.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
package collectors

import (
"time"

"github.com/bosh-prometheus/cf_exporter/models"
"github.com/prometheus/client_golang/prometheus"
)

type DomainsCollector struct {
namespace string
environment string
deployment string
domainInfoMetric *prometheus.GaugeVec
domainInfoScrapesTotalMetric prometheus.Counter
domainInfoScrapeErrorsTotalMetric prometheus.Counter
lastDomainInfoScrapeErrorMetric prometheus.Gauge
lastDomainInfoScrapeTimestampMetric prometheus.Gauge
lastDomainInfoScrapeDurationSecondsMetric prometheus.Gauge
}

func NewDomainsCollector(namespace string, environment string, deployment string) *DomainsCollector {
domainInfoMetric := prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Namespace: namespace,
Subsystem: "domain",
Name: "info",
Help: "Cloud Foundry domains, labeled by domain ID, name, whether it is internal, and supported protocol. Metric value is set to 1.",
ConstLabels: prometheus.Labels{"environment": environment, "deployment": deployment},
},
[]string{"domain_id", "domain_name", "internal", "protocol"},
)

domainInfoScrapesTotalMetric := prometheus.NewCounter(
prometheus.CounterOpts{
Namespace: namespace,
Subsystem: "domain_scrapes",
Name: "total",
Help: "Total number of scrapes for Cloud Foundry Domains.",
ConstLabels: prometheus.Labels{"environment": environment, "deployment": deployment},
},
)

domainInfoScrapeErrorsTotalMetric := prometheus.NewCounter(
prometheus.CounterOpts{
Namespace: namespace,
Subsystem: "domain_scrape_errors",
Name: "total",
Help: "Total number of scrape errors of Cloud Foundry Domains.",
ConstLabels: prometheus.Labels{"environment": environment, "deployment": deployment},
},
)

lastDomainInfoScrapeErrorMetric := prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: namespace,
Subsystem: "",
Name: "last_domains_scrape_error",
Help: "Whether the last scrape of Domains metrics from Cloud Foundry resulted in an error (1 for error, 0 for success).",
ConstLabels: prometheus.Labels{"environment": environment, "deployment": deployment},
},
)

lastDomainInfoScrapeTimestampMetric := prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: namespace,
Subsystem: "",
Name: "last_domains_scrape_timestamp",
Help: "Number of seconds since 1970 since last scrape of Domains metrics from Cloud Foundry.",
ConstLabels: prometheus.Labels{"environment": environment, "deployment": deployment},
},
)

lastDomainInfoScrapeDurationSecondsMetric := prometheus.NewGauge(
prometheus.GaugeOpts{
Namespace: namespace,
Subsystem: "",
Name: "last_domains_scrape_duration_seconds",
Help: "Duration of the last scrape of Domains metrics from Cloud Foundry.",
ConstLabels: prometheus.Labels{"environment": environment, "deployment": deployment},
},
)

return &DomainsCollector{
namespace: namespace,
environment: environment,
deployment: deployment,
domainInfoMetric: domainInfoMetric,
domainInfoScrapesTotalMetric: domainInfoScrapesTotalMetric,
domainInfoScrapeErrorsTotalMetric: domainInfoScrapeErrorsTotalMetric,
lastDomainInfoScrapeErrorMetric: lastDomainInfoScrapeErrorMetric,
lastDomainInfoScrapeTimestampMetric: lastDomainInfoScrapeTimestampMetric,
lastDomainInfoScrapeDurationSecondsMetric: lastDomainInfoScrapeDurationSecondsMetric,
}
}

func (c *DomainsCollector) Collect(objs *models.CFObjects, ch chan<- prometheus.Metric) {
errorMetric := float64(0)
if objs.Error != nil {
errorMetric = float64(1)
c.domainInfoScrapeErrorsTotalMetric.Inc()
} else {
c.reportDomainsMetrics(objs, ch)
}

c.domainInfoScrapeErrorsTotalMetric.Collect(ch)
c.domainInfoScrapesTotalMetric.Inc()
c.domainInfoScrapesTotalMetric.Collect(ch)
c.lastDomainInfoScrapeErrorMetric.Set(errorMetric)
c.lastDomainInfoScrapeErrorMetric.Collect(ch)
c.lastDomainInfoScrapeTimestampMetric.Set(float64(time.Now().Unix()))
c.lastDomainInfoScrapeTimestampMetric.Collect(ch)
c.lastDomainInfoScrapeDurationSecondsMetric.Set(objs.Took)
c.lastDomainInfoScrapeDurationSecondsMetric.Collect(ch)
}

func (c *DomainsCollector) Describe(ch chan<- *prometheus.Desc) {
c.domainInfoMetric.Describe(ch)
c.domainInfoScrapesTotalMetric.Describe(ch)
c.domainInfoScrapeErrorsTotalMetric.Describe(ch)
c.lastDomainInfoScrapeErrorMetric.Describe(ch)
c.lastDomainInfoScrapeTimestampMetric.Describe(ch)
c.lastDomainInfoScrapeDurationSecondsMetric.Describe(ch)
}

func (c *DomainsCollector) reportDomainsMetrics(objs *models.CFObjects, ch chan<- prometheus.Metric) {
c.domainInfoMetric.Reset()

for _, domain := range objs.Domains {
internal := "false"
if domain.Internal.Value {
internal = "true"
}

for _, protocol := range domain.Protocols {
c.domainInfoMetric.WithLabelValues(
domain.GUID,
domain.Name,
internal,
protocol,
).Set(1)
}
}

c.domainInfoMetric.Collect(ch)
}
1 change: 1 addition & 0 deletions fetcher/fetcher.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ func (c *Fetcher) workInit() {
c.worker.PushIf("spaces", c.fetchSpaces, filters.Applications, filters.Spaces)
c.worker.PushIf("space_quotas", c.fetchSpaceQuotas, filters.Spaces)
c.worker.PushIf("applications", c.fetchApplications, filters.Applications)
c.worker.PushIf("domains", c.fetchDomains, filters.Domains)
c.worker.PushIf("process", c.fetchProcesses, filters.Applications)
c.worker.PushIf("routes", c.fetchRoutes, filters.Routes)
c.worker.PushIf("route_services", c.fetchRouteServices, filters.Routes)
Expand Down
9 changes: 9 additions & 0 deletions fetcher/fetcher_handlers.go
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,15 @@ func (c *Fetcher) fetchApplications(session *SessionExt, entry *models.CFObjects
return err
}

func (c *Fetcher) fetchDomains(session *SessionExt, entry *models.CFObjects) error {
domains, _, err := session.V3().GetDomains(LargeQuery)
if err == nil {
loadIndex(entry.Domains, domains, func(r resources.Domain) string { return r.GUID })
}
return err
}


func (c *Fetcher) fetchProcesses(session *SessionExt, entry *models.CFObjects) error {
processes, _, err := session.V3().GetProcesses(LargeQuery)
if err != nil {
Expand Down
4 changes: 4 additions & 0 deletions filters/filters.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
const (
Applications = "applications"
Buildpacks = "buildpacks"
Domains = "domains"
Events = "events"
IsolationSegments = "isolationsegments"
Organizations = "organizations"
Expand All @@ -26,6 +27,7 @@ var (
All = []string{
Applications,
Buildpacks,
Domains,
Events,
IsolationSegments,
Organizations,
Expand All @@ -50,6 +52,7 @@ func NewFilter(active ...string) (*Filter, error) {
activated: map[string]bool{
Applications: true,
Buildpacks: true,
Domains: true,
IsolationSegments: true,
Organizations: true,
Routes: true,
Expand Down Expand Up @@ -79,6 +82,7 @@ func (f *Filter) setActive(active []string) error {
f.activated = map[string]bool{
Applications: false,
Buildpacks: false,
Domains: false,
IsolationSegments: false,
Organizations: false,
Routes: false,
Expand Down