1
0
mirror of https://github.com/prometheus/mysqld_exporter.git synced 2025-07-30 06:43:05 +03:00

Add special handling of "buffer_page_io" subsystem

For information_schema.innodb_metrics we can split out labels based on
the type of buffer page IO operation in the "buffer_page_io" subsystem.
* Simplify scrapeInnodbMetrics() by casting value to float64.
* Test for broken metric output from MySQL.
* Supress log messages in the test.
This commit is contained in:
Ben Kochie
2016-04-14 03:22:43 +02:00
parent 738baff868
commit b35a156663
2 changed files with 103 additions and 3 deletions

View File

@ -0,0 +1,62 @@
package collector
import (
"flag"
"testing"
"github.com/prometheus/client_golang/prometheus"
dto "github.com/prometheus/client_model/go"
"github.com/smartystreets/goconvey/convey"
"gopkg.in/DATA-DOG/go-sqlmock.v1"
)
func TestScrapeInnodbMetrics(t *testing.T) {
// Suppress a log messages
err := flag.Set("log.level", "fatal")
if err != nil {
t.Fatal(err)
}
db, mock, err := sqlmock.New()
if err != nil {
t.Fatalf("error opening a stub database connection: %s", err)
}
defer db.Close()
columns := []string{"name", "subsystem", "type", "comment", "count"}
rows := sqlmock.NewRows(columns).
AddRow("lock_timeouts", "lock", "counter", "Number of lock timeouts", 0).
AddRow("buffer_pool_reads", "buffer", "status_counter", "Number of reads directly from disk (innodb_buffer_pool_reads)", 1).
AddRow("buffer_pool_size", "server", "value", "Server buffer pool size (all buffer pools) in bytes", 2).
AddRow("buffer_page_read_system_page", "buffer_page_io", "counter", "Number of System Pages read", 3).
AddRow("buffer_page_written_undo_log", "buffer_page_io", "counter", "Number of Undo Log Pages written", 4).
AddRow("NOPE", "buffer_page_io", "counter", "An invalid buffer_page_io metric", 5)
mock.ExpectQuery(sanitizeQuery(infoSchemaInnodbMetricsQuery)).WillReturnRows(rows)
ch := make(chan prometheus.Metric)
go func() {
if err = ScrapeInnodbMetrics(db, ch); err != nil {
t.Errorf("error calling function on test: %s", err)
}
close(ch)
}()
metricExpected := []MetricResult{
{labels: labelMap{}, value: 0, metricType: dto.MetricType_COUNTER},
{labels: labelMap{}, value: 1, metricType: dto.MetricType_COUNTER},
{labels: labelMap{}, value: 2, metricType: dto.MetricType_GAUGE},
{labels: labelMap{"type": "system_page"}, value: 3, metricType: dto.MetricType_COUNTER},
{labels: labelMap{"type": "undo_log"}, value: 4, metricType: dto.MetricType_COUNTER},
}
convey.Convey("Metrics comparison", t, func() {
for _, expect := range metricExpected {
got := readMetric(<-ch)
convey.So(got, convey.ShouldResemble, expect)
}
})
// Ensure all SQL queries were executed
if err := mock.ExpectationsWereMet(); err != nil {
t.Errorf("there were unfulfilled expections: %s", err)
}
}