From 8d5e772a8c2dd36d8a01f8d9cfad405a9a91e016 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Tue, 3 Dec 2019 14:57:40 -0800 Subject: [PATCH 01/79] Remove non-existent field from net_response readme (cherry picked from commit cc9a8cd1c6968bcfc6a9390a79b9966cbd292568) --- plugins/inputs/net_response/README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/inputs/net_response/README.md b/plugins/inputs/net_response/README.md index dcfb341d50dac..2c492408beef2 100644 --- a/plugins/inputs/net_response/README.md +++ b/plugins/inputs/net_response/README.md @@ -43,7 +43,6 @@ verify text in the response. - result - fields: - response_time (float, seconds) - - success (int) # success 0, failure 1 - result_code (int, success = 0, timeout = 1, connection_failed = 2, read_failed = 3, string_mismatch = 4) - result_type (string) **DEPRECATED in 1.7; use result tag** - string_found (boolean) **DEPRECATED in 1.4; use result tag** From 5a3ac977efe4e0e81b440b0f8f7e602b22366eb7 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Tue, 3 Dec 2019 16:43:05 -0800 Subject: [PATCH 02/79] Document that json tag_keys are not saved as fields. (cherry picked from commit 48f9f22f33b35139433e5ebd2008244c6c929616) --- plugins/parsers/json/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/plugins/parsers/json/README.md b/plugins/parsers/json/README.md index 45f4a98c6e43a..b4975bcd334f4 100644 --- a/plugins/parsers/json/README.md +++ b/plugins/parsers/json/README.md @@ -29,7 +29,8 @@ ignored unless specified in the `tag_key` or `json_string_fields` options. ## https://github.com/tidwall/gjson/tree/v1.3.0#path-syntax json_query = "" - ## Tag keys is an array of keys that should be added as tags. + ## Tag keys is an array of keys that should be added as tags. Matching keys + ## are no longer saved as fields. tag_keys = [ "my_tag_1", "my_tag_2" From f7ab7de8336c28eaa0d5034b72fdb3f52de62a91 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Wed, 4 Dec 2019 13:41:33 -0800 Subject: [PATCH 03/79] Fix prometheus histogram and summary merging (#6756) (cherry picked from commit eeb46906866f8a10b89040ae62a88e7e98dff366) --- plugins/serializers/prometheus/collection.go | 25 +- .../serializers/prometheus/collection_test.go | 231 ++++++++++++++++++ 2 files changed, 253 insertions(+), 3 deletions(-) diff --git a/plugins/serializers/prometheus/collection.go b/plugins/serializers/prometheus/collection.go index 8ca06520bc7ec..5c385caad0881 100644 --- a/plugins/serializers/prometheus/collection.go +++ b/plugins/serializers/prometheus/collection.go @@ -52,12 +52,32 @@ type Histogram struct { Sum float64 } +func (h *Histogram) merge(b Bucket) { + for i := range h.Buckets { + if h.Buckets[i].Bound == b.Bound { + h.Buckets[i].Count = b.Count + return + } + } + h.Buckets = append(h.Buckets, b) +} + type Summary struct { Quantiles []Quantile Count uint64 Sum float64 } +func (s *Summary) merge(q Quantile) { + for i := range s.Quantiles { + if s.Quantiles[i].Quantile == q.Quantile { + s.Quantiles[i].Value = q.Value + return + } + } + s.Quantiles = append(s.Quantiles, q) +} + type MetricKey uint64 func MakeMetricKey(labels []LabelPair) MetricKey { @@ -210,7 +230,6 @@ func (c *Collection) Add(metric telegraf.Metric) { Scaler: &Scaler{Value: value}, } - // what if already here entry.Metrics[metricKey] = m case telegraf.Histogram: if m == nil { @@ -236,7 +255,7 @@ func (c *Collection) Add(metric telegraf.Metric) { continue } - m.Histogram.Buckets = append(m.Histogram.Buckets, Bucket{ + m.Histogram.merge(Bucket{ Bound: bound, Count: count, }) @@ -297,7 +316,7 @@ func (c *Collection) Add(metric telegraf.Metric) { continue } - m.Summary.Quantiles = append(m.Summary.Quantiles, Quantile{ + m.Summary.merge(Quantile{ Quantile: quantile, Value: value, }) diff --git a/plugins/serializers/prometheus/collection_test.go b/plugins/serializers/prometheus/collection_test.go index 589c306b58f0f..70f26dac788d7 100644 --- a/plugins/serializers/prometheus/collection_test.go +++ b/plugins/serializers/prometheus/collection_test.go @@ -1,6 +1,7 @@ package prometheus import ( + "math" "testing" "time" @@ -47,6 +48,78 @@ func TestCollectionExpire(t *testing.T) { }, }, }, + { + name: "update metric expiration", + now: time.Unix(20, 0), + age: 10 * time.Second, + metrics: []telegraf.Metric{ + testutil.MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{ + "time_idle": 42.0, + }, + time.Unix(0, 0), + ), + testutil.MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{ + "time_idle": 43.0, + }, + time.Unix(12, 0), + ), + }, + expected: []*dto.MetricFamily{ + { + Name: proto.String("cpu_time_idle"), + Help: proto.String(helpString), + Type: dto.MetricType_UNTYPED.Enum(), + Metric: []*dto.Metric{ + { + Label: []*dto.LabelPair{}, + Untyped: &dto.Untyped{Value: proto.Float64(43.0)}, + }, + }, + }, + }, + }, + { + name: "update metric expiration descending order", + now: time.Unix(20, 0), + age: 10 * time.Second, + metrics: []telegraf.Metric{ + testutil.MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{ + "time_idle": 42.0, + }, + time.Unix(12, 0), + ), + testutil.MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{ + "time_idle": 43.0, + }, + time.Unix(0, 0), + ), + }, + expected: []*dto.MetricFamily{ + { + Name: proto.String("cpu_time_idle"), + Help: proto.String(helpString), + Type: dto.MetricType_UNTYPED.Enum(), + Metric: []*dto.Metric{ + { + Label: []*dto.LabelPair{}, + Untyped: &dto.Untyped{Value: proto.Float64(42.0)}, + }, + }, + }, + }, + }, { name: "expired single metric in metric family", now: time.Unix(20, 0), @@ -99,6 +172,164 @@ func TestCollectionExpire(t *testing.T) { }, }, }, + { + name: "histogram bucket updates", + now: time.Unix(0, 0), + age: 10 * time.Second, + metrics: []telegraf.Metric{ + testutil.MustMetric( + "prometheus", + map[string]string{}, + map[string]interface{}{ + "http_request_duration_seconds_sum": 10.0, + "http_request_duration_seconds_count": 2, + }, + time.Unix(0, 0), + telegraf.Histogram, + ), + testutil.MustMetric( + "prometheus", + map[string]string{"le": "0.05"}, + map[string]interface{}{ + "http_request_duration_seconds_bucket": 1.0, + }, + time.Unix(0, 0), + telegraf.Histogram, + ), + testutil.MustMetric( + "prometheus", + map[string]string{"le": "+Inf"}, + map[string]interface{}{ + "http_request_duration_seconds_bucket": 1.0, + }, + time.Unix(0, 0), + telegraf.Histogram, + ), + // Next interval + testutil.MustMetric( + "prometheus", + map[string]string{}, + map[string]interface{}{ + "http_request_duration_seconds_sum": 20.0, + "http_request_duration_seconds_count": 4, + }, + time.Unix(0, 0), + telegraf.Histogram, + ), + testutil.MustMetric( + "prometheus", + map[string]string{"le": "0.05"}, + map[string]interface{}{ + "http_request_duration_seconds_bucket": 2.0, + }, + time.Unix(0, 0), + telegraf.Histogram, + ), + testutil.MustMetric( + "prometheus", + map[string]string{"le": "+Inf"}, + map[string]interface{}{ + "http_request_duration_seconds_bucket": 2.0, + }, + time.Unix(0, 0), + telegraf.Histogram, + ), + }, + expected: []*dto.MetricFamily{ + { + Name: proto.String("http_request_duration_seconds"), + Help: proto.String(helpString), + Type: dto.MetricType_HISTOGRAM.Enum(), + Metric: []*dto.Metric{ + { + Label: []*dto.LabelPair{}, + Histogram: &dto.Histogram{ + SampleCount: proto.Uint64(4), + SampleSum: proto.Float64(20.0), + Bucket: []*dto.Bucket{ + { + UpperBound: proto.Float64(0.05), + CumulativeCount: proto.Uint64(2), + }, + { + UpperBound: proto.Float64(math.Inf(1)), + CumulativeCount: proto.Uint64(2), + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "summary quantile updates", + now: time.Unix(0, 0), + age: 10 * time.Second, + metrics: []telegraf.Metric{ + testutil.MustMetric( + "prometheus", + map[string]string{}, + map[string]interface{}{ + "rpc_duration_seconds_sum": 1.0, + "rpc_duration_seconds_count": 1, + }, + time.Unix(0, 0), + telegraf.Summary, + ), + testutil.MustMetric( + "prometheus", + map[string]string{"quantile": "0.01"}, + map[string]interface{}{ + "rpc_duration_seconds": 1.0, + }, + time.Unix(0, 0), + telegraf.Summary, + ), + // Updated Summary + testutil.MustMetric( + "prometheus", + map[string]string{}, + map[string]interface{}{ + "rpc_duration_seconds_sum": 2.0, + "rpc_duration_seconds_count": 2, + }, + time.Unix(0, 0), + telegraf.Summary, + ), + testutil.MustMetric( + "prometheus", + map[string]string{"quantile": "0.01"}, + map[string]interface{}{ + "rpc_duration_seconds": 2.0, + }, + time.Unix(0, 0), + telegraf.Summary, + ), + }, + expected: []*dto.MetricFamily{ + { + Name: proto.String("rpc_duration_seconds"), + Help: proto.String(helpString), + Type: dto.MetricType_SUMMARY.Enum(), + Metric: []*dto.Metric{ + { + Label: []*dto.LabelPair{}, + Summary: &dto.Summary{ + SampleCount: proto.Uint64(2), + SampleSum: proto.Float64(2.0), + Quantile: []*dto.Quantile{ + { + Quantile: proto.Float64(0.01), + Value: proto.Float64(2), + }, + }, + }, + }, + }, + }, + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { From 280a41445c4bba94cff47618c583bc51b12eedfc Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Wed, 4 Dec 2019 17:16:00 -0800 Subject: [PATCH 04/79] Add minimum system requirements to readme (cherry picked from commit 1f5be2bac7b895867411406b2eeac892b9ff9258) --- README.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/README.md b/README.md index 73f4268bb75ea..b34451df2a568 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,16 @@ There are many ways to contribute: - Answer questions and discuss here on github and on the [Community Site](https://community.influxdata.com/) - [Contribute plugins](CONTRIBUTING.md) +## Minimum Requirements + +Telegraf shares the same [minimum requirements][] as Go: +- Linux kernel version 2.6.23 or later +- Windows 7 or later +- FreeBSD 11.2 or later +- MacOS 10.11 El Capitan or later + +[minimum requirements]: https://github.com/golang/go/wiki/MinimumRequirements#minimum-requirements + ## Installation: You can download the binaries directly from the [downloads](https://www.influxdata.com/downloads) page From bf03bb355f8db8ec031b1aef9437591689e9dc26 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Fri, 6 Dec 2019 17:10:59 -0800 Subject: [PATCH 05/79] Build packages with Go 1.13.5 (#6767) (cherry picked from commit 613d0dbd162c154c8d20394b9913757e71c701d3) --- .circleci/config.yml | 4 ++-- CHANGELOG.md | 2 +- Makefile | 8 ++++---- appveyor.yml | 4 ++-- scripts/ci-1.12.docker | 2 +- scripts/ci-1.13.docker | 2 +- 6 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a32bd77a4d6e6..e070c2957c578 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -6,10 +6,10 @@ defaults: GOFLAGS: -p=8 go-1_12: &go-1_12 docker: - - image: 'quay.io/influxdb/telegraf-ci:1.12.12' + - image: 'quay.io/influxdb/telegraf-ci:1.12.14' go-1_13: &go-1_13 docker: - - image: 'quay.io/influxdb/telegraf-ci:1.13.3' + - image: 'quay.io/influxdb/telegraf-ci:1.13.5' version: 2 jobs: diff --git a/CHANGELOG.md b/CHANGELOG.md index d82e3cb4b2884..efad9d6619b91 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ #### Release Notes -- Official packages built with Go 1.13.3. +- Official packages built with Go 1.13.5. - The `prometheus` input and `prometheus_client` output have a new mapping to and from Telegraf metrics, which can be enabled by setting `metric_version = 2`. The original mapping is deprecated. When both plugins have the same setting, diff --git a/Makefile b/Makefile index aeae48e4c617f..9202cc1f45803 100644 --- a/Makefile +++ b/Makefile @@ -131,10 +131,10 @@ plugin-%: .PHONY: ci-1.13 ci-1.13: - docker build -t quay.io/influxdb/telegraf-ci:1.13.3 - < scripts/ci-1.13.docker - docker push quay.io/influxdb/telegraf-ci:1.13.3 + docker build -t quay.io/influxdb/telegraf-ci:1.13.5 - < scripts/ci-1.13.docker + docker push quay.io/influxdb/telegraf-ci:1.13.5 .PHONY: ci-1.12 ci-1.12: - docker build -t quay.io/influxdb/telegraf-ci:1.12.12 - < scripts/ci-1.12.docker - docker push quay.io/influxdb/telegraf-ci:1.12.12 + docker build -t quay.io/influxdb/telegraf-ci:1.12.14 - < scripts/ci-1.12.docker + docker push quay.io/influxdb/telegraf-ci:1.12.14 diff --git a/appveyor.yml b/appveyor.yml index fba80d46fc2dd..66d17b0f4b2af 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -13,11 +13,11 @@ platform: x64 install: - IF NOT EXIST "C:\Cache" mkdir C:\Cache - - IF NOT EXIST "C:\Cache\go1.13.3.msi" curl -o "C:\Cache\go1.13.3.msi" https://storage.googleapis.com/golang/go1.13.3.windows-amd64.msi + - IF NOT EXIST "C:\Cache\go1.13.5.msi" curl -o "C:\Cache\go1.13.5.msi" https://storage.googleapis.com/golang/go1.13.5.windows-amd64.msi - IF NOT EXIST "C:\Cache\gnuwin32-bin.zip" curl -o "C:\Cache\gnuwin32-bin.zip" https://dl.influxdata.com/telegraf/ci/make-3.81-bin.zip - IF NOT EXIST "C:\Cache\gnuwin32-dep.zip" curl -o "C:\Cache\gnuwin32-dep.zip" https://dl.influxdata.com/telegraf/ci/make-3.81-dep.zip - IF EXIST "C:\Go" rmdir /S /Q C:\Go - - msiexec.exe /i "C:\Cache\go1.13.3.msi" /quiet + - msiexec.exe /i "C:\Cache\go1.13.5.msi" /quiet - 7z x "C:\Cache\gnuwin32-bin.zip" -oC:\GnuWin32 -y - 7z x "C:\Cache\gnuwin32-dep.zip" -oC:\GnuWin32 -y - go get -d github.com/golang/dep diff --git a/scripts/ci-1.12.docker b/scripts/ci-1.12.docker index f60f49a43659c..e68618dbcc11e 100644 --- a/scripts/ci-1.12.docker +++ b/scripts/ci-1.12.docker @@ -1,4 +1,4 @@ -FROM golang:1.12.12 +FROM golang:1.12.14 RUN chmod -R 755 "$GOPATH" diff --git a/scripts/ci-1.13.docker b/scripts/ci-1.13.docker index c3c9792d27a00..ad71addb9d6ba 100644 --- a/scripts/ci-1.13.docker +++ b/scripts/ci-1.13.docker @@ -1,4 +1,4 @@ -FROM golang:1.13.3 +FROM golang:1.13.5 RUN chmod -R 755 "$GOPATH" From 2ed29496fc64509887df89e6b2150f64de2cd627 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Fri, 6 Dec 2019 17:22:11 -0800 Subject: [PATCH 06/79] Override github.com/satori/go.uuid revision for transitive deps (#6768) While there has been a workaround in place for some time, this change is being made to reduce confusion around if Telegraf is affected by https://github.com/satori/go.uuid/issues/73 (cherry picked from commit f0b0295e3c1000335fe446935ab874b202bd3698) --- Gopkg.lock | 7 +++---- Gopkg.toml | 6 +++++- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/Gopkg.lock b/Gopkg.lock index 00116a7b5b21f..3fabcfb77cfe9 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -81,7 +81,6 @@ version = "v13.3.0" [[projects]] - branch = "master" digest = "1:298712a3ee36b59c3ca91f4183bd75d174d5eaa8b4aed5072831f126e2e752f6" name = "github.com/Microsoft/ApplicationInsights-Go" packages = [ @@ -90,6 +89,7 @@ ] pruneopts = "" revision = "d2df5d440eda5372f24fcac03839a64d6cb5f7e5" + version = "v0.4.2" [[projects]] digest = "1:45ec6eb579713a01991ad07f538fed3b576ee55f5ce9f248320152a9270d9258" @@ -1103,12 +1103,11 @@ revision = "c4fab1ac1bec58281ad0667dc3f0907a9476ac47" [[projects]] - digest = "1:7f569d906bdd20d906b606415b7d794f798f91a62fcfb6a4daa6d50690fb7a3f" + digest = "1:47081c00d00c1dfc9a530c2556e78be391a5c24db1043efe6d406af882a169a1" name = "github.com/satori/go.uuid" packages = ["."] pruneopts = "" - revision = "f58768cc1a7a7e77a3bd49e98cdd21419399b6a3" - version = "v1.2.0" + revision = "b2ce2384e17bbe0c6d34077efa39dbab3e09123b" [[projects]] digest = "1:9024df427b3c8a80a0c4b34e535e5e1ae922c7174e3242b6c7f30ffb3b9f715e" diff --git a/Gopkg.toml b/Gopkg.toml index 7ecfae42527c4..5b0a2dba45ca8 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -100,7 +100,7 @@ [[constraint]] name = "github.com/Microsoft/ApplicationInsights-Go" - branch = "master" + version = "0.4.2" [[constraint]] name = "github.com/miekg/dns" @@ -304,3 +304,7 @@ [[constraint]] name = "github.com/safchain/ethtool" revision = "42ed695e3de80b9d695f280295fd7994639f209d" + +[[override]] + name = "github.com/satori/go.uuid" + revision = "b2ce2384e17bbe0c6d34077efa39dbab3e09123b" From edbb08bb8dfdbe2a2e932fdd3819c28443f61dbd Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Mon, 9 Dec 2019 12:15:27 -0800 Subject: [PATCH 07/79] Fix unix socket dial arguments in uwsgi input (#6769) (cherry picked from commit faca80fd000d0118e2ec38825a4bd3be00ec9a62) --- plugins/inputs/uwsgi/uwsgi.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/inputs/uwsgi/uwsgi.go b/plugins/inputs/uwsgi/uwsgi.go index 15a9bbe2261fb..a20f3b2bfcaf9 100644 --- a/plugins/inputs/uwsgi/uwsgi.go +++ b/plugins/inputs/uwsgi/uwsgi.go @@ -91,13 +91,13 @@ func (u *Uwsgi) gatherServer(acc telegraf.Accumulator, url *url.URL) error { } s.source = url.Host case "unix": - r, err = net.DialTimeout(url.Scheme, url.Host, u.Timeout.Duration) + r, err = net.DialTimeout(url.Scheme, url.Path, u.Timeout.Duration) if err != nil { return err } s.source, err = os.Hostname() if err != nil { - s.source = url.Host + s.source = "" } case "http": resp, err := u.client.Get(url.String()) From b4d2158ea0880eda48a878f46bd8e81079c6ec22 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Mon, 9 Dec 2019 12:24:11 -0800 Subject: [PATCH 08/79] Update changelog (cherry picked from commit 5034af7af2d475a2952364c7a17cb4cc5f541f4e) --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index efad9d6619b91..d459f3b590fbb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -78,6 +78,7 @@ - [#6705](https://github.com/influxdata/telegraf/issues/6705): Remove trailing underscore trimming from json flattener. - [#6421](https://github.com/influxdata/telegraf/issues/6421): Revert change causing cpu usage to be capped at 100 percent. - [#6523](https://github.com/influxdata/telegraf/issues/6523): Accept any media type in the prometheus input. +- [#6769](https://github.com/influxdata/telegraf/issues/6769): Fix unix socket dial arguments in uwsgi input. ## v1.12.6 [2019-11-19] From 78de395318dbad48ca26f1e32d2a32b88cd36e6f Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Tue, 10 Dec 2019 12:58:59 -0800 Subject: [PATCH 09/79] Use actual database name in db creation failed log (#6780) (cherry picked from commit eb00f41905999b74557545708b6cdcd7189ccf43) --- plugins/outputs/influxdb/influxdb.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/outputs/influxdb/influxdb.go b/plugins/outputs/influxdb/influxdb.go index 50161e8322fb3..be462ba03157f 100644 --- a/plugins/outputs/influxdb/influxdb.go +++ b/plugins/outputs/influxdb/influxdb.go @@ -278,7 +278,7 @@ func (i *InfluxDB) httpClient(ctx context.Context, url *url.URL, proxy *url.URL) err = c.CreateDatabase(ctx, c.Database()) if err != nil { i.Log.Warnf("When writing to [%s]: database %q creation failed: %v", - c.URL(), i.Database, err) + c.URL(), c.Database(), err) } } From 7df0ade79caba560b2fb099997aa66fd2209e4c2 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Tue, 10 Dec 2019 12:59:16 -0800 Subject: [PATCH 10/79] Replace colon chars in prometheus output labels with metric_version=1 (#6781) (cherry picked from commit aabc7e7d4f04171cd39fdd945461ae53728d7402) --- .../prometheus_client_v1_test.go | 28 +++++++++++++++++++ .../outputs/prometheus_client/v1/collector.go | 13 +++++---- .../serializers/prometheus/prometheus_test.go | 22 +++++++++++++++ 3 files changed, 57 insertions(+), 6 deletions(-) diff --git a/plugins/outputs/prometheus_client/prometheus_client_v1_test.go b/plugins/outputs/prometheus_client/prometheus_client_v1_test.go index 6a9770fdcd2ad..adf18c9f0f076 100644 --- a/plugins/outputs/prometheus_client/prometheus_client_v1_test.go +++ b/plugins/outputs/prometheus_client/prometheus_client_v1_test.go @@ -103,6 +103,34 @@ cpu_time_idle{host="example.org"} 42 # HELP cpu_time_idle Telegraf collected metric # TYPE cpu_time_idle counter cpu_time_idle{host="example.org"} 42 +`), + }, + { + name: "replace characters when using string as label", + output: &PrometheusClient{ + Listen: ":0", + MetricVersion: 1, + CollectorsExclude: []string{"gocollector", "process"}, + Path: "/metrics", + StringAsLabel: true, + Log: Logger, + }, + metrics: []telegraf.Metric{ + testutil.MustMetric( + "cpu_time_idle", + map[string]string{}, + map[string]interface{}{ + "host:name": "example.org", + "counter": 42.0, + }, + time.Unix(0, 0), + telegraf.Counter, + ), + }, + expected: []byte(` +# HELP cpu_time_idle Telegraf collected metric +# TYPE cpu_time_idle counter +cpu_time_idle{host_name="example.org"} 42 `), }, { diff --git a/plugins/outputs/prometheus_client/v1/collector.go b/plugins/outputs/prometheus_client/v1/collector.go index 72b09be085f45..7932bbc59f44d 100644 --- a/plugins/outputs/prometheus_client/v1/collector.go +++ b/plugins/outputs/prometheus_client/v1/collector.go @@ -10,6 +10,7 @@ import ( "time" "github.com/influxdata/telegraf" + serializer "github.com/influxdata/telegraf/plugins/serializers/prometheus" "github.com/prometheus/client_golang/prometheus" ) @@ -201,11 +202,11 @@ func (c *Collector) Add(metrics []telegraf.Metric) error { labels := make(map[string]string) for k, v := range tags { - tName := sanitize(k) - if !isValidTagName(tName) { + name, ok := serializer.SanitizeLabelName(k) + if !ok { continue } - labels[tName] = v + labels[name] = v } // Prometheus doesn't have a string value type, so convert string @@ -214,11 +215,11 @@ func (c *Collector) Add(metrics []telegraf.Metric) error { for fn, fv := range point.Fields() { switch fv := fv.(type) { case string: - tName := sanitize(fn) - if !isValidTagName(tName) { + name, ok := serializer.SanitizeLabelName(fn) + if !ok { continue } - labels[tName] = fv + labels[name] = fv } } } diff --git a/plugins/serializers/prometheus/prometheus_test.go b/plugins/serializers/prometheus/prometheus_test.go index 632ca148ec6ea..ff082f7b26e3f 100644 --- a/plugins/serializers/prometheus/prometheus_test.go +++ b/plugins/serializers/prometheus/prometheus_test.go @@ -550,6 +550,28 @@ cpu_time_idle{cpu="cpu0"} 42 # HELP cpu_time_idle Telegraf collected metric # TYPE cpu_time_idle untyped cpu_time_idle{cpu="cpu0"} 42 +`), + }, + { + name: "replace characters when using string as label", + config: FormatConfig{ + StringHandling: StringAsLabel, + }, + metrics: []telegraf.Metric{ + testutil.MustMetric( + "cpu", + map[string]string{}, + map[string]interface{}{ + "host:name": "example.org", + "time_idle": 42.0, + }, + time.Unix(1574279268, 0), + ), + }, + expected: []byte(` +# HELP cpu_time_idle Telegraf collected metric +# TYPE cpu_time_idle untyped +cpu_time_idle{host_name="example.org"} 42 `), }, { From 54b72db839d4dc5cb47c23f0a01c6fd705a525a4 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Tue, 10 Dec 2019 13:01:53 -0800 Subject: [PATCH 11/79] Update changelog (cherry picked from commit 7cfde0cf4d59c3e44a9fe6baf5933b7b2dacda63) --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d459f3b590fbb..c7a9dc544500e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -79,6 +79,7 @@ - [#6421](https://github.com/influxdata/telegraf/issues/6421): Revert change causing cpu usage to be capped at 100 percent. - [#6523](https://github.com/influxdata/telegraf/issues/6523): Accept any media type in the prometheus input. - [#6769](https://github.com/influxdata/telegraf/issues/6769): Fix unix socket dial arguments in uwsgi input. +- [#6757](https://github.com/influxdata/telegraf/issues/6757): Replace colon chars in prometheus output labels with metric_version=1. ## v1.12.6 [2019-11-19] From 91d7c6e4cc1b8a76075c45dedc310165599e7c90 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Tue, 10 Dec 2019 13:13:03 -0800 Subject: [PATCH 12/79] Document --service-display-name flag for Windows service (cherry picked from commit 05cefe61bd6bd183369dc36c5cc649083c6570cb) --- docs/WINDOWS_SERVICE.md | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/docs/WINDOWS_SERVICE.md b/docs/WINDOWS_SERVICE.md index 5b630076c5265..b0b6ee5adf358 100644 --- a/docs/WINDOWS_SERVICE.md +++ b/docs/WINDOWS_SERVICE.md @@ -48,18 +48,21 @@ Telegraf can manage its own service through the --service flag: ## Install multiple services -You can install multiple telegraf instances with --service-name flag: +Running multiple instances of Telegraf is seldom needed, as you can run +multiple instances of each plugin and route metric flow using the metric +filtering options. However, if you do need to run multiple telegraf instances +on a single system, you can install the service with the `--service-name` and +`--service-display-name` flags to give the services unique names: ``` - > C:\"Program Files"\Telegraf\telegraf.exe --service install --service-name telegraf-1 - > C:\"Program Files"\Telegraf\telegraf.exe --service install --service-name telegraf-2 - > C:\"Program Files"\Telegraf\telegraf.exe --service uninstall --service-name telegraf-1 +> C:\"Program Files"\Telegraf\telegraf.exe --service install --service-name telegraf-1 --service-display-name "Telegraf 1" +> C:\"Program Files"\Telegraf\telegraf.exe --service install --service-name telegraf-2 --service-display-name "Telegraf 2" ``` ## Troubleshooting When Telegraf runs as a Windows service, Telegraf logs messages to Windows events log before configuration file with logging settings is loaded. -Check event log for an error reported by `telegraf` service in case of Telegraf service reports failure on its start: Event Viewer->Windows Logs->Application +Check event log for an error reported by `telegraf` service in case of Telegraf service reports failure on its start: Event Viewer->Windows Logs->Application **Troubleshooting common error #1067** From 6b01bb86ec0c7671df5e47b52b91f33bafb458d4 Mon Sep 17 00:00:00 2001 From: reimda Date: Wed, 11 Dec 2019 11:23:51 -0700 Subject: [PATCH 13/79] Add documentation about listening on port < 1024 (#6785) (cherry picked from commit 61fbc68279cbff44a08e920b66ed75c8a9bde683) --- plugins/inputs/snmp_trap/README.md | 31 ++++++++++++++++++++++++++- plugins/inputs/snmp_trap/snmp_trap.go | 6 +++++- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/plugins/inputs/snmp_trap/README.md b/plugins/inputs/snmp_trap/README.md index ec3c7ba4c6efa..8c1a2c132adc7 100644 --- a/plugins/inputs/snmp_trap/README.md +++ b/plugins/inputs/snmp_trap/README.md @@ -17,7 +17,11 @@ the SNMP [README.md](../snmp/README.md) for details. ## Transport, local address, and port to listen on. Transport must ## be "udp://". Omit local address to listen on all interfaces. ## example: "udp://127.0.0.1:1234" - # service_address = udp://:162 + ## + ## Special permissions may be required to listen on a port less than + ## 1024. See README.md for details + ## + # service_address = "udp://:162" ## Timeout running snmptranslate command # timeout = "5s" ``` @@ -41,3 +45,28 @@ the SNMP [README.md](../snmp/README.md) for details. snmp_trap,mib=SNMPv2-MIB,name=coldStart,oid=.1.3.6.1.6.3.1.1.5.1,source=192.168.122.102,version=2c snmpTrapEnterprise.0="linux",sysUpTimeInstance=1i 1574109187723429814 snmp_trap,mib=NET-SNMP-AGENT-MIB,name=nsNotifyShutdown,oid=.1.3.6.1.4.1.8072.4.0.2,source=192.168.122.102,version=2c sysUpTimeInstance=5803i,snmpTrapEnterprise.0="netSnmpNotificationPrefix" 1574109186555115459 ``` + +### Using a Privileged Port + +On many operating systems, listening on a privileged port (a port +number less than 1024) requires extra permission. Since the default +SNMP trap port 162 is in this category, using telegraf to receive SNMP +traps may need extra permission. + +Instructions for listening on a privileged port vary by operating +system. It is not recommended to run telegraf as superuser in order to +use a privileged port. Instead follow the principle of least privilege +and use a more specific operating system mechanism to allow telegraf to +use the port. You may also be able to have telegraf use an +unprivileged port and then configure a firewall port forward rule from +the privileged port. + +To use a privileged port on Linux, you can use setcap to enable the +CAP_NET_BIND_SERVICE capability on the telegraf binary: + +``` +setcap cap_net_bind_service=+ep /usr/bin/telegraf +``` + +On Mac OS, listening on privileged ports is unrestricted on versions +10.14 and later. diff --git a/plugins/inputs/snmp_trap/snmp_trap.go b/plugins/inputs/snmp_trap/snmp_trap.go index 4b9ce4a563844..7163a853e61bf 100644 --- a/plugins/inputs/snmp_trap/snmp_trap.go +++ b/plugins/inputs/snmp_trap/snmp_trap.go @@ -50,7 +50,11 @@ var sampleConfig = ` ## Transport, local address, and port to listen on. Transport must ## be "udp://". Omit local address to listen on all interfaces. ## example: "udp://127.0.0.1:1234" - # service_address = udp://:162 + ## + ## Special permissions may be required to listen on a port less than + ## 1024. See README.md for details + ## + # service_address = "udp://:162" ## Timeout running snmptranslate command # timeout = "5s" ` From 7cf5ea7f59b38e83539c7e4902893c226a3f5c09 Mon Sep 17 00:00:00 2001 From: Daniel Speichert Date: Wed, 11 Dec 2019 14:25:35 -0500 Subject: [PATCH 14/79] Set TrimLeadingSpace when TrimSpace is on in csv parser (#6773) (cherry picked from commit 98585a1853c8f75bc5a9b6c018e6d8565a2ae055) --- plugins/parsers/csv/parser.go | 1 + plugins/parsers/csv/parser_test.go | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/plugins/parsers/csv/parser.go b/plugins/parsers/csv/parser.go index 861844488a68c..b59ea97999426 100644 --- a/plugins/parsers/csv/parser.go +++ b/plugins/parsers/csv/parser.go @@ -45,6 +45,7 @@ func (p *Parser) compile(r *bytes.Reader) (*csv.Reader, error) { if p.Comment != "" { csvReader.Comment = []rune(p.Comment)[0] } + csvReader.TrimLeadingSpace = p.TrimSpace return csvReader, nil } diff --git a/plugins/parsers/csv/parser_test.go b/plugins/parsers/csv/parser_test.go index 6a10c083439eb..1b6fb8f3bc11c 100644 --- a/plugins/parsers/csv/parser_test.go +++ b/plugins/parsers/csv/parser_test.go @@ -243,6 +243,30 @@ func TestTrimSpace(t *testing.T) { require.Equal(t, expectedFields, metrics[0].Fields()) } +func TestTrimSpaceDelimetedBySpace(t *testing.T) { + p := Parser{ + Delimiter: " ", + HeaderRowCount: 1, + TrimSpace: true, + TimeFunc: DefaultTime, + } + testCSV := ` first second third fourth +abcdefgh 0 2 false + abcdef 3.3 4 true + f 0 2 false` + + expectedFields := map[string]interface{}{ + "first": "abcdef", + "second": 3.3, + "third": int64(4), + "fourth": true, + } + + metrics, err := p.Parse([]byte(testCSV)) + require.NoError(t, err) + require.Equal(t, expectedFields, metrics[1].Fields()) +} + func TestSkipRows(t *testing.T) { p := Parser{ HeaderRowCount: 1, From 5cd633935f724e18be570dbbc5a5a7759248d6e7 Mon Sep 17 00:00:00 2001 From: Ben Hymans <6125803+benhymans@users.noreply.github.com> Date: Wed, 11 Dec 2019 13:42:54 -0600 Subject: [PATCH 15/79] Add option to control collecting global variables to mysql input (#6790) (cherry picked from commit 4def7cc5e17d1347c4cdce22a24b5c3e67c602ca) --- plugins/inputs/mysql/README.md | 3 +++ plugins/inputs/mysql/mysql.go | 22 +++++++++++++++------- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/plugins/inputs/mysql/README.md b/plugins/inputs/mysql/README.md index af00da03d77e5..3e07229da7e15 100644 --- a/plugins/inputs/mysql/README.md +++ b/plugins/inputs/mysql/README.md @@ -69,6 +69,9 @@ This plugin gathers the statistic data from MySQL server ## gather metrics from SHOW BINARY LOGS command output # gather_binary_logs = false + ## gather metrics from SHOW GLOBAL VARIABLES command output + # gather_global_variables = true + ## gather metrics from PERFORMANCE_SCHEMA.TABLE_IO_WAITS_SUMMARY_BY_TABLE # gather_table_io_waits = false diff --git a/plugins/inputs/mysql/mysql.go b/plugins/inputs/mysql/mysql.go index 3ca955beb023e..a2dc56505692a 100644 --- a/plugins/inputs/mysql/mysql.go +++ b/plugins/inputs/mysql/mysql.go @@ -36,6 +36,7 @@ type Mysql struct { GatherTableSchema bool `toml:"gather_table_schema"` GatherFileEventsStats bool `toml:"gather_file_events_stats"` GatherPerfEventsStatements bool `toml:"gather_perf_events_statements"` + GatherGlobalVars bool `toml:"gather_global_variables"` IntervalSlow string `toml:"interval_slow"` MetricVersion int `toml:"metric_version"` @@ -94,6 +95,9 @@ const sampleConfig = ` ## gather metrics from SHOW BINARY LOGS command output # gather_binary_logs = false + ## gather metrics from PERFORMANCE_SCHEMA.GLOBAL_VARIABLES + # gather_global_variables = true + ## gather metrics from PERFORMANCE_SCHEMA.TABLE_IO_WAITS_SUMMARY_BY_TABLE # gather_table_io_waits = false @@ -134,6 +138,7 @@ const ( defaultPerfEventsStatementsDigestTextLimit = 120 defaultPerfEventsStatementsLimit = 250 defaultPerfEventsStatementsTimeLimit = 86400 + defaultGatherGlobalVars = true ) func (m *Mysql) SampleConfig() string { @@ -431,14 +436,16 @@ func (m *Mysql) gatherServer(serv string, acc telegraf.Accumulator) error { return err } - // Global Variables may be gathered less often - if len(m.IntervalSlow) > 0 { - if uint32(time.Since(m.lastT).Seconds()) >= m.scanIntervalSlow { - err = m.gatherGlobalVariables(db, serv, acc) - if err != nil { - return err + if m.GatherGlobalVars { + // Global Variables may be gathered less often + if len(m.IntervalSlow) > 0 { + if uint32(time.Since(m.lastT).Seconds()) >= m.scanIntervalSlow { + err = m.gatherGlobalVariables(db, serv, acc) + if err != nil { + return err + } + m.lastT = time.Now() } - m.lastT = time.Now() } } @@ -1767,6 +1774,7 @@ func init() { PerfEventsStatementsDigestTextLimit: defaultPerfEventsStatementsDigestTextLimit, PerfEventsStatementsLimit: defaultPerfEventsStatementsLimit, PerfEventsStatementsTimeLimit: defaultPerfEventsStatementsTimeLimit, + GatherGlobalVars: defaultGatherGlobalVars, } }) } From cee32672d7b93f05e1bba0874c9ab29a6105e373 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Wed, 11 Dec 2019 11:27:00 -0800 Subject: [PATCH 16/79] Update changelog (cherry picked from commit 7cc3507f222b63c6313ca4f94f445b23c1727dbc) --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c7a9dc544500e..1de41a2bc9192 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -66,6 +66,7 @@ - [#6735](https://github.com/influxdata/telegraf/pull/6735): Support resolution of symlinks in filecount input. - [#6746](https://github.com/influxdata/telegraf/pull/6746): Set message timestamp to the metric time in kafka output. - [#6740](https://github.com/influxdata/telegraf/pull/6740): Add base64decode operation to string processor. +- [#6790](https://github.com/influxdata/telegraf/pull/6790): Add option to control collecting global variables to mysql input. #### Bugfixes @@ -80,6 +81,7 @@ - [#6523](https://github.com/influxdata/telegraf/issues/6523): Accept any media type in the prometheus input. - [#6769](https://github.com/influxdata/telegraf/issues/6769): Fix unix socket dial arguments in uwsgi input. - [#6757](https://github.com/influxdata/telegraf/issues/6757): Replace colon chars in prometheus output labels with metric_version=1. +- [#6773](https://github.com/influxdata/telegraf/issues/6773): Set TrimLeadingSpace when TrimSpace is on in csv parser. ## v1.12.6 [2019-11-19] From 44851949646cc2773f659675ec4c61bf33698cce Mon Sep 17 00:00:00 2001 From: reimda Date: Wed, 11 Dec 2019 15:29:18 -0700 Subject: [PATCH 17/79] Interpret SNMP v1 traps as described in RFC 2576 3.1 (#6793) (cherry picked from commit cae701c54bb805d2e715ae97c97f5752923f51aa) --- plugins/inputs/snmp_trap/snmp_trap.go | 38 ++++++- plugins/inputs/snmp_trap/snmp_trap_test.go | 112 +++++++++++++++++++++ 2 files changed, 147 insertions(+), 3 deletions(-) diff --git a/plugins/inputs/snmp_trap/snmp_trap.go b/plugins/inputs/snmp_trap/snmp_trap.go index 7163a853e61bf..03f6a3a2955ea 100644 --- a/plugins/inputs/snmp_trap/snmp_trap.go +++ b/plugins/inputs/snmp_trap/snmp_trap.go @@ -6,6 +6,7 @@ import ( "fmt" "net" "os/exec" + "strconv" "strings" "sync" "time" @@ -150,6 +151,12 @@ func (s *SnmpTrap) Stop() { } } +func setTrapOid(tags map[string]string, oid string, e mibEntry) { + tags["oid"] = oid + tags["name"] = e.oidText + tags["mib"] = e.mibName +} + func makeTrapHandler(s *SnmpTrap) handler { return func(packet *gosnmp.SnmpPacket, addr *net.UDPAddr) { tm := s.timeFunc() @@ -159,6 +166,33 @@ func makeTrapHandler(s *SnmpTrap) handler { tags["version"] = packet.Version.String() tags["source"] = addr.IP.String() + if packet.Version == gosnmp.Version1 { + // Follow the procedure described in RFC 2576 3.1 to + // translate a v1 trap to v2. + var trapOid string + + if packet.GenericTrap > 0 && packet.GenericTrap < 6 { + trapOid = "1.3.6.1.6.3.1.1.5." + strconv.Itoa(packet.GenericTrap+1) + } else if packet.GenericTrap == 6 { + trapOid = packet.Enterprise + ".0." + strconv.Itoa(packet.SpecificTrap) + } + + if trapOid != "" { + e, err := s.lookup(trapOid) + if err != nil { + s.Log.Errorf("Error resolving V1 OID: %v", err) + return + } + setTrapOid(tags, trapOid, e) + } + + if packet.AgentAddress != "" { + tags["agent_address"] = packet.AgentAddress + } + + fields["sysUpTimeInstance"] = packet.Timestamp + } + for _, v := range packet.Variables { // Use system mibs to resolve oids. Don't fall back to // numeric oid because it's not useful enough to the end @@ -193,9 +227,7 @@ func makeTrapHandler(s *SnmpTrap) handler { // 1.3.6.1.6.3.1.1.4.1.0 is SNMPv2-MIB::snmpTrapOID.0. // If v.Name is this oid, set a tag of the trap name. if v.Name == ".1.3.6.1.6.3.1.1.4.1.0" { - tags["oid"] = val - tags["name"] = e.oidText - tags["mib"] = e.mibName + setTrapOid(tags, val, e) continue } default: diff --git a/plugins/inputs/snmp_trap/snmp_trap_test.go b/plugins/inputs/snmp_trap/snmp_trap_test.go index ed31786d81119..68121b0c8be70 100644 --- a/plugins/inputs/snmp_trap/snmp_trap_test.go +++ b/plugins/inputs/snmp_trap/snmp_trap_test.go @@ -220,3 +220,115 @@ func TestMissingOid(t *testing.T) { expected, acc.GetTelegrafMetrics(), testutil.SortMetrics()) } + +func sendV1Trap(t *testing.T, port uint16) (sentTimestamp uint) { + s := &gosnmp.GoSNMP{ + Port: port, + Community: "public", + Version: gosnmp.Version1, + Timeout: time.Duration(2) * time.Second, + Retries: 3, + MaxOids: gosnmp.MaxOids, + Target: "127.0.0.1", + } + + err := s.Connect() + if err != nil { + t.Fatalf("Connect() err: %v", err) + } + defer s.Conn.Close() + + now := uint(time.Now().Unix()) + + pdu := gosnmp.SnmpPDU{ + Name: ".1.2.3.4.5", + Type: gosnmp.OctetString, + Value: "payload", + } + + trap := gosnmp.SnmpTrap{ + Variables: []gosnmp.SnmpPDU{pdu}, + Enterprise: ".1.2.3", + AgentAddress: "10.20.30.40", + GenericTrap: 6, // enterpriseSpecific + SpecificTrap: 55, + Timestamp: now, + } + + _, err = s.SendTrap(trap) + if err != nil { + t.Fatalf("SendTrap() err: %v", err) + } + + return now +} + +func TestReceiveV1Trap(t *testing.T) { + const port = 12399 + var fakeTime = time.Now() + + received := make(chan int) + wrap := func(f handler) handler { + return func(p *gosnmp.SnmpPacket, a *net.UDPAddr) { + f(p, a) + received <- 0 + } + } + + s := &SnmpTrap{ + ServiceAddress: "udp://:" + strconv.Itoa(port), + makeHandlerWrapper: wrap, + timeFunc: func() time.Time { + return fakeTime + }, + Log: testutil.Logger{}, + } + require.Nil(t, s.Init()) + var acc testutil.Accumulator + require.Nil(t, s.Start(&acc)) + defer s.Stop() + + defer s.clear() + s.load(".1.2.3.4.5", + mibEntry{ + "valueMIB", + "valueOID", + }) + s.load(".1.2.3.0.55", + mibEntry{ + "enterpriseMIB", + "enterpriseOID", + }) + + sentTimestamp := sendV1Trap(t, port) + + select { + case <-received: + case <-time.After(2 * time.Second): + t.Fatal("timed out waiting for trap to be received") + } + + expected := []telegraf.Metric{ + testutil.MustMetric( + "snmp_trap", // name + map[string]string{ // tags + "oid": ".1.2.3.0.55", + "name": "enterpriseOID", + "mib": "enterpriseMIB", + "version": "1", + "source": "127.0.0.1", + "agent_address": "10.20.30.40", + }, + map[string]interface{}{ // fields + "sysUpTimeInstance": sentTimestamp, + "valueOID": "payload", + }, + fakeTime, + ), + } + + testutil.RequireMetricsEqual(t, + expected, acc.GetTelegrafMetrics(), + testutil.SortMetrics()) + +} From 5f5e314f9997295c95b0da704e2e04c7d13725b2 Mon Sep 17 00:00:00 2001 From: reimda Date: Thu, 12 Dec 2019 11:54:44 -0700 Subject: [PATCH 18/79] Fix off by one bug in snmp trap v1 generic trap field (#6797) (cherry picked from commit a7a639f6a3fb25d66e971306ce8abe6af33984ac) --- plugins/inputs/snmp_trap/snmp_trap.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/inputs/snmp_trap/snmp_trap.go b/plugins/inputs/snmp_trap/snmp_trap.go index 03f6a3a2955ea..a802762642734 100644 --- a/plugins/inputs/snmp_trap/snmp_trap.go +++ b/plugins/inputs/snmp_trap/snmp_trap.go @@ -171,7 +171,7 @@ func makeTrapHandler(s *SnmpTrap) handler { // translate a v1 trap to v2. var trapOid string - if packet.GenericTrap > 0 && packet.GenericTrap < 6 { + if packet.GenericTrap >= 0 && packet.GenericTrap < 6 { trapOid = "1.3.6.1.6.3.1.1.5." + strconv.Itoa(packet.GenericTrap+1) } else if packet.GenericTrap == 6 { trapOid = packet.Enterprise + ".0." + strconv.Itoa(packet.SpecificTrap) From 19876ee0e95b632043a83402afb615cd4bfa0d84 Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Thu, 12 Dec 2019 11:05:31 -0800 Subject: [PATCH 19/79] Update sample config (cherry picked from commit d6f2857c2b849fd982f602765e5a28bbd758b29f) --- etc/telegraf.conf | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/etc/telegraf.conf b/etc/telegraf.conf index c807c01c72447..dbafd2f8347c4 100644 --- a/etc/telegraf.conf +++ b/etc/telegraf.conf @@ -1516,6 +1516,10 @@ # # [[processors.strings.left]] # # field = "message" # # width = 10 +# +# ## Decode a base64 encoded utf-8 string +# # [[processors.strings.base64decode]] +# # field = "message" # # Restricts the number of tags that can pass through this filter and chooses which tags to preserve when over the limit. @@ -3469,6 +3473,9 @@ # ## gather metrics from SHOW BINARY LOGS command output # # gather_binary_logs = false # +# ## gather metrics from PERFORMANCE_SCHEMA.GLOBAL_VARIABLES +# # gather_global_variables = true +# # ## gather metrics from PERFORMANCE_SCHEMA.TABLE_IO_WAITS_SUMMARY_BY_TABLE # # gather_table_io_waits = false # @@ -5741,7 +5748,11 @@ # ## Transport, local address, and port to listen on. Transport must # ## be "udp://". Omit local address to listen on all interfaces. # ## example: "udp://127.0.0.1:1234" -# # service_address = udp://:162 +# ## +# ## Special permissions may be required to listen on a port less than +# ## 1024. See README.md for details +# ## +# # service_address = "udp://:162" # ## Timeout running snmptranslate command # # timeout = "5s" From a04530557312daafb3ba11c48accb94b67cc2b6a Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Thu, 12 Dec 2019 11:09:40 -0800 Subject: [PATCH 20/79] Set 1.13.0 release date (cherry picked from commit dde70118c081caa394876e747ae6d7d9f9c2ece6) --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1de41a2bc9192..81472e73793a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -## v1.13 [unreleased] +## v1.13 [2019-12-12] #### Release Notes From 773e4ca5c732154cabd53d6e93fd153ab8f08bcc Mon Sep 17 00:00:00 2001 From: Daniel Nelson Date: Thu, 12 Dec 2019 11:10:34 -0800 Subject: [PATCH 21/79] Telegraf 1.13.0 From c9731bf897daf17649d3bb8547f47342e3e061e0 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Thu, 19 Oct 2017 12:26:27 +0200 Subject: [PATCH 22/79] initial import of postgres output plugin --- plugins/outputs/all/all.go | 1 + plugins/outputs/postgresql/README.md | 11 ++++ plugins/outputs/postgresql/postgresql.go | 65 ++++++++++++++++++++++++ 3 files changed, 77 insertions(+) create mode 100644 plugins/outputs/postgresql/README.md create mode 100644 plugins/outputs/postgresql/postgresql.go diff --git a/plugins/outputs/all/all.go b/plugins/outputs/all/all.go index e40230993b51a..7e2f55ee12319 100644 --- a/plugins/outputs/all/all.go +++ b/plugins/outputs/all/all.go @@ -27,6 +27,7 @@ import ( _ "github.com/influxdata/telegraf/plugins/outputs/nats" _ "github.com/influxdata/telegraf/plugins/outputs/nsq" _ "github.com/influxdata/telegraf/plugins/outputs/opentsdb" + _ "github.com/influxdata/telegraf/plugins/outputs/postgresql" _ "github.com/influxdata/telegraf/plugins/outputs/prometheus_client" _ "github.com/influxdata/telegraf/plugins/outputs/riemann" _ "github.com/influxdata/telegraf/plugins/outputs/riemann_legacy" diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md new file mode 100644 index 0000000000000..df7e546f69d33 --- /dev/null +++ b/plugins/outputs/postgresql/README.md @@ -0,0 +1,11 @@ +# PostgreSQL Output Plugin + +This output plugin writes all metrics to PostgreSQL. + +### Configuration: + +```toml +# Send metrics to postgres +[[outputs.postgresql]] + # no configuration +``` diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go new file mode 100644 index 0000000000000..48acbc7540008 --- /dev/null +++ b/plugins/outputs/postgresql/postgresql.go @@ -0,0 +1,65 @@ +package postgresql + +import ( + "database/sql" + "fmt" + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/plugins/outputs" + "strings" +) + +type Postgresql struct { + db *sql.DB +} + +func (p *Postgresql) Connect() error { + fmt.Println("Connect") + + db, err := sql.Open("pgx", "host=localhost database=postgres") + + if err != nil { + fmt.Println("DB Connect failed") + return nil + } + fmt.Println("DB Connect") + p.db = db + + return nil +} + +func (p *Postgresql) Close() error { + fmt.Println("Close") + return nil +} + +func (p *Postgresql) SampleConfig() string { return "" } +func (p *Postgresql) Description() string { return "Send metrics to PostgreSQL" } + +func (p *Postgresql) Write(metrics []telegraf.Metric) error { + + for _, m := range metrics { + var keys, values []string + for k, v := range m.Tags() { + keys = append(keys, k) + values = append(values, fmt.Sprintf("'%s'", v)) + } + for k, v := range m.Fields() { + keys = append(keys, k) + switch value := v.(type) { + case int: + values = append(values, fmt.Sprintf("%d", value)) + case float64: + values = append(values, fmt.Sprintf("%f", value)) + case string: + values = append(values, fmt.Sprintf("'%s'", value)) + } + } + fmt.Printf("INSERT INTO %v.%v (%v) VALUES (%v);\n", m.Tags()["host"], m.Name(), strings.Join(keys, ","), strings.Join(values, ",")) + } + + return nil +} + +func init() { + outputs.Add("postgresql", func() telegraf.Output { return &Postgresql{} }) +} From b57e5e481694f570e4d2a04c8570d90aa60c1ed0 Mon Sep 17 00:00:00 2001 From: Sven Date: Fri, 20 Oct 2017 23:32:03 +0200 Subject: [PATCH 23/79] make address configurable --- plugins/outputs/postgresql/postgresql.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 48acbc7540008..16bd6912fe8f4 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -9,13 +9,14 @@ import ( ) type Postgresql struct { - db *sql.DB + db *sql.DB + Address string } func (p *Postgresql) Connect() error { fmt.Println("Connect") - db, err := sql.Open("pgx", "host=localhost database=postgres") + db, err := sql.Open("pgx", p.Address) if err != nil { fmt.Println("DB Connect failed") From 9ff32b34f777e3c3fd2389ca2f8a8dd49a08c3b5 Mon Sep 17 00:00:00 2001 From: Sven Date: Sun, 22 Oct 2017 21:42:40 +0200 Subject: [PATCH 24/79] add helper functions for create table and insert --- plugins/outputs/postgresql/postgresql.go | 119 ++++++++++++++++++++--- 1 file changed, 104 insertions(+), 15 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 16bd6912fe8f4..f149d55ffc1b7 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -9,43 +9,131 @@ import ( ) type Postgresql struct { - db *sql.DB - Address string + db *sql.DB + Address string + CreateTables bool + TagsAsForeignkeys bool + Tables map[string]bool + SchemaTag string } func (p *Postgresql) Connect() error { - fmt.Println("Connect") - db, err := sql.Open("pgx", p.Address) - if err != nil { - fmt.Println("DB Connect failed") - return nil + return err } - fmt.Println("DB Connect") p.db = db + p.Tables = make(map[string]bool) return nil } func (p *Postgresql) Close() error { - fmt.Println("Close") - return nil + return p.db.Close() } func (p *Postgresql) SampleConfig() string { return "" } func (p *Postgresql) Description() string { return "Send metrics to PostgreSQL" } +func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { + var columns []string + var pk []string + + pk = append(pk, "time") + columns = append(columns, "time timestamptz") + + for column, _ := range metric.Tags() { + pk = append(pk, column) + columns = append(columns, fmt.Sprintf("%s text", column)) + } + + var datatype string + for column, v := range metric.Fields() { + switch v.(type) { + case int64: + datatype = "int" + case float64: + datatype = "real" + } + columns = append(columns, fmt.Sprintf("%s %s", column, datatype)) + } + + sql := fmt.Sprintf("CREATE TABLE %s(%s,PRIMARY KEY(%s))", metric.Name(), strings.Join(columns, ","), strings.Join(pk, ",")) + fmt.Println(sql) + return sql +} + +func (p *Postgresql) generateInsert(metric telegraf.Metric) (string, []interface{}) { + var columns []string + var placeholder []string + var values []interface{} + + columns = append(columns, "time") + values = append(values, metric.Time().Format("2006-01-02 15:04:05 -0700")) + placeholder = append(placeholder, "?") + + for column, value := range metric.Tags() { + columns = append(columns, column) + values = append(values, value) + placeholder = append(placeholder, "?") + } + + for column, value := range metric.Fields() { + columns = append(columns, column) + values = append(values, value) + placeholder = append(placeholder, "?") + } + + sql := fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", metric.Name(), strings.Join(columns, ","), strings.Join(placeholder, ",")) + fmt.Println(sql) + fmt.Println(values) + return sql, values +} + +func (p *Postgresql) writeMetric(metric telegraf.Metric) error { + tableName := metric.Name() + + if p.Tables[tableName] == false { + createStmt := p.generateCreateTable(metric) + _, err := p.db.Exec(createStmt) + if err != nil { + fmt.Println("Error creating table", err) + return err + } + p.Tables[tableName] = true + } + + sql, values := p.generateInsert(metric) + _, err := p.db.Exec(sql, values...) + if err != nil { + fmt.Println("Error during insert", err) + return err + } + + return nil +} + func (p *Postgresql) Write(metrics []telegraf.Metric) error { + for _, metric := range metrics { + p.writeMetric(metric) + } + return nil + var tableName string - for _, m := range metrics { + for _, metric := range metrics { + var columns []string var keys, values []string - for k, v := range m.Tags() { - keys = append(keys, k) + + tableName = metric.Name() + + for name, v := range metric.Tags() { + keys = append(keys, name) values = append(values, fmt.Sprintf("'%s'", v)) } - for k, v := range m.Fields() { + + for k, v := range metric.Fields() { keys = append(keys, k) + columns = append(columns, k) switch value := v.(type) { case int: values = append(values, fmt.Sprintf("%d", value)) @@ -55,7 +143,8 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { values = append(values, fmt.Sprintf("'%s'", value)) } } - fmt.Printf("INSERT INTO %v.%v (%v) VALUES (%v);\n", m.Tags()["host"], m.Name(), strings.Join(keys, ","), strings.Join(values, ",")) + fmt.Printf("INSERT INTO %v (%v) VALUES (%v);\n", tableName, strings.Join(keys, ","), strings.Join(values, ",")) + } return nil From eeecc47d435ed1ea444c6015fd58a34309c91b88 Mon Sep 17 00:00:00 2001 From: Sven Date: Sun, 22 Oct 2017 21:43:20 +0200 Subject: [PATCH 25/79] add tests --- plugins/outputs/postgresql/postgresql_test.go | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 plugins/outputs/postgresql/postgresql_test.go diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go new file mode 100644 index 0000000000000..90780aa45aee2 --- /dev/null +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -0,0 +1,59 @@ +package postgresql + +import ( + "testing" + "time" + + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/metric" + + "github.com/stretchr/testify/assert" +) + +func TestPostgresqlCreateStatement(t *testing.T) { + p := Postgresql{} + timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) + + var m telegraf.Metric + m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) + assert.Equal(t, "CREATE TABLE m(time timestamptz,f real,PRIMARY KEY(time))", p.generateCreateTable(m)) + + m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) + assert.Equal(t, "CREATE TABLE m(time timestamptz,i int,PRIMARY KEY(time))", p.generateCreateTable(m)) + + m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14), "i": int(3)}, timestamp) + assert.Equal(t, "CREATE TABLE m(time timestamptz,f real,i int,PRIMARY KEY(time))", p.generateCreateTable(m)) + + m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) + assert.Equal(t, "CREATE TABLE m(time timestamptz,k text,i int,PRIMARY KEY(time,k))", p.generateCreateTable(m)) + + m, _ = metric.New("m", map[string]string{"k1": "v1", "k2": "v2"}, map[string]interface{}{"i": int(3)}, timestamp) + assert.Equal(t, "CREATE TABLE m(time timestamptz,k1 text,k2 text,i int,PRIMARY KEY(time,k1,k2))", p.generateCreateTable(m)) +} + +func TestPostgresqlInsertStatement(t *testing.T) { + p := Postgresql{} + timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) + + var m telegraf.Metric + m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) + sql, values := p.generateInsert(m) + assert.Equal(t, "INSERT INTO m(time,f) VALUES(?,?)", sql) + assert.Equal(t, []interface{}{timestamp, float64(3.14)}, values) + + m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) + sql, values = p.generateInsert(m) + assert.Equal(t, "INSERT INTO m(time,i) VALUES(?,?)", sql) + + m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14), "i": int(3)}, timestamp) + sql, values = p.generateInsert(m) + assert.Equal(t, "INSERT INTO m(time,f,i) VALUES(?,?,?)", sql) + + m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) + sql, values = p.generateInsert(m) + assert.Equal(t, "INSERT INTO m(time,k,i) VALUES(?,?,?)", sql) + + m, _ = metric.New("m", map[string]string{"k1": "v1", "k2": "v2"}, map[string]interface{}{"i": int(3)}, timestamp) + sql, values = p.generateInsert(m) + assert.Equal(t, "INSERT INTO m(time,k1,k2,i) VALUES(?,?,?,?)", sql) +} From 4337a32e0d479c0373c3c81738f2f5d3a52245cf Mon Sep 17 00:00:00 2001 From: Sven Date: Tue, 24 Oct 2017 21:39:31 +0200 Subject: [PATCH 26/79] fix sql placeholder --- plugins/outputs/postgresql/postgresql.go | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index f149d55ffc1b7..13f2d77944c6f 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -51,9 +51,9 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { for column, v := range metric.Fields() { switch v.(type) { case int64: - datatype = "int" + datatype = "int8" case float64: - datatype = "real" + datatype = "float8" } columns = append(columns, fmt.Sprintf("%s %s", column, datatype)) } @@ -65,28 +65,27 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { func (p *Postgresql) generateInsert(metric telegraf.Metric) (string, []interface{}) { var columns []string - var placeholder []string var values []interface{} columns = append(columns, "time") values = append(values, metric.Time().Format("2006-01-02 15:04:05 -0700")) - placeholder = append(placeholder, "?") for column, value := range metric.Tags() { columns = append(columns, column) values = append(values, value) - placeholder = append(placeholder, "?") } for column, value := range metric.Fields() { columns = append(columns, column) values = append(values, value) - placeholder = append(placeholder, "?") + } + + var placeholder []string + for i := 1; i <= len(values); i++ { + placeholder = append(placeholder, fmt.Sprintf("$%d", i)) } sql := fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", metric.Name(), strings.Join(columns, ","), strings.Join(placeholder, ",")) - fmt.Println(sql) - fmt.Println(values) return sql, values } From 21cabc0f3f256139aa521630dfbfaa611f98939b Mon Sep 17 00:00:00 2001 From: Sven Date: Tue, 24 Oct 2017 22:20:48 +0200 Subject: [PATCH 27/79] let pgx handle time conversion, remove old code --- plugins/outputs/postgresql/postgresql.go | 35 +++--------------------- 1 file changed, 4 insertions(+), 31 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 13f2d77944c6f..6c84309a3ee07 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -68,7 +68,7 @@ func (p *Postgresql) generateInsert(metric telegraf.Metric) (string, []interface var values []interface{} columns = append(columns, "time") - values = append(values, metric.Time().Format("2006-01-02 15:04:05 -0700")) + values = append(values, metric.Time()) for column, value := range metric.Tags() { columns = append(columns, column) @@ -114,38 +114,11 @@ func (p *Postgresql) writeMetric(metric telegraf.Metric) error { func (p *Postgresql) Write(metrics []telegraf.Metric) error { for _, metric := range metrics { - p.writeMetric(metric) - } - return nil - var tableName string - - for _, metric := range metrics { - var columns []string - var keys, values []string - - tableName = metric.Name() - - for name, v := range metric.Tags() { - keys = append(keys, name) - values = append(values, fmt.Sprintf("'%s'", v)) - } - - for k, v := range metric.Fields() { - keys = append(keys, k) - columns = append(columns, k) - switch value := v.(type) { - case int: - values = append(values, fmt.Sprintf("%d", value)) - case float64: - values = append(values, fmt.Sprintf("%f", value)) - case string: - values = append(values, fmt.Sprintf("'%s'", value)) - } + err := p.writeMetric(metric) + if err != nil { + return err } - fmt.Printf("INSERT INTO %v (%v) VALUES (%v);\n", tableName, strings.Join(keys, ","), strings.Join(values, ",")) - } - return nil } From 1380298a46330b4c92bac592d267f45229dae875 Mon Sep 17 00:00:00 2001 From: Sven Date: Tue, 24 Oct 2017 22:21:41 +0200 Subject: [PATCH 28/79] adjust test cases to datatype changes --- plugins/outputs/postgresql/postgresql_test.go | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 90780aa45aee2..502763fef3de3 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -16,19 +16,19 @@ func TestPostgresqlCreateStatement(t *testing.T) { var m telegraf.Metric m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,f real,PRIMARY KEY(time))", p.generateCreateTable(m)) + assert.Equal(t, "CREATE TABLE m(time timestamptz,f float8,PRIMARY KEY(time))", p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,i int,PRIMARY KEY(time))", p.generateCreateTable(m)) + assert.Equal(t, "CREATE TABLE m(time timestamptz,i int8,PRIMARY KEY(time))", p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14), "i": int(3)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,f real,i int,PRIMARY KEY(time))", p.generateCreateTable(m)) + assert.Equal(t, "CREATE TABLE m(time timestamptz,f float8,i int8,PRIMARY KEY(time))", p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,k text,i int,PRIMARY KEY(time,k))", p.generateCreateTable(m)) + assert.Equal(t, "CREATE TABLE m(time timestamptz,k text,i int8,PRIMARY KEY(time,k))", p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k1": "v1", "k2": "v2"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,k1 text,k2 text,i int,PRIMARY KEY(time,k1,k2))", p.generateCreateTable(m)) + assert.Equal(t, "CREATE TABLE m(time timestamptz,k1 text,k2 text,i int8,PRIMARY KEY(time,k1,k2))", p.generateCreateTable(m)) } func TestPostgresqlInsertStatement(t *testing.T) { @@ -38,22 +38,22 @@ func TestPostgresqlInsertStatement(t *testing.T) { var m telegraf.Metric m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) sql, values := p.generateInsert(m) - assert.Equal(t, "INSERT INTO m(time,f) VALUES(?,?)", sql) - assert.Equal(t, []interface{}{timestamp, float64(3.14)}, values) + assert.Equal(t, "INSERT INTO m(time,f) VALUES($1,$2)", sql) + assert.EqualValues(t, []interface{}{timestamp, float64(3.14)}, values) m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) sql, values = p.generateInsert(m) - assert.Equal(t, "INSERT INTO m(time,i) VALUES(?,?)", sql) + assert.Equal(t, "INSERT INTO m(time,i) VALUES($1,$2)", sql) m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14), "i": int(3)}, timestamp) sql, values = p.generateInsert(m) - assert.Equal(t, "INSERT INTO m(time,f,i) VALUES(?,?,?)", sql) + assert.Equal(t, "INSERT INTO m(time,f,i) VALUES($1,$2,$3)", sql) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) sql, values = p.generateInsert(m) - assert.Equal(t, "INSERT INTO m(time,k,i) VALUES(?,?,?)", sql) + assert.Equal(t, "INSERT INTO m(time,k,i) VALUES($1,$2,$3)", sql) m, _ = metric.New("m", map[string]string{"k1": "v1", "k2": "v2"}, map[string]interface{}{"i": int(3)}, timestamp) sql, values = p.generateInsert(m) - assert.Equal(t, "INSERT INTO m(time,k1,k2,i) VALUES(?,?,?,?)", sql) + assert.Equal(t, "INSERT INTO m(time,k1,k2,i) VALUES($1,$2,$3,$4)", sql) } From cf16e1f877dd2a64ad64c32c6adbc2a77afe4a0b Mon Sep 17 00:00:00 2001 From: Sven Date: Fri, 27 Oct 2017 17:08:30 +0200 Subject: [PATCH 29/79] remove debug prints --- plugins/outputs/postgresql/postgresql.go | 2 -- 1 file changed, 2 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 6c84309a3ee07..e8a239b836f55 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -59,7 +59,6 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { } sql := fmt.Sprintf("CREATE TABLE %s(%s,PRIMARY KEY(%s))", metric.Name(), strings.Join(columns, ","), strings.Join(pk, ",")) - fmt.Println(sql) return sql } @@ -96,7 +95,6 @@ func (p *Postgresql) writeMetric(metric telegraf.Metric) error { createStmt := p.generateCreateTable(metric) _, err := p.db.Exec(createStmt) if err != nil { - fmt.Println("Error creating table", err) return err } p.Tables[tableName] = true From c4a8a248dc2535f4814e9636cffa7c57e9efdd60 Mon Sep 17 00:00:00 2001 From: Sven Date: Fri, 27 Oct 2017 23:37:28 +0200 Subject: [PATCH 30/79] check if table exists before creating --- plugins/outputs/postgresql/postgresql.go | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index e8a239b836f55..e49a064ddd890 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -88,10 +88,24 @@ func (p *Postgresql) generateInsert(metric telegraf.Metric) (string, []interface return sql, values } +func (p *Postgresql) tableExists(tableName string) bool { + stmt := "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname NOT IN ('information_schema','pg_catalog');" + result, err := p.db.Exec(stmt, tableName) + if err != nil { + return false + } + if count, _ := result.RowsAffected(); count == 1 { + p.Tables[tableName] = true + return true + } + return false + +} + func (p *Postgresql) writeMetric(metric telegraf.Metric) error { tableName := metric.Name() - if p.Tables[tableName] == false { + if p.Tables[tableName] == false && p.tableExists(tableName) == false { createStmt := p.generateCreateTable(metric) _, err := p.db.Exec(createStmt) if err != nil { From 26b12dc01e099670ec854393f3716bf0b4436ba1 Mon Sep 17 00:00:00 2001 From: Sven Date: Sat, 28 Oct 2017 17:13:58 +0200 Subject: [PATCH 31/79] allow skipping tags --- plugins/outputs/postgresql/postgresql.go | 30 ++++++++++++++++++------ 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index e49a064ddd890..add76b0713c4a 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -3,18 +3,18 @@ package postgresql import ( "database/sql" "fmt" + "log" + "strings" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/plugins/outputs" - "strings" ) type Postgresql struct { - db *sql.DB - Address string - CreateTables bool - TagsAsForeignkeys bool - Tables map[string]bool - SchemaTag string + db *sql.DB + Address string + IgnoredTags []string + Tables map[string]bool } func (p *Postgresql) Connect() error { @@ -32,6 +32,15 @@ func (p *Postgresql) Close() error { return p.db.Close() } +func contains(haystack []string, needle string) bool { + for _, key := range haystack { + if key == needle { + return true + } + } + return false +} + func (p *Postgresql) SampleConfig() string { return "" } func (p *Postgresql) Description() string { return "Send metrics to PostgreSQL" } @@ -43,6 +52,9 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { columns = append(columns, "time timestamptz") for column, _ := range metric.Tags() { + if contains(p.IgnoredTags, column) { + continue + } pk = append(pk, column) columns = append(columns, fmt.Sprintf("%s text", column)) } @@ -70,6 +82,9 @@ func (p *Postgresql) generateInsert(metric telegraf.Metric) (string, []interface values = append(values, metric.Time()) for column, value := range metric.Tags() { + if contains(p.IgnoredTags, column) { + continue + } columns = append(columns, column) values = append(values, value) } @@ -92,6 +107,7 @@ func (p *Postgresql) tableExists(tableName string) bool { stmt := "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname NOT IN ('information_schema','pg_catalog');" result, err := p.db.Exec(stmt, tableName) if err != nil { + log.Printf("E! Error checking for existence of metric table %s: %v", tableName, err) return false } if count, _ := result.RowsAffected(); count == 1 { From 96639c2cb0fc944707f4c0e735c1be27ddcc93f6 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 5 Nov 2017 18:26:26 +0100 Subject: [PATCH 32/79] refactoring --- plugins/outputs/postgresql/postgresql.go | 111 +++++++++++++---------- 1 file changed, 64 insertions(+), 47 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index add76b0713c4a..4f0bc2e3b642a 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -11,10 +11,11 @@ import ( ) type Postgresql struct { - db *sql.DB - Address string - IgnoredTags []string - Tables map[string]bool + db *sql.DB + Address string + IgnoredTags []string + TagsAsForeignkeys bool + Tables map[string]bool } func (p *Postgresql) Connect() error { @@ -55,8 +56,13 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { if contains(p.IgnoredTags, column) { continue } - pk = append(pk, column) - columns = append(columns, fmt.Sprintf("%s text", column)) + if p.TagsAsForeignkeys { + pk = append(pk, column+"_id") + columns = append(columns, fmt.Sprintf("%s_id int8", column)) + } else { + pk = append(pk, column) + columns = append(columns, fmt.Sprintf("%s text", column)) + } } var datatype string @@ -74,32 +80,14 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { return sql } -func (p *Postgresql) generateInsert(metric telegraf.Metric) (string, []interface{}) { - var columns []string - var values []interface{} - - columns = append(columns, "time") - values = append(values, metric.Time()) - - for column, value := range metric.Tags() { - if contains(p.IgnoredTags, column) { - continue - } - columns = append(columns, column) - values = append(values, value) - } - - for column, value := range metric.Fields() { - columns = append(columns, column) - values = append(values, value) - } +func (p *Postgresql) generateInsert(tablename string, columns []string, values []interface{}) (string, []interface{}) { var placeholder []string for i := 1; i <= len(values); i++ { placeholder = append(placeholder, fmt.Sprintf("$%d", i)) } - sql := fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", metric.Name(), strings.Join(columns, ","), strings.Join(placeholder, ",")) + sql := fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", tablename, strings.Join(columns, ","), strings.Join(placeholder, ",")) return sql, values } @@ -115,35 +103,64 @@ func (p *Postgresql) tableExists(tableName string) bool { return true } return false - } -func (p *Postgresql) writeMetric(metric telegraf.Metric) error { - tableName := metric.Name() +func (p *Postgresql) Write(metrics []telegraf.Metric) error { + for _, metric := range metrics { + tablename := metric.Name() + + // create table if needed + if p.Tables[tablename] == false && p.tableExists(tablename) == false { + createStmt := p.generateCreateTable(metric) + _, err := p.db.Exec(createStmt) + if err != nil { + return err + } + p.Tables[tablename] = true + } - if p.Tables[tableName] == false && p.tableExists(tableName) == false { - createStmt := p.generateCreateTable(metric) - _, err := p.db.Exec(createStmt) - if err != nil { - return err + var columns []string + var values []interface{} + + columns = append(columns, "time") + values = append(values, metric.Time()) + + for column, value := range metric.Tags() { + if contains(p.IgnoredTags, column) { + continue + } + + if p.TagsAsForeignkeys { + // var value_id int + // query := fmt.Sprintf("SELECT %s_id FROM %s_%s WHERE %s=$1", column, metric.Name(), column, column) + // err := p.db.QueryRow(query, value).Scan(&value_id) + // + // if err != nil { + // query := fmt.Sprintf("INSERT INTO %s_%s(%s) VALUES($1) RETURNING %s_id", metric.Name(), column, column, column) + // err := p.db.QueryRow(query, value).Scan(&value_id) + // } + // columns = append(columns, column+"_id") + // values = append(values, value_id) + } else { + columns = append(columns, column) + values = append(values, value) + } } - p.Tables[tableName] = true - } - sql, values := p.generateInsert(metric) - _, err := p.db.Exec(sql, values...) - if err != nil { - fmt.Println("Error during insert", err) - return err - } + for column, value := range metric.Fields() { + columns = append(columns, column) + values = append(values, value) + } - return nil -} + var placeholder []string + for i := 1; i <= len(values); i++ { + placeholder = append(placeholder, fmt.Sprintf("$%d", i)) + } -func (p *Postgresql) Write(metrics []telegraf.Metric) error { - for _, metric := range metrics { - err := p.writeMetric(metric) + sql, values := p.generateInsert(tablename, columns, values) + _, err := p.db.Exec(sql, values...) if err != nil { + fmt.Println("Error during insert", err) return err } } From 01f9de110333db763bc005d3c280a1b3da9fb6e1 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 5 Nov 2017 20:05:15 +0100 Subject: [PATCH 33/79] implement TagsAsForeignkeys --- plugins/outputs/postgresql/postgresql.go | 30 ++++++++++++++---------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 4f0bc2e3b642a..8005ca76b0fc3 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -48,6 +48,7 @@ func (p *Postgresql) Description() string { return "Send metrics to PostgreSQL" func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { var columns []string var pk []string + var sql []string pk = append(pk, "time") columns = append(columns, "time timestamptz") @@ -59,6 +60,7 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { if p.TagsAsForeignkeys { pk = append(pk, column+"_id") columns = append(columns, fmt.Sprintf("%s_id int8", column)) + sql = append(sql, fmt.Sprintf("CREATE TABLE %s_%s(%s_id serial primary key,%s text unique)", metric.Name(), column, column, column)) } else { pk = append(pk, column) columns = append(columns, fmt.Sprintf("%s text", column)) @@ -76,8 +78,8 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { columns = append(columns, fmt.Sprintf("%s %s", column, datatype)) } - sql := fmt.Sprintf("CREATE TABLE %s(%s,PRIMARY KEY(%s))", metric.Name(), strings.Join(columns, ","), strings.Join(pk, ",")) - return sql + sql = append(sql, fmt.Sprintf("CREATE TABLE %s(%s,PRIMARY KEY(%s))", metric.Name(), strings.Join(columns, ","), strings.Join(pk, ","))) + return strings.Join(sql, ";") } func (p *Postgresql) generateInsert(tablename string, columns []string, values []interface{}) (string, []interface{}) { @@ -131,16 +133,20 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } if p.TagsAsForeignkeys { - // var value_id int - // query := fmt.Sprintf("SELECT %s_id FROM %s_%s WHERE %s=$1", column, metric.Name(), column, column) - // err := p.db.QueryRow(query, value).Scan(&value_id) - // - // if err != nil { - // query := fmt.Sprintf("INSERT INTO %s_%s(%s) VALUES($1) RETURNING %s_id", metric.Name(), column, column, column) - // err := p.db.QueryRow(query, value).Scan(&value_id) - // } - // columns = append(columns, column+"_id") - // values = append(values, value_id) + var value_id int + + query := fmt.Sprintf("SELECT %s_id FROM %s_%s WHERE %s=$1", column, tablename, column, column) + err := p.db.QueryRow(query, value).Scan(&value_id) + if err != nil { + query := fmt.Sprintf("INSERT INTO %s_%s(%s) VALUES($1) RETURNING %s_id", tablename, column, column, column) + err := p.db.QueryRow(query, value).Scan(&value_id) + if err != nil { + return err + } + } + + columns = append(columns, column+"_id") + values = append(values, value_id) } else { columns = append(columns, column) values = append(values, value) From 8d9b9a2b19930ddf89d75e97f89f84040780a0a7 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 5 Nov 2017 20:29:49 +0100 Subject: [PATCH 34/79] fix tests --- plugins/outputs/postgresql/postgresql_test.go | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 502763fef3de3..905323080b4a6 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -35,25 +35,18 @@ func TestPostgresqlInsertStatement(t *testing.T) { p := Postgresql{} timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) - var m telegraf.Metric - m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - sql, values := p.generateInsert(m) + sql, _ := p.generateInsert("m", []string{"time", "f"}, []interface{}{timestamp, 3.1}) assert.Equal(t, "INSERT INTO m(time,f) VALUES($1,$2)", sql) - assert.EqualValues(t, []interface{}{timestamp, float64(3.14)}, values) - m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - sql, values = p.generateInsert(m) + sql, _ = p.generateInsert("m", []string{"time", "i"}, []interface{}{timestamp, 3}) assert.Equal(t, "INSERT INTO m(time,i) VALUES($1,$2)", sql) - m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14), "i": int(3)}, timestamp) - sql, values = p.generateInsert(m) + sql, _ = p.generateInsert("m", []string{"time", "f", "i"}, []interface{}{timestamp, 3.1, 3}) assert.Equal(t, "INSERT INTO m(time,f,i) VALUES($1,$2,$3)", sql) - m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - sql, values = p.generateInsert(m) + sql, _ = p.generateInsert("m", []string{"time", "k", "i"}, []interface{}{timestamp, "v", 3}) assert.Equal(t, "INSERT INTO m(time,k,i) VALUES($1,$2,$3)", sql) - m, _ = metric.New("m", map[string]string{"k1": "v1", "k2": "v2"}, map[string]interface{}{"i": int(3)}, timestamp) - sql, values = p.generateInsert(m) + sql, _ = p.generateInsert("m", []string{"time", "k1", "k2", "i"}, []interface{}{timestamp, "v1", "v2", 3}) assert.Equal(t, "INSERT INTO m(time,k1,k2,i) VALUES($1,$2,$3,$4)", sql) } From bb78f5f1d6ae6e9956aedeba97d2d544c19ba37e Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 5 Nov 2017 20:58:23 +0100 Subject: [PATCH 35/79] add SampleConfig --- plugins/outputs/postgresql/postgresql.go | 26 +++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 8005ca76b0fc3..ea35205a50a9f 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -42,7 +42,31 @@ func contains(haystack []string, needle string) bool { return false } -func (p *Postgresql) SampleConfig() string { return "" } +var sampleConfig = ` + ## specify address via a url matching: + ## postgres://[pqgotest[:password]]@localhost[/dbname]\ + ## ?sslmode=[disable|verify-ca|verify-full] + ## or a simple string: + ## host=localhost user=pqotest password=... sslmode=... dbname=app_production + ## + ## All connection parameters are optional. + ## + ## Without the dbname parameter, the driver will default to a database + ## with the same name as the user. This dbname is just for instantiating a + ## connection with the server and doesn't restrict the databases we are trying + ## to grab metrics for. + ## + address = "host=localhost user=postgres sslmode=verify-full" + + ## A list of tags to exclude from storing. If not specified, all tags are stored. + # ignored_tags = ["foo", "bar"] + + ## Store tags as foreign keys in the metrics table. Default is false. + # tags_as_foreignkeys = false + +` + +func (p *Postgresql) SampleConfig() string { return sampleConfig } func (p *Postgresql) Description() string { return "Send metrics to PostgreSQL" } func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { From 6ecd59d6b19aafc7ebb68ba773deaf3271447584 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 5 Nov 2017 21:00:38 +0100 Subject: [PATCH 36/79] register driver --- plugins/outputs/postgresql/postgresql.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index ea35205a50a9f..5f3641b6a5ae9 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -6,6 +6,9 @@ import ( "log" "strings" + // register in driver. + _ "github.com/jackc/pgx/stdlib" + "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/plugins/outputs" ) From db5609d518a739ba9de06774701ccba87fa175a6 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 5 Nov 2017 21:04:43 +0100 Subject: [PATCH 37/79] update README --- plugins/outputs/postgresql/README.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md index df7e546f69d33..61d5ace8d307d 100644 --- a/plugins/outputs/postgresql/README.md +++ b/plugins/outputs/postgresql/README.md @@ -7,5 +7,11 @@ This output plugin writes all metrics to PostgreSQL. ```toml # Send metrics to postgres [[outputs.postgresql]] - # no configuration + address = "host=localhost user=postgres sslmode=verify-full" + + ## A list of tags to exclude from storing. If not specified, all tags are stored. + # ignored_tags = ["foo", "bar"] + + ## Store tags as foreign keys in the metrics table. Default is false. + # tags_as_foreignkeys = false ``` From 89f87d586c2f1154e7d95f4e691996ee7d6a64d1 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Mon, 6 Nov 2017 20:31:12 +0100 Subject: [PATCH 38/79] prepare for create table template --- plugins/outputs/postgresql/postgresql.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 5f3641b6a5ae9..a18fc0499402e 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -105,7 +105,7 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { columns = append(columns, fmt.Sprintf("%s %s", column, datatype)) } - sql = append(sql, fmt.Sprintf("CREATE TABLE %s(%s,PRIMARY KEY(%s))", metric.Name(), strings.Join(columns, ","), strings.Join(pk, ","))) + sql = append(sql, fmt.Sprintf("CREATE TABLE %[1]s(%[2]s,PRIMARY KEY(%[3]s))", metric.Name(), strings.Join(columns, ","), strings.Join(pk, ","))) return strings.Join(sql, ";") } From bdd220f4c1b6d393c473856d4b9acbe24d541433 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 7 Nov 2017 09:24:27 +0100 Subject: [PATCH 39/79] quote identifier --- plugins/outputs/postgresql/postgresql.go | 49 ++++++++++--------- plugins/outputs/postgresql/postgresql_test.go | 20 ++++---- 2 files changed, 36 insertions(+), 33 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index a18fc0499402e..c81685ff2b932 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -6,8 +6,7 @@ import ( "log" "strings" - // register in driver. - _ "github.com/jackc/pgx/stdlib" + "github.com/jackc/pgx" "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/plugins/outputs" @@ -45,6 +44,10 @@ func contains(haystack []string, needle string) bool { return false } +func quoteIdent(name string) string { + return pgx.Identifier{name}.Sanitize() +} + var sampleConfig = ` ## specify address via a url matching: ## postgres://[pqgotest[:password]]@localhost[/dbname]\ @@ -77,20 +80,20 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { var pk []string var sql []string - pk = append(pk, "time") - columns = append(columns, "time timestamptz") + pk = append(pk, quoteIdent("time")) + columns = append(columns, quoteIdent("time")+" timestamptz") for column, _ := range metric.Tags() { if contains(p.IgnoredTags, column) { continue } if p.TagsAsForeignkeys { - pk = append(pk, column+"_id") - columns = append(columns, fmt.Sprintf("%s_id int8", column)) - sql = append(sql, fmt.Sprintf("CREATE TABLE %s_%s(%s_id serial primary key,%s text unique)", metric.Name(), column, column, column)) + pk = append(pk, quoteIdent(column+"_id")) + columns = append(columns, fmt.Sprintf("%s int8", quoteIdent(column+"_id"))) + sql = append(sql, fmt.Sprintf("CREATE TABLE %s(%s serial primary key,%s text unique)", quoteIdent(metric.Name()+"_"+column), quoteIdent(column+"_id"), quoteIdent(column))) } else { - pk = append(pk, column) - columns = append(columns, fmt.Sprintf("%s text", column)) + pk = append(pk, quoteIdent(column)) + columns = append(columns, fmt.Sprintf("%s text", quoteIdent(column))) } } @@ -102,10 +105,10 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { case float64: datatype = "float8" } - columns = append(columns, fmt.Sprintf("%s %s", column, datatype)) + columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) } - sql = append(sql, fmt.Sprintf("CREATE TABLE %[1]s(%[2]s,PRIMARY KEY(%[3]s))", metric.Name(), strings.Join(columns, ","), strings.Join(pk, ","))) + sql = append(sql, fmt.Sprintf("CREATE TABLE %[1]s(%[2]s,PRIMARY KEY(%[3]s))", quoteIdent(metric.Name()), strings.Join(columns, ","), strings.Join(pk, ","))) return strings.Join(sql, ";") } @@ -116,7 +119,12 @@ func (p *Postgresql) generateInsert(tablename string, columns []string, values [ placeholder = append(placeholder, fmt.Sprintf("$%d", i)) } - sql := fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", tablename, strings.Join(columns, ","), strings.Join(placeholder, ",")) + var quoted []string + for _, column := range columns { + quoted = append(quoted, quoteIdent(column)) + } + + sql := fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", quoteIdent(tablename), strings.Join(quoted, ","), strings.Join(placeholder, ",")) return sql, values } @@ -151,7 +159,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { var columns []string var values []interface{} - columns = append(columns, "time") + columns = append(columns, quoteIdent("time")) values = append(values, metric.Time()) for column, value := range metric.Tags() { @@ -162,34 +170,29 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { if p.TagsAsForeignkeys { var value_id int - query := fmt.Sprintf("SELECT %s_id FROM %s_%s WHERE %s=$1", column, tablename, column, column) + query := fmt.Sprintf("SELECT %s FROM %s WHERE %s=$1", quoteIdent(column+"_id"), quoteIdent(tablename+"_"+column), quoteIdent(column)) err := p.db.QueryRow(query, value).Scan(&value_id) if err != nil { - query := fmt.Sprintf("INSERT INTO %s_%s(%s) VALUES($1) RETURNING %s_id", tablename, column, column, column) + query := fmt.Sprintf("INSERT INTO %s(%s) VALUES($1) RETURNING %s", quoteIdent(tablename+"_"+column), quoteIdent(column), quoteIdent(column+"_id")) err := p.db.QueryRow(query, value).Scan(&value_id) if err != nil { return err } } - columns = append(columns, column+"_id") + columns = append(columns, quoteIdent(column+"_id")) values = append(values, value_id) } else { - columns = append(columns, column) + columns = append(columns, quoteIdent(column)) values = append(values, value) } } for column, value := range metric.Fields() { - columns = append(columns, column) + columns = append(columns, quoteIdent(column)) values = append(values, value) } - var placeholder []string - for i := 1; i <= len(values); i++ { - placeholder = append(placeholder, fmt.Sprintf("$%d", i)) - } - sql, values := p.generateInsert(tablename, columns, values) _, err := p.db.Exec(sql, values...) if err != nil { diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 905323080b4a6..4888cf56698c4 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -16,19 +16,19 @@ func TestPostgresqlCreateStatement(t *testing.T) { var m telegraf.Metric m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,f float8,PRIMARY KEY(time))", p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"f" float8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,i int8,PRIMARY KEY(time))", p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"i" int8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14), "i": int(3)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,f float8,i int8,PRIMARY KEY(time))", p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"f" float8,"i" int8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,k text,i int8,PRIMARY KEY(time,k))", p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"k" text,"i" int8,PRIMARY KEY("time","k"))`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k1": "v1", "k2": "v2"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, "CREATE TABLE m(time timestamptz,k1 text,k2 text,i int8,PRIMARY KEY(time,k1,k2))", p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"k1" text,"k2" text,"i" int8,PRIMARY KEY("time","k1","k2"))`, p.generateCreateTable(m)) } func TestPostgresqlInsertStatement(t *testing.T) { @@ -36,17 +36,17 @@ func TestPostgresqlInsertStatement(t *testing.T) { timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) sql, _ := p.generateInsert("m", []string{"time", "f"}, []interface{}{timestamp, 3.1}) - assert.Equal(t, "INSERT INTO m(time,f) VALUES($1,$2)", sql) + assert.Equal(t, `INSERT INTO "m"("time","f") VALUES($1,$2)`, sql) sql, _ = p.generateInsert("m", []string{"time", "i"}, []interface{}{timestamp, 3}) - assert.Equal(t, "INSERT INTO m(time,i) VALUES($1,$2)", sql) + assert.Equal(t, `INSERT INTO "m"("time","i") VALUES($1,$2)`, sql) sql, _ = p.generateInsert("m", []string{"time", "f", "i"}, []interface{}{timestamp, 3.1, 3}) - assert.Equal(t, "INSERT INTO m(time,f,i) VALUES($1,$2,$3)", sql) + assert.Equal(t, `INSERT INTO "m"("time","f","i") VALUES($1,$2,$3)`, sql) sql, _ = p.generateInsert("m", []string{"time", "k", "i"}, []interface{}{timestamp, "v", 3}) - assert.Equal(t, "INSERT INTO m(time,k,i) VALUES($1,$2,$3)", sql) + assert.Equal(t, `INSERT INTO "m"("time","k","i") VALUES($1,$2,$3)`, sql) sql, _ = p.generateInsert("m", []string{"time", "k1", "k2", "i"}, []interface{}{timestamp, "v1", "v2", 3}) - assert.Equal(t, "INSERT INTO m(time,k1,k2,i) VALUES($1,$2,$3,$4)", sql) + assert.Equal(t, `INSERT INTO "m"("time","k1","k2","i") VALUES($1,$2,$3,$4)`, sql) } From f1819f8e732ceb6200e210013c507151613d6314 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 7 Nov 2017 12:40:40 +0100 Subject: [PATCH 40/79] refactor generateInsert --- plugins/outputs/postgresql/postgresql.go | 33 +++++++++---------- plugins/outputs/postgresql/postgresql_test.go | 11 +++---- 2 files changed, 21 insertions(+), 23 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index c81685ff2b932..443116e676310 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -88,9 +88,12 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { continue } if p.TagsAsForeignkeys { - pk = append(pk, quoteIdent(column+"_id")) - columns = append(columns, fmt.Sprintf("%s int8", quoteIdent(column+"_id"))) - sql = append(sql, fmt.Sprintf("CREATE TABLE %s(%s serial primary key,%s text unique)", quoteIdent(metric.Name()+"_"+column), quoteIdent(column+"_id"), quoteIdent(column))) + key := quoteIdent(column + "_id") + table := quoteIdent(metric.Name() + "_" + column) + + pk = append(pk, key) + columns = append(columns, fmt.Sprintf("%s int8", key)) + sql = append(sql, fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s(%s serial primary key,%s text unique)", table, key, quoteIdent(column))) } else { pk = append(pk, quoteIdent(column)) columns = append(columns, fmt.Sprintf("%s text", quoteIdent(column))) @@ -112,20 +115,16 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { return strings.Join(sql, ";") } -func (p *Postgresql) generateInsert(tablename string, columns []string, values []interface{}) (string, []interface{}) { - - var placeholder []string - for i := 1; i <= len(values); i++ { - placeholder = append(placeholder, fmt.Sprintf("$%d", i)) - } +func (p *Postgresql) generateInsert(tablename string, columns []string) string { - var quoted []string - for _, column := range columns { + var placeholder, quoted []string + for i, column := range columns { + placeholder = append(placeholder, fmt.Sprintf("$%d", i+1)) quoted = append(quoted, quoteIdent(column)) } sql := fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", quoteIdent(tablename), strings.Join(quoted, ","), strings.Join(placeholder, ",")) - return sql, values + return sql } func (p *Postgresql) tableExists(tableName string) bool { @@ -159,7 +158,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { var columns []string var values []interface{} - columns = append(columns, quoteIdent("time")) + columns = append(columns, "time") values = append(values, metric.Time()) for column, value := range metric.Tags() { @@ -180,20 +179,20 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } } - columns = append(columns, quoteIdent(column+"_id")) + columns = append(columns, column+"_id") values = append(values, value_id) } else { - columns = append(columns, quoteIdent(column)) + columns = append(columns, column) values = append(values, value) } } for column, value := range metric.Fields() { - columns = append(columns, quoteIdent(column)) + columns = append(columns, column) values = append(values, value) } - sql, values := p.generateInsert(tablename, columns, values) + sql := p.generateInsert(tablename, columns) _, err := p.db.Exec(sql, values...) if err != nil { fmt.Println("Error during insert", err) diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 4888cf56698c4..1e6cda6136c81 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -33,20 +33,19 @@ func TestPostgresqlCreateStatement(t *testing.T) { func TestPostgresqlInsertStatement(t *testing.T) { p := Postgresql{} - timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) - sql, _ := p.generateInsert("m", []string{"time", "f"}, []interface{}{timestamp, 3.1}) + sql := p.generateInsert("m", []string{"time", "f"}) assert.Equal(t, `INSERT INTO "m"("time","f") VALUES($1,$2)`, sql) - sql, _ = p.generateInsert("m", []string{"time", "i"}, []interface{}{timestamp, 3}) + sql = p.generateInsert("m", []string{"time", "i"}) assert.Equal(t, `INSERT INTO "m"("time","i") VALUES($1,$2)`, sql) - sql, _ = p.generateInsert("m", []string{"time", "f", "i"}, []interface{}{timestamp, 3.1, 3}) + sql = p.generateInsert("m", []string{"time", "f", "i"}) assert.Equal(t, `INSERT INTO "m"("time","f","i") VALUES($1,$2,$3)`, sql) - sql, _ = p.generateInsert("m", []string{"time", "k", "i"}, []interface{}{timestamp, "v", 3}) + sql = p.generateInsert("m", []string{"time", "k", "i"}) assert.Equal(t, `INSERT INTO "m"("time","k","i") VALUES($1,$2,$3)`, sql) - sql, _ = p.generateInsert("m", []string{"time", "k1", "k2", "i"}, []interface{}{timestamp, "v1", "v2", 3}) + sql = p.generateInsert("m", []string{"time", "k1", "k2", "i"}) assert.Equal(t, `INSERT INTO "m"("time","k1","k2","i") VALUES($1,$2,$3,$4)`, sql) } From 5123a63ac60ac0ee3c1e50de2e0759626f1127ad Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Wed, 8 Nov 2017 00:01:40 +0100 Subject: [PATCH 41/79] use timestamp for time column to allow pg10 partitioning --- plugins/outputs/postgresql/postgresql.go | 2 +- plugins/outputs/postgresql/postgresql_test.go | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 443116e676310..fd37c14ae8e0e 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -81,7 +81,7 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { var sql []string pk = append(pk, quoteIdent("time")) - columns = append(columns, quoteIdent("time")+" timestamptz") + columns = append(columns, quoteIdent("time")+" timestamp") for column, _ := range metric.Tags() { if contains(p.IgnoredTags, column) { diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 1e6cda6136c81..6208088897f66 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -16,19 +16,19 @@ func TestPostgresqlCreateStatement(t *testing.T) { var m telegraf.Metric m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"f" float8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"f" float8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"i" int8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"i" int8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14), "i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"f" float8,"i" int8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"f" float8,"i" int8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"k" text,"i" int8,PRIMARY KEY("time","k"))`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"k" text,"i" int8,PRIMARY KEY("time","k"))`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k1": "v1", "k2": "v2"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamptz,"k1" text,"k2" text,"i" int8,PRIMARY KEY("time","k1","k2"))`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"k1" text,"k2" text,"i" int8,PRIMARY KEY("time","k1","k2"))`, p.generateCreateTable(m)) } func TestPostgresqlInsertStatement(t *testing.T) { From af02b9b2c368b206e3fa0cf0cca467c26053347d Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Wed, 8 Nov 2017 16:46:03 +0100 Subject: [PATCH 42/79] remove nondeterministic tests --- plugins/outputs/postgresql/postgresql_test.go | 5 ----- 1 file changed, 5 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 6208088897f66..a065a222e8051 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -21,14 +21,9 @@ func TestPostgresqlCreateStatement(t *testing.T) { m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"i" int8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) - m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14), "i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"f" float8,"i" int8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) - m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"k" text,"i" int8,PRIMARY KEY("time","k"))`, p.generateCreateTable(m)) - m, _ = metric.New("m", map[string]string{"k1": "v1", "k2": "v2"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"k1" text,"k2" text,"i" int8,PRIMARY KEY("time","k1","k2"))`, p.generateCreateTable(m)) } func TestPostgresqlInsertStatement(t *testing.T) { From 1a909dc399254248d5097854c5d718968eab17be Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Wed, 8 Nov 2017 16:46:21 +0100 Subject: [PATCH 43/79] use template for create table query generation --- plugins/outputs/postgresql/postgresql.go | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index fd37c14ae8e0e..34cedd862619a 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -111,7 +111,13 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) } - sql = append(sql, fmt.Sprintf("CREATE TABLE %[1]s(%[2]s,PRIMARY KEY(%[3]s))", quoteIdent(metric.Name()), strings.Join(columns, ","), strings.Join(pk, ","))) + template := "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({PK_COLUMNS}))" + + query := strings.Replace(template, "{TABLE}", quoteIdent(metric.Name()), -1) + query = strings.Replace(query, "{COLUMNS}", strings.Join(columns, ","), -1) + query = strings.Replace(query, "{PK_COLUMNS}", strings.Join(pk, ","), -1) + + sql = append(sql, query) return strings.Join(sql, ";") } From 62629ac4a9c0a82713e8fbe17b9f0f51f4e8a30e Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Wed, 8 Nov 2017 16:55:56 +0100 Subject: [PATCH 44/79] make TableTemplate configurable --- plugins/outputs/postgresql/postgresql.go | 15 +++++++++++---- plugins/outputs/postgresql/postgresql_test.go | 4 ++-- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 34cedd862619a..e16431905cb44 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -17,6 +17,7 @@ type Postgresql struct { Address string IgnoredTags []string TagsAsForeignkeys bool + TableTemplate string Tables map[string]bool } @@ -111,9 +112,7 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) } - template := "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({PK_COLUMNS}))" - - query := strings.Replace(template, "{TABLE}", quoteIdent(metric.Name()), -1) + query := strings.Replace(p.TableTemplate, "{TABLE}", quoteIdent(metric.Name()), -1) query = strings.Replace(query, "{COLUMNS}", strings.Join(columns, ","), -1) query = strings.Replace(query, "{PK_COLUMNS}", strings.Join(pk, ","), -1) @@ -209,5 +208,13 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } func init() { - outputs.Add("postgresql", func() telegraf.Output { return &Postgresql{} }) + outputs.Add("postgresql", func() telegraf.Output { return newPostgresql() }) +} + +func newPostgresql() *Postgresql { + p := Postgresql{} + if p.TableTemplate == "" { + p.TableTemplate = "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({PK_COLUMNS}))" + } + return &p } diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index a065a222e8051..a93bc11c6bb33 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -11,7 +11,7 @@ import ( ) func TestPostgresqlCreateStatement(t *testing.T) { - p := Postgresql{} + p := newPostgresql() timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) var m telegraf.Metric @@ -27,7 +27,7 @@ func TestPostgresqlCreateStatement(t *testing.T) { } func TestPostgresqlInsertStatement(t *testing.T) { - p := Postgresql{} + p := newPostgresql() sql := p.generateInsert("m", []string{"time", "f"}) assert.Equal(t, `INSERT INTO "m"("time","f") VALUES($1,$2)`, sql) From 8c0953cc68a208cdc13e2a83b4aedb80859acd9f Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sat, 18 Nov 2017 17:41:21 +0100 Subject: [PATCH 45/79] add quoteLiteral helper function --- plugins/outputs/postgresql/postgresql.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index e16431905cb44..332a6e33fc653 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -49,6 +49,10 @@ func quoteIdent(name string) string { return pgx.Identifier{name}.Sanitize() } +func quoteLiteral(name string) string { + return "'" + strings.Replace(name, "'", "''", -1) + "'" +} + var sampleConfig = ` ## specify address via a url matching: ## postgres://[pqgotest[:password]]@localhost[/dbname]\ From e53efb3aab47f7fe4e1593e60e5a7100b15bfb06 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sat, 18 Nov 2017 17:43:41 +0100 Subject: [PATCH 46/79] add tests for quoting --- plugins/outputs/postgresql/postgresql_test.go | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index a93bc11c6bb33..e4fc2e1c19076 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -10,6 +10,16 @@ import ( "github.com/stretchr/testify/assert" ) +func TestPostgresqlQuote(t *testing.T) { + assert.Equal(t, `"foo"`, quoteIdent("foo")) + assert.Equal(t, `"fo'o"`, quoteIdent("fo'o")) + assert.Equal(t, `"fo""o"`, quoteIdent("fo\"o")) + + assert.Equal(t, "'foo'", quoteLiteral("foo")) + assert.Equal(t, "'fo''o'", quoteLiteral("fo'o")) + assert.Equal(t, "'fo\"o'", quoteLiteral("fo\"o")) +} + func TestPostgresqlCreateStatement(t *testing.T) { p := newPostgresql() timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) From 544c1b1ef7fa898c2fa427b4085454dbf406688e Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sat, 18 Nov 2017 17:50:06 +0100 Subject: [PATCH 47/79] add TABLELITERAL to template variables --- plugins/outputs/postgresql/postgresql.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 332a6e33fc653..1883b979dac67 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -75,6 +75,8 @@ var sampleConfig = ` ## Store tags as foreign keys in the metrics table. Default is false. # tags_as_foreignkeys = false + ## Template to use for generating tables + # table_template = "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({PK_COLUMNS}))" ` func (p *Postgresql) SampleConfig() string { return sampleConfig } @@ -117,8 +119,9 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { } query := strings.Replace(p.TableTemplate, "{TABLE}", quoteIdent(metric.Name()), -1) + query = strings.Replace(query, "{TABLELITERAL}", quoteLiteral(metric.Name()), -1) query = strings.Replace(query, "{COLUMNS}", strings.Join(columns, ","), -1) - query = strings.Replace(query, "{PK_COLUMNS}", strings.Join(pk, ","), -1) + query = strings.Replace(query, "{KEY_COLUMNS}", strings.Join(pk, ","), -1) sql = append(sql, query) return strings.Join(sql, ";") @@ -218,7 +221,7 @@ func init() { func newPostgresql() *Postgresql { p := Postgresql{} if p.TableTemplate == "" { - p.TableTemplate = "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({PK_COLUMNS}))" + p.TableTemplate = "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({KEY_COLUMNS}))" } return &p } From a8ccb891c9459e95b6693205e9bd42bdfecfd59f Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sat, 18 Nov 2017 19:11:56 +0100 Subject: [PATCH 48/79] fix template in doc --- plugins/outputs/postgresql/postgresql.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 1883b979dac67..bc7e77f9bad62 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -76,7 +76,7 @@ var sampleConfig = ` # tags_as_foreignkeys = false ## Template to use for generating tables - # table_template = "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({PK_COLUMNS}))" + # table_template = "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({KEY_COLUMNS}))" ` func (p *Postgresql) SampleConfig() string { return sampleConfig } From 235d12d4906291e52b01c916a2b4496111a8ba9a Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 19 Nov 2017 00:22:50 +0100 Subject: [PATCH 49/79] dont add primary key --- plugins/outputs/postgresql/postgresql.go | 5 +++-- plugins/outputs/postgresql/postgresql_test.go | 6 +++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index bc7e77f9bad62..76fd5c631aa4b 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -76,7 +76,7 @@ var sampleConfig = ` # tags_as_foreignkeys = false ## Template to use for generating tables - # table_template = "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({KEY_COLUMNS}))" + # table_template = "CREATE TABLE {TABLE}({COLUMNS})" ` func (p *Postgresql) SampleConfig() string { return sampleConfig } @@ -184,6 +184,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { query := fmt.Sprintf("SELECT %s FROM %s WHERE %s=$1", quoteIdent(column+"_id"), quoteIdent(tablename+"_"+column), quoteIdent(column)) err := p.db.QueryRow(query, value).Scan(&value_id) if err != nil { + println(err) query := fmt.Sprintf("INSERT INTO %s(%s) VALUES($1) RETURNING %s", quoteIdent(tablename+"_"+column), quoteIdent(column), quoteIdent(column+"_id")) err := p.db.QueryRow(query, value).Scan(&value_id) if err != nil { @@ -221,7 +222,7 @@ func init() { func newPostgresql() *Postgresql { p := Postgresql{} if p.TableTemplate == "" { - p.TableTemplate = "CREATE TABLE {TABLE}({COLUMNS},PRIMARY KEY({KEY_COLUMNS}))" + p.TableTemplate = "CREATE TABLE {TABLE}({COLUMNS})" } return &p } diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index e4fc2e1c19076..d195d0d8a0565 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -26,13 +26,13 @@ func TestPostgresqlCreateStatement(t *testing.T) { var m telegraf.Metric m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"f" float8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"f" float8)`, p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"i" int8,PRIMARY KEY("time"))`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"i" int8)`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"k" text,"i" int8,PRIMARY KEY("time","k"))`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"k" text,"i" int8)`, p.generateCreateTable(m)) } From b2f2bab42024a58a973edc8e83cfb4c59b2b10ab Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 26 Nov 2017 15:06:43 +0100 Subject: [PATCH 50/79] allow using jsonb for fields and tags and make it default --- plugins/outputs/postgresql/postgresql.go | 136 ++++++++++++------ plugins/outputs/postgresql/postgresql_test.go | 18 ++- 2 files changed, 107 insertions(+), 47 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 76fd5c631aa4b..5905d6a1acb07 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -2,6 +2,7 @@ package postgresql import ( "database/sql" + "encoding/json" "fmt" "log" "strings" @@ -17,6 +18,8 @@ type Postgresql struct { Address string IgnoredTags []string TagsAsForeignkeys bool + TagsAsJsonb bool + FieldsAsJsonb bool TableTemplate string Tables map[string]bool } @@ -88,34 +91,44 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { var sql []string pk = append(pk, quoteIdent("time")) - columns = append(columns, quoteIdent("time")+" timestamp") + columns = append(columns, "time timestamp") - for column, _ := range metric.Tags() { - if contains(p.IgnoredTags, column) { - continue + if p.TagsAsJsonb { + if len(metric.Tags()) > 0 { + columns = append(columns, "tags jsonb") } - if p.TagsAsForeignkeys { - key := quoteIdent(column + "_id") - table := quoteIdent(metric.Name() + "_" + column) + } else { + for column, _ := range metric.Tags() { + if contains(p.IgnoredTags, column) { + continue + } + if p.TagsAsForeignkeys { + key := quoteIdent(column + "_id") + table := quoteIdent(metric.Name() + "_" + column) - pk = append(pk, key) - columns = append(columns, fmt.Sprintf("%s int8", key)) - sql = append(sql, fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s(%s serial primary key,%s text unique)", table, key, quoteIdent(column))) - } else { - pk = append(pk, quoteIdent(column)) - columns = append(columns, fmt.Sprintf("%s text", quoteIdent(column))) + pk = append(pk, key) + columns = append(columns, fmt.Sprintf("%s int8", key)) + sql = append(sql, fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s(%s serial primary key,%s text unique)", table, key, quoteIdent(column))) + } else { + pk = append(pk, quoteIdent(column)) + columns = append(columns, fmt.Sprintf("%s text", quoteIdent(column))) + } } } - var datatype string - for column, v := range metric.Fields() { - switch v.(type) { - case int64: - datatype = "int8" - case float64: - datatype = "float8" + if p.FieldsAsJsonb { + columns = append(columns, "fields jsonb") + } else { + var datatype string + for column, v := range metric.Fields() { + switch v.(type) { + case int64: + datatype = "int8" + case float64: + datatype = "float8" + } + columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) } - columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) } query := strings.Replace(p.TableTemplate, "{TABLE}", quoteIdent(metric.Name()), -1) @@ -169,42 +182,77 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { var columns []string var values []interface{} + var js map[string]interface{} columns = append(columns, "time") values = append(values, metric.Time()) - for column, value := range metric.Tags() { - if contains(p.IgnoredTags, column) { - continue + if p.TagsAsJsonb { + js = make(map[string]interface{}) + for column, value := range metric.Tags() { + if contains(p.IgnoredTags, column) { + continue + } + js[column] = value } - if p.TagsAsForeignkeys { - var value_id int - - query := fmt.Sprintf("SELECT %s FROM %s WHERE %s=$1", quoteIdent(column+"_id"), quoteIdent(tablename+"_"+column), quoteIdent(column)) - err := p.db.QueryRow(query, value).Scan(&value_id) + if len(js) > 0 { + d, err := json.Marshal(js) if err != nil { - println(err) - query := fmt.Sprintf("INSERT INTO %s(%s) VALUES($1) RETURNING %s", quoteIdent(tablename+"_"+column), quoteIdent(column), quoteIdent(column+"_id")) + return err + } + + columns = append(columns, "tags") + values = append(values, d) + } + } else { + for column, value := range metric.Tags() { + if contains(p.IgnoredTags, column) { + continue + } + if p.TagsAsForeignkeys { + var value_id int + + query := fmt.Sprintf("SELECT %s FROM %s WHERE %s=$1", quoteIdent(column+"_id"), quoteIdent(tablename+"_"+column), quoteIdent(column)) err := p.db.QueryRow(query, value).Scan(&value_id) if err != nil { - return err + log.Printf("W! Foreign key reference not found %s: %v", tablename, err) + query := fmt.Sprintf("INSERT INTO %s(%s) VALUES($1) RETURNING %s", quoteIdent(tablename+"_"+column), quoteIdent(column), quoteIdent(column+"_id")) + err := p.db.QueryRow(query, value).Scan(&value_id) + if err != nil { + return err + } } + + columns = append(columns, column+"_id") + values = append(values, value_id) + } else { + columns = append(columns, column) + values = append(values, value) } + } + } - columns = append(columns, column+"_id") - values = append(values, value_id) - } else { + if p.FieldsAsJsonb { + js = make(map[string]interface{}) + for column, value := range metric.Fields() { + js[column] = value + } + + d, err := json.Marshal(js) + if err != nil { + return err + } + + columns = append(columns, "fields") + values = append(values, d) + } else { + for column, value := range metric.Fields() { columns = append(columns, column) values = append(values, value) } } - for column, value := range metric.Fields() { - columns = append(columns, column) - values = append(values, value) - } - sql := p.generateInsert(tablename, columns) _, err := p.db.Exec(sql, values...) if err != nil { @@ -220,9 +268,9 @@ func init() { } func newPostgresql() *Postgresql { - p := Postgresql{} - if p.TableTemplate == "" { - p.TableTemplate = "CREATE TABLE {TABLE}({COLUMNS})" + return &Postgresql{ + TableTemplate: "CREATE TABLE {TABLE}({COLUMNS})", + TagsAsJsonb: true, + FieldsAsJsonb: true, } - return &p } diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index d195d0d8a0565..4180f9900ca7b 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -26,19 +26,31 @@ func TestPostgresqlCreateStatement(t *testing.T) { var m telegraf.Metric m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"f" float8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"(time timestamp,fields jsonb)`, p.generateCreateTable(m)) + + m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) + assert.Equal(t, `CREATE TABLE "m"(time timestamp,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) + + p.TagsAsJsonb = false + p.FieldsAsJsonb = false + + m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) + assert.Equal(t, `CREATE TABLE "m"(time timestamp,"f" float8)`, p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"i" int8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"(time timestamp,"i" int8)`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"("time" timestamp,"k" text,"i" int8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"(time timestamp,"k" text,"i" int8)`, p.generateCreateTable(m)) } func TestPostgresqlInsertStatement(t *testing.T) { p := newPostgresql() + p.TagsAsJsonb = false + p.FieldsAsJsonb = false + sql := p.generateInsert("m", []string{"time", "f"}) assert.Equal(t, `INSERT INTO "m"("time","f") VALUES($1,$2)`, sql) From 571b545cb7e403ea2bb0720de1c4f636b69db739 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 28 Nov 2017 11:54:52 +0100 Subject: [PATCH 51/79] document jsonb settings remove IgnoredTags since tagexclude achieves the same --- plugins/outputs/postgresql/postgresql.go | 20 +++++++------------- 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 5905d6a1acb07..8ef18279faacb 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -16,7 +16,6 @@ import ( type Postgresql struct { db *sql.DB Address string - IgnoredTags []string TagsAsForeignkeys bool TagsAsJsonb bool FieldsAsJsonb bool @@ -72,14 +71,18 @@ var sampleConfig = ` ## address = "host=localhost user=postgres sslmode=verify-full" - ## A list of tags to exclude from storing. If not specified, all tags are stored. - # ignored_tags = ["foo", "bar"] - ## Store tags as foreign keys in the metrics table. Default is false. # tags_as_foreignkeys = false ## Template to use for generating tables # table_template = "CREATE TABLE {TABLE}({COLUMNS})" + + ## Use jsonb datatype for tags + # tags_as_jsonb = true + + ## Use jsonb datatype for fields + # fields_as_jsonb = true + ` func (p *Postgresql) SampleConfig() string { return sampleConfig } @@ -99,9 +102,6 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { } } else { for column, _ := range metric.Tags() { - if contains(p.IgnoredTags, column) { - continue - } if p.TagsAsForeignkeys { key := quoteIdent(column + "_id") table := quoteIdent(metric.Name() + "_" + column) @@ -190,9 +190,6 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { if p.TagsAsJsonb { js = make(map[string]interface{}) for column, value := range metric.Tags() { - if contains(p.IgnoredTags, column) { - continue - } js[column] = value } @@ -207,9 +204,6 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } } else { for column, value := range metric.Tags() { - if contains(p.IgnoredTags, column) { - continue - } if p.TagsAsForeignkeys { var value_id int From cd21135abaf5c2bded50e1ac90cdbd6bdc4d7ae6 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Wed, 17 Jan 2018 17:12:33 +0100 Subject: [PATCH 52/79] document template better --- plugins/outputs/postgresql/postgresql.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 8ef18279faacb..d83e5891c0a8e 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -75,7 +75,16 @@ var sampleConfig = ` # tags_as_foreignkeys = false ## Template to use for generating tables + ## Available Variables: + ## {TABLE} - tablename as identifier + ## {TABLELITERAL} - tablename as string literal + ## {COLUMNS} - column definitions + ## {KEY_COLUMNS} - comma-separated list of key columns (time + tags) + + ## Default template # table_template = "CREATE TABLE {TABLE}({COLUMNS})" + ## Example for timescale + # table_template = "CREATE TABLE {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval);" ## Use jsonb datatype for tags # tags_as_jsonb = true From 89059a0c08a9f244507c21079ac9a7fbe9efbf7c Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 23 Jan 2018 15:25:46 +0100 Subject: [PATCH 53/79] rework TagsAsForeignkeys to have produce 1 foreign key in measurement table --- plugins/outputs/postgresql/postgresql.go | 129 ++++++++++++++++------- 1 file changed, 88 insertions(+), 41 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index d83e5891c0a8e..02ec38d5cba94 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -13,6 +13,8 @@ import ( "github.com/influxdata/telegraf/plugins/outputs" ) +var tag_table_suffix = "_tag" + type Postgresql struct { db *sql.DB Address string @@ -105,22 +107,34 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { pk = append(pk, quoteIdent("time")) columns = append(columns, "time timestamp") - if p.TagsAsJsonb { - if len(metric.Tags()) > 0 { - columns = append(columns, "tags jsonb") - } - } else { - for column, _ := range metric.Tags() { - if p.TagsAsForeignkeys { - key := quoteIdent(column + "_id") - table := quoteIdent(metric.Name() + "_" + column) - - pk = append(pk, key) - columns = append(columns, fmt.Sprintf("%s int8", key)) - sql = append(sql, fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s(%s serial primary key,%s text unique)", table, key, quoteIdent(column))) + // handle tags if necessary + if len(metric.Tags()) > 0 { + if p.TagsAsForeignkeys { + // tags in separate table + var tag_columns []string + var tag_columndefs []string + columns = append(columns, "tag_id int") + + if p.TagsAsJsonb { + tag_columns = append(tag_columns, "tags") + tag_columndefs = append(tag_columndefs, "tags jsonb") } else { - pk = append(pk, quoteIdent(column)) - columns = append(columns, fmt.Sprintf("%s text", quoteIdent(column))) + for column, _ := range metric.Tags() { + tag_columns = append(tag_columns, quoteIdent(column)) + tag_columndefs = append(tag_columndefs, fmt.Sprintf("%s text", quoteIdent(column))) + } + } + table := quoteIdent(metric.Name() + "_tag") + sql = append(sql, fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s(tag_id serial primary key,%s,UNIQUE(%s))", table, strings.Join(tag_columndefs, ","), strings.Join(tag_columns, ","))) + } else { + // tags in measurement table + if p.TagsAsJsonb { + columns = append(columns, "tags jsonb") + } else { + for column, _ := range metric.Tags() { + pk = append(pk, quoteIdent(column)) + columns = append(columns, fmt.Sprintf("%s text", quoteIdent(column))) + } } } } @@ -196,42 +210,75 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { columns = append(columns, "time") values = append(values, metric.Time()) - if p.TagsAsJsonb { - js = make(map[string]interface{}) - for column, value := range metric.Tags() { - js[column] = value - } + if len(metric.Tags()) > 0 { + if p.TagsAsForeignkeys { + // tags in separate table + var tag_id int + var where_columns []string + var where_values []interface{} + + if p.TagsAsJsonb { + js = make(map[string]interface{}) + for column, value := range metric.Tags() { + js[column] = value + } - if len(js) > 0 { - d, err := json.Marshal(js) - if err != nil { - return err + if len(js) > 0 { + d, err := json.Marshal(js) + if err != nil { + return err + } + + where_columns = append(where_columns, "tags") + where_values = append(where_values, d) + } + } else { + for column, value := range metric.Tags() { + where_columns = append(where_columns, column) + where_values = append(where_values, value) + } } - columns = append(columns, "tags") - values = append(values, d) - } - } else { - for column, value := range metric.Tags() { - if p.TagsAsForeignkeys { - var value_id int + var where_parts []string + for i, column := range where_columns { + where_parts = append(where_parts, fmt.Sprintf("%s = $%d", quoteIdent(column), i+1)) + } + query := fmt.Sprintf("SELECT tag_id FROM %s WHERE %s", quoteIdent(tablename+"_tag"), strings.Join(where_parts, " AND ")) - query := fmt.Sprintf("SELECT %s FROM %s WHERE %s=$1", quoteIdent(column+"_id"), quoteIdent(tablename+"_"+column), quoteIdent(column)) - err := p.db.QueryRow(query, value).Scan(&value_id) + err := p.db.QueryRow(query, where_values...).Scan(&tag_id) + if err != nil { + log.Printf("I! Foreign key reference not found %s: %v", tablename, err) + query := p.generateInsert(tablename+"_tag", where_columns) + " RETURNING tag_id" + err := p.db.QueryRow(query, where_values...).Scan(&tag_id) if err != nil { - log.Printf("W! Foreign key reference not found %s: %v", tablename, err) - query := fmt.Sprintf("INSERT INTO %s(%s) VALUES($1) RETURNING %s", quoteIdent(tablename+"_"+column), quoteIdent(column), quoteIdent(column+"_id")) - err := p.db.QueryRow(query, value).Scan(&value_id) + return err + } + } + + columns = append(columns, "tag_id") + values = append(values, tag_id) + } else { + // tags in measurement table + if p.TagsAsJsonb { + js = make(map[string]interface{}) + for column, value := range metric.Tags() { + js[column] = value + } + + if len(js) > 0 { + d, err := json.Marshal(js) if err != nil { return err } - } - columns = append(columns, column+"_id") - values = append(values, value_id) + columns = append(columns, "tags") + values = append(values, d) + } } else { - columns = append(columns, column) - values = append(values, value) + for column, value := range metric.Tags() { + columns = append(columns, column) + values = append(values, value) + } } } } From c7321741fb27200499250fe5428ad886d2a9d273 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 23 Jan 2018 15:31:34 +0100 Subject: [PATCH 54/79] make tag table suffix configurable --- plugins/outputs/postgresql/postgresql.go | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 02ec38d5cba94..60036e9a209f4 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -13,8 +13,6 @@ import ( "github.com/influxdata/telegraf/plugins/outputs" ) -var tag_table_suffix = "_tag" - type Postgresql struct { db *sql.DB Address string @@ -22,6 +20,7 @@ type Postgresql struct { TagsAsJsonb bool FieldsAsJsonb bool TableTemplate string + TagTableSuffix string Tables map[string]bool } @@ -124,7 +123,7 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { tag_columndefs = append(tag_columndefs, fmt.Sprintf("%s text", quoteIdent(column))) } } - table := quoteIdent(metric.Name() + "_tag") + table := quoteIdent(metric.Name() + p.TagTableSuffix) sql = append(sql, fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s(tag_id serial primary key,%s,UNIQUE(%s))", table, strings.Join(tag_columndefs, ","), strings.Join(tag_columns, ","))) } else { // tags in measurement table @@ -243,12 +242,12 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { for i, column := range where_columns { where_parts = append(where_parts, fmt.Sprintf("%s = $%d", quoteIdent(column), i+1)) } - query := fmt.Sprintf("SELECT tag_id FROM %s WHERE %s", quoteIdent(tablename+"_tag"), strings.Join(where_parts, " AND ")) + query := fmt.Sprintf("SELECT tag_id FROM %s WHERE %s", quoteIdent(tablename+p.TagTableSuffix), strings.Join(where_parts, " AND ")) err := p.db.QueryRow(query, where_values...).Scan(&tag_id) if err != nil { log.Printf("I! Foreign key reference not found %s: %v", tablename, err) - query := p.generateInsert(tablename+"_tag", where_columns) + " RETURNING tag_id" + query := p.generateInsert(tablename+p.TagTableSuffix, where_columns) + " RETURNING tag_id" err := p.db.QueryRow(query, where_values...).Scan(&tag_id) if err != nil { return err @@ -319,8 +318,9 @@ func init() { func newPostgresql() *Postgresql { return &Postgresql{ - TableTemplate: "CREATE TABLE {TABLE}({COLUMNS})", - TagsAsJsonb: true, - FieldsAsJsonb: true, + TableTemplate: "CREATE TABLE {TABLE}({COLUMNS})", + TagsAsJsonb: true, + TagTableSuffix: "_tag", + FieldsAsJsonb: true, } } From 7d7ee008f99a75e709e7c6410ef67b689f7a7cd9 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Mon, 5 Feb 2018 14:35:58 +0100 Subject: [PATCH 55/79] comment out noisy log messages when fk reference is not found handle text datatype and log unknown datatypes on table creation --- plugins/outputs/postgresql/postgresql.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 60036e9a209f4..2dc23f6c1dd4f 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -148,6 +148,11 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { datatype = "int8" case float64: datatype = "float8" + case string: + datatype = "text" + default: + datatype = "text" + log.Printf("E! Unknown column datatype %s: %v", column, v) } columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) } @@ -246,7 +251,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { err := p.db.QueryRow(query, where_values...).Scan(&tag_id) if err != nil { - log.Printf("I! Foreign key reference not found %s: %v", tablename, err) + // log.Printf("I! Foreign key reference not found %s: %v", tablename, err) query := p.generateInsert(tablename+p.TagTableSuffix, where_columns) + " RETURNING tag_id" err := p.db.QueryRow(query, where_values...).Scan(&tag_id) if err != nil { From 0b9142e23c8edf09648be79af2fcf782c3062f56 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Mon, 5 Feb 2018 22:28:08 +0100 Subject: [PATCH 56/79] handle missing columns --- plugins/outputs/postgresql/postgresql.go | 66 +++++++++++++++++++----- 1 file changed, 54 insertions(+), 12 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 2dc23f6c1dd4f..ff7d0d763e0e0 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -56,6 +56,23 @@ func quoteLiteral(name string) string { return "'" + strings.Replace(name, "'", "''", -1) + "'" } +func deriveDatatype(value interface{}) string { + var datatype string + + switch value.(type) { + case int64: + datatype = "int8" + case float64: + datatype = "float8" + case string: + datatype = "text" + default: + datatype = "text" + log.Printf("E! Unknown datatype %v", value) + } + return datatype +} + var sampleConfig = ` ## specify address via a url matching: ## postgres://[pqgotest[:password]]@localhost[/dbname]\ @@ -143,17 +160,7 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { } else { var datatype string for column, v := range metric.Fields() { - switch v.(type) { - case int64: - datatype = "int8" - case float64: - datatype = "float8" - case string: - datatype = "text" - default: - datatype = "text" - log.Printf("E! Unknown column datatype %s: %v", column, v) - } + datatype = deriveDatatype(v) columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) } } @@ -310,7 +317,42 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { sql := p.generateInsert(tablename, columns) _, err := p.db.Exec(sql, values...) if err != nil { - fmt.Println("Error during insert", err) + // check if insert error was caused by column mismatch + if p.FieldsAsJsonb == false { + log.Printf("E! Error during insert: %v", err) + var quoted_columns []string + for _, column := range columns { + quoted_columns = append(quoted_columns, quoteLiteral(column)) + } + query := "SELECT c FROM unnest(array[%s]) AS c WHERE NOT EXISTS(SELECT 1 FROM information_schema.columns WHERE column_name=c AND table_schema=$1 AND table_name=$2)" + query = fmt.Sprintf(query, strings.Join(quoted_columns, ",")) + result, err := p.db.Query(query, "public", tablename) + defer result.Close() + if err != nil { + return err + } + // some columns are missing + + var column, datatype string + for result.Next() { + err := result.Scan(&column) + if err != nil { + log.Println(err) + } + for i, name := range columns { + if name == column { + datatype = deriveDatatype(values[i]) + } + } + query := "ALTER TABLE %s.%s ADD COLUMN %s %s;" + _, err = p.db.Exec(fmt.Sprintf(query, quoteIdent("public"), quoteIdent(tablename), quoteIdent(column), datatype)) + if err != nil { + return err + log.Println(err) + } + } + } + return err } } From 7ed8a01773d4a543239c7d56b53d4df2a29eb9bd Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 11 Feb 2018 22:50:56 +0100 Subject: [PATCH 57/79] remove dead code --- plugins/outputs/postgresql/postgresql.go | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index ff7d0d763e0e0..5aa239604aacd 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -348,7 +348,6 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { _, err = p.db.Exec(fmt.Sprintf(query, quoteIdent("public"), quoteIdent(tablename), quoteIdent(column), datatype)) if err != nil { return err - log.Println(err) } } } From db9991f0b144747a914f29d56e9c12f58b8060ba Mon Sep 17 00:00:00 2001 From: Oskari Saarenmaa Date: Mon, 16 Apr 2018 10:27:31 +0300 Subject: [PATCH 58/79] postgresql output: boolean columns --- plugins/outputs/postgresql/postgresql.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 5aa239604aacd..751ce9152191d 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -60,6 +60,8 @@ func deriveDatatype(value interface{}) string { var datatype string switch value.(type) { + case bool: + datatype = "boolean" case int64: datatype = "int8" case float64: From dc620f237a40fbb6ab050ceb05798252e62382be Mon Sep 17 00:00:00 2001 From: Oskari Saarenmaa Date: Tue, 17 Apr 2018 14:16:34 -0300 Subject: [PATCH 59/79] postgresql output: batch inserts to the same table/column set Batches are created for every (table, columns) set as we may have metrics with different set of tags & fields in the same batch. This speeds up inserts quite a bit by reducing the number of separate insert statements. Grouping inserts requires columns to appear in the same order, so we now sort them for both tags & fields. This code could use a bit of tidying. --- plugins/outputs/postgresql/postgresql.go | 50 +++++++++++++++++++++--- 1 file changed, 44 insertions(+), 6 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 751ce9152191d..73a2c607bdcf0 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -5,6 +5,7 @@ import ( "encoding/json" "fmt" "log" + "sort" "strings" "github.com/jackc/pgx" @@ -95,7 +96,7 @@ var sampleConfig = ` # tags_as_foreignkeys = false ## Template to use for generating tables - ## Available Variables: + ## Available Variables: ## {TABLE} - tablename as identifier ## {TABLELITERAL} - tablename as string literal ## {COLUMNS} - column definitions @@ -203,6 +204,11 @@ func (p *Postgresql) tableExists(tableName string) bool { } func (p *Postgresql) Write(metrics []telegraf.Metric) error { + batches := make(map[string][]interface{}) + params := make(map[string][]string) + colmap := make(map[string][]string) + tabmap := make(map[string]string) + for _, metric := range metrics { tablename := metric.Name() @@ -288,9 +294,15 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { values = append(values, d) } } else { - for column, value := range metric.Tags() { + var keys []string + fields := metric.Tags() + for column := range fields { + keys = append(keys, column) + } + sort.Strings(keys) + for _, column := range keys { columns = append(columns, column) - values = append(values, value) + values = append(values, fields[column]) } } } @@ -310,18 +322,44 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { columns = append(columns, "fields") values = append(values, d) } else { - for column, value := range metric.Fields() { + var keys []string + fields := metric.Fields() + for column := range fields { + keys = append(keys, column) + } + sort.Strings(keys) + for _, column := range keys { columns = append(columns, column) - values = append(values, value) + values = append(values, fields[column]) } } - sql := p.generateInsert(tablename, columns) + var table_and_cols string; + var placeholder, quoted_columns []string; + for _, column := range columns { + quoted_columns = append(quoted_columns, quoteIdent(column)) + } + table_and_cols = fmt.Sprintf("%s(%s)", quoteIdent(tablename), strings.Join(quoted_columns, ",")) + batches[table_and_cols] = append(batches[table_and_cols], values...) + for i, _ := range columns { + i += len(params[table_and_cols]) * len(columns) + placeholder = append(placeholder, fmt.Sprintf("$%d", i + 1)) + } + params[table_and_cols] = append(params[table_and_cols], strings.Join(placeholder, ",")) + colmap[table_and_cols] = columns + tabmap[table_and_cols] = tablename + } + + for table_and_cols, values := range batches { + // log.Printf("Writing %d metrics into %s", len(params[table_and_cols]), table_and_cols) + sql := fmt.Sprintf("INSERT INTO %s VALUES (%s)", table_and_cols, strings.Join(params[table_and_cols], "),(")) _, err := p.db.Exec(sql, values...) if err != nil { // check if insert error was caused by column mismatch if p.FieldsAsJsonb == false { log.Printf("E! Error during insert: %v", err) + tablename := tabmap[table_and_cols] + columns := colmap[table_and_cols] var quoted_columns []string for _, column := range columns { quoted_columns = append(quoted_columns, quoteLiteral(column)) From 9ad545f97510d76a28e796fb66e319c2173d9ae1 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sat, 12 May 2018 18:10:30 +0200 Subject: [PATCH 60/79] use timestamptz for time column --- plugins/outputs/postgresql/postgresql.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 73a2c607bdcf0..e7e9f585b8668 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -124,7 +124,7 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { var sql []string pk = append(pk, quoteIdent("time")) - columns = append(columns, "time timestamp") + columns = append(columns, "time timestamptz") // handle tags if necessary if len(metric.Tags()) > 0 { From 0de9b92431d3fde54dbfe219e550773352a2a364 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Sun, 13 May 2018 19:15:03 +0200 Subject: [PATCH 61/79] adjust test to timestamptz change --- plugins/outputs/postgresql/postgresql_test.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 4180f9900ca7b..962f7808fc69e 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -26,22 +26,22 @@ func TestPostgresqlCreateStatement(t *testing.T) { var m telegraf.Metric m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamp,fields jsonb)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"(time timestamptz,fields jsonb)`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamp,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"(time timestamptz,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) p.TagsAsJsonb = false p.FieldsAsJsonb = false m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamp,"f" float8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"(time timestamptz,"f" float8)`, p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamp,"i" int8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"(time timestamptz,"i" int8)`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamp,"k" text,"i" int8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE "m"(time timestamptz,"k" text,"i" int8)`, p.generateCreateTable(m)) } From 8a62338bfe58afc50e63cc8929ea0110af91b02d Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Mon, 11 Jun 2018 21:35:23 +0200 Subject: [PATCH 62/79] fix code formatting (gofmt) --- plugins/outputs/postgresql/postgresql.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index e7e9f585b8668..7b195c0bcbbc1 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -334,8 +334,8 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } } - var table_and_cols string; - var placeholder, quoted_columns []string; + var table_and_cols string + var placeholder, quoted_columns []string for _, column := range columns { quoted_columns = append(quoted_columns, quoteIdent(column)) } @@ -343,7 +343,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { batches[table_and_cols] = append(batches[table_and_cols], values...) for i, _ := range columns { i += len(params[table_and_cols]) * len(columns) - placeholder = append(placeholder, fmt.Sprintf("$%d", i + 1)) + placeholder = append(placeholder, fmt.Sprintf("$%d", i+1)) } params[table_and_cols] = append(params[table_and_cols], strings.Join(placeholder, ",")) colmap[table_and_cols] = columns From 1d4a44280ebcb8e38880c4016538ff13c84d0939 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Thu, 5 Jul 2018 08:20:37 +0200 Subject: [PATCH 63/79] include type in error message about unknown type --- plugins/outputs/postgresql/postgresql.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 7b195c0bcbbc1..a3091cb0f962d 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -71,7 +71,7 @@ func deriveDatatype(value interface{}) string { datatype = "text" default: datatype = "text" - log.Printf("E! Unknown datatype %v", value) + log.Printf("E! Unknown datatype %T(%v)", value) } return datatype } From 236cc8c626cfd6965484d74193745fc50e9c73a1 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Thu, 5 Jul 2018 08:59:46 +0200 Subject: [PATCH 64/79] handle uint64 as datatype --- plugins/outputs/postgresql/postgresql.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index a3091cb0f962d..5ce011268e52e 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -63,6 +63,8 @@ func deriveDatatype(value interface{}) string { switch value.(type) { case bool: datatype = "boolean" + case uint64: + datatype = "int8" case int64: datatype = "int8" case float64: From 3842a8679ffd653715ff027427fceeaf32c3f366 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Thu, 5 Jul 2018 16:45:26 +0200 Subject: [PATCH 65/79] fix Printf call --- plugins/outputs/postgresql/postgresql.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 5ce011268e52e..8c75102b74f9f 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -73,7 +73,7 @@ func deriveDatatype(value interface{}) string { datatype = "text" default: datatype = "text" - log.Printf("E! Unknown datatype %T(%v)", value) + log.Printf("E! Unknown datatype %T(%v)", value, value) } return datatype } From 38d492f7662c866cf4f49a05a01d46f52a675007 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 17 Jul 2018 11:17:30 +0200 Subject: [PATCH 66/79] show all config parameters in readme --- plugins/outputs/postgresql/README.md | 19 +++++++++++++++++++ plugins/outputs/postgresql/postgresql.go | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md index 61d5ace8d307d..90324827d7e73 100644 --- a/plugins/outputs/postgresql/README.md +++ b/plugins/outputs/postgresql/README.md @@ -14,4 +14,23 @@ This output plugin writes all metrics to PostgreSQL. ## Store tags as foreign keys in the metrics table. Default is false. # tags_as_foreignkeys = false + + ## Template to use for generating tables + ## Available Variables: + ## {TABLE} - tablename as identifier + ## {TABLELITERAL} - tablename as string literal + ## {COLUMNS} - column definitions + ## {KEY_COLUMNS} - comma-separated list of key columns (time + tags) + + ## Default template + # table_template = "CREATE TABLE {TABLE}({COLUMNS})" + ## Example for timescaledb + # table_template = "CREATE TABLE {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval);" + + ## Use jsonb datatype for tags. Default is true. + # tags_as_jsonb = true + + ## Use jsonb datatype for fields. Default is true. + # fields_as_jsonb = true + ``` diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 8c75102b74f9f..7a755bb5b675b 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -106,7 +106,7 @@ var sampleConfig = ` ## Default template # table_template = "CREATE TABLE {TABLE}({COLUMNS})" - ## Example for timescale + ## Example for timescaledb # table_template = "CREATE TABLE {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval);" ## Use jsonb datatype for tags From 401f39a99977977dc93f35ae15885f2e4f0f0322 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 16 Oct 2018 12:04:13 +0200 Subject: [PATCH 67/79] use CREATE TABLE IF NOT EXISTS --- plugins/outputs/postgresql/README.md | 4 ++-- plugins/outputs/postgresql/postgresql.go | 8 ++++---- plugins/outputs/postgresql/postgresql_test.go | 10 +++++----- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md index 90324827d7e73..5a341a837230c 100644 --- a/plugins/outputs/postgresql/README.md +++ b/plugins/outputs/postgresql/README.md @@ -23,9 +23,9 @@ This output plugin writes all metrics to PostgreSQL. ## {KEY_COLUMNS} - comma-separated list of key columns (time + tags) ## Default template - # table_template = "CREATE TABLE {TABLE}({COLUMNS})" + # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})" ## Example for timescaledb - # table_template = "CREATE TABLE {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval);" + # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval, if_not_exists := true);" ## Use jsonb datatype for tags. Default is true. # tags_as_jsonb = true diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 7a755bb5b675b..f033360d8463e 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -105,9 +105,9 @@ var sampleConfig = ` ## {KEY_COLUMNS} - comma-separated list of key columns (time + tags) ## Default template - # table_template = "CREATE TABLE {TABLE}({COLUMNS})" + # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})" ## Example for timescaledb - # table_template = "CREATE TABLE {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval);" + # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval,if_not_exists := true);" ## Use jsonb datatype for tags # tags_as_jsonb = true @@ -386,7 +386,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { datatype = deriveDatatype(values[i]) } } - query := "ALTER TABLE %s.%s ADD COLUMN %s %s;" + query := "ALTER TABLE %s.%s ADD COLUMN IF NOT EXISTS %s %s;" _, err = p.db.Exec(fmt.Sprintf(query, quoteIdent("public"), quoteIdent(tablename), quoteIdent(column), datatype)) if err != nil { return err @@ -406,7 +406,7 @@ func init() { func newPostgresql() *Postgresql { return &Postgresql{ - TableTemplate: "CREATE TABLE {TABLE}({COLUMNS})", + TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", TagsAsJsonb: true, TagTableSuffix: "_tag", FieldsAsJsonb: true, diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 962f7808fc69e..381153ddd5b08 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -26,22 +26,22 @@ func TestPostgresqlCreateStatement(t *testing.T) { var m telegraf.Metric m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamptz,fields jsonb)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,fields jsonb)`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamptz,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) p.TagsAsJsonb = false p.FieldsAsJsonb = false m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamptz,"f" float8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,"f" float8)`, p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamptz,"i" int8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,"i" int8)`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE "m"(time timestamptz,"k" text,"i" int8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,"k" text,"i" int8)`, p.generateCreateTable(m)) } From 79aa3c546575312c43732d3860d13828bb0ac062 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Thu, 25 Oct 2018 10:36:40 +0200 Subject: [PATCH 68/79] remove commented out code, initialize vars with values --- plugins/outputs/postgresql/README.md | 3 --- plugins/outputs/postgresql/postgresql.go | 13 +++---------- 2 files changed, 3 insertions(+), 13 deletions(-) diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md index 5a341a837230c..14687f961dd9a 100644 --- a/plugins/outputs/postgresql/README.md +++ b/plugins/outputs/postgresql/README.md @@ -9,9 +9,6 @@ This output plugin writes all metrics to PostgreSQL. [[outputs.postgresql]] address = "host=localhost user=postgres sslmode=verify-full" - ## A list of tags to exclude from storing. If not specified, all tags are stored. - # ignored_tags = ["foo", "bar"] - ## Store tags as foreign keys in the metrics table. Default is false. # tags_as_foreignkeys = false diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index f033360d8463e..2db0b3a9f7556 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -180,15 +180,13 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { } func (p *Postgresql) generateInsert(tablename string, columns []string) string { - var placeholder, quoted []string for i, column := range columns { placeholder = append(placeholder, fmt.Sprintf("$%d", i+1)) quoted = append(quoted, quoteIdent(column)) } - sql := fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", quoteIdent(tablename), strings.Join(quoted, ","), strings.Join(placeholder, ",")) - return sql + return fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", quoteIdent(tablename), strings.Join(quoted, ","), strings.Join(placeholder, ",")) } func (p *Postgresql) tableExists(tableName string) bool { @@ -224,13 +222,10 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { p.Tables[tablename] = true } - var columns []string - var values []interface{} + columns := []string{"time"} + values := []interface{}{metric.Time()} var js map[string]interface{} - columns = append(columns, "time") - values = append(values, metric.Time()) - if len(metric.Tags()) > 0 { if p.TagsAsForeignkeys { // tags in separate table @@ -268,7 +263,6 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { err := p.db.QueryRow(query, where_values...).Scan(&tag_id) if err != nil { - // log.Printf("I! Foreign key reference not found %s: %v", tablename, err) query := p.generateInsert(tablename+p.TagTableSuffix, where_columns) + " RETURNING tag_id" err := p.db.QueryRow(query, where_values...).Scan(&tag_id) if err != nil { @@ -353,7 +347,6 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } for table_and_cols, values := range batches { - // log.Printf("Writing %d metrics into %s", len(params[table_and_cols]), table_and_cols) sql := fmt.Sprintf("INSERT INTO %s VALUES (%s)", table_and_cols, strings.Join(params[table_and_cols], "),(")) _, err := p.db.Exec(sql, values...) if err != nil { From 172e45d95b8f7cf5b2b6e0d5230ab0a84ccbefde Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Tue, 27 Nov 2018 14:33:34 +0100 Subject: [PATCH 69/79] fix TABLELITERAL quoting --- plugins/outputs/postgresql/postgresql.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 2db0b3a9f7556..b40f8a43a7818 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -171,7 +171,7 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { } query := strings.Replace(p.TableTemplate, "{TABLE}", quoteIdent(metric.Name()), -1) - query = strings.Replace(query, "{TABLELITERAL}", quoteLiteral(metric.Name()), -1) + query = strings.Replace(query, "{TABLELITERAL}", quoteLiteral(quoteIdent(metric.Name())), -1) query = strings.Replace(query, "{COLUMNS}", strings.Join(columns, ","), -1) query = strings.Replace(query, "{KEY_COLUMNS}", strings.Join(pk, ","), -1) From bb8821bc60d58ce82cb9f40d5626e616c66fd71f Mon Sep 17 00:00:00 2001 From: Rauli Ikonen Date: Sun, 25 Nov 2018 12:27:31 +0200 Subject: [PATCH 70/79] pg output: Support defining schema for metrics tables Make sure explicit schema is always used and allow changing that from public into something else. --- plugins/outputs/postgresql/postgresql.go | 30 ++++++++++++++++-------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index b40f8a43a7818..f8e54df1fdb23 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -17,6 +17,7 @@ import ( type Postgresql struct { db *sql.DB Address string + Schema string TagsAsForeignkeys bool TagsAsJsonb bool FieldsAsJsonb bool @@ -57,6 +58,10 @@ func quoteLiteral(name string) string { return "'" + strings.Replace(name, "'", "''", -1) + "'" } +func (p *Postgresql) fullTableName(name string) string { + return quoteIdent(p.Schema) + "." + quoteIdent(name) +} + func deriveDatatype(value interface{}) string { var datatype string @@ -109,6 +114,9 @@ var sampleConfig = ` ## Example for timescaledb # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval,if_not_exists := true);" + ## Schema to create the tables into + # schema = "public" + ## Use jsonb datatype for tags # tags_as_jsonb = true @@ -170,8 +178,8 @@ func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { } } - query := strings.Replace(p.TableTemplate, "{TABLE}", quoteIdent(metric.Name()), -1) - query = strings.Replace(query, "{TABLELITERAL}", quoteLiteral(quoteIdent(metric.Name())), -1) + query := strings.Replace(p.TableTemplate, "{TABLE}", p.fullTableName(metric.Name()), -1) + query = strings.Replace(query, "{TABLELITERAL}", quoteLiteral(p.fullTableName(metric.Name())), -1) query = strings.Replace(query, "{COLUMNS}", strings.Join(columns, ","), -1) query = strings.Replace(query, "{KEY_COLUMNS}", strings.Join(pk, ","), -1) @@ -186,12 +194,12 @@ func (p *Postgresql) generateInsert(tablename string, columns []string) string { quoted = append(quoted, quoteIdent(column)) } - return fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", quoteIdent(tablename), strings.Join(quoted, ","), strings.Join(placeholder, ",")) + return fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", p.fullTableName(tablename), strings.Join(quoted, ","), strings.Join(placeholder, ",")) } func (p *Postgresql) tableExists(tableName string) bool { - stmt := "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname NOT IN ('information_schema','pg_catalog');" - result, err := p.db.Exec(stmt, tableName) + stmt := "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname = $2;" + result, err := p.db.Exec(stmt, tableName, p.Schema) if err != nil { log.Printf("E! Error checking for existence of metric table %s: %v", tableName, err) return false @@ -217,6 +225,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { createStmt := p.generateCreateTable(metric) _, err := p.db.Exec(createStmt) if err != nil { + log.Printf("E! Creating table failed: statement: %v, error: %v", createStmt, err) return err } p.Tables[tablename] = true @@ -259,7 +268,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { for i, column := range where_columns { where_parts = append(where_parts, fmt.Sprintf("%s = $%d", quoteIdent(column), i+1)) } - query := fmt.Sprintf("SELECT tag_id FROM %s WHERE %s", quoteIdent(tablename+p.TagTableSuffix), strings.Join(where_parts, " AND ")) + query := fmt.Sprintf("SELECT tag_id FROM %s WHERE %s", p.fullTableName(tablename+p.TagTableSuffix), strings.Join(where_parts, " AND ")) err := p.db.QueryRow(query, where_values...).Scan(&tag_id) if err != nil { @@ -335,7 +344,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { for _, column := range columns { quoted_columns = append(quoted_columns, quoteIdent(column)) } - table_and_cols = fmt.Sprintf("%s(%s)", quoteIdent(tablename), strings.Join(quoted_columns, ",")) + table_and_cols = fmt.Sprintf("%s(%s)", p.fullTableName(tablename), strings.Join(quoted_columns, ",")) batches[table_and_cols] = append(batches[table_and_cols], values...) for i, _ := range columns { i += len(params[table_and_cols]) * len(columns) @@ -361,7 +370,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } query := "SELECT c FROM unnest(array[%s]) AS c WHERE NOT EXISTS(SELECT 1 FROM information_schema.columns WHERE column_name=c AND table_schema=$1 AND table_name=$2)" query = fmt.Sprintf(query, strings.Join(quoted_columns, ",")) - result, err := p.db.Query(query, "public", tablename) + result, err := p.db.Query(query, p.Schema, tablename) defer result.Close() if err != nil { return err @@ -379,8 +388,8 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { datatype = deriveDatatype(values[i]) } } - query := "ALTER TABLE %s.%s ADD COLUMN IF NOT EXISTS %s %s;" - _, err = p.db.Exec(fmt.Sprintf(query, quoteIdent("public"), quoteIdent(tablename), quoteIdent(column), datatype)) + query := "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s;" + _, err = p.db.Exec(fmt.Sprintf(query, p.fullTableName(tablename), quoteIdent(column), datatype)) if err != nil { return err } @@ -399,6 +408,7 @@ func init() { func newPostgresql() *Postgresql { return &Postgresql{ + Schema: "public", TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", TagsAsJsonb: true, TagTableSuffix: "_tag", From 87b111cc48725277cd490f3ff39b4c2e8152e9f6 Mon Sep 17 00:00:00 2001 From: Rauli Ikonen Date: Thu, 29 Nov 2018 12:04:24 +0200 Subject: [PATCH 71/79] pg output: Retry writing metrics after adding missing columns In some cases metrics datapoints only contain subset of all fields that may be present for that metric. When storing columns separately instead of using JSON getting new columns always resulted in the write failing on first metric that was missing any columns and depending on how often write was getting called and how many metrics had this behavior there was potentially very long delay in getting all metrics through. --- plugins/outputs/postgresql/postgresql.go | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index f8e54df1fdb23..12d09d8a11f10 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -360,6 +360,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { _, err := p.db.Exec(sql, values...) if err != nil { // check if insert error was caused by column mismatch + retry := false if p.FieldsAsJsonb == false { log.Printf("E! Error during insert: %v", err) tablename := tabmap[table_and_cols] @@ -393,10 +394,19 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { if err != nil { return err } + retry = true } } - return err + // We added some columns and insert might work now. Try again immediately to + // avoid long lead time in getting metrics when there are several columns missing + // from the original create statement and they get added in small drops. + if retry { + _, err = p.db.Exec(sql, values...) + } + if err != nil { + return err + } } } return nil From 48299b2ed28ed3c7f95355b22b7a1fa950177faf Mon Sep 17 00:00:00 2001 From: Rauli Ikonen Date: Thu, 29 Nov 2018 15:49:19 +0200 Subject: [PATCH 72/79] pg output: Don't try closing nil rows Used to cause SIGSEGV when the operation failed --- plugins/outputs/postgresql/postgresql.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 12d09d8a11f10..3fe78eac193ce 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -372,12 +372,12 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { query := "SELECT c FROM unnest(array[%s]) AS c WHERE NOT EXISTS(SELECT 1 FROM information_schema.columns WHERE column_name=c AND table_schema=$1 AND table_name=$2)" query = fmt.Sprintf(query, strings.Join(quoted_columns, ",")) result, err := p.db.Query(query, p.Schema, tablename) - defer result.Close() if err != nil { return err } - // some columns are missing + defer result.Close() + // some columns are missing var column, datatype string for result.Next() { err := result.Scan(&column) From 7597dddffa1a1736e783a8c5bcc8e475c46afa46 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Thu, 29 Nov 2018 17:41:35 +0100 Subject: [PATCH 73/79] adjust test output --- plugins/outputs/postgresql/postgresql_test.go | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 381153ddd5b08..3f0863ce427c7 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -26,22 +26,22 @@ func TestPostgresqlCreateStatement(t *testing.T) { var m telegraf.Metric m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,fields jsonb)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,fields jsonb)`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) p.TagsAsJsonb = false p.FieldsAsJsonb = false m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,"f" float8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"f" float8)`, p.generateCreateTable(m)) m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,"i" int8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"i" int8)`, p.generateCreateTable(m)) m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "m"(time timestamptz,"k" text,"i" int8)`, p.generateCreateTable(m)) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"k" text,"i" int8)`, p.generateCreateTable(m)) } @@ -52,17 +52,17 @@ func TestPostgresqlInsertStatement(t *testing.T) { p.FieldsAsJsonb = false sql := p.generateInsert("m", []string{"time", "f"}) - assert.Equal(t, `INSERT INTO "m"("time","f") VALUES($1,$2)`, sql) + assert.Equal(t, `INSERT INTO "public"."m"("time","f") VALUES($1,$2)`, sql) sql = p.generateInsert("m", []string{"time", "i"}) - assert.Equal(t, `INSERT INTO "m"("time","i") VALUES($1,$2)`, sql) + assert.Equal(t, `INSERT INTO "public"."m"("time","i") VALUES($1,$2)`, sql) sql = p.generateInsert("m", []string{"time", "f", "i"}) - assert.Equal(t, `INSERT INTO "m"("time","f","i") VALUES($1,$2,$3)`, sql) + assert.Equal(t, `INSERT INTO "public"."m"("time","f","i") VALUES($1,$2,$3)`, sql) sql = p.generateInsert("m", []string{"time", "k", "i"}) - assert.Equal(t, `INSERT INTO "m"("time","k","i") VALUES($1,$2,$3)`, sql) + assert.Equal(t, `INSERT INTO "public"."m"("time","k","i") VALUES($1,$2,$3)`, sql) sql = p.generateInsert("m", []string{"time", "k1", "k2", "i"}) - assert.Equal(t, `INSERT INTO "m"("time","k1","k2","i") VALUES($1,$2,$3,$4)`, sql) + assert.Equal(t, `INSERT INTO "public"."m"("time","k1","k2","i") VALUES($1,$2,$3,$4)`, sql) } From f728b1e6aacffd7f461655b4645d2b84a17fc3b2 Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Thu, 29 Nov 2018 17:49:41 +0100 Subject: [PATCH 74/79] add schema config settting to README --- plugins/outputs/postgresql/README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md index 14687f961dd9a..3d3f623776dd8 100644 --- a/plugins/outputs/postgresql/README.md +++ b/plugins/outputs/postgresql/README.md @@ -24,6 +24,9 @@ This output plugin writes all metrics to PostgreSQL. ## Example for timescaledb # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval, if_not_exists := true);" + ## Schema to create the tables into + # schema = "public" + ## Use jsonb datatype for tags. Default is true. # tags_as_jsonb = true From 9ec0fc5fea36a46686ba9dc516b8c1e657e4529a Mon Sep 17 00:00:00 2001 From: Sven Klemm Date: Thu, 29 Nov 2018 18:47:03 +0100 Subject: [PATCH 75/79] Fix adding tags when using tags as foreign key --- plugins/outputs/postgresql/postgresql.go | 170 +++++++++++++++-------- 1 file changed, 114 insertions(+), 56 deletions(-) diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 3fe78eac193ce..4b8a588aa3da5 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -211,6 +211,112 @@ func (p *Postgresql) tableExists(tableName string) bool { return false } +func (p *Postgresql) getTagId(metric telegraf.Metric) (int, error) { + var tag_id int + var where_columns []string + var where_values []interface{} + tablename := metric.Name() + + if p.TagsAsJsonb { + if len(metric.Tags()) > 0 { + d, err := buildJsonbTags(metric.Tags()) + if err != nil { + return tag_id, err + } + + where_columns = append(where_columns, "tags") + where_values = append(where_values, d) + } + } else { + for column, value := range metric.Tags() { + where_columns = append(where_columns, column) + where_values = append(where_values, value) + } + } + + var where_parts []string + for i, column := range where_columns { + where_parts = append(where_parts, fmt.Sprintf("%s = $%d", quoteIdent(column), i+1)) + } + query := fmt.Sprintf("SELECT tag_id FROM %s WHERE %s", p.fullTableName(tablename+p.TagTableSuffix), strings.Join(where_parts, " AND ")) + + err := p.db.QueryRow(query, where_values...).Scan(&tag_id) + if err != nil { + query := p.generateInsert(tablename+p.TagTableSuffix, where_columns) + " RETURNING tag_id" + err := p.db.QueryRow(query, where_values...).Scan(&tag_id) + if err != nil { + // check if insert error was caused by column mismatch + retry := false + if p.TagsAsJsonb == false { + log.Printf("E! Error during insert: %v", err) + tablename := tablename + p.TagTableSuffix + columns := where_columns + var quoted_columns []string + for _, column := range columns { + quoted_columns = append(quoted_columns, quoteLiteral(column)) + } + query := "SELECT c FROM unnest(array[%s]) AS c WHERE NOT EXISTS(SELECT 1 FROM information_schema.columns WHERE column_name=c AND table_schema=$1 AND table_name=$2)" + query = fmt.Sprintf(query, strings.Join(quoted_columns, ",")) + result, err := p.db.Query(query, p.Schema, tablename) + if err != nil { + return tag_id, err + } + defer result.Close() + + // some columns are missing + var column, datatype string + for result.Next() { + err := result.Scan(&column) + if err != nil { + log.Println(err) + } + for i, name := range columns { + if name == column { + datatype = deriveDatatype(where_values[i]) + } + } + query := "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s;" + _, err = p.db.Exec(fmt.Sprintf(query, p.fullTableName(tablename), quoteIdent(column), datatype)) + if err != nil { + return tag_id, err + } + retry = true + } + } + + // We added some columns and insert might work now. Try again immediately to + // avoid long lead time in getting metrics when there are several columns missing + // from the original create statement and they get added in small drops. + if retry { + err := p.db.QueryRow(query, where_values...).Scan(&tag_id) + if err != nil { + return tag_id, err + } + } + } + } + return tag_id, nil +} + +func buildJsonbTags(tags map[string]string) ([]byte, error) { + js := make(map[string]interface{}) + for column, value := range tags { + js[column] = value + } + + return buildJsonb(js) +} + +func buildJsonb(data map[string]interface{}) ([]byte, error) { + if len(data) > 0 { + d, err := json.Marshal(data) + if err != nil { + return d, err + } + } + return nil, nil +} + func (p *Postgresql) Write(metrics []telegraf.Metric) error { batches := make(map[string][]interface{}) params := make(map[string][]string) @@ -233,68 +339,25 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { columns := []string{"time"} values := []interface{}{metric.Time()} - var js map[string]interface{} if len(metric.Tags()) > 0 { if p.TagsAsForeignkeys { // tags in separate table - var tag_id int - var where_columns []string - var where_values []interface{} - - if p.TagsAsJsonb { - js = make(map[string]interface{}) - for column, value := range metric.Tags() { - js[column] = value - } - - if len(js) > 0 { - d, err := json.Marshal(js) - if err != nil { - return err - } - - where_columns = append(where_columns, "tags") - where_values = append(where_values, d) - } - } else { - for column, value := range metric.Tags() { - where_columns = append(where_columns, column) - where_values = append(where_values, value) - } - } - - var where_parts []string - for i, column := range where_columns { - where_parts = append(where_parts, fmt.Sprintf("%s = $%d", quoteIdent(column), i+1)) - } - query := fmt.Sprintf("SELECT tag_id FROM %s WHERE %s", p.fullTableName(tablename+p.TagTableSuffix), strings.Join(where_parts, " AND ")) - - err := p.db.QueryRow(query, where_values...).Scan(&tag_id) + tag_id, err := p.getTagId(metric) if err != nil { - query := p.generateInsert(tablename+p.TagTableSuffix, where_columns) + " RETURNING tag_id" - err := p.db.QueryRow(query, where_values...).Scan(&tag_id) - if err != nil { - return err - } + return err } - columns = append(columns, "tag_id") values = append(values, tag_id) } else { // tags in measurement table if p.TagsAsJsonb { - js = make(map[string]interface{}) - for column, value := range metric.Tags() { - js[column] = value + d, err := buildJsonbTags(metric.Tags()) + if err != nil { + return err } - if len(js) > 0 { - d, err := json.Marshal(js) - if err != nil { - return err - } - + if d != nil { columns = append(columns, "tags") values = append(values, d) } @@ -314,12 +377,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } if p.FieldsAsJsonb { - js = make(map[string]interface{}) - for column, value := range metric.Fields() { - js[column] = value - } - - d, err := json.Marshal(js) + d, err := buildJsonb(metric.Fields()) if err != nil { return err } @@ -398,7 +456,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } } - // We added some columns and insert might work now. Try again immediately to + // We added some columns and insert might work now. Try again immediately to // avoid long lead time in getting metrics when there are several columns missing // from the original create statement and they get added in small drops. if retry { From 4aeefcb5bf85441410fc775b78a4fe1f8230863c Mon Sep 17 00:00:00 2001 From: Blagoj Atanasovski Date: Wed, 29 May 2019 13:32:55 +0200 Subject: [PATCH 76/79] Refactor PostgreSQL output plugin code The PostgreSQL plugin code is split up in multiple files for better readability. Unit and Integration tests are written. Code complexity reduced a bit by reducing branching. --- plugins/outputs/postgresql/README.md | 4 +- .../outputs/postgresql/add_missing_columns.go | 72 +++ .../postgresql/add_missing_columns_test.go | 92 ++++ plugins/outputs/postgresql/create_table.go | 72 +++ .../outputs/postgresql/create_table_test.go | 48 ++ plugins/outputs/postgresql/db_wrapper.go | 43 ++ plugins/outputs/postgresql/generate_insert.go | 24 + .../postgresql/generate_insert_test.go | 39 ++ plugins/outputs/postgresql/get_tag_id.go | 87 ++++ plugins/outputs/postgresql/postgresql.go | 464 +++++------------- .../postgresql/postgresql_integration_test.go | 273 +++++++++++ plugins/outputs/postgresql/postgresql_test.go | 210 ++++++-- plugins/outputs/postgresql/table_keeper.go | 47 ++ .../outputs/postgresql/table_keeper_test.go | 71 +++ plugins/outputs/postgresql/utils.go | 68 +++ 15 files changed, 1215 insertions(+), 399 deletions(-) create mode 100644 plugins/outputs/postgresql/add_missing_columns.go create mode 100644 plugins/outputs/postgresql/add_missing_columns_test.go create mode 100644 plugins/outputs/postgresql/create_table.go create mode 100644 plugins/outputs/postgresql/create_table_test.go create mode 100644 plugins/outputs/postgresql/db_wrapper.go create mode 100644 plugins/outputs/postgresql/generate_insert.go create mode 100644 plugins/outputs/postgresql/generate_insert_test.go create mode 100644 plugins/outputs/postgresql/get_tag_id.go create mode 100644 plugins/outputs/postgresql/postgresql_integration_test.go create mode 100644 plugins/outputs/postgresql/table_keeper.go create mode 100644 plugins/outputs/postgresql/table_keeper_test.go create mode 100644 plugins/outputs/postgresql/utils.go diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md index 3d3f623776dd8..b9d0020682023 100644 --- a/plugins/outputs/postgresql/README.md +++ b/plugins/outputs/postgresql/README.md @@ -28,9 +28,9 @@ This output plugin writes all metrics to PostgreSQL. # schema = "public" ## Use jsonb datatype for tags. Default is true. - # tags_as_jsonb = true + # tags_as_jsonb = false ## Use jsonb datatype for fields. Default is true. - # fields_as_jsonb = true + # fields_as_jsonb = false ``` diff --git a/plugins/outputs/postgresql/add_missing_columns.go b/plugins/outputs/postgresql/add_missing_columns.go new file mode 100644 index 0000000000000..1d2be69bf3c55 --- /dev/null +++ b/plugins/outputs/postgresql/add_missing_columns.go @@ -0,0 +1,72 @@ +package postgresql + +import ( + "fmt" + "log" + "strings" +) + +func (p *Postgresql) addMissingColumns(tableName string, columns []string, values []interface{}) (bool, error) { + columnStatuses, err := p.whichColumnsAreMissing(columns, tableName) + if err != nil { + return false, err + } + + retry := false + for currentColumn, isMissing := range columnStatuses { + if !isMissing { + continue + } + + dataType := deriveDatatype(values[currentColumn]) + columnName := columns[currentColumn] + if err := p.addColumnToTable(columnName, dataType, tableName); err != nil { + return false, err + } + retry = true + } + + return retry, nil +} + +func prepareMissingColumnsQuery(columns []string) string { + var quotedColumns = make([]string, len(columns)) + for i, column := range columns { + quotedColumns[i] = quoteLiteral(column) + } + return fmt.Sprintf(missingColumnsTemplate, strings.Join(quotedColumns, ",")) +} + +// for a given array of columns x = [a, b, c ...] it returns an array of bools indicating +// if x[i] is missing +func (p *Postgresql) whichColumnsAreMissing(columns []string, tableName string) ([]bool, error) { + missingColumnsQuery := prepareMissingColumnsQuery(columns) + result, err := p.db.Query(missingColumnsQuery, p.Schema, tableName) + if err != nil { + return nil, err + } + defer result.Close() + columnStatus := make([]bool, len(columns)) + var isMissing bool + var columnName string + currentColumn := 0 + + for result.Next() { + err := result.Scan(&columnName, &isMissing) + if err != nil { + log.Println(err) + return nil, err + } + columnStatus[currentColumn] = isMissing + currentColumn++ + } + + return columnStatus, nil +} + +func (p *Postgresql) addColumnToTable(columnName, dataType, tableName string) error { + fullTableName := p.fullTableName(tableName) + addColumnQuery := fmt.Sprintf(addColumnTemplate, fullTableName, quoteIdent(columnName), dataType) + _, err := p.db.Exec(addColumnQuery) + return err +} diff --git a/plugins/outputs/postgresql/add_missing_columns_test.go b/plugins/outputs/postgresql/add_missing_columns_test.go new file mode 100644 index 0000000000000..7140847e03375 --- /dev/null +++ b/plugins/outputs/postgresql/add_missing_columns_test.go @@ -0,0 +1,92 @@ +package postgresql + +import ( + "database/sql" + "fmt" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +func prepareMissingColumnsQuery1(columns []string) string { + var quotedColumns = make([]string, len(columns)) + for i, column := range columns { + quotedColumns[i] = quoteLiteral(column) + } + return fmt.Sprintf(missingColumnsTemplate, strings.Join(quotedColumns, ",")) +} + +func TestPrepareMissingColumnsQuery(t *testing.T) { + columns := []string{} + assert.Equal(t, `WITH available AS (SELECT column_name as c FROM information_schema.columns WHERE table_schema = $1 and table_name = $2),`+ + `required AS (SELECT c FROM unnest(array []) AS c) `+ + `SELECT required.c, available.c IS NULL FROM required LEFT JOIN available ON required.c = available.c;`, + prepareMissingColumnsQuery(columns)) + columns = []string{"a", "b", "c"} + assert.Equal(t, `WITH available AS (SELECT column_name as c FROM information_schema.columns WHERE table_schema = $1 and table_name = $2),`+ + `required AS (SELECT c FROM unnest(array ['a','b','c']) AS c) `+ + `SELECT required.c, available.c IS NULL FROM required LEFT JOIN available ON required.c = available.c;`, + prepareMissingColumnsQuery(columns)) +} + +func TestWhichColumnsAreMissing(t *testing.T) { + mock := &mockWr{} + p := &Postgresql{db: mock} + + columns := []string{"col1"} + mock.queryErr = fmt.Errorf("error 1") + mock.expected = prepareMissingColumnsQuery(columns) + table := "tableName" + _, err := p.whichColumnsAreMissing(columns, table) + assert.Equal(t, err.Error(), "error 1") +} + +func TestAddColumnToTable(t *testing.T) { + mock := &mockWr{} + p := &Postgresql{db: mock, Schema: "pub"} + + column := "col1" + dataType := "text" + tableName := "table" + mock.execErr = fmt.Errorf("error 1") + mock.expected = `ALTER TABLE "pub"."table" ADD COLUMN IF NOT EXISTS "col1" text;` + err := p.addColumnToTable(column, dataType, tableName) + assert.EqualError(t, err, "error 1") + + mock.execErr = nil + assert.Nil(t, p.addColumnToTable(column, dataType, tableName)) + +} + +func (p *Postgresql) addColumnToTable1(columnName, dataType, tableName string) error { + fullTableName := p.fullTableName(tableName) + addColumnQuery := fmt.Sprintf(addColumnTemplate, fullTableName, quoteIdent(columnName), dataType) + _, err := p.db.Exec(addColumnQuery) + return err +} + +type mockWr struct { + expected string + exec sql.Result + execErr error + query *sql.Rows + queryErr error +} + +func (m *mockWr) Exec(query string, args ...interface{}) (sql.Result, error) { + if m.expected != query { + return nil, fmt.Errorf("unexpected query; exp: '%s'; got: '%s'", m.expected, query) + } + return m.exec, m.execErr +} +func (m *mockWr) Query(query string, args ...interface{}) (*sql.Rows, error) { + if m.expected != query { + return nil, fmt.Errorf("unexpected query; exp: '%s'; got: '%s'", m.expected, query) + } + return m.query, m.queryErr +} +func (m *mockWr) QueryRow(query string, args ...interface{}) *sql.Row { + return nil +} +func (m *mockWr) Close() error { return nil } diff --git a/plugins/outputs/postgresql/create_table.go b/plugins/outputs/postgresql/create_table.go new file mode 100644 index 0000000000000..9b647e276b3ee --- /dev/null +++ b/plugins/outputs/postgresql/create_table.go @@ -0,0 +1,72 @@ +package postgresql + +import ( + "fmt" + "strings" + + "github.com/influxdata/telegraf" +) + +const ( + tagIDColumn = "tag_id" + createTagsTableTemplate = "CREATE TABLE IF NOT EXISTS %s(tag_id serial primary key,%s,UNIQUE(%s))" +) + +func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { + var columns []string + var pk []string + var sql []string + + pk = append(pk, quoteIdent("time")) + columns = append(columns, "time timestamptz") + + // handle tags if necessary + if len(metric.Tags()) > 0 { + if p.TagsAsForeignkeys { + // tags in separate table + var tagColumns []string + var tagColumndefs []string + columns = append(columns, "tag_id int") + + if p.TagsAsJsonb { + tagColumns = append(tagColumns, "tags") + tagColumndefs = append(tagColumndefs, "tags jsonb") + } else { + for column := range metric.Tags() { + tagColumns = append(tagColumns, quoteIdent(column)) + tagColumndefs = append(tagColumndefs, fmt.Sprintf("%s text", quoteIdent(column))) + } + } + table := p.fullTableName(metric.Name() + p.TagTableSuffix) + sql = append(sql, fmt.Sprintf(createTagsTableTemplate, table, strings.Join(tagColumndefs, ","), strings.Join(tagColumns, ","))) + } else { + // tags in measurement table + if p.TagsAsJsonb { + columns = append(columns, "tags jsonb") + } else { + for column := range metric.Tags() { + pk = append(pk, quoteIdent(column)) + columns = append(columns, fmt.Sprintf("%s text", quoteIdent(column))) + } + } + } + } + + if p.FieldsAsJsonb { + columns = append(columns, "fields jsonb") + } else { + var datatype string + for column, v := range metric.Fields() { + datatype = deriveDatatype(v) + columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) + } + } + + query := strings.Replace(p.TableTemplate, "{TABLE}", p.fullTableName(metric.Name()), -1) + query = strings.Replace(query, "{TABLELITERAL}", quoteLiteral(p.fullTableName(metric.Name())), -1) + query = strings.Replace(query, "{COLUMNS}", strings.Join(columns, ","), -1) + query = strings.Replace(query, "{KEY_COLUMNS}", strings.Join(pk, ","), -1) + + sql = append(sql, query) + return strings.Join(sql, ";") +} diff --git a/plugins/outputs/postgresql/create_table_test.go b/plugins/outputs/postgresql/create_table_test.go new file mode 100644 index 0000000000000..404e3fdbd4683 --- /dev/null +++ b/plugins/outputs/postgresql/create_table_test.go @@ -0,0 +1,48 @@ +package postgresql + +import ( + "testing" + "time" + + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/metric" + "github.com/stretchr/testify/assert" +) + +func TestGenerateCreateTable(t *testing.T) { + p := newPostgresql() + p.TagsAsJsonb = true + p.FieldsAsJsonb = true + timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) + + var m telegraf.Metric + m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,fields jsonb)`, p.generateCreateTable(m)) + + m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) + + p.TagsAsJsonb = false + p.FieldsAsJsonb = false + + m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"f" float8)`, p.generateCreateTable(m)) + + m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"i" int8)`, p.generateCreateTable(m)) + + m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) + assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"k" text,"i" int8)`, p.generateCreateTable(m)) + + p.TagsAsForeignkeys = true + assert.Equal(t, + `CREATE TABLE IF NOT EXISTS "public"."m_tag"(tag_id serial primary key,"k" text,UNIQUE("k"));`+ + `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,tag_id int,"i" int8)`, + p.generateCreateTable(m)) + + p.TagsAsJsonb = true + assert.Equal(t, + `CREATE TABLE IF NOT EXISTS "public"."m_tag"(tag_id serial primary key,tags jsonb,UNIQUE(tags));`+ + `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,tag_id int,"i" int8)`, + p.generateCreateTable(m)) +} diff --git a/plugins/outputs/postgresql/db_wrapper.go b/plugins/outputs/postgresql/db_wrapper.go new file mode 100644 index 0000000000000..bb095429d985b --- /dev/null +++ b/plugins/outputs/postgresql/db_wrapper.go @@ -0,0 +1,43 @@ +package postgresql + +import ( + "database/sql" + // pgx driver for sql connections + _ "github.com/jackc/pgx/stdlib" +) + +type dbWrapper interface { + Exec(query string, args ...interface{}) (sql.Result, error) + Query(query string, args ...interface{}) (*sql.Rows, error) + QueryRow(query string, args ...interface{}) *sql.Row + Close() error +} + +type defaultDbWrapper struct { + db *sql.DB +} + +func newDbWrapper(address string) (dbWrapper, error) { + db, err := sql.Open("pgx", address) + if err != nil { + return nil, err + } + + return &defaultDbWrapper{ + db: db, + }, nil +} + +func (d *defaultDbWrapper) Exec(query string, args ...interface{}) (sql.Result, error) { + return d.db.Exec(query, args...) +} + +func (d *defaultDbWrapper) Close() error { return d.db.Close() } + +func (d *defaultDbWrapper) Query(query string, args ...interface{}) (*sql.Rows, error) { + return d.db.Query(query, args...) +} + +func (d *defaultDbWrapper) QueryRow(query string, args ...interface{}) *sql.Row { + return d.db.QueryRow(query, args...) +} diff --git a/plugins/outputs/postgresql/generate_insert.go b/plugins/outputs/postgresql/generate_insert.go new file mode 100644 index 0000000000000..c71c884845da3 --- /dev/null +++ b/plugins/outputs/postgresql/generate_insert.go @@ -0,0 +1,24 @@ +package postgresql + +import ( + "fmt" + "strings" +) + +const ( + insertIntoSQLTemplate = "INSERT INTO %s(%s) VALUES(%s)" +) + +func (p *Postgresql) generateInsert(tablename string, columns []string) string { + valuePlaceholders := make([]string, len(columns)) + quotedColumns := make([]string, len(columns)) + for i, column := range columns { + valuePlaceholders[i] = fmt.Sprintf("$%d", i+1) + quotedColumns[i] = quoteIdent(column) + } + + fullTableName := p.fullTableName(tablename) + columnNames := strings.Join(quotedColumns, ",") + values := strings.Join(valuePlaceholders, ",") + return fmt.Sprintf(insertIntoSQLTemplate, fullTableName, columnNames, values) +} diff --git a/plugins/outputs/postgresql/generate_insert_test.go b/plugins/outputs/postgresql/generate_insert_test.go new file mode 100644 index 0000000000000..28d2e023b9790 --- /dev/null +++ b/plugins/outputs/postgresql/generate_insert_test.go @@ -0,0 +1,39 @@ +package postgresql + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPostgresqlQuote(t *testing.T) { + assert.Equal(t, `"foo"`, quoteIdent("foo")) + assert.Equal(t, `"fo'o"`, quoteIdent("fo'o")) + assert.Equal(t, `"fo""o"`, quoteIdent("fo\"o")) + + assert.Equal(t, "'foo'", quoteLiteral("foo")) + assert.Equal(t, "'fo''o'", quoteLiteral("fo'o")) + assert.Equal(t, "'fo\"o'", quoteLiteral("fo\"o")) +} + +func TestPostgresqlInsertStatement(t *testing.T) { + p := newPostgresql() + + p.TagsAsJsonb = false + p.FieldsAsJsonb = false + + sql := p.generateInsert("m", []string{"time", "f"}) + assert.Equal(t, `INSERT INTO "public"."m"("time","f") VALUES($1,$2)`, sql) + + sql = p.generateInsert("m", []string{"time", "i"}) + assert.Equal(t, `INSERT INTO "public"."m"("time","i") VALUES($1,$2)`, sql) + + sql = p.generateInsert("m", []string{"time", "f", "i"}) + assert.Equal(t, `INSERT INTO "public"."m"("time","f","i") VALUES($1,$2,$3)`, sql) + + sql = p.generateInsert("m", []string{"time", "k", "i"}) + assert.Equal(t, `INSERT INTO "public"."m"("time","k","i") VALUES($1,$2,$3)`, sql) + + sql = p.generateInsert("m", []string{"time", "k1", "k2", "i"}) + assert.Equal(t, `INSERT INTO "public"."m"("time","k1","k2","i") VALUES($1,$2,$3,$4)`, sql) +} diff --git a/plugins/outputs/postgresql/get_tag_id.go b/plugins/outputs/postgresql/get_tag_id.go new file mode 100644 index 0000000000000..c17e6a6ea978c --- /dev/null +++ b/plugins/outputs/postgresql/get_tag_id.go @@ -0,0 +1,87 @@ +package postgresql + +import ( + "fmt" + "log" + "strings" + + "github.com/influxdata/telegraf" +) + +const ( + selectTagIDTemplate = "SELECT tag_id FROM %s WHERE %s" + missingColumnsTemplate = "WITH available AS (SELECT column_name as c FROM information_schema.columns WHERE table_schema = $1 and table_name = $2)," + + "required AS (SELECT c FROM unnest(array [%s]) AS c) " + + "SELECT required.c, available.c IS NULL FROM required LEFT JOIN available ON required.c = available.c;" + + addColumnTemplate = "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s;" +) + +func (p *Postgresql) getTagID(metric telegraf.Metric) (int, error) { + var tagID int + var whereColumns []string + var whereValues []interface{} + tablename := metric.Name() + + if p.TagsAsJsonb && len(metric.Tags()) > 0 { + d, err := buildJsonbTags(metric.Tags()) + if err != nil { + return tagID, err + } + + whereColumns = append(whereColumns, "tags") + whereValues = append(whereValues, d) + } else { + for column, value := range metric.Tags() { + whereColumns = append(whereColumns, column) + whereValues = append(whereValues, value) + } + } + + whereParts := make([]string, len(whereColumns)) + for i, column := range whereColumns { + whereParts[i] = fmt.Sprintf("%s = $%d", quoteIdent(column), i+1) + } + + tagsTableName := tablename + p.TagTableSuffix + tagsTableFullName := p.fullTableName(tagsTableName) + query := fmt.Sprintf(selectTagIDTemplate, tagsTableFullName, strings.Join(whereParts, " AND ")) + + err := p.db.QueryRow(query, whereValues...).Scan(&tagID) + if err == nil { + return tagID, nil + } + query = p.generateInsert(tagsTableName, whereColumns) + " RETURNING tag_id" + err = p.db.QueryRow(query, whereValues...).Scan(&tagID) + if err == nil { + return tagID, nil + } + + // check if insert error was caused by column mismatch + + // if tags are jsonb, there shouldn't be a column mismatch + if p.TagsAsJsonb { + return tagID, err + } + + // check for missing columns + log.Printf("W! Possible column mismatch while inserting new tag-set: %v", err) + retry, err := p.addMissingColumns(tagsTableName, whereColumns, whereValues) + if err != nil { + // missing coulmns not properly added + log.Printf("E! Could not add missing columns: %v", err) + return tagID, err + } + + // We added some columns and insert might work now. Try again immediately to + // avoid long lead time in getting metrics when there are several columns missing + // from the original create statement and they get added in small drops. + if retry { + log.Printf("I! Retrying to insert new tag set") + err := p.db.QueryRow(query, whereValues...).Scan(&tagID) + if err != nil { + return tagID, err + } + } + return tagID, nil +} diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 4b8a588aa3da5..0e30ea6e507d3 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -1,21 +1,20 @@ package postgresql import ( - "database/sql" - "encoding/json" - "fmt" "log" "sort" - "strings" - - "github.com/jackc/pgx" "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/plugins/outputs" ) +const ( + tagsJSONColumn = "tags" + fieldsJSONColumn = "fields" +) + type Postgresql struct { - db *sql.DB + db dbWrapper Address string Schema string TagsAsForeignkeys bool @@ -23,66 +22,41 @@ type Postgresql struct { FieldsAsJsonb bool TableTemplate string TagTableSuffix string - Tables map[string]bool + tables tableKeeper } +func init() { + outputs.Add("postgresql", func() telegraf.Output { return newPostgresql() }) +} + +func newPostgresql() *Postgresql { + return &Postgresql{ + Schema: "public", + TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", + TagTableSuffix: "_tag", + } +} + +// Connect establishes a connection to the target database and prepares the cache func (p *Postgresql) Connect() error { - db, err := sql.Open("pgx", p.Address) + db, err := newDbWrapper(p.Address) if err != nil { return err } p.db = db - p.Tables = make(map[string]bool) - + p.tables = newTableKeeper(db) return nil } +// Close closes the connection to the database func (p *Postgresql) Close() error { return p.db.Close() } -func contains(haystack []string, needle string) bool { - for _, key := range haystack { - if key == needle { - return true - } - } - return false -} - -func quoteIdent(name string) string { - return pgx.Identifier{name}.Sanitize() -} - -func quoteLiteral(name string) string { - return "'" + strings.Replace(name, "'", "''", -1) + "'" -} - func (p *Postgresql) fullTableName(name string) string { return quoteIdent(p.Schema) + "." + quoteIdent(name) } -func deriveDatatype(value interface{}) string { - var datatype string - - switch value.(type) { - case bool: - datatype = "boolean" - case uint64: - datatype = "int8" - case int64: - datatype = "int8" - case float64: - datatype = "float8" - case string: - datatype = "text" - default: - datatype = "text" - log.Printf("E! Unknown datatype %T(%v)", value, value) - } - return datatype -} - var sampleConfig = ` ## specify address via a url matching: ## postgres://[pqgotest[:password]]@localhost[/dbname]\ @@ -118,262 +92,41 @@ var sampleConfig = ` # schema = "public" ## Use jsonb datatype for tags - # tags_as_jsonb = true + # tags_as_jsonb = false ## Use jsonb datatype for fields - # fields_as_jsonb = true + # fields_as_jsonb = false ` func (p *Postgresql) SampleConfig() string { return sampleConfig } func (p *Postgresql) Description() string { return "Send metrics to PostgreSQL" } -func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { - var columns []string - var pk []string - var sql []string - - pk = append(pk, quoteIdent("time")) - columns = append(columns, "time timestamptz") - - // handle tags if necessary - if len(metric.Tags()) > 0 { - if p.TagsAsForeignkeys { - // tags in separate table - var tag_columns []string - var tag_columndefs []string - columns = append(columns, "tag_id int") - - if p.TagsAsJsonb { - tag_columns = append(tag_columns, "tags") - tag_columndefs = append(tag_columndefs, "tags jsonb") - } else { - for column, _ := range metric.Tags() { - tag_columns = append(tag_columns, quoteIdent(column)) - tag_columndefs = append(tag_columndefs, fmt.Sprintf("%s text", quoteIdent(column))) - } - } - table := quoteIdent(metric.Name() + p.TagTableSuffix) - sql = append(sql, fmt.Sprintf("CREATE TABLE IF NOT EXISTS %s(tag_id serial primary key,%s,UNIQUE(%s))", table, strings.Join(tag_columndefs, ","), strings.Join(tag_columns, ","))) - } else { - // tags in measurement table - if p.TagsAsJsonb { - columns = append(columns, "tags jsonb") - } else { - for column, _ := range metric.Tags() { - pk = append(pk, quoteIdent(column)) - columns = append(columns, fmt.Sprintf("%s text", quoteIdent(column))) - } - } - } - } - - if p.FieldsAsJsonb { - columns = append(columns, "fields jsonb") - } else { - var datatype string - for column, v := range metric.Fields() { - datatype = deriveDatatype(v) - columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) - } - } - - query := strings.Replace(p.TableTemplate, "{TABLE}", p.fullTableName(metric.Name()), -1) - query = strings.Replace(query, "{TABLELITERAL}", quoteLiteral(p.fullTableName(metric.Name())), -1) - query = strings.Replace(query, "{COLUMNS}", strings.Join(columns, ","), -1) - query = strings.Replace(query, "{KEY_COLUMNS}", strings.Join(pk, ","), -1) - - sql = append(sql, query) - return strings.Join(sql, ";") -} - -func (p *Postgresql) generateInsert(tablename string, columns []string) string { - var placeholder, quoted []string - for i, column := range columns { - placeholder = append(placeholder, fmt.Sprintf("$%d", i+1)) - quoted = append(quoted, quoteIdent(column)) - } - - return fmt.Sprintf("INSERT INTO %s(%s) VALUES(%s)", p.fullTableName(tablename), strings.Join(quoted, ","), strings.Join(placeholder, ",")) -} - -func (p *Postgresql) tableExists(tableName string) bool { - stmt := "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname = $2;" - result, err := p.db.Exec(stmt, tableName, p.Schema) - if err != nil { - log.Printf("E! Error checking for existence of metric table %s: %v", tableName, err) - return false - } - if count, _ := result.RowsAffected(); count == 1 { - p.Tables[tableName] = true - return true - } - return false -} - -func (p *Postgresql) getTagId(metric telegraf.Metric) (int, error) { - var tag_id int - var where_columns []string - var where_values []interface{} - tablename := metric.Name() - - if p.TagsAsJsonb { - if len(metric.Tags()) > 0 { - d, err := buildJsonbTags(metric.Tags()) - if err != nil { - return tag_id, err - } - - where_columns = append(where_columns, "tags") - where_values = append(where_values, d) - } - } else { - for column, value := range metric.Tags() { - where_columns = append(where_columns, column) - where_values = append(where_values, value) - } - } - - var where_parts []string - for i, column := range where_columns { - where_parts = append(where_parts, fmt.Sprintf("%s = $%d", quoteIdent(column), i+1)) - } - query := fmt.Sprintf("SELECT tag_id FROM %s WHERE %s", p.fullTableName(tablename+p.TagTableSuffix), strings.Join(where_parts, " AND ")) - - err := p.db.QueryRow(query, where_values...).Scan(&tag_id) - if err != nil { - query := p.generateInsert(tablename+p.TagTableSuffix, where_columns) + " RETURNING tag_id" - err := p.db.QueryRow(query, where_values...).Scan(&tag_id) - if err != nil { - // check if insert error was caused by column mismatch - retry := false - if p.TagsAsJsonb == false { - log.Printf("E! Error during insert: %v", err) - tablename := tablename + p.TagTableSuffix - columns := where_columns - var quoted_columns []string - for _, column := range columns { - quoted_columns = append(quoted_columns, quoteLiteral(column)) - } - query := "SELECT c FROM unnest(array[%s]) AS c WHERE NOT EXISTS(SELECT 1 FROM information_schema.columns WHERE column_name=c AND table_schema=$1 AND table_name=$2)" - query = fmt.Sprintf(query, strings.Join(quoted_columns, ",")) - result, err := p.db.Query(query, p.Schema, tablename) - if err != nil { - return tag_id, err - } - defer result.Close() - - // some columns are missing - var column, datatype string - for result.Next() { - err := result.Scan(&column) - if err != nil { - log.Println(err) - } - for i, name := range columns { - if name == column { - datatype = deriveDatatype(where_values[i]) - } - } - query := "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s;" - _, err = p.db.Exec(fmt.Sprintf(query, p.fullTableName(tablename), quoteIdent(column), datatype)) - if err != nil { - return tag_id, err - } - retry = true - } - } - - // We added some columns and insert might work now. Try again immediately to - // avoid long lead time in getting metrics when there are several columns missing - // from the original create statement and they get added in small drops. - if retry { - err := p.db.QueryRow(query, where_values...).Scan(&tag_id) - if err != nil { - return tag_id, err - } - } - } - } - return tag_id, nil -} - -func buildJsonbTags(tags map[string]string) ([]byte, error) { - js := make(map[string]interface{}) - for column, value := range tags { - js[column] = value - } - - return buildJsonb(js) -} - -func buildJsonb(data map[string]interface{}) ([]byte, error) { - if len(data) > 0 { - d, err := json.Marshal(data) - if err != nil { - return d, err - } - } - return nil, nil -} - func (p *Postgresql) Write(metrics []telegraf.Metric) error { - batches := make(map[string][]interface{}) - params := make(map[string][]string) - colmap := make(map[string][]string) - tabmap := make(map[string]string) - + toInsert := make(map[string][]*colsAndValues) for _, metric := range metrics { tablename := metric.Name() // create table if needed - if p.Tables[tablename] == false && p.tableExists(tablename) == false { + if p.tables.exists(p.Schema, tablename) == false { createStmt := p.generateCreateTable(metric) _, err := p.db.Exec(createStmt) if err != nil { log.Printf("E! Creating table failed: statement: %v, error: %v", createStmt, err) return err } - p.Tables[tablename] = true + p.tables.add(tablename) } columns := []string{"time"} values := []interface{}{metric.Time()} - - if len(metric.Tags()) > 0 { - if p.TagsAsForeignkeys { - // tags in separate table - tag_id, err := p.getTagId(metric) - if err != nil { - return err - } - columns = append(columns, "tag_id") - values = append(values, tag_id) - } else { - // tags in measurement table - if p.TagsAsJsonb { - d, err := buildJsonbTags(metric.Tags()) - if err != nil { - return err - } - - if d != nil { - columns = append(columns, "tags") - values = append(values, d) - } - } else { - var keys []string - fields := metric.Tags() - for column := range fields { - keys = append(keys, column) - } - sort.Strings(keys) - for _, column := range keys { - columns = append(columns, column) - values = append(values, fields[column]) - } - } - } + tagColumns, tagValues, err := p.prepareTags(metric) + if err != nil { + return err + } + if tagColumns != nil { + columns = append(columns, tagColumns...) + values = append(values, tagValues...) } if p.FieldsAsJsonb { @@ -382,7 +135,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { return err } - columns = append(columns, "fields") + columns = append(columns, fieldsJSONColumn) values = append(values, d) } else { var keys []string @@ -397,89 +150,102 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { } } - var table_and_cols string - var placeholder, quoted_columns []string - for _, column := range columns { - quoted_columns = append(quoted_columns, quoteIdent(column)) + newValues := &colsAndValues{ + cols: columns, + vals: values, } - table_and_cols = fmt.Sprintf("%s(%s)", p.fullTableName(tablename), strings.Join(quoted_columns, ",")) - batches[table_and_cols] = append(batches[table_and_cols], values...) - for i, _ := range columns { - i += len(params[table_and_cols]) * len(columns) - placeholder = append(placeholder, fmt.Sprintf("$%d", i+1)) - } - params[table_and_cols] = append(params[table_and_cols], strings.Join(placeholder, ",")) - colmap[table_and_cols] = columns - tabmap[table_and_cols] = tablename + toInsert[tablename] = append(toInsert[tablename], newValues) + } + + return p.insertBatches(toInsert) +} + +func (p *Postgresql) prepareTags(metric telegraf.Metric) ([]string, []interface{}, error) { + if len(metric.Tags()) == 0 { + return nil, nil, nil } - for table_and_cols, values := range batches { - sql := fmt.Sprintf("INSERT INTO %s VALUES (%s)", table_and_cols, strings.Join(params[table_and_cols], "),(")) - _, err := p.db.Exec(sql, values...) + if p.TagsAsForeignkeys { + // tags in separate table + tagID, err := p.getTagID(metric) + if err != nil { + return nil, nil, err + } + return []string{tagIDColumn}, []interface{}{tagID}, nil + } + // tags in measurement table + if p.TagsAsJsonb { + d, err := buildJsonbTags(metric.Tags()) if err != nil { + return nil, nil, err + } + + if d != nil { + return []string{tagsJSONColumn}, []interface{}{d}, nil + } + return nil, nil, nil + + } + + var keys []string + tags := metric.Tags() + for column := range tags { + keys = append(keys, column) + } + sort.Strings(keys) + numColumns := len(keys) + var columns = make([]string, numColumns) + var values = make([]interface{}, numColumns) + for i, column := range keys { + columns[i] = column + values[i] = tags[column] + } + return columns, values, nil +} + +type colsAndValues struct { + cols []string + vals []interface{} +} + +// insertBatches takes batches of data to be inserted. The batches are mapped +// by the target table, and each batch contains the columns and values for those +// columns that will generate the INSERT statement. +// On column mismatch an attempt is made to create the column and try to reinsert. +func (p *Postgresql) insertBatches(batches map[string][]*colsAndValues) error { + for tableName, colsAndValues := range batches { + for _, row := range colsAndValues { + sql := p.generateInsert(tableName, row.cols) + _, err := p.db.Exec(sql, row.vals...) + if err == nil { + continue + } + // check if insert error was caused by column mismatch + if p.FieldsAsJsonb { + return err + } + + log.Printf("W! Possible column mismatch while inserting new metrics: %v", err) + retry := false - if p.FieldsAsJsonb == false { - log.Printf("E! Error during insert: %v", err) - tablename := tabmap[table_and_cols] - columns := colmap[table_and_cols] - var quoted_columns []string - for _, column := range columns { - quoted_columns = append(quoted_columns, quoteLiteral(column)) - } - query := "SELECT c FROM unnest(array[%s]) AS c WHERE NOT EXISTS(SELECT 1 FROM information_schema.columns WHERE column_name=c AND table_schema=$1 AND table_name=$2)" - query = fmt.Sprintf(query, strings.Join(quoted_columns, ",")) - result, err := p.db.Query(query, p.Schema, tablename) - if err != nil { - return err - } - defer result.Close() - - // some columns are missing - var column, datatype string - for result.Next() { - err := result.Scan(&column) - if err != nil { - log.Println(err) - } - for i, name := range columns { - if name == column { - datatype = deriveDatatype(values[i]) - } - } - query := "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s;" - _, err = p.db.Exec(fmt.Sprintf(query, p.fullTableName(tablename), quoteIdent(column), datatype)) - if err != nil { - return err - } - retry = true - } + retry, err = p.addMissingColumns(tableName, row.cols, row.vals) + if err != nil { + log.Printf("E! Could not fix column mismatch: %v", err) + return err } // We added some columns and insert might work now. Try again immediately to // avoid long lead time in getting metrics when there are several columns missing // from the original create statement and they get added in small drops. if retry { - _, err = p.db.Exec(sql, values...) + _, err = p.db.Exec(sql, row.vals...) } if err != nil { return err } } } - return nil -} - -func init() { - outputs.Add("postgresql", func() telegraf.Output { return newPostgresql() }) -} -func newPostgresql() *Postgresql { - return &Postgresql{ - Schema: "public", - TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", - TagsAsJsonb: true, - TagTableSuffix: "_tag", - FieldsAsJsonb: true, - } + return nil } diff --git a/plugins/outputs/postgresql/postgresql_integration_test.go b/plugins/outputs/postgresql/postgresql_integration_test.go new file mode 100644 index 0000000000000..1fdbe0207ed33 --- /dev/null +++ b/plugins/outputs/postgresql/postgresql_integration_test.go @@ -0,0 +1,273 @@ +package postgresql + +import ( + "database/sql" + "fmt" + "testing" + "time" + + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/metric" + _ "github.com/jackc/pgx/stdlib" + "github.com/stretchr/testify/assert" +) + +func prepareAndConnect(t *testing.T, foreignTags, jsonTags, jsonFields bool) (telegraf.Metric, *sql.DB, *Postgresql) { + if testing.Short() { + t.Skip("Skipping integration test in short mode") + } + + testAddress := "postgres://postgres@localhost:5432/postgres?sslmode=disable" + + testMetric := testMetric("metric name", "tag1", int(1)) + + postgres := &Postgresql{ + Address: testAddress, + Schema: "public", + TagsAsForeignkeys: foreignTags, + TagsAsJsonb: jsonTags, + FieldsAsJsonb: jsonFields, + TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", + TagTableSuffix: "_tags", + } + + // drop metric tables if exists + + db, err := sql.Open("pgx", testAddress) + assert.NoError(t, err, "Could not connect to test db") + + _, err = db.Exec(fmt.Sprintf(`DROP TABLE IF EXISTS "%s"`, testMetric.Name())) + assert.NoError(t, err, "Could not prepare db") + _, err = db.Exec(fmt.Sprintf(`DROP TABLE IF EXISTS "%s%s"`, testMetric.Name(), postgres.TagTableSuffix)) + assert.NoError(t, err, "Could not prepare db") + + err = postgres.Connect() + assert.NoError(t, err, "Could not connect") + return testMetric, db, postgres +} + +// testMetric Returns a simple test point: +// measurement -> name +// tags -> "tag":tag +// value -> "value": value +// time -> time.Now().UTC() +func testMetric(name string, tag string, value interface{}) telegraf.Metric { + if value == nil { + panic("Cannot use a nil value") + } + tags := map[string]string{"tag": tag} + pt, _ := metric.New( + name, + tags, + map[string]interface{}{"value": value}, + time.Now().UTC(), + ) + return pt +} + +func TestWriteToPostgres(t *testing.T) { + testMetric, dbConn, postgres := prepareAndConnect(t, false, false, false) + writeAndAssertSingleMetricNoJSON(t, testMetric, dbConn, postgres) +} + +func TestWriteToPostgresJsonTags(t *testing.T) { + tagsAsForeignKey := false + tagsAsJSON := true + fieldsAsJSON := false + testMetric, dbConn, postgres := prepareAndConnect(t, tagsAsForeignKey, tagsAsJSON, fieldsAsJSON) + // insert first metric + err := postgres.Write([]telegraf.Metric{testMetric}) + assert.NoError(t, err, "Could not write") + + // should have created table, all columns in the same table + row := dbConn.QueryRow(fmt.Sprintf(`SELECT time, tags, value FROM "%s"`, testMetric.Name())) + var ts time.Time + var tags string + var value int64 + err = row.Scan(&ts, &tags, &value) + assert.NoError(t, err, "Could not check test results") + + sentTag, _ := testMetric.GetTag("tag") + sentTagJSON := fmt.Sprintf(`{"tag": "%s"}`, sentTag) + sentValue, _ := testMetric.GetField("value") + sentTs := testMetric.Time() + // postgres doesn't support nano seconds in timestamp + sentTsNanoSecondOffset := sentTs.Nanosecond() + nanoSeconds := sentTsNanoSecondOffset % 1000 + sentTs = sentTs.Add(time.Duration(-nanoSeconds) * time.Nanosecond) + if !ts.UTC().Equal(sentTs) || tags != sentTagJSON || value != sentValue.(int64) { + assert.Fail(t, fmt.Sprintf("Expected: %v, %v, %v; Received: %v, %v, %v", + sentTs, sentTagJSON, sentValue, + ts.UTC(), tags, value)) + } +} + +func TestWriteToPostgresJsonTagsAsForeignTable(t *testing.T) { + tagsAsForeignKey := true + tagsAsJSON := true + fieldsAsJSON := false + testMetric, dbConn, postgres := prepareAndConnect(t, tagsAsForeignKey, tagsAsJSON, fieldsAsJSON) + // insert first metric + err := postgres.Write([]telegraf.Metric{testMetric}) + assert.NoError(t, err, "Could not write") + + // should have created table, all columns in the same table + row := dbConn.QueryRow(fmt.Sprintf(`SELECT time, tag_id, value FROM "%s"`, testMetric.Name())) + var ts time.Time + var tagID int64 + var value int64 + err = row.Scan(&ts, &tagID, &value) + assert.NoError(t, err, "Could not check test results") + + sentValue, _ := testMetric.GetField("value") + sentTs := testMetric.Time() + // postgres doesn't support nano seconds in timestamp + sentTsNanoSecondOffset := sentTs.Nanosecond() + nanoSeconds := sentTsNanoSecondOffset % 1000 + sentTs = sentTs.Add(time.Duration(-nanoSeconds) * time.Nanosecond) + if !ts.UTC().Equal(sentTs) || tagID != 1 || value != sentValue.(int64) { + assert.Fail(t, fmt.Sprintf("Expected: %v, %v, %v; Received: %v, %v, %v", + sentTs, 1, sentValue, + ts.UTC(), tagID, value)) + } + + sentTag, _ := testMetric.GetTag("tag") + sentTagJSON := fmt.Sprintf(`{"tag": "%s"}`, sentTag) + row = dbConn.QueryRow(fmt.Sprintf(`SELECT tag_id, tags FROM "%s%s"`, testMetric.Name(), postgres.TagTableSuffix)) + tagID = 0 + var tags string + err = row.Scan(&tagID, &tags) + assert.NoError(t, err, "Could not check test results") + assert.Equal(t, int64(1), tagID) + assert.Equal(t, sentTagJSON, tags) +} + +func TestWriteToPostgresMultipleRowsOneTag(t *testing.T) { + tagsAsForeignKey := true + tagsAsJSON := true + fieldsAsJSON := false + testMetric, dbConn, postgres := prepareAndConnect(t, tagsAsForeignKey, tagsAsJSON, fieldsAsJSON) + // insert first metric + err := postgres.Write([]telegraf.Metric{testMetric, testMetric}) + assert.NoError(t, err, "Could not write") + + // should have two rows + row := dbConn.QueryRow(fmt.Sprintf(`SELECT count(*) FROM "%s"`, testMetric.Name())) + var count int64 + err = row.Scan(&count) + assert.NoError(t, err, "Could not check test results") + assert.Equal(t, int64(2), count) + + sentTag, _ := testMetric.GetTag("tag") + sentTagJSON := fmt.Sprintf(`{"tag": "%s"}`, sentTag) + row = dbConn.QueryRow(fmt.Sprintf(`SELECT tag_id, tags FROM "%s%s"`, testMetric.Name(), postgres.TagTableSuffix)) + var tagID int64 + var tags string + err = row.Scan(&tagID, &tags) + assert.NoError(t, err, "Could not check test results") + assert.Equal(t, int64(1), tagID) + assert.Equal(t, sentTagJSON, tags) +} + +func TestWriteToPostgresAddNewTag(t *testing.T) { + tagsAsForeignKey := true + tagsAsJSON := true + fieldsAsJSON := false + testMetricWithOneTag, dbConn, postgres := prepareAndConnect(t, tagsAsForeignKey, tagsAsJSON, fieldsAsJSON) + testMetricWithOneMoreTag := testMetric("metric name", "tag1", int(2)) + testMetricWithOneMoreTag.AddTag("second_tag", "tag2") + // insert first two metric + err := postgres.Write([]telegraf.Metric{testMetricWithOneTag, testMetricWithOneMoreTag}) + assert.NoError(t, err, "Could not write") + + // should have two rows + row := dbConn.QueryRow(fmt.Sprintf(`SELECT count(*) FROM "%s"`, testMetricWithOneTag.Name())) + var count int64 + err = row.Scan(&count) + assert.NoError(t, err, "Could not check test results") + assert.Equal(t, int64(2), count) + + // and two tagsets + sentTag, _ := testMetricWithOneTag.GetTag("tag") + sentTagJSON := fmt.Sprintf(`{"tag": "%s"}`, sentTag) + row = dbConn.QueryRow(fmt.Sprintf(`SELECT tags FROM "%s%s" WHERE tag_id=1`, testMetricWithOneTag.Name(), postgres.TagTableSuffix)) + var tags string + err = row.Scan(&tags) + assert.NoError(t, err, "Could not check test results") + assert.Equal(t, sentTagJSON, tags) + + secondSentTagsJSON := `{"tag": "tag1", "second_tag": "tag2"}` + row = dbConn.QueryRow(fmt.Sprintf(`SELECT tags FROM "%s%s" WHERE tag_id=2`, testMetricWithOneMoreTag.Name(), postgres.TagTableSuffix)) + err = row.Scan(&tags) + assert.NoError(t, err, "Could not check test results") + assert.Equal(t, secondSentTagsJSON, tags) + + // insert new point with a third tagset + testMetricWithThirdTag := testMetric("metric name", "tag1", int(2)) + testMetricWithThirdTag.AddTag("third_tag", "tag3") + err = postgres.Write([]telegraf.Metric{testMetricWithThirdTag}) + assert.NoError(t, err, "Could not write") + thirdSentTagsJSON := `{"tag": "tag1", "third_tag": "tag3"}` + row = dbConn.QueryRow(fmt.Sprintf(`SELECT tags FROM "%s%s" WHERE tag_id=3`, testMetricWithThirdTag.Name(), postgres.TagTableSuffix)) + err = row.Scan(&tags) + assert.NoError(t, err, "Could not check test results") + assert.Equal(t, thirdSentTagsJSON, tags) +} + +func TestWriteToPostgresAddNewField(t *testing.T) { + testMetric, dbConn, postgres := prepareAndConnect(t, false, false, false) + // insert first metric + writeAndAssertSingleMetricNoJSON(t, testMetric, dbConn, postgres) + + //insert second metric with one more field + testMetric.AddField("field2", 1.0) + testMetric.SetTime(time.Now()) + err := postgres.Write([]telegraf.Metric{testMetric}) + assert.NoError(t, err, "Could not write") + + rows, err := dbConn.Query(fmt.Sprintf(`SELECT time, tag, value, field2 FROM "%s" ORDER BY time ASC`, testMetric.Name())) + assert.NoError(t, err, "Could not check written results") + var ts time.Time + var tag string + var value sql.NullInt64 + var field2 sql.NullFloat64 + rowNum := 1 + for rows.Next() { + rows.Scan(&ts, &tag, &value, &field2) + if rowNum == 1 { + assert.False(t, field2.Valid) + } else if rowNum == 2 { + assert.Equal(t, 1.0, field2.Float64) + } else { + assert.FailNow(t, "more rows than expected") + } + rowNum++ + } + +} + +func writeAndAssertSingleMetricNoJSON(t *testing.T, testMetric telegraf.Metric, dbConn *sql.DB, postgres *Postgresql) { + err := postgres.Write([]telegraf.Metric{testMetric}) + assert.NoError(t, err, "Could not write") + + // should have created table, all columns in the same table + row := dbConn.QueryRow(fmt.Sprintf(`SELECT time, tag, value FROM "%s"`, testMetric.Name())) + var ts time.Time + var tag string + var value int64 + err = row.Scan(&ts, &tag, &value) + assert.NoError(t, err, "Could not check test results") + + sentTag, _ := testMetric.GetTag("tag") + sentValue, _ := testMetric.GetField("value") + sentTs := testMetric.Time() + // postgres doesn't support nano seconds in timestamp + sentTsNanoSecondOffset := sentTs.Nanosecond() + nanoSeconds := sentTsNanoSecondOffset % 1000 + sentTs = sentTs.Add(time.Duration(-nanoSeconds) * time.Nanosecond) + if !ts.UTC().Equal(sentTs) || tag != sentTag || value != sentValue.(int64) { + assert.Fail(t, fmt.Sprintf("Expected: %v, %v, %v; Received: %v, %v, %v", + sentTs, sentTag, sentValue, + ts.UTC(), tag, value)) + } +} diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 3f0863ce427c7..4c4bf34450016 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -1,68 +1,182 @@ package postgresql import ( + "database/sql" + "fmt" "testing" "time" "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/metric" - + _ "github.com/jackc/pgx/stdlib" "github.com/stretchr/testify/assert" ) -func TestPostgresqlQuote(t *testing.T) { - assert.Equal(t, `"foo"`, quoteIdent("foo")) - assert.Equal(t, `"fo'o"`, quoteIdent("fo'o")) - assert.Equal(t, `"fo""o"`, quoteIdent("fo\"o")) - - assert.Equal(t, "'foo'", quoteLiteral("foo")) - assert.Equal(t, "'fo''o'", quoteLiteral("fo'o")) - assert.Equal(t, "'fo\"o'", quoteLiteral("fo\"o")) -} - -func TestPostgresqlCreateStatement(t *testing.T) { - p := newPostgresql() +func TestWrite(t *testing.T) { timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) - - var m telegraf.Metric - m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,fields jsonb)`, p.generateCreateTable(m)) - - m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) - - p.TagsAsJsonb = false - p.FieldsAsJsonb = false - - m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"f" float8)`, p.generateCreateTable(m)) - - m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"i" int8)`, p.generateCreateTable(m)) - - m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"k" text,"i" int8)`, p.generateCreateTable(m)) - + oneMetric, _ := metric.New("m", map[string]string{"t": "tv"}, map[string]interface{}{"f": 1}, timestamp) + noTags, _ := metric.New("m", nil, map[string]interface{}{"f": 1}, timestamp) + testCases := []struct { + desc string + input []telegraf.Metric + fieldsAsJSON bool + execs []sql.Result + expectedExecQueries []string + execErrs []error + expectErr string + }{ + { + desc: "no metrics, no error", + input: []telegraf.Metric{}, + expectErr: "", + }, { + desc: "metric table not cached, error on creating it", + input: []telegraf.Metric{oneMetric}, + execs: []sql.Result{nil}, + execErrs: []error{fmt.Errorf("error on first exec")}, + expectErr: "error on first exec", + }, { + desc: "metric table not cached, gets cached, no tags, fields as json, error on insert", + input: []telegraf.Metric{noTags}, + fieldsAsJSON: true, + execs: []sql.Result{nil, nil}, + execErrs: []error{nil, fmt.Errorf("error on batch insert")}, + expectErr: "error on batch insert", + }, { + desc: "metric table not cached, gets cached, has tags, json fields, all good", + input: []telegraf.Metric{oneMetric}, + fieldsAsJSON: true, + execs: []sql.Result{nil, nil}, + execErrs: []error{nil, nil}, + expectedExecQueries: []string{ + `CREATE TABLE IF NOT EXISTS "a"."m"(time timestamptz,"t" text,fields jsonb)`, + `INSERT INTO "a"."m"("time","t","fields") VALUES($1,$2,$3)`}, + }, { + desc: "metric table not cached, gets cached, has tags, all good", + input: []telegraf.Metric{oneMetric}, + execs: []sql.Result{nil, nil}, + execErrs: []error{nil, nil}, + expectedExecQueries: []string{ + `CREATE TABLE IF NOT EXISTS "a"."m"(time timestamptz,"t" text,"f" int8)`, + `INSERT INTO "a"."m"("time","t","f") VALUES($1,$2,$3)`}, + }, + } + + for _, testCase := range testCases { + p := &Postgresql{ + tables: &mockTk{tables: make(map[string]bool)}, + TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", + Schema: "a", + FieldsAsJsonb: testCase.fieldsAsJSON, + db: &mockDb{ + exec: testCase.execs, + execErr: testCase.execErrs, + expectedQ: testCase.expectedExecQueries, + }} + err := p.Write(testCase.input) + if testCase.expectErr != "" { + assert.EqualError(t, err, testCase.expectErr, testCase.desc) + } else { + assert.Nil(t, err, testCase.desc) + } + } +} +func TestInsertBatches(t *testing.T) { + sampleData := map[string][]*colsAndValues{ + "tab": { + { + cols: []string{"a"}, + vals: []interface{}{1}, + }, + }, + } + + testCases := []struct { + input map[string][]*colsAndValues + desc string + resultsFromExec []sql.Result + errorsFromExec []error + errorOnQuery error + fieldsAsJSON bool + expectErr string + }{ + { + desc: "no batches, no errors", + input: make(map[string][]*colsAndValues), + errorsFromExec: []error{fmt.Errorf("should not have called exec")}, + }, { + desc: "error returned on first insert, fields as json", + input: sampleData, + resultsFromExec: []sql.Result{nil}, + errorsFromExec: []error{fmt.Errorf("error on first insert")}, + fieldsAsJSON: true, + expectErr: "error on first insert", + }, { + desc: "error returned on first insert, error on add column", + input: sampleData, + resultsFromExec: []sql.Result{nil}, + errorsFromExec: []error{fmt.Errorf("error on first insert")}, + errorOnQuery: fmt.Errorf("error on query"), + expectErr: "error on query", + }, { + desc: "no error on insert", + input: sampleData, + resultsFromExec: []sql.Result{nil}, + errorsFromExec: []error{nil}, + }, + } + + for _, testCase := range testCases { + m := &mockDb{exec: testCase.resultsFromExec, + execErr: testCase.errorsFromExec, + queryErr: testCase.errorOnQuery} + p := &Postgresql{ + db: m, + FieldsAsJsonb: testCase.fieldsAsJSON, + } + + err := p.insertBatches(testCase.input) + if testCase.expectErr != "" { + assert.EqualError(t, err, testCase.expectErr) + } else { + assert.Nil(t, err) + } + } } -func TestPostgresqlInsertStatement(t *testing.T) { - p := newPostgresql() - - p.TagsAsJsonb = false - p.FieldsAsJsonb = false +type mockDb struct { + currentExec int + exec []sql.Result + expectedQ []string + execErr []error + query *sql.Rows + queryErr error +} - sql := p.generateInsert("m", []string{"time", "f"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","f") VALUES($1,$2)`, sql) +func (m *mockDb) Exec(query string, args ...interface{}) (sql.Result, error) { + tmp := m.currentExec + m.currentExec++ + if m.expectedQ != nil && m.expectedQ[tmp] != query { + return nil, fmt.Errorf("unexpected query, got: '%s' expected: %s", query, m.expectedQ[tmp]) + } - sql = p.generateInsert("m", []string{"time", "i"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","i") VALUES($1,$2)`, sql) + return m.exec[tmp], m.execErr[tmp] +} +func (m *mockDb) Query(query string, args ...interface{}) (*sql.Rows, error) { + return m.query, m.queryErr +} +func (m *mockDb) QueryRow(query string, args ...interface{}) *sql.Row { return nil } +func (m *mockDb) Close() error { return nil } - sql = p.generateInsert("m", []string{"time", "f", "i"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","f","i") VALUES($1,$2,$3)`, sql) +type mockTk struct { + tables map[string]bool +} - sql = p.generateInsert("m", []string{"time", "k", "i"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","k","i") VALUES($1,$2,$3)`, sql) +func (m *mockTk) add(tableName string) { + m.tables[tableName] = true +} - sql = p.generateInsert("m", []string{"time", "k1", "k2", "i"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","k1","k2","i") VALUES($1,$2,$3,$4)`, sql) +func (m *mockTk) exists(schema, table string) bool { + _, exists := m.tables[table] + return exists } diff --git a/plugins/outputs/postgresql/table_keeper.go b/plugins/outputs/postgresql/table_keeper.go new file mode 100644 index 0000000000000..3b0fd45ac481f --- /dev/null +++ b/plugins/outputs/postgresql/table_keeper.go @@ -0,0 +1,47 @@ +package postgresql + +import ( + "log" +) + +const ( + tableExistsTemplate = "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname = $2;" +) + +type tableKeeper interface { + exists(schema, tableName string) bool + add(tableName string) +} + +type defTableKeeper struct { + Tables map[string]bool + db dbWrapper +} + +func newTableKeeper(db dbWrapper) tableKeeper { + return &defTableKeeper{ + Tables: make(map[string]bool), + db: db, + } +} + +func (t *defTableKeeper) exists(schema, tableName string) bool { + if _, ok := t.Tables[tableName]; ok { + return true + } + + result, err := t.db.Exec(tableExistsTemplate, tableName, schema) + if err != nil { + log.Printf("E! Error checking for existence of metric table %s: %v", tableName, err) + return false + } + if count, _ := result.RowsAffected(); count == 1 { + t.Tables[tableName] = true + return true + } + return false +} + +func (t *defTableKeeper) add(tableName string) { + t.Tables[tableName] = true +} diff --git a/plugins/outputs/postgresql/table_keeper_test.go b/plugins/outputs/postgresql/table_keeper_test.go new file mode 100644 index 0000000000000..0d7bb77bec307 --- /dev/null +++ b/plugins/outputs/postgresql/table_keeper_test.go @@ -0,0 +1,71 @@ +package postgresql + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewTableKeeper(t *testing.T) { + mock := &mockWr{} + tk := newTableKeeper(mock).(*defTableKeeper) + assert.Equal(t, mock, tk.db) + assert.Empty(t, tk.Tables) +} + +func TestTableKeeperAdd(t *testing.T) { + tk := newTableKeeper(nil).(*defTableKeeper) + tk.add("table") + tk.add("table2") + assert.Equal(t, 2, len(tk.Tables)) + assert.True(t, tk.Tables["table"]) + assert.True(t, tk.Tables["table2"]) + assert.False(t, tk.Tables["table3"]) + tk.add("table2") + assert.Equal(t, 2, len(tk.Tables)) +} + +func TestTableKeeperExists(t *testing.T) { + mock := &mockWr{} + tk := newTableKeeper(mock).(*defTableKeeper) + table := "table name" + + // table cached + tk.Tables[table] = true + mock.execErr = fmt.Errorf("should not call execute") + assert.True(t, tk.exists("", table)) + + // error on table exists query + mock.execErr = fmt.Errorf("error on query execute") + mock.expected = tableExistsTemplate + delete(tk.Tables, table) + assert.False(t, tk.exists("", table)) + assert.Equal(t, 0, len(tk.Tables)) + + // fetch from db, doesn't exist + mock.execErr = nil + mock.exec = &mockResult{} + assert.False(t, tk.exists("", table)) + + // fetch from db, exists + mock.exec = &mockResult{rows: 1} + assert.True(t, tk.exists("", table)) + assert.Equal(t, 1, len(tk.Tables)) + assert.True(t, tk.Tables[table]) +} + +type mockResult struct { + rows int64 + rowErr error + last int64 + lastErr error +} + +func (m *mockResult) LastInsertId() (int64, error) { + return m.last, m.lastErr +} + +func (m *mockResult) RowsAffected() (int64, error) { + return m.rows, m.rowErr +} diff --git a/plugins/outputs/postgresql/utils.go b/plugins/outputs/postgresql/utils.go new file mode 100644 index 0000000000000..801a2b6aac4d5 --- /dev/null +++ b/plugins/outputs/postgresql/utils.go @@ -0,0 +1,68 @@ +package postgresql + +import ( + "encoding/json" + "log" + "strings" + + "github.com/jackc/pgx" +) + +func buildJsonbTags(tags map[string]string) ([]byte, error) { + js := make(map[string]interface{}) + for column, value := range tags { + js[column] = value + } + + return buildJsonb(js) +} + +func buildJsonb(data map[string]interface{}) ([]byte, error) { + if len(data) > 0 { + d, err := json.Marshal(data) + if err != nil { + return nil, err + } + return d, nil + } + + return nil, nil +} + +func quoteIdent(name string) string { + return pgx.Identifier{name}.Sanitize() +} + +func quoteLiteral(name string) string { + return "'" + strings.Replace(name, "'", "''", -1) + "'" +} + +func deriveDatatype(value interface{}) string { + var datatype string + + switch value.(type) { + case bool: + datatype = "boolean" + case uint64: + datatype = "int8" + case int64: + datatype = "int8" + case float64: + datatype = "float8" + case string: + datatype = "text" + default: + datatype = "text" + log.Printf("E! Unknown datatype %T(%v)", value, value) + } + return datatype +} + +func contains(haystack []string, needle string) bool { + for _, key := range haystack { + if key == needle { + return true + } + } + return false +} From b3f405da90be80dc893227096ce764dae81c0102 Mon Sep 17 00:00:00 2001 From: Blagoj Atanasovski Date: Sun, 14 Jul 2019 12:57:53 +0200 Subject: [PATCH 77/79] Optimize insert performance of metrics and tag_id lookup A refactor was done that optimized several things: 1. Metrics are organized by measurement and schema validation is done only once instead of every metric 2. Metric values are batched and inserted with binary COPY 3. Tag IDs are now cached with a LRU strategy --- Gopkg.lock | 12 + Gopkg.toml | 11 +- plugins/outputs/postgresql/README.md | 60 ++-- .../outputs/postgresql/add_missing_columns.go | 72 ----- .../postgresql/add_missing_columns_test.go | 92 ------ .../postgresql/columns/column_mapper.go | 68 +++++ .../postgresql/columns/columns_initializer.go | 139 +++++++++ .../outputs/postgresql/columns/map_fields.go | 18 ++ .../outputs/postgresql/columns/map_tags.go | 18 ++ .../postgresql/columns/standard_columns.go | 16 ++ plugins/outputs/postgresql/create_table.go | 72 ----- .../outputs/postgresql/create_table_test.go | 48 ---- plugins/outputs/postgresql/db/db_wrapper.go | 65 +++++ plugins/outputs/postgresql/db_wrapper.go | 43 --- plugins/outputs/postgresql/generate_insert.go | 24 -- .../postgresql/generate_insert_test.go | 39 --- plugins/outputs/postgresql/get_tag_id.go | 87 ------ plugins/outputs/postgresql/postgresql.go | 237 ++++++---------- .../postgresql/postgresql_integration_test.go | 150 ++++++++++ plugins/outputs/postgresql/postgresql_test.go | 263 ++++++++---------- plugins/outputs/postgresql/table_keeper.go | 47 ---- .../outputs/postgresql/table_keeper_test.go | 71 ----- plugins/outputs/postgresql/tables/manager.go | 208 ++++++++++++++ .../outputs/postgresql/tables/manager_test.go | 139 +++++++++ plugins/outputs/postgresql/tags_cache.go | 159 +++++++++++ plugins/outputs/postgresql/transformer.go | 72 +++++ plugins/outputs/postgresql/utils.go | 68 ----- plugins/outputs/postgresql/utils/types.go | 30 ++ plugins/outputs/postgresql/utils/utils.go | 168 +++++++++++ .../outputs/postgresql/utils/utils_test.go | 138 +++++++++ 30 files changed, 1646 insertions(+), 988 deletions(-) delete mode 100644 plugins/outputs/postgresql/add_missing_columns.go delete mode 100644 plugins/outputs/postgresql/add_missing_columns_test.go create mode 100644 plugins/outputs/postgresql/columns/column_mapper.go create mode 100644 plugins/outputs/postgresql/columns/columns_initializer.go create mode 100644 plugins/outputs/postgresql/columns/map_fields.go create mode 100644 plugins/outputs/postgresql/columns/map_tags.go create mode 100644 plugins/outputs/postgresql/columns/standard_columns.go delete mode 100644 plugins/outputs/postgresql/create_table.go delete mode 100644 plugins/outputs/postgresql/create_table_test.go create mode 100644 plugins/outputs/postgresql/db/db_wrapper.go delete mode 100644 plugins/outputs/postgresql/db_wrapper.go delete mode 100644 plugins/outputs/postgresql/generate_insert.go delete mode 100644 plugins/outputs/postgresql/generate_insert_test.go delete mode 100644 plugins/outputs/postgresql/get_tag_id.go delete mode 100644 plugins/outputs/postgresql/table_keeper.go delete mode 100644 plugins/outputs/postgresql/table_keeper_test.go create mode 100644 plugins/outputs/postgresql/tables/manager.go create mode 100644 plugins/outputs/postgresql/tables/manager_test.go create mode 100644 plugins/outputs/postgresql/tags_cache.go create mode 100644 plugins/outputs/postgresql/transformer.go delete mode 100644 plugins/outputs/postgresql/utils.go create mode 100644 plugins/outputs/postgresql/utils/types.go create mode 100644 plugins/outputs/postgresql/utils/utils.go create mode 100644 plugins/outputs/postgresql/utils/utils_test.go diff --git a/Gopkg.lock b/Gopkg.lock index 3fabcfb77cfe9..a2de1f7655fd2 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -532,6 +532,14 @@ revision = "9fa652df1129bef0e734c9cf9bf6dbae9ef3b9fa" version = "1.3.1" +[[projects]] + branch = "master" + digest = "1:e1822d37be8e11e101357a27170527b1056c99182407f270e080f76409adbd9a" + name = "github.com/golang/groupcache" + packages = ["lru"] + pruneopts = "" + revision = "869f871628b6baa9cfbc11732cdf6546b17c1298" + [[projects]] digest = "1:f958a1c137db276e52f0b50efee41a1a389dcdded59a69711f3e872757dab34b" name = "github.com/golang/protobuf" @@ -1762,8 +1770,12 @@ "github.com/go-redis/redis", "github.com/go-sql-driver/mysql", "github.com/gobwas/glob", +<<<<<<< HEAD "github.com/gofrs/uuid", "github.com/gogo/protobuf/proto", +======= + "github.com/golang/groupcache/lru", +>>>>>>> Optimize insert performance of metrics and tag_id lookup "github.com/golang/protobuf/proto", "github.com/golang/protobuf/ptypes/duration", "github.com/golang/protobuf/ptypes/empty", diff --git a/Gopkg.toml b/Gopkg.toml index 5b0a2dba45ca8..0963de6713594 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -18,6 +18,10 @@ name = "github.com/aws/aws-sdk-go" version = "1.19.41" +[[constraint]] + name = "github.com/bsm/sarama-cluster" + version = "2.1.13" + [[constraint]] name = "github.com/couchbase/go-couchbase" branch = "master" @@ -36,7 +40,7 @@ [[constraint]] name = "github.com/eclipse/paho.mqtt.golang" - version = "1" + version = "~1.1.1" [[constraint]] name = "github.com/go-sql-driver/mysql" @@ -308,3 +312,8 @@ [[override]] name = "github.com/satori/go.uuid" revision = "b2ce2384e17bbe0c6d34077efa39dbab3e09123b" + +[[constraint]] + branch = "master" + name = "github.com/golang/groupcache" + diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md index b9d0020682023..2a38b7a08ab5a 100644 --- a/plugins/outputs/postgresql/README.md +++ b/plugins/outputs/postgresql/README.md @@ -1,36 +1,44 @@ # PostgreSQL Output Plugin -This output plugin writes all metrics to PostgreSQL. +This output plugin writes all metrics to PostgreSQL. +The plugin manages the schema automatically updating missing columns, and checking if existing ones are of the proper type. ### Configuration: ```toml # Send metrics to postgres [[outputs.postgresql]] - address = "host=localhost user=postgres sslmode=verify-full" - - ## Store tags as foreign keys in the metrics table. Default is false. - # tags_as_foreignkeys = false - - ## Template to use for generating tables - ## Available Variables: - ## {TABLE} - tablename as identifier - ## {TABLELITERAL} - tablename as string literal - ## {COLUMNS} - column definitions - ## {KEY_COLUMNS} - comma-separated list of key columns (time + tags) - - ## Default template - # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})" - ## Example for timescaledb - # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval, if_not_exists := true);" - - ## Schema to create the tables into - # schema = "public" - - ## Use jsonb datatype for tags. Default is true. - # tags_as_jsonb = false - - ## Use jsonb datatype for fields. Default is true. - # fields_as_jsonb = false + address = "host=localhost user=postgres sslmode=verify-full" + + ## Update existing tables to match the incoming metrics. Default is true + # do_schema_updates = true + + ## Store tags as foreign keys in the metrics table. Default is false. + # tags_as_foreignkeys = false + + ## If tags_as_foreignkeys is set to true you can choose the number of tag sets to cache + ## per measurement (metric name). Default is 1000, if set to 0 => cache has no limit. + # cached_tagsets_per_measurement = 1000 + + ## Template to use for generating tables + ## Available Variables: + ## {TABLE} - tablename as identifier + ## {TABLELITERAL} - tablename as string literal + ## {COLUMNS} - column definitions + ## {KEY_COLUMNS} - comma-separated list of key columns (time + tags) + + ## Default template + # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})" + ## Example for timescaledb + # table_template = "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS}); SELECT create_hypertable({TABLELITERAL},'time',chunk_time_interval := '1 week'::interval, if_not_exists := true);" + + ## Schema to create the tables into + # schema = "public" + + ## Use jsonb datatype for tags. Default is false. + # tags_as_jsonb = false + + ## Use jsonb datatype for fields. Default is false. + # fields_as_jsonb = false ``` diff --git a/plugins/outputs/postgresql/add_missing_columns.go b/plugins/outputs/postgresql/add_missing_columns.go deleted file mode 100644 index 1d2be69bf3c55..0000000000000 --- a/plugins/outputs/postgresql/add_missing_columns.go +++ /dev/null @@ -1,72 +0,0 @@ -package postgresql - -import ( - "fmt" - "log" - "strings" -) - -func (p *Postgresql) addMissingColumns(tableName string, columns []string, values []interface{}) (bool, error) { - columnStatuses, err := p.whichColumnsAreMissing(columns, tableName) - if err != nil { - return false, err - } - - retry := false - for currentColumn, isMissing := range columnStatuses { - if !isMissing { - continue - } - - dataType := deriveDatatype(values[currentColumn]) - columnName := columns[currentColumn] - if err := p.addColumnToTable(columnName, dataType, tableName); err != nil { - return false, err - } - retry = true - } - - return retry, nil -} - -func prepareMissingColumnsQuery(columns []string) string { - var quotedColumns = make([]string, len(columns)) - for i, column := range columns { - quotedColumns[i] = quoteLiteral(column) - } - return fmt.Sprintf(missingColumnsTemplate, strings.Join(quotedColumns, ",")) -} - -// for a given array of columns x = [a, b, c ...] it returns an array of bools indicating -// if x[i] is missing -func (p *Postgresql) whichColumnsAreMissing(columns []string, tableName string) ([]bool, error) { - missingColumnsQuery := prepareMissingColumnsQuery(columns) - result, err := p.db.Query(missingColumnsQuery, p.Schema, tableName) - if err != nil { - return nil, err - } - defer result.Close() - columnStatus := make([]bool, len(columns)) - var isMissing bool - var columnName string - currentColumn := 0 - - for result.Next() { - err := result.Scan(&columnName, &isMissing) - if err != nil { - log.Println(err) - return nil, err - } - columnStatus[currentColumn] = isMissing - currentColumn++ - } - - return columnStatus, nil -} - -func (p *Postgresql) addColumnToTable(columnName, dataType, tableName string) error { - fullTableName := p.fullTableName(tableName) - addColumnQuery := fmt.Sprintf(addColumnTemplate, fullTableName, quoteIdent(columnName), dataType) - _, err := p.db.Exec(addColumnQuery) - return err -} diff --git a/plugins/outputs/postgresql/add_missing_columns_test.go b/plugins/outputs/postgresql/add_missing_columns_test.go deleted file mode 100644 index 7140847e03375..0000000000000 --- a/plugins/outputs/postgresql/add_missing_columns_test.go +++ /dev/null @@ -1,92 +0,0 @@ -package postgresql - -import ( - "database/sql" - "fmt" - "strings" - "testing" - - "github.com/stretchr/testify/assert" -) - -func prepareMissingColumnsQuery1(columns []string) string { - var quotedColumns = make([]string, len(columns)) - for i, column := range columns { - quotedColumns[i] = quoteLiteral(column) - } - return fmt.Sprintf(missingColumnsTemplate, strings.Join(quotedColumns, ",")) -} - -func TestPrepareMissingColumnsQuery(t *testing.T) { - columns := []string{} - assert.Equal(t, `WITH available AS (SELECT column_name as c FROM information_schema.columns WHERE table_schema = $1 and table_name = $2),`+ - `required AS (SELECT c FROM unnest(array []) AS c) `+ - `SELECT required.c, available.c IS NULL FROM required LEFT JOIN available ON required.c = available.c;`, - prepareMissingColumnsQuery(columns)) - columns = []string{"a", "b", "c"} - assert.Equal(t, `WITH available AS (SELECT column_name as c FROM information_schema.columns WHERE table_schema = $1 and table_name = $2),`+ - `required AS (SELECT c FROM unnest(array ['a','b','c']) AS c) `+ - `SELECT required.c, available.c IS NULL FROM required LEFT JOIN available ON required.c = available.c;`, - prepareMissingColumnsQuery(columns)) -} - -func TestWhichColumnsAreMissing(t *testing.T) { - mock := &mockWr{} - p := &Postgresql{db: mock} - - columns := []string{"col1"} - mock.queryErr = fmt.Errorf("error 1") - mock.expected = prepareMissingColumnsQuery(columns) - table := "tableName" - _, err := p.whichColumnsAreMissing(columns, table) - assert.Equal(t, err.Error(), "error 1") -} - -func TestAddColumnToTable(t *testing.T) { - mock := &mockWr{} - p := &Postgresql{db: mock, Schema: "pub"} - - column := "col1" - dataType := "text" - tableName := "table" - mock.execErr = fmt.Errorf("error 1") - mock.expected = `ALTER TABLE "pub"."table" ADD COLUMN IF NOT EXISTS "col1" text;` - err := p.addColumnToTable(column, dataType, tableName) - assert.EqualError(t, err, "error 1") - - mock.execErr = nil - assert.Nil(t, p.addColumnToTable(column, dataType, tableName)) - -} - -func (p *Postgresql) addColumnToTable1(columnName, dataType, tableName string) error { - fullTableName := p.fullTableName(tableName) - addColumnQuery := fmt.Sprintf(addColumnTemplate, fullTableName, quoteIdent(columnName), dataType) - _, err := p.db.Exec(addColumnQuery) - return err -} - -type mockWr struct { - expected string - exec sql.Result - execErr error - query *sql.Rows - queryErr error -} - -func (m *mockWr) Exec(query string, args ...interface{}) (sql.Result, error) { - if m.expected != query { - return nil, fmt.Errorf("unexpected query; exp: '%s'; got: '%s'", m.expected, query) - } - return m.exec, m.execErr -} -func (m *mockWr) Query(query string, args ...interface{}) (*sql.Rows, error) { - if m.expected != query { - return nil, fmt.Errorf("unexpected query; exp: '%s'; got: '%s'", m.expected, query) - } - return m.query, m.queryErr -} -func (m *mockWr) QueryRow(query string, args ...interface{}) *sql.Row { - return nil -} -func (m *mockWr) Close() error { return nil } diff --git a/plugins/outputs/postgresql/columns/column_mapper.go b/plugins/outputs/postgresql/columns/column_mapper.go new file mode 100644 index 0000000000000..99692997ab904 --- /dev/null +++ b/plugins/outputs/postgresql/columns/column_mapper.go @@ -0,0 +1,68 @@ +package columns + +import ( + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" +) + +// Mapper knows how to generate the column details for the main and tags table in the db +type Mapper interface { + // Iterate through an array of 'metrics' visiting only those indexed by 'indices' + // and depending on 'tagsAsFK', 'tagsAsJSON', and 'fieldsAsJSON' generate the + // desired columns (their name, type and which role they play) for both the + // main metrics table in the DB, and if tagsAsFK == true for the tags table. + Target(indices []int, metrics []telegraf.Metric) (*utils.TargetColumns, *utils.TargetColumns) +} + +type defMapper struct { + initTargetColumns targetColumnInitializer + tagsAsFK bool + tagsAsJSON bool + fieldsAsJSON bool +} + +// NewMapper returns a new implementation of the columns.Mapper interface. +func NewMapper(tagsAsFK, tagsAsJSON, fieldsAsJSON bool) Mapper { + initializer := getInitialColumnsGenerator(tagsAsFK, tagsAsJSON, fieldsAsJSON) + return &defMapper{ + tagsAsFK: tagsAsFK, + tagsAsJSON: tagsAsJSON, + fieldsAsJSON: fieldsAsJSON, + initTargetColumns: initializer, + } +} + +// Iterate through an array of 'metrics' visiting only those indexed by 'indices' +// and depending on 'tagsAsFK', 'tagsAsJSON', and 'fieldsAsJSON' generate the +// desired columns (their name, type and which role they play) for both the +// main metrics table in the DB, and if tagsAsFK == true for the tags table. +func (d *defMapper) Target(indices []int, metrics []telegraf.Metric) (*utils.TargetColumns, *utils.TargetColumns) { + columns, tagColumns := d.initTargetColumns() + if d.tagsAsJSON && d.fieldsAsJSON { + // if json is used for both, that's all the columns you need + return columns, tagColumns + } + + alreadyMapped := map[string]bool{} + // Iterate the metrics indexed by 'indices' and populate all the resulting required columns + // e.g. metric1(tags:[t1], fields:[f1,f2]), metric2(tags:[t2],fields:[f2, f3]) + // => columns = [time, t1, f1, f2, t2, f3], tagColumns = nil + // if tagsAsFK == true + // columns = [time, tagID, f1, f2, f3], tagColumns = [tagID, t1, t2] + // if tagsAsFK == true && fieldsAsJSON = true + // cols = [time, tagID, fields], tagCols = [tagID, t1, t2] + for _, index := range indices { + metric := metrics[index] + if !d.tagsAsJSON { + whichColumns := columns + if d.tagsAsFK { + whichColumns = tagColumns + } + mapTags(metric.TagList(), alreadyMapped, whichColumns) + } + + mapFields(metric.FieldList(), alreadyMapped, columns) + } + + return columns, tagColumns +} diff --git a/plugins/outputs/postgresql/columns/columns_initializer.go b/plugins/outputs/postgresql/columns/columns_initializer.go new file mode 100644 index 0000000000000..5391dabe93f27 --- /dev/null +++ b/plugins/outputs/postgresql/columns/columns_initializer.go @@ -0,0 +1,139 @@ +package columns + +import "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" + +// a function type that generates column details for the main, and tags table in the db +type targetColumnInitializer func() (*utils.TargetColumns, *utils.TargetColumns) + +// constants used for populating the 'targetColumnInit' map (for better readability) +const ( + cTagsAsFK = true + cTagsAsJSON = true + cFieldsAsJSON = true +) + +// Since some of the target columns for the tables in the database don't +// depend on the metrics received, but on the plugin config, we can have +// constant initializer functions. It is always known that the 'time' +// column goes first in the main table, then if the tags are kept in a +// separate table you need to add the 'tag_id' column... +// This map contains an initializer for all the combinations +// of (tagsAsFK, tagsAsJSON, fieldsAsJSON). +func getInitialColumnsGenerator(tagsAsFK, tagsAsJSON, fieldsAsJSON bool) targetColumnInitializer { + return standardColumns[tagsAsFK][tagsAsJSON][fieldsAsJSON] +} + +var standardColumns = map[bool]map[bool]map[bool]targetColumnInitializer{ + cTagsAsFK: { + cTagsAsJSON: { + cFieldsAsJSON: tagsAsFKAndJSONAndFieldsAsJSONInit, + !cFieldsAsJSON: tagsAsFKAndJSONInit, + }, + !cTagsAsJSON: { + cFieldsAsJSON: tagsAsFKFieldsAsJSONInit, + !cFieldsAsJSON: tagsAsFKInit, + }, + }, + !cTagsAsFK: { + cTagsAsJSON: { + cFieldsAsJSON: tagsAndFieldsAsJSONInit, + !cFieldsAsJSON: tagsAsJSONInit, + }, + !cTagsAsJSON: { + cFieldsAsJSON: fieldsAsJSONInit, + !cFieldsAsJSON: vanillaColumns, + }, + }, +} + +func tagsAsFKAndJSONAndFieldsAsJSONInit() (*utils.TargetColumns, *utils.TargetColumns) { + return &utils.TargetColumns{ + Names: []string{TimeColumnName, TagIDColumnName, FieldsJSONColumn}, + DataTypes: []utils.PgDataType{TimeColumnDataType, TagIDColumnDataType, JSONColumnDataType}, + Target: map[string]int{TimeColumnName: 0, TagIDColumnName: 1, FieldsJSONColumn: 2}, + Roles: []utils.ColumnRole{utils.TimeColType, utils.TagsIDColType, utils.FieldColType}, + }, &utils.TargetColumns{ + Names: []string{TagIDColumnName, TagsJSONColumn}, + DataTypes: []utils.PgDataType{TagIDColumnDataTypeAsPK, JSONColumnDataType}, + Target: map[string]int{TagIDColumnName: 0, TagsJSONColumn: 1}, + Roles: []utils.ColumnRole{utils.TagsIDColType, utils.TagColType}, + } +} + +func tagsAsFKAndJSONInit() (*utils.TargetColumns, *utils.TargetColumns) { + return &utils.TargetColumns{ + Names: []string{TimeColumnName, TagIDColumnName}, + DataTypes: []utils.PgDataType{TimeColumnDataType, TagIDColumnDataType}, + Target: map[string]int{TimeColumnName: 0, TagIDColumnName: 1}, + Roles: []utils.ColumnRole{utils.TimeColType, utils.TagsIDColType}, + }, &utils.TargetColumns{ + Names: []string{TagIDColumnName, TagsJSONColumn}, + DataTypes: []utils.PgDataType{TagIDColumnDataTypeAsPK, JSONColumnDataType}, + Target: map[string]int{TagIDColumnName: 0, TagsJSONColumn: 1}, + Roles: []utils.ColumnRole{utils.TagsIDColType, utils.FieldColType}, + } +} + +func tagsAsFKFieldsAsJSONInit() (*utils.TargetColumns, *utils.TargetColumns) { + return &utils.TargetColumns{ + Names: []string{TimeColumnName, TagIDColumnName, FieldsJSONColumn}, + DataTypes: []utils.PgDataType{TimeColumnDataType, TagIDColumnDataType, JSONColumnDataType}, + Target: map[string]int{TimeColumnName: 0, TagIDColumnName: 1, FieldsJSONColumn: 2}, + Roles: []utils.ColumnRole{utils.TimeColType, utils.TagsIDColType, utils.FieldColType}, + }, &utils.TargetColumns{ + Names: []string{TagIDColumnName}, + DataTypes: []utils.PgDataType{TagIDColumnDataTypeAsPK}, + Target: map[string]int{TagIDColumnName: 0}, + Roles: []utils.ColumnRole{utils.TagsIDColType}, + } +} + +func tagsAsFKInit() (*utils.TargetColumns, *utils.TargetColumns) { + return &utils.TargetColumns{ + Names: []string{TimeColumnName, TagIDColumnName}, + DataTypes: []utils.PgDataType{TimeColumnDataType, TagIDColumnDataType}, + Target: map[string]int{TimeColumnName: 0, TagIDColumnName: 1}, + Roles: []utils.ColumnRole{utils.TimeColType, utils.TagsIDColType}, + }, &utils.TargetColumns{ + Names: []string{TagIDColumnName}, + DataTypes: []utils.PgDataType{TagIDColumnDataTypeAsPK}, + Target: map[string]int{TagIDColumnName: 0}, + Roles: []utils.ColumnRole{utils.TagsIDColType}, + } +} + +func tagsAndFieldsAsJSONInit() (*utils.TargetColumns, *utils.TargetColumns) { + return &utils.TargetColumns{ + Names: []string{TimeColumnName, TagsJSONColumn, FieldsJSONColumn}, + DataTypes: []utils.PgDataType{TimeColumnDataType, JSONColumnDataType, JSONColumnDataType}, + Target: map[string]int{TimeColumnName: 0, TagsJSONColumn: 1, FieldsJSONColumn: 2}, + Roles: []utils.ColumnRole{utils.TimeColType, utils.TagColType, utils.FieldColType}, + }, nil +} + +func tagsAsJSONInit() (*utils.TargetColumns, *utils.TargetColumns) { + return &utils.TargetColumns{ + Names: []string{TimeColumnName, TagsJSONColumn}, + DataTypes: []utils.PgDataType{TimeColumnDataType, JSONColumnDataType}, + Target: map[string]int{TimeColumnName: 0, TagsJSONColumn: 1}, + Roles: []utils.ColumnRole{utils.TimeColType, utils.TagColType}, + }, nil +} + +func fieldsAsJSONInit() (*utils.TargetColumns, *utils.TargetColumns) { + return &utils.TargetColumns{ + Names: []string{TimeColumnName, FieldsJSONColumn}, + DataTypes: []utils.PgDataType{TimeColumnDataType, JSONColumnDataType}, + Target: map[string]int{TimeColumnName: 0, FieldsJSONColumn: 1}, + Roles: []utils.ColumnRole{utils.TimeColType, utils.FieldColType}, + }, nil +} + +func vanillaColumns() (*utils.TargetColumns, *utils.TargetColumns) { + return &utils.TargetColumns{ + Names: []string{TimeColumnName}, + DataTypes: []utils.PgDataType{TimeColumnDataType}, + Target: map[string]int{TimeColumnName: 0}, + Roles: []utils.ColumnRole{utils.TimeColType}, + }, nil +} diff --git a/plugins/outputs/postgresql/columns/map_fields.go b/plugins/outputs/postgresql/columns/map_fields.go new file mode 100644 index 0000000000000..e905d5ddf66a0 --- /dev/null +++ b/plugins/outputs/postgresql/columns/map_fields.go @@ -0,0 +1,18 @@ +package columns + +import ( + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" +) + +func mapFields(fieldList []*telegraf.Field, alreadyMapped map[string]bool, columns *utils.TargetColumns) { + for _, field := range fieldList { + if _, ok := alreadyMapped[field.Key]; !ok { + alreadyMapped[field.Key] = true + columns.Target[field.Key] = len(columns.Names) + columns.Names = append(columns.Names, field.Key) + columns.DataTypes = append(columns.DataTypes, utils.DerivePgDatatype(field.Value)) + columns.Roles = append(columns.Roles, utils.FieldColType) + } + } +} diff --git a/plugins/outputs/postgresql/columns/map_tags.go b/plugins/outputs/postgresql/columns/map_tags.go new file mode 100644 index 0000000000000..7bb575c0da768 --- /dev/null +++ b/plugins/outputs/postgresql/columns/map_tags.go @@ -0,0 +1,18 @@ +package columns + +import ( + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" +) + +func mapTags(tagList []*telegraf.Tag, alreadyMapped map[string]bool, columns *utils.TargetColumns) { + for _, tag := range tagList { + if _, ok := alreadyMapped[tag.Key]; !ok { + alreadyMapped[tag.Key] = true + columns.Target[tag.Key] = len(columns.Names) + columns.Names = append(columns.Names, tag.Key) + columns.DataTypes = append(columns.DataTypes, utils.PgText) + columns.Roles = append(columns.Roles, utils.TagColType) + } + } +} diff --git a/plugins/outputs/postgresql/columns/standard_columns.go b/plugins/outputs/postgresql/columns/standard_columns.go new file mode 100644 index 0000000000000..75abe2ec6e869 --- /dev/null +++ b/plugins/outputs/postgresql/columns/standard_columns.go @@ -0,0 +1,16 @@ +package columns + +import "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" + +// Column names and data types for standard fields (time, tag_id, tags, and fields) +const ( + TimeColumnName = "time" + TimeColumnDataType = utils.PgTimestamptz + TimeColumnDefinition = TimeColumnName + " " + utils.PgTimestamptz + TagIDColumnName = "tag_id" + TagIDColumnDataType = utils.PgInt4 + TagIDColumnDataTypeAsPK = utils.PgSerial + TagsJSONColumn = "tags" + FieldsJSONColumn = "fields" + JSONColumnDataType = utils.PgJSONb +) diff --git a/plugins/outputs/postgresql/create_table.go b/plugins/outputs/postgresql/create_table.go deleted file mode 100644 index 9b647e276b3ee..0000000000000 --- a/plugins/outputs/postgresql/create_table.go +++ /dev/null @@ -1,72 +0,0 @@ -package postgresql - -import ( - "fmt" - "strings" - - "github.com/influxdata/telegraf" -) - -const ( - tagIDColumn = "tag_id" - createTagsTableTemplate = "CREATE TABLE IF NOT EXISTS %s(tag_id serial primary key,%s,UNIQUE(%s))" -) - -func (p *Postgresql) generateCreateTable(metric telegraf.Metric) string { - var columns []string - var pk []string - var sql []string - - pk = append(pk, quoteIdent("time")) - columns = append(columns, "time timestamptz") - - // handle tags if necessary - if len(metric.Tags()) > 0 { - if p.TagsAsForeignkeys { - // tags in separate table - var tagColumns []string - var tagColumndefs []string - columns = append(columns, "tag_id int") - - if p.TagsAsJsonb { - tagColumns = append(tagColumns, "tags") - tagColumndefs = append(tagColumndefs, "tags jsonb") - } else { - for column := range metric.Tags() { - tagColumns = append(tagColumns, quoteIdent(column)) - tagColumndefs = append(tagColumndefs, fmt.Sprintf("%s text", quoteIdent(column))) - } - } - table := p.fullTableName(metric.Name() + p.TagTableSuffix) - sql = append(sql, fmt.Sprintf(createTagsTableTemplate, table, strings.Join(tagColumndefs, ","), strings.Join(tagColumns, ","))) - } else { - // tags in measurement table - if p.TagsAsJsonb { - columns = append(columns, "tags jsonb") - } else { - for column := range metric.Tags() { - pk = append(pk, quoteIdent(column)) - columns = append(columns, fmt.Sprintf("%s text", quoteIdent(column))) - } - } - } - } - - if p.FieldsAsJsonb { - columns = append(columns, "fields jsonb") - } else { - var datatype string - for column, v := range metric.Fields() { - datatype = deriveDatatype(v) - columns = append(columns, fmt.Sprintf("%s %s", quoteIdent(column), datatype)) - } - } - - query := strings.Replace(p.TableTemplate, "{TABLE}", p.fullTableName(metric.Name()), -1) - query = strings.Replace(query, "{TABLELITERAL}", quoteLiteral(p.fullTableName(metric.Name())), -1) - query = strings.Replace(query, "{COLUMNS}", strings.Join(columns, ","), -1) - query = strings.Replace(query, "{KEY_COLUMNS}", strings.Join(pk, ","), -1) - - sql = append(sql, query) - return strings.Join(sql, ";") -} diff --git a/plugins/outputs/postgresql/create_table_test.go b/plugins/outputs/postgresql/create_table_test.go deleted file mode 100644 index 404e3fdbd4683..0000000000000 --- a/plugins/outputs/postgresql/create_table_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package postgresql - -import ( - "testing" - "time" - - "github.com/influxdata/telegraf" - "github.com/influxdata/telegraf/metric" - "github.com/stretchr/testify/assert" -) - -func TestGenerateCreateTable(t *testing.T) { - p := newPostgresql() - p.TagsAsJsonb = true - p.FieldsAsJsonb = true - timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) - - var m telegraf.Metric - m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,fields jsonb)`, p.generateCreateTable(m)) - - m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,tags jsonb,fields jsonb)`, p.generateCreateTable(m)) - - p.TagsAsJsonb = false - p.FieldsAsJsonb = false - - m, _ = metric.New("m", nil, map[string]interface{}{"f": float64(3.14)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"f" float8)`, p.generateCreateTable(m)) - - m, _ = metric.New("m", nil, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"i" int8)`, p.generateCreateTable(m)) - - m, _ = metric.New("m", map[string]string{"k": "v"}, map[string]interface{}{"i": int(3)}, timestamp) - assert.Equal(t, `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,"k" text,"i" int8)`, p.generateCreateTable(m)) - - p.TagsAsForeignkeys = true - assert.Equal(t, - `CREATE TABLE IF NOT EXISTS "public"."m_tag"(tag_id serial primary key,"k" text,UNIQUE("k"));`+ - `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,tag_id int,"i" int8)`, - p.generateCreateTable(m)) - - p.TagsAsJsonb = true - assert.Equal(t, - `CREATE TABLE IF NOT EXISTS "public"."m_tag"(tag_id serial primary key,tags jsonb,UNIQUE(tags));`+ - `CREATE TABLE IF NOT EXISTS "public"."m"(time timestamptz,tag_id int,"i" int8)`, - p.generateCreateTable(m)) -} diff --git a/plugins/outputs/postgresql/db/db_wrapper.go b/plugins/outputs/postgresql/db/db_wrapper.go new file mode 100644 index 0000000000000..c6cf999b1bc0c --- /dev/null +++ b/plugins/outputs/postgresql/db/db_wrapper.go @@ -0,0 +1,65 @@ +package db + +import ( + "log" + + "github.com/jackc/pgx" + // pgx driver for sql connections + _ "github.com/jackc/pgx/stdlib" +) + +// Wrapper defines an interface that encapsulates communication with a DB. +type Wrapper interface { + Exec(query string, args ...interface{}) (pgx.CommandTag, error) + DoCopy(fullTableName *pgx.Identifier, colNames []string, batch [][]interface{}) error + Query(query string, args ...interface{}) (*pgx.Rows, error) + QueryRow(query string, args ...interface{}) *pgx.Row + Close() error +} + +type defaultDbWrapper struct { + db *pgx.Conn +} + +// NewWrapper returns an implementation of the db.Wrapper interface +// that issues queries to a PG database. +func NewWrapper(address string) (Wrapper, error) { + connConfig, err := pgx.ParseConnectionString(address) + if err != nil { + log.Printf("E! Couldn't parse connection address: %s\n%v", address, err) + return nil, err + } + db, err := pgx.Connect(connConfig) + if err != nil { + log.Printf("E! Couldn't connect to server\n%v", err) + return nil, err + } + + return &defaultDbWrapper{ + db: db, + }, nil +} + +func (d *defaultDbWrapper) Exec(query string, args ...interface{}) (pgx.CommandTag, error) { + return d.db.Exec(query, args...) +} + +func (d *defaultDbWrapper) DoCopy(fullTableName *pgx.Identifier, colNames []string, batch [][]interface{}) error { + source := pgx.CopyFromRows(batch) + _, err := d.db.CopyFrom(*fullTableName, colNames, source) + if err != nil { + log.Printf("E! Could not insert batch of rows in output db\n%v", err) + } + + return err +} + +func (d *defaultDbWrapper) Close() error { return d.db.Close() } + +func (d *defaultDbWrapper) Query(query string, args ...interface{}) (*pgx.Rows, error) { + return d.db.Query(query, args...) +} + +func (d *defaultDbWrapper) QueryRow(query string, args ...interface{}) *pgx.Row { + return d.db.QueryRow(query, args...) +} diff --git a/plugins/outputs/postgresql/db_wrapper.go b/plugins/outputs/postgresql/db_wrapper.go deleted file mode 100644 index bb095429d985b..0000000000000 --- a/plugins/outputs/postgresql/db_wrapper.go +++ /dev/null @@ -1,43 +0,0 @@ -package postgresql - -import ( - "database/sql" - // pgx driver for sql connections - _ "github.com/jackc/pgx/stdlib" -) - -type dbWrapper interface { - Exec(query string, args ...interface{}) (sql.Result, error) - Query(query string, args ...interface{}) (*sql.Rows, error) - QueryRow(query string, args ...interface{}) *sql.Row - Close() error -} - -type defaultDbWrapper struct { - db *sql.DB -} - -func newDbWrapper(address string) (dbWrapper, error) { - db, err := sql.Open("pgx", address) - if err != nil { - return nil, err - } - - return &defaultDbWrapper{ - db: db, - }, nil -} - -func (d *defaultDbWrapper) Exec(query string, args ...interface{}) (sql.Result, error) { - return d.db.Exec(query, args...) -} - -func (d *defaultDbWrapper) Close() error { return d.db.Close() } - -func (d *defaultDbWrapper) Query(query string, args ...interface{}) (*sql.Rows, error) { - return d.db.Query(query, args...) -} - -func (d *defaultDbWrapper) QueryRow(query string, args ...interface{}) *sql.Row { - return d.db.QueryRow(query, args...) -} diff --git a/plugins/outputs/postgresql/generate_insert.go b/plugins/outputs/postgresql/generate_insert.go deleted file mode 100644 index c71c884845da3..0000000000000 --- a/plugins/outputs/postgresql/generate_insert.go +++ /dev/null @@ -1,24 +0,0 @@ -package postgresql - -import ( - "fmt" - "strings" -) - -const ( - insertIntoSQLTemplate = "INSERT INTO %s(%s) VALUES(%s)" -) - -func (p *Postgresql) generateInsert(tablename string, columns []string) string { - valuePlaceholders := make([]string, len(columns)) - quotedColumns := make([]string, len(columns)) - for i, column := range columns { - valuePlaceholders[i] = fmt.Sprintf("$%d", i+1) - quotedColumns[i] = quoteIdent(column) - } - - fullTableName := p.fullTableName(tablename) - columnNames := strings.Join(quotedColumns, ",") - values := strings.Join(valuePlaceholders, ",") - return fmt.Sprintf(insertIntoSQLTemplate, fullTableName, columnNames, values) -} diff --git a/plugins/outputs/postgresql/generate_insert_test.go b/plugins/outputs/postgresql/generate_insert_test.go deleted file mode 100644 index 28d2e023b9790..0000000000000 --- a/plugins/outputs/postgresql/generate_insert_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package postgresql - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestPostgresqlQuote(t *testing.T) { - assert.Equal(t, `"foo"`, quoteIdent("foo")) - assert.Equal(t, `"fo'o"`, quoteIdent("fo'o")) - assert.Equal(t, `"fo""o"`, quoteIdent("fo\"o")) - - assert.Equal(t, "'foo'", quoteLiteral("foo")) - assert.Equal(t, "'fo''o'", quoteLiteral("fo'o")) - assert.Equal(t, "'fo\"o'", quoteLiteral("fo\"o")) -} - -func TestPostgresqlInsertStatement(t *testing.T) { - p := newPostgresql() - - p.TagsAsJsonb = false - p.FieldsAsJsonb = false - - sql := p.generateInsert("m", []string{"time", "f"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","f") VALUES($1,$2)`, sql) - - sql = p.generateInsert("m", []string{"time", "i"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","i") VALUES($1,$2)`, sql) - - sql = p.generateInsert("m", []string{"time", "f", "i"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","f","i") VALUES($1,$2,$3)`, sql) - - sql = p.generateInsert("m", []string{"time", "k", "i"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","k","i") VALUES($1,$2,$3)`, sql) - - sql = p.generateInsert("m", []string{"time", "k1", "k2", "i"}) - assert.Equal(t, `INSERT INTO "public"."m"("time","k1","k2","i") VALUES($1,$2,$3,$4)`, sql) -} diff --git a/plugins/outputs/postgresql/get_tag_id.go b/plugins/outputs/postgresql/get_tag_id.go deleted file mode 100644 index c17e6a6ea978c..0000000000000 --- a/plugins/outputs/postgresql/get_tag_id.go +++ /dev/null @@ -1,87 +0,0 @@ -package postgresql - -import ( - "fmt" - "log" - "strings" - - "github.com/influxdata/telegraf" -) - -const ( - selectTagIDTemplate = "SELECT tag_id FROM %s WHERE %s" - missingColumnsTemplate = "WITH available AS (SELECT column_name as c FROM information_schema.columns WHERE table_schema = $1 and table_name = $2)," + - "required AS (SELECT c FROM unnest(array [%s]) AS c) " + - "SELECT required.c, available.c IS NULL FROM required LEFT JOIN available ON required.c = available.c;" - - addColumnTemplate = "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s;" -) - -func (p *Postgresql) getTagID(metric telegraf.Metric) (int, error) { - var tagID int - var whereColumns []string - var whereValues []interface{} - tablename := metric.Name() - - if p.TagsAsJsonb && len(metric.Tags()) > 0 { - d, err := buildJsonbTags(metric.Tags()) - if err != nil { - return tagID, err - } - - whereColumns = append(whereColumns, "tags") - whereValues = append(whereValues, d) - } else { - for column, value := range metric.Tags() { - whereColumns = append(whereColumns, column) - whereValues = append(whereValues, value) - } - } - - whereParts := make([]string, len(whereColumns)) - for i, column := range whereColumns { - whereParts[i] = fmt.Sprintf("%s = $%d", quoteIdent(column), i+1) - } - - tagsTableName := tablename + p.TagTableSuffix - tagsTableFullName := p.fullTableName(tagsTableName) - query := fmt.Sprintf(selectTagIDTemplate, tagsTableFullName, strings.Join(whereParts, " AND ")) - - err := p.db.QueryRow(query, whereValues...).Scan(&tagID) - if err == nil { - return tagID, nil - } - query = p.generateInsert(tagsTableName, whereColumns) + " RETURNING tag_id" - err = p.db.QueryRow(query, whereValues...).Scan(&tagID) - if err == nil { - return tagID, nil - } - - // check if insert error was caused by column mismatch - - // if tags are jsonb, there shouldn't be a column mismatch - if p.TagsAsJsonb { - return tagID, err - } - - // check for missing columns - log.Printf("W! Possible column mismatch while inserting new tag-set: %v", err) - retry, err := p.addMissingColumns(tagsTableName, whereColumns, whereValues) - if err != nil { - // missing coulmns not properly added - log.Printf("E! Could not add missing columns: %v", err) - return tagID, err - } - - // We added some columns and insert might work now. Try again immediately to - // avoid long lead time in getting metrics when there are several columns missing - // from the original create statement and they get added in small drops. - if retry { - log.Printf("I! Retrying to insert new tag set") - err := p.db.QueryRow(query, whereValues...).Scan(&tagID) - if err != nil { - return tagID, err - } - } - return tagID, nil -} diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index 0e30ea6e507d3..c59f5cbb87cea 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -2,27 +2,30 @@ package postgresql import ( "log" - "sort" "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/plugins/outputs" -) - -const ( - tagsJSONColumn = "tags" - fieldsJSONColumn = "fields" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/columns" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/db" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/tables" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" ) type Postgresql struct { - db dbWrapper - Address string - Schema string - TagsAsForeignkeys bool - TagsAsJsonb bool - FieldsAsJsonb bool - TableTemplate string - TagTableSuffix string - tables tableKeeper + db db.Wrapper + Address string + Schema string + DoSchemaUpdates bool + TagsAsForeignkeys bool + CachedTagsetsPerMeasurement int + TagsAsJsonb bool + FieldsAsJsonb bool + TableTemplate string + TagTableSuffix string + tables tables.Manager + tagCache tagsCache + rows transformer + columns columns.Mapper } func init() { @@ -31,32 +34,39 @@ func init() { func newPostgresql() *Postgresql { return &Postgresql{ - Schema: "public", - TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", - TagTableSuffix: "_tag", + Schema: "public", + TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", + TagTableSuffix: "_tag", + CachedTagsetsPerMeasurement: 1000, + DoSchemaUpdates: true, } } // Connect establishes a connection to the target database and prepares the cache func (p *Postgresql) Connect() error { - db, err := newDbWrapper(p.Address) + db, err := db.NewWrapper(p.Address) if err != nil { return err } p.db = db - p.tables = newTableKeeper(db) + p.tables = tables.NewManager(db, p.Schema, p.TableTemplate) + + if p.TagsAsForeignkeys { + p.tagCache = newTagsCache(p.CachedTagsetsPerMeasurement, p.TagsAsJsonb, p.TagTableSuffix, p.Schema, p.db) + } + p.rows = newRowTransformer(p.TagsAsForeignkeys, p.TagsAsJsonb, p.FieldsAsJsonb, p.tagCache) + p.columns = columns.NewMapper(p.TagsAsForeignkeys, p.TagsAsJsonb, p.FieldsAsJsonb) return nil } // Close closes the connection to the database func (p *Postgresql) Close() error { + p.tagCache = nil + p.tagCache = nil + p.tables = nil return p.db.Close() } -func (p *Postgresql) fullTableName(name string) string { - return quoteIdent(p.Schema) + "." + quoteIdent(name) -} - var sampleConfig = ` ## specify address via a url matching: ## postgres://[pqgotest[:password]]@localhost[/dbname]\ @@ -73,8 +83,16 @@ var sampleConfig = ` ## address = "host=localhost user=postgres sslmode=verify-full" + ## Update existing tables to match the incoming metrics automatically. Default is true + # do_schema_updates = true + ## Store tags as foreign keys in the metrics table. Default is false. # tags_as_foreignkeys = false + + ## If tags_as_foreignkeys is set to true you can choose the number of tag sets to cache + ## per measurement (metric name). Default is 1000, if set to 0 => cache has no limit. + ## Has no effect if tags_as_foreignkeys = false + # cached_tagsets_per_measurement = 1000 ## Template to use for generating tables ## Available Variables: @@ -103,149 +121,64 @@ func (p *Postgresql) SampleConfig() string { return sampleConfig } func (p *Postgresql) Description() string { return "Send metrics to PostgreSQL" } func (p *Postgresql) Write(metrics []telegraf.Metric) error { - toInsert := make(map[string][]*colsAndValues) - for _, metric := range metrics { - tablename := metric.Name() - - // create table if needed - if p.tables.exists(p.Schema, tablename) == false { - createStmt := p.generateCreateTable(metric) - _, err := p.db.Exec(createStmt) - if err != nil { - log.Printf("E! Creating table failed: statement: %v, error: %v", createStmt, err) - return err - } - p.tables.add(tablename) - } - - columns := []string{"time"} - values := []interface{}{metric.Time()} - tagColumns, tagValues, err := p.prepareTags(metric) + metricsByMeasurement := utils.GroupMetricsByMeasurement(metrics) + for measureName, indices := range metricsByMeasurement { + err := p.writeMetricsFromMeasure(measureName, indices, metrics) if err != nil { return err } - if tagColumns != nil { - columns = append(columns, tagColumns...) - values = append(values, tagValues...) - } - - if p.FieldsAsJsonb { - d, err := buildJsonb(metric.Fields()) - if err != nil { - return err - } - - columns = append(columns, fieldsJSONColumn) - values = append(values, d) - } else { - var keys []string - fields := metric.Fields() - for column := range fields { - keys = append(keys, column) - } - sort.Strings(keys) - for _, column := range keys { - columns = append(columns, column) - values = append(values, fields[column]) - } - } - - newValues := &colsAndValues{ - cols: columns, - vals: values, - } - toInsert[tablename] = append(toInsert[tablename], newValues) } - - return p.insertBatches(toInsert) + return nil } -func (p *Postgresql) prepareTags(metric telegraf.Metric) ([]string, []interface{}, error) { - if len(metric.Tags()) == 0 { - return nil, nil, nil - } +// Writes only the metrics from a specified measure. 'metricIndices' is an array +// of the metrics that belong to the selected 'measureName' for faster lookup. +// If schema updates are enabled the target db tables are updated to be able +// to hold the new values. +func (p *Postgresql) writeMetricsFromMeasure(measureName string, metricIndices []int, metrics []telegraf.Metric) error { + targetColumns, targetTagColumns := p.columns.Target(metricIndices, metrics) - if p.TagsAsForeignkeys { - // tags in separate table - tagID, err := p.getTagID(metric) - if err != nil { - return nil, nil, err - } - return []string{tagIDColumn}, []interface{}{tagID}, nil - } - // tags in measurement table - if p.TagsAsJsonb { - d, err := buildJsonbTags(metric.Tags()) - if err != nil { - return nil, nil, err + if p.DoSchemaUpdates { + if err := p.prepareTable(measureName, targetColumns); err != nil { + return err } - - if d != nil { - return []string{tagsJSONColumn}, []interface{}{d}, nil + if p.TagsAsForeignkeys { + tagTableName := p.tagCache.tagsTableName(measureName) + if err := p.prepareTable(tagTableName, targetTagColumns); err != nil { + return err + } } - return nil, nil, nil - } - - var keys []string - tags := metric.Tags() - for column := range tags { - keys = append(keys, column) - } - sort.Strings(keys) - numColumns := len(keys) - var columns = make([]string, numColumns) - var values = make([]interface{}, numColumns) - for i, column := range keys { - columns[i] = column - values[i] = tags[column] + numColumns := len(targetColumns.Names) + values := make([][]interface{}, len(metricIndices)) + var rowTransformErr error + for rowNum, metricIndex := range metricIndices { + values[rowNum], rowTransformErr = p.rows.createRowFromMetric(numColumns, metrics[metricIndex], targetColumns, targetTagColumns) + if rowTransformErr != nil { + log.Printf("E! Could not transform metric to proper row\n%v", rowTransformErr) + return rowTransformErr + } } - return columns, values, nil -} -type colsAndValues struct { - cols []string - vals []interface{} + fullTableName := utils.FullTableName(p.Schema, measureName) + return p.db.DoCopy(fullTableName, targetColumns.Names, values) } -// insertBatches takes batches of data to be inserted. The batches are mapped -// by the target table, and each batch contains the columns and values for those -// columns that will generate the INSERT statement. -// On column mismatch an attempt is made to create the column and try to reinsert. -func (p *Postgresql) insertBatches(batches map[string][]*colsAndValues) error { - for tableName, colsAndValues := range batches { - for _, row := range colsAndValues { - sql := p.generateInsert(tableName, row.cols) - _, err := p.db.Exec(sql, row.vals...) - if err == nil { - continue - } - - // check if insert error was caused by column mismatch - if p.FieldsAsJsonb { - return err - } - - log.Printf("W! Possible column mismatch while inserting new metrics: %v", err) +// Checks if a table exists in the db, and then validates if all the required columns +// are present or some are missing (if metrics changed their field or tag sets). +func (p *Postgresql) prepareTable(tableName string, details *utils.TargetColumns) error { + tableExists := p.tables.Exists(tableName) - retry := false - retry, err = p.addMissingColumns(tableName, row.cols, row.vals) - if err != nil { - log.Printf("E! Could not fix column mismatch: %v", err) - return err - } - - // We added some columns and insert might work now. Try again immediately to - // avoid long lead time in getting metrics when there are several columns missing - // from the original create statement and they get added in small drops. - if retry { - _, err = p.db.Exec(sql, row.vals...) - } - if err != nil { - return err - } - } + if !tableExists { + return p.tables.CreateTable(tableName, details) } - return nil + missingColumns, err := p.tables.FindColumnMismatch(tableName, details) + if err != nil { + return err + } + if len(missingColumns) == 0 { + return nil + } + return p.tables.AddColumnsToTable(tableName, missingColumns, details) } diff --git a/plugins/outputs/postgresql/postgresql_integration_test.go b/plugins/outputs/postgresql/postgresql_integration_test.go index 1fdbe0207ed33..457665621c033 100644 --- a/plugins/outputs/postgresql/postgresql_integration_test.go +++ b/plugins/outputs/postgresql/postgresql_integration_test.go @@ -3,6 +3,8 @@ package postgresql import ( "database/sql" "fmt" + "math/rand" + "strconv" "testing" "time" @@ -27,6 +29,7 @@ func prepareAndConnect(t *testing.T, foreignTags, jsonTags, jsonFields bool) (te TagsAsForeignkeys: foreignTags, TagsAsJsonb: jsonTags, FieldsAsJsonb: jsonFields, + DoSchemaUpdates: true, TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", TagTableSuffix: "_tags", } @@ -75,6 +78,8 @@ func TestWriteToPostgresJsonTags(t *testing.T) { tagsAsJSON := true fieldsAsJSON := false testMetric, dbConn, postgres := prepareAndConnect(t, tagsAsForeignKey, tagsAsJSON, fieldsAsJSON) + defer dbConn.Close() + // insert first metric err := postgres.Write([]telegraf.Metric{testMetric}) assert.NoError(t, err, "Could not write") @@ -107,6 +112,8 @@ func TestWriteToPostgresJsonTagsAsForeignTable(t *testing.T) { tagsAsJSON := true fieldsAsJSON := false testMetric, dbConn, postgres := prepareAndConnect(t, tagsAsForeignKey, tagsAsJSON, fieldsAsJSON) + defer dbConn.Close() + // insert first metric err := postgres.Write([]telegraf.Metric{testMetric}) assert.NoError(t, err, "Could not write") @@ -147,6 +154,8 @@ func TestWriteToPostgresMultipleRowsOneTag(t *testing.T) { tagsAsJSON := true fieldsAsJSON := false testMetric, dbConn, postgres := prepareAndConnect(t, tagsAsForeignKey, tagsAsJSON, fieldsAsJSON) + defer dbConn.Close() + // insert first metric err := postgres.Write([]telegraf.Metric{testMetric, testMetric}) assert.NoError(t, err, "Could not write") @@ -174,6 +183,8 @@ func TestWriteToPostgresAddNewTag(t *testing.T) { tagsAsJSON := true fieldsAsJSON := false testMetricWithOneTag, dbConn, postgres := prepareAndConnect(t, tagsAsForeignKey, tagsAsJSON, fieldsAsJSON) + defer dbConn.Close() + testMetricWithOneMoreTag := testMetric("metric name", "tag1", int(2)) testMetricWithOneMoreTag.AddTag("second_tag", "tag2") // insert first two metric @@ -216,6 +227,8 @@ func TestWriteToPostgresAddNewTag(t *testing.T) { func TestWriteToPostgresAddNewField(t *testing.T) { testMetric, dbConn, postgres := prepareAndConnect(t, false, false, false) + defer dbConn.Close() + // insert first metric writeAndAssertSingleMetricNoJSON(t, testMetric, dbConn, postgres) @@ -271,3 +284,140 @@ func writeAndAssertSingleMetricNoJSON(t *testing.T, testMetric telegraf.Metric, ts.UTC(), tag, value)) } } + +func TestWriteToPostgresMultipleMetrics(t *testing.T) { + tagsAsForeignKey := true + tagsAsJSON := true + fieldsAsJSON := false + testMetric, dbConn, postgres := prepareAndConnect(t, tagsAsForeignKey, tagsAsJSON, fieldsAsJSON) + defer dbConn.Close() + dbConn.Exec(`DROP TABLE IF EXISTS "` + testMetric.Name() + `2"`) + dbConn.Exec(`DROP TABLE IF EXISTS "` + testMetric.Name() + `2_tag"`) + testMetricInSecondMeasurement, _ := metric.New(testMetric.Name()+"2", testMetric.Tags(), testMetric.Fields(), testMetric.Time().Add(time.Second)) + // insert first metric + err := postgres.Write([]telegraf.Metric{testMetric, testMetric, testMetricInSecondMeasurement}) + assert.NoError(t, err, "Could not write") + + // should have created table, all columns in the same table + rows, _ := dbConn.Query(fmt.Sprintf(`SELECT time, tag_id, value FROM "%s"`, testMetric.Name())) + // check results for testMetric if in db + for i := 0; i < 2; i++ { + var ts time.Time + var tagID int64 + var value int64 + rows.Next() + err = rows.Scan(&ts, &tagID, &value) + assert.NoError(t, err, "Could not check test results") + + sentValue, _ := testMetric.GetField("value") + sentTs := testMetric.Time() + // postgres doesn't support nano seconds in timestamp + sentTsNanoSecondOffset := sentTs.Nanosecond() + nanoSeconds := sentTsNanoSecondOffset % 1000 + sentTs = sentTs.Add(time.Duration(-nanoSeconds) * time.Nanosecond) + if !ts.UTC().Equal(sentTs.UTC()) { + assert.Fail(t, fmt.Sprintf("Expected: %v; Received: %v", sentTs, ts.UTC())) + } + + assert.Equal(t, int64(1), tagID) + assert.Equal(t, sentValue.(int64), value) + + sentTag, _ := testMetric.GetTag("tag") + sentTagJSON := fmt.Sprintf(`{"tag": "%s"}`, sentTag) + row := dbConn.QueryRow(fmt.Sprintf(`SELECT tag_id, tags FROM "%s%s"`, testMetric.Name(), postgres.TagTableSuffix)) + tagID = 0 + var tags string + err = row.Scan(&tagID, &tags) + assert.NoError(t, err, "Could not check test results") + assert.Equal(t, int64(1), tagID) + assert.Equal(t, sentTagJSON, tags) + } + // check results for second metric + row := dbConn.QueryRow(fmt.Sprintf(`SELECT time, tag_id, value FROM "%s"`, testMetricInSecondMeasurement.Name())) + var ts time.Time + var tagID int64 + var value int64 + err = row.Scan(&ts, &tagID, &value) + assert.NoError(t, err, "Could not check test results") + + sentValue, _ := testMetricInSecondMeasurement.GetField("value") + sentTs := testMetricInSecondMeasurement.Time() + // postgres doesn't support nano seconds in timestamp + sentTsNanoSecondOffset := sentTs.Nanosecond() + nanoSeconds := sentTsNanoSecondOffset % 1000 + sentTs = sentTs.Add(time.Duration(-nanoSeconds) * time.Nanosecond) + if !ts.UTC().Equal(sentTs.UTC()) { + assert.Fail(t, fmt.Sprintf("Expected: %v; Received: %v", sentTs, ts.UTC())) + } + + assert.Equal(t, int64(1), tagID) + assert.Equal(t, sentValue.(int64), value) +} + +func TestPerformanceIsAcceptable(t *testing.T) { + _, db, postgres := prepareAndConnect(t, false, false, false) + defer db.Close() + numMetricsPerMeasure := 10000 + numTags := 5 + numDiffValuesForEachTag := 5 + numFields := 10 + numMeasures := 2 + metrics := make([]telegraf.Metric, numMeasures*numMetricsPerMeasure) + for measureInd := 0; measureInd < numMeasures; measureInd++ { + for numMetric := 0; numMetric < numMetricsPerMeasure; numMetric++ { + tags := map[string]string{} + for tag := 0; tag < numTags; tag++ { + randNum := rand.Intn(numDiffValuesForEachTag) + tags[fmt.Sprintf("tag_%d", tag)] = strconv.Itoa(randNum) + } + fields := map[string]interface{}{} + for field := 0; field < numFields; field++ { + fields[fmt.Sprintf("field_%d", field)] = rand.Float64() + } + metricName := "m_" + strconv.Itoa(measureInd) + m, _ := metric.New(metricName, tags, fields, time.Now()) + metrics[measureInd*numMetricsPerMeasure+numMetric] = m + } + } + + start := time.Now() + err := postgres.Write(metrics) + assert.NoError(t, err) + end := time.Since(start) + t.Log("Wrote " + strconv.Itoa(numMeasures*numMetricsPerMeasure) + " metrics in " + end.String()) +} + +func TestPostgresBatching(t *testing.T) { + _, db, postgres := prepareAndConnect(t, false, false, false) + defer db.Close() + numMetricsPerMeasure := 5 + numMeasures := 2 + metrics := make([]telegraf.Metric, numMeasures*numMetricsPerMeasure) + for measureInd := 0; measureInd < numMeasures; measureInd++ { + metricName := "m_" + strconv.Itoa(measureInd) + db.Exec(fmt.Sprintf(`DROP TABLE IF EXISTS ` + metricName)) + for numMetric := 0; numMetric < numMetricsPerMeasure; numMetric++ { + tags := map[string]string{} + fields := map[string]interface{}{"f": 1} + m, _ := metric.New(metricName, tags, fields, time.Now()) + metrics[measureInd*numMetricsPerMeasure+numMetric] = m + } + } + + err := postgres.Write(metrics) + assert.NoError(t, err) + err = postgres.Write(metrics) + assert.NoError(t, err) + // check num rows inserted by transaction id should be 'numMetricsPerMeasure' for + // both transactions, for all measures + for measureInd := 0; measureInd < numMeasures; measureInd++ { + metricName := "m_" + strconv.Itoa(measureInd) + rows, err := db.Query(`select count(*) from ` + metricName + ` group by xmin`) + assert.NoError(t, err) + var count int64 + rows.Next() + rows.Scan(&count) + assert.Equal(t, int64(numMetricsPerMeasure), count) + rows.Close() + } +} diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 4c4bf34450016..3c988bba800b2 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -1,182 +1,153 @@ package postgresql import ( - "database/sql" "fmt" "testing" "time" "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/metric" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/columns" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" + "github.com/jackc/pgx" _ "github.com/jackc/pgx/stdlib" "github.com/stretchr/testify/assert" ) -func TestWrite(t *testing.T) { +func TestWriteAllInOnePlace(t *testing.T) { timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) oneMetric, _ := metric.New("m", map[string]string{"t": "tv"}, map[string]interface{}{"f": 1}, timestamp) - noTags, _ := metric.New("m", nil, map[string]interface{}{"f": 1}, timestamp) - testCases := []struct { - desc string - input []telegraf.Metric - fieldsAsJSON bool - execs []sql.Result - expectedExecQueries []string - execErrs []error - expectErr string - }{ - { - desc: "no metrics, no error", - input: []telegraf.Metric{}, - expectErr: "", - }, { - desc: "metric table not cached, error on creating it", - input: []telegraf.Metric{oneMetric}, - execs: []sql.Result{nil}, - execErrs: []error{fmt.Errorf("error on first exec")}, - expectErr: "error on first exec", - }, { - desc: "metric table not cached, gets cached, no tags, fields as json, error on insert", - input: []telegraf.Metric{noTags}, - fieldsAsJSON: true, - execs: []sql.Result{nil, nil}, - execErrs: []error{nil, fmt.Errorf("error on batch insert")}, - expectErr: "error on batch insert", - }, { - desc: "metric table not cached, gets cached, has tags, json fields, all good", - input: []telegraf.Metric{oneMetric}, - fieldsAsJSON: true, - execs: []sql.Result{nil, nil}, - execErrs: []error{nil, nil}, - expectedExecQueries: []string{ - `CREATE TABLE IF NOT EXISTS "a"."m"(time timestamptz,"t" text,fields jsonb)`, - `INSERT INTO "a"."m"("time","t","fields") VALUES($1,$2,$3)`}, - }, { - desc: "metric table not cached, gets cached, has tags, all good", - input: []telegraf.Metric{oneMetric}, - execs: []sql.Result{nil, nil}, - execErrs: []error{nil, nil}, - expectedExecQueries: []string{ - `CREATE TABLE IF NOT EXISTS "a"."m"(time timestamptz,"t" text,"f" int8)`, - `INSERT INTO "a"."m"("time","t","f") VALUES($1,$2,$3)`}, - }, - } + twoMetric, _ := metric.New("m", map[string]string{"t2": "tv2"}, map[string]interface{}{"f2": 2}, timestamp) + threeMetric, _ := metric.New("m", map[string]string{"t": "tv", "t2": "tv2"}, map[string]interface{}{"f": 3, "f2": 4}, timestamp) + fourMetric, _ := metric.New("m2", map[string]string{"t": "tv", "t2": "tv2"}, map[string]interface{}{"f": 5, "f2": 6}, timestamp) - for _, testCase := range testCases { - p := &Postgresql{ - tables: &mockTk{tables: make(map[string]bool)}, - TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", - Schema: "a", - FieldsAsJsonb: testCase.fieldsAsJSON, - db: &mockDb{ - exec: testCase.execs, - execErr: testCase.execErrs, - expectedQ: testCase.expectedExecQueries, - }} - err := p.Write(testCase.input) - if testCase.expectErr != "" { - assert.EqualError(t, err, testCase.expectErr, testCase.desc) - } else { - assert.Nil(t, err, testCase.desc) - } + p := &Postgresql{ + Schema: "public", + TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", + TagTableSuffix: "_tag", + DoSchemaUpdates: true, + Address: "host=localhost user=postgres password=postgres sslmode=disable dbname=postgres", } -} -func TestInsertBatches(t *testing.T) { - sampleData := map[string][]*colsAndValues{ - "tab": { - { - cols: []string{"a"}, - vals: []interface{}{1}, - }, - }, + p.Connect() + err := p.Write([]telegraf.Metric{oneMetric, twoMetric, fourMetric, threeMetric}) + if err != nil { + fmt.Println(err.Error()) + t.Fail() } - - testCases := []struct { - input map[string][]*colsAndValues - desc string - resultsFromExec []sql.Result - errorsFromExec []error - errorOnQuery error - fieldsAsJSON bool - expectErr string - }{ - { - desc: "no batches, no errors", - input: make(map[string][]*colsAndValues), - errorsFromExec: []error{fmt.Errorf("should not have called exec")}, - }, { - desc: "error returned on first insert, fields as json", - input: sampleData, - resultsFromExec: []sql.Result{nil}, - errorsFromExec: []error{fmt.Errorf("error on first insert")}, - fieldsAsJSON: true, - expectErr: "error on first insert", - }, { - desc: "error returned on first insert, error on add column", - input: sampleData, - resultsFromExec: []sql.Result{nil}, - errorsFromExec: []error{fmt.Errorf("error on first insert")}, - errorOnQuery: fmt.Errorf("error on query"), - expectErr: "error on query", - }, { - desc: "no error on insert", - input: sampleData, - resultsFromExec: []sql.Result{nil}, - errorsFromExec: []error{nil}, - }, + fiveMetric, _ := metric.New("m", map[string]string{"t": "tv", "t3": "tv3"}, map[string]interface{}{"f": 7, "f3": 8}, timestamp) + err = p.Write([]telegraf.Metric{fiveMetric}) + if err != nil { + fmt.Println(err.Error()) + t.Fail() } +} + +func TestPostgresqlMetricsFromMeasure(t *testing.T) { + postgreSQL, metrics, metricIndices := prepareAllColumnsInOnePlaceNoJSON() + err := postgreSQL.writeMetricsFromMeasure(metrics[0].Name(), metricIndices["m"], metrics) + assert.NoError(t, err) + postgreSQL, metrics, metricIndices = prepareAllColumnsInOnePlaceTagsAndFieldsJSON() + err = postgreSQL.writeMetricsFromMeasure(metrics[0].Name(), metricIndices["m"], metrics) + assert.NoError(t, err) +} + +func prepareAllColumnsInOnePlaceNoJSON() (*Postgresql, []telegraf.Metric, map[string][]int) { + oneMetric, _ := metric.New("m", map[string]string{"t": "tv"}, map[string]interface{}{"f": 1}, time.Now()) + twoMetric, _ := metric.New("m", map[string]string{"t2": "tv2"}, map[string]interface{}{"f2": 2}, time.Now()) + threeMetric, _ := metric.New("m", map[string]string{"t": "tv", "t2": "tv2"}, map[string]interface{}{"f": 3, "f2": 4}, time.Now()) - for _, testCase := range testCases { - m := &mockDb{exec: testCase.resultsFromExec, - execErr: testCase.errorsFromExec, - queryErr: testCase.errorOnQuery} - p := &Postgresql{ - db: m, - FieldsAsJsonb: testCase.fieldsAsJSON, + return &Postgresql{ + TagTableSuffix: "_tag", + DoSchemaUpdates: true, + tables: &mockTables{t: map[string]bool{"m": true}, missingCols: []int{}}, + rows: &mockTransformer{rows: [][]interface{}{nil, nil, nil}}, + columns: columns.NewMapper(false, false, false), + db: &mockDb{}, + }, []telegraf.Metric{ + oneMetric, twoMetric, threeMetric, + }, map[string][]int{ + "m": []int{0, 1, 2}, } +} - err := p.insertBatches(testCase.input) - if testCase.expectErr != "" { - assert.EqualError(t, err, testCase.expectErr) - } else { - assert.Nil(t, err) +func prepareAllColumnsInOnePlaceTagsAndFieldsJSON() (*Postgresql, []telegraf.Metric, map[string][]int) { + oneMetric, _ := metric.New("m", map[string]string{"t": "tv"}, map[string]interface{}{"f": 1}, time.Now()) + twoMetric, _ := metric.New("m", map[string]string{"t2": "tv2"}, map[string]interface{}{"f2": 2}, time.Now()) + threeMetric, _ := metric.New("m", map[string]string{"t": "tv", "t2": "tv2"}, map[string]interface{}{"f": 3, "f2": 4}, time.Now()) + + return &Postgresql{ + TagTableSuffix: "_tag", + DoSchemaUpdates: true, + TagsAsForeignkeys: false, + TagsAsJsonb: true, + FieldsAsJsonb: true, + tables: &mockTables{t: map[string]bool{"m": true}, missingCols: []int{}}, + columns: columns.NewMapper(false, true, true), + rows: &mockTransformer{rows: [][]interface{}{nil, nil, nil}}, + db: &mockDb{}, + }, []telegraf.Metric{ + oneMetric, twoMetric, threeMetric, + }, map[string][]int{ + "m": []int{0, 1, 2}, } - } } -type mockDb struct { - currentExec int - exec []sql.Result - expectedQ []string - execErr []error - query *sql.Rows - queryErr error +type mockTables struct { + t map[string]bool + createErr error + missingCols []int + mismatchErr error + addColsErr error } -func (m *mockDb) Exec(query string, args ...interface{}) (sql.Result, error) { - tmp := m.currentExec - m.currentExec++ - if m.expectedQ != nil && m.expectedQ[tmp] != query { - return nil, fmt.Errorf("unexpected query, got: '%s' expected: %s", query, m.expectedQ[tmp]) +func (m *mockTables) Exists(tableName string) bool { + return m.t[tableName] +} +func (m *mockTables) CreateTable(tableName string, colDetails *utils.TargetColumns) error { + if m.createErr != nil { + return m.createErr } - - return m.exec[tmp], m.execErr[tmp] + m.t[tableName] = true + return nil +} +func (m *mockTables) FindColumnMismatch(tableName string, colDetails *utils.TargetColumns) ([]int, error) { + return m.missingCols, m.mismatchErr } -func (m *mockDb) Query(query string, args ...interface{}) (*sql.Rows, error) { - return m.query, m.queryErr +func (m *mockTables) AddColumnsToTable(tableName string, columnIndices []int, colDetails *utils.TargetColumns) error { + return m.addColsErr } -func (m *mockDb) QueryRow(query string, args ...interface{}) *sql.Row { return nil } -func (m *mockDb) Close() error { return nil } -type mockTk struct { - tables map[string]bool +type mockTransformer struct { + rows [][]interface{} + current int + rowErr error } -func (m *mockTk) add(tableName string) { - m.tables[tableName] = true +func (mt *mockTransformer) createRowFromMetric(numColumns int, metric telegraf.Metric, targetColumns, targetTagColumns *utils.TargetColumns) ([]interface{}, error) { + if mt.rowErr != nil { + return nil, mt.rowErr + } + row := mt.rows[mt.current] + mt.current++ + return row, nil } -func (m *mockTk) exists(schema, table string) bool { - _, exists := m.tables[table] - return exists +type mockDb struct { + doCopyErr error +} + +func (m *mockDb) Exec(query string, args ...interface{}) (pgx.CommandTag, error) { + return "", nil +} +func (m *mockDb) DoCopy(fullTableName *pgx.Identifier, colNames []string, batch [][]interface{}) error { + return m.doCopyErr +} +func (m *mockDb) Query(query string, args ...interface{}) (*pgx.Rows, error) { + return nil, nil +} +func (m *mockDb) QueryRow(query string, args ...interface{}) *pgx.Row { + return nil +} +func (m *mockDb) Close() error { + return nil } diff --git a/plugins/outputs/postgresql/table_keeper.go b/plugins/outputs/postgresql/table_keeper.go deleted file mode 100644 index 3b0fd45ac481f..0000000000000 --- a/plugins/outputs/postgresql/table_keeper.go +++ /dev/null @@ -1,47 +0,0 @@ -package postgresql - -import ( - "log" -) - -const ( - tableExistsTemplate = "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname = $2;" -) - -type tableKeeper interface { - exists(schema, tableName string) bool - add(tableName string) -} - -type defTableKeeper struct { - Tables map[string]bool - db dbWrapper -} - -func newTableKeeper(db dbWrapper) tableKeeper { - return &defTableKeeper{ - Tables: make(map[string]bool), - db: db, - } -} - -func (t *defTableKeeper) exists(schema, tableName string) bool { - if _, ok := t.Tables[tableName]; ok { - return true - } - - result, err := t.db.Exec(tableExistsTemplate, tableName, schema) - if err != nil { - log.Printf("E! Error checking for existence of metric table %s: %v", tableName, err) - return false - } - if count, _ := result.RowsAffected(); count == 1 { - t.Tables[tableName] = true - return true - } - return false -} - -func (t *defTableKeeper) add(tableName string) { - t.Tables[tableName] = true -} diff --git a/plugins/outputs/postgresql/table_keeper_test.go b/plugins/outputs/postgresql/table_keeper_test.go deleted file mode 100644 index 0d7bb77bec307..0000000000000 --- a/plugins/outputs/postgresql/table_keeper_test.go +++ /dev/null @@ -1,71 +0,0 @@ -package postgresql - -import ( - "fmt" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestNewTableKeeper(t *testing.T) { - mock := &mockWr{} - tk := newTableKeeper(mock).(*defTableKeeper) - assert.Equal(t, mock, tk.db) - assert.Empty(t, tk.Tables) -} - -func TestTableKeeperAdd(t *testing.T) { - tk := newTableKeeper(nil).(*defTableKeeper) - tk.add("table") - tk.add("table2") - assert.Equal(t, 2, len(tk.Tables)) - assert.True(t, tk.Tables["table"]) - assert.True(t, tk.Tables["table2"]) - assert.False(t, tk.Tables["table3"]) - tk.add("table2") - assert.Equal(t, 2, len(tk.Tables)) -} - -func TestTableKeeperExists(t *testing.T) { - mock := &mockWr{} - tk := newTableKeeper(mock).(*defTableKeeper) - table := "table name" - - // table cached - tk.Tables[table] = true - mock.execErr = fmt.Errorf("should not call execute") - assert.True(t, tk.exists("", table)) - - // error on table exists query - mock.execErr = fmt.Errorf("error on query execute") - mock.expected = tableExistsTemplate - delete(tk.Tables, table) - assert.False(t, tk.exists("", table)) - assert.Equal(t, 0, len(tk.Tables)) - - // fetch from db, doesn't exist - mock.execErr = nil - mock.exec = &mockResult{} - assert.False(t, tk.exists("", table)) - - // fetch from db, exists - mock.exec = &mockResult{rows: 1} - assert.True(t, tk.exists("", table)) - assert.Equal(t, 1, len(tk.Tables)) - assert.True(t, tk.Tables[table]) -} - -type mockResult struct { - rows int64 - rowErr error - last int64 - lastErr error -} - -func (m *mockResult) LastInsertId() (int64, error) { - return m.last, m.lastErr -} - -func (m *mockResult) RowsAffected() (int64, error) { - return m.rows, m.rowErr -} diff --git a/plugins/outputs/postgresql/tables/manager.go b/plugins/outputs/postgresql/tables/manager.go new file mode 100644 index 0000000000000..f99ca0c5c72d0 --- /dev/null +++ b/plugins/outputs/postgresql/tables/manager.go @@ -0,0 +1,208 @@ +package tables + +import ( + "database/sql" + "fmt" + "log" + "strings" + + "github.com/influxdata/telegraf/plugins/outputs/postgresql/db" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" +) + +const ( + addColumnTemplate = "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s;" + tableExistsTemplate = "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname = $2;" + findColumnPresenceTemplate = "WITH available AS (SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = $1 and table_name = $2)," + + "required AS (SELECT c FROM unnest(array [%s]) AS c) " + + "SELECT required.c as column_name, available.column_name IS NOT NULL as exists, available.data_type FROM required LEFT JOIN available ON required.c = available.column_name;" +) + +type columnInDbDef struct { + dataType utils.PgDataType + exists bool +} + +// Manager defines an abstraction that can check the state of tables in a PG +// database, create, and update them. +type Manager interface { + // Exists checks if a table with the given name already is present in the DB. + Exists(tableName string) bool + // Creates a table in the database with the column names and types specified in 'colDetails' + CreateTable(tableName string, colDetails *utils.TargetColumns) error + // This function queries a table in the DB if the required columns in 'colDetails' are present and what is their + // data type. For existing columns it checks if the data type in the DB can safely hold the data from the metrics. + // It returns: + // - the indices of the missing columns (from colDetails) + // - or an error if + // = it couldn't discover the columns of the table in the db + // = the existing column types are incompatible with the required column types + FindColumnMismatch(tableName string, colDetails *utils.TargetColumns) ([]int, error) + // From the column details (colDetails) of a given measurement, 'columnIndices' specifies which are missing in the DB. + // this function will add the new columns with the required data type. + AddColumnsToTable(tableName string, columnIndices []int, colDetails *utils.TargetColumns) error +} + +type defTableManager struct { + Tables map[string]bool + db db.Wrapper + schema string + tableTemplate string +} + +// NewManager returns an instance of the tables.Manager interface +// that can handle checking and updating the state of tables in the PG database. +func NewManager(db db.Wrapper, schema, tableTemplate string) Manager { + return &defTableManager{ + Tables: make(map[string]bool), + db: db, + tableTemplate: tableTemplate, + schema: schema, + } +} + +// Exists checks if a table with the given name already is present in the DB. +func (t *defTableManager) Exists(tableName string) bool { + if _, ok := t.Tables[tableName]; ok { + return true + } + + commandTag, err := t.db.Exec(tableExistsTemplate, tableName, t.schema) + if err != nil { + log.Printf("E! Error checking for existence of metric table: %s\nSQL: %s\n%v", tableName, tableExistsTemplate, err) + return false + } + + if commandTag.RowsAffected() == 1 { + t.Tables[tableName] = true + return true + } + + return false +} + +// Creates a table in the database with the column names and types specified in 'colDetails' +func (t *defTableManager) CreateTable(tableName string, colDetails *utils.TargetColumns) error { + sql := t.generateCreateTableSQL(tableName, colDetails) + if _, err := t.db.Exec(sql); err != nil { + log.Printf("E! Couldn't create table: %s\nSQL: %s\n%v", tableName, sql, err) + return err + } + + t.Tables[tableName] = true + return nil +} + +// This function queries a table in the DB if the required columns in 'colDetails' are present and what is their +// data type. For existing columns it checks if the data type in the DB can safely hold the data from the metrics. +// It returns: +// - the indices of the missing columns (from colDetails) +// - or an error if +// = it couldn't discover the columns of the table in the db +// = the existing column types are incompatible with the required column types +func (t *defTableManager) FindColumnMismatch(tableName string, colDetails *utils.TargetColumns) ([]int, error) { + columnPresence, err := t.findColumnPresence(tableName, colDetails.Names) + if err != nil { + return nil, err + } + + missingCols := []int{} + for colIndex := range colDetails.Names { + colStateInDb := columnPresence[colIndex] + if !colStateInDb.exists { + missingCols = append(missingCols, colIndex) + continue + } + typeInDb := colStateInDb.dataType + typeInMetric := colDetails.DataTypes[colIndex] + if !utils.PgTypeCanContain(typeInDb, typeInMetric) { + return nil, fmt.Errorf("E! A column exists in '%s' of type '%s' required type '%s'", tableName, typeInDb, typeInMetric) + } + } + + return missingCols, nil +} + +// From the column details (colDetails) of a given measurement, 'columnIndices' specifies which are missing in the DB. +// this function will add the new columns with the required data type. +func (t *defTableManager) AddColumnsToTable(tableName string, columnIndices []int, colDetails *utils.TargetColumns) error { + fullTableName := utils.FullTableName(t.schema, tableName).Sanitize() + for _, colIndex := range columnIndices { + name := colDetails.Names[colIndex] + dataType := colDetails.DataTypes[colIndex] + addColumnQuery := fmt.Sprintf(addColumnTemplate, fullTableName, utils.QuoteIdent(name), dataType) + _, err := t.db.Exec(addColumnQuery) + if err != nil { + log.Printf("E! Couldn't add missing columns to the table: %s\nError executing: %s\n%v", tableName, addColumnQuery, err) + return err + } + } + + return nil +} + +// Populate the 'tableTemplate' (supplied as config option to the plugin) with the details of +// the required columns for the measurement to create a 'CREATE TABLE' SQL statement. +// The order, column names and data types are given in 'colDetails'. +func (t *defTableManager) generateCreateTableSQL(tableName string, colDetails *utils.TargetColumns) string { + colDefs := make([]string, len(colDetails.Names)) + pk := []string{} + for colIndex, colName := range colDetails.Names { + colDefs[colIndex] = utils.QuoteIdent(colName) + " " + string(colDetails.DataTypes[colIndex]) + if colDetails.Roles[colIndex] != utils.FieldColType { + pk = append(pk, colName) + } + } + + fullTableName := utils.FullTableName(t.schema, tableName).Sanitize() + query := strings.Replace(t.tableTemplate, "{TABLE}", fullTableName, -1) + query = strings.Replace(query, "{TABLELITERAL}", utils.QuoteLiteral(fullTableName), -1) + query = strings.Replace(query, "{COLUMNS}", strings.Join(colDefs, ","), -1) + query = strings.Replace(query, "{KEY_COLUMNS}", strings.Join(pk, ","), -1) + + return query +} + +// For a given table and an array of column names it checks the database if those columns exist, +// and what's their data type. +func (t *defTableManager) findColumnPresence(tableName string, columns []string) ([]*columnInDbDef, error) { + columnPresenseQuery := prepareColumnPresenceQuery(columns) + result, err := t.db.Query(columnPresenseQuery, t.schema, tableName) + if err != nil { + log.Printf("E! Couldn't discover columns of table: %s\nQuery failed: %s\n%v", tableName, columnPresenseQuery, err) + return nil, err + } + defer result.Close() + columnStatus := make([]*columnInDbDef, len(columns)) + var exists bool + var columnName string + var pgLongType sql.NullString + currentColumn := 0 + + for result.Next() { + err := result.Scan(&columnName, &exists, &pgLongType) + if err != nil { + log.Printf("E! Couldn't discover columns of table: %s\n%v", tableName, err) + return nil, err + } + pgShortType := utils.PgDataType("") + if pgLongType.Valid { + pgShortType = utils.LongToShortPgType(pgLongType.String) + } + columnStatus[currentColumn] = &columnInDbDef{ + exists: exists, + dataType: pgShortType, + } + currentColumn++ + } + + return columnStatus, nil +} + +func prepareColumnPresenceQuery(columns []string) string { + quotedColumns := make([]string, len(columns)) + for i, column := range columns { + quotedColumns[i] = utils.QuoteLiteral(column) + } + return fmt.Sprintf(findColumnPresenceTemplate, strings.Join(quotedColumns, ",")) +} diff --git a/plugins/outputs/postgresql/tables/manager_test.go b/plugins/outputs/postgresql/tables/manager_test.go new file mode 100644 index 0000000000000..54a4fbbb39e3b --- /dev/null +++ b/plugins/outputs/postgresql/tables/manager_test.go @@ -0,0 +1,139 @@ +package tables + +import ( + "errors" + "testing" + + "github.com/influxdata/telegraf/plugins/outputs/postgresql/db" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" + "github.com/jackc/pgx" + "github.com/stretchr/testify/assert" +) + +type mockDb struct { + exec pgx.CommandTag + execErr error +} + +func (m *mockDb) Exec(query string, args ...interface{}) (pgx.CommandTag, error) { + return m.exec, m.execErr +} +func (m *mockDb) DoCopy(fullTableName *pgx.Identifier, colNames []string, batch [][]interface{}) error { + return nil +} +func (m *mockDb) Query(query string, args ...interface{}) (*pgx.Rows, error) { + return nil, nil +} +func (m *mockDb) QueryRow(query string, args ...interface{}) *pgx.Row { + return nil +} +func (m *mockDb) Close() error { + return nil +} + +func TestNewManager(t *testing.T) { + db := &mockDb{} + res := NewManager(db, "schema", "table template").(*defTableManager) + assert.Equal(t, "table template", res.tableTemplate) + assert.Equal(t, "schema", res.schema) + assert.Equal(t, db, res.db) +} + +func TestExists(t *testing.T) { + testCases := []struct { + desc string + in string + out bool + db *mockDb + cache map[string]bool + }{ + { + desc: "table already cached", + in: "table", + db: &mockDb{execErr: errors.New("should not have called exec")}, + cache: map[string]bool{"table": true}, + out: true, + }, { + desc: "table not cached, error on check db", + cache: map[string]bool{}, + in: "table", + db: &mockDb{execErr: errors.New("error on exec")}, + }, { + desc: "table not cached, exists in db", + cache: map[string]bool{}, + in: "table", + db: &mockDb{exec: "0 1"}, + out: true, + }, { + desc: "table not cached, doesn't exist", + cache: map[string]bool{}, + in: "table", + db: &mockDb{exec: "0 0"}, + out: false, + }, + } + + for _, tc := range testCases { + t.Run(tc.desc, func(t *testing.T) { + manager := &defTableManager{ + Tables: tc.cache, + db: tc.db, + } + + got := manager.Exists(tc.in) + assert.Equal(t, tc.out, got) + }) + } +} + +func TestCreateTable(t *testing.T) { + testCases := []struct { + desc string + inT string + inCD *utils.TargetColumns + db db.Wrapper + template string + out error + }{ + { + desc: "error on exec, no table cached", + inT: "t", + inCD: &utils.TargetColumns{ + Names: []string{"time", "t", "f"}, + Target: map[string]int{"time": 0, "t": 1, "f": 2}, + DataTypes: []utils.PgDataType{"timestamptz", "text", "float8"}, + Roles: []utils.ColumnRole{utils.TimeColType, utils.TagColType, utils.FieldColType}, + }, + db: &mockDb{execErr: errors.New("error on exec")}, + template: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS}) ", + out: errors.New("error on exec"), + }, { + desc: "all good, table is cached", + inT: "t", + inCD: &utils.TargetColumns{ + Names: []string{"time", "t", "f"}, + Target: map[string]int{"time": 0, "t": 1, "f": 2}, + DataTypes: []utils.PgDataType{"timestamptz", "text", "float8"}, + Roles: []utils.ColumnRole{utils.TimeColType, utils.TagColType, utils.FieldColType}, + }, + db: &mockDb{}, + template: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS}) ", + out: nil, + }, + } + + for _, tc := range testCases { + t.Run(tc.desc, func(t *testing.T) { + manager := &defTableManager{ + Tables: map[string]bool{}, + db: tc.db, + tableTemplate: tc.template, + } + got := manager.CreateTable(tc.inT, tc.inCD) + assert.Equal(t, tc.out, got) + if tc.out == nil { + assert.True(t, manager.Tables[tc.inT]) + } + }) + } +} diff --git a/plugins/outputs/postgresql/tags_cache.go b/plugins/outputs/postgresql/tags_cache.go new file mode 100644 index 0000000000000..6761a0d3ec43f --- /dev/null +++ b/plugins/outputs/postgresql/tags_cache.go @@ -0,0 +1,159 @@ +package postgresql + +import ( + "fmt" + "sort" + "strconv" + "strings" + + "github.com/golang/groupcache/lru" + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/columns" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/db" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" +) + +const ( + selectTagIDTemplate = "SELECT tag_id FROM %s WHERE %s" +) + +// TagsCache retrieves the appropriate tagID based on the tag values +// from the database (used only when TagsAsForeignKey property selected). +// Also caches the LRU tagIDs +type tagsCache interface { + getTagID(target *utils.TargetColumns, metric telegraf.Metric) (int, error) + tagsTableName(measureName string) string +} + +type defTagsCache struct { + cache map[string]*lru.Cache + tagsAsJSONb bool + tagTableSuffix string + schema string + db db.Wrapper + itemsToCache int +} + +// newTagsCache returns a new implementation of the tags cache interface with LRU memoization +func newTagsCache(numItemsInCachePerMetric int, tagsAsJSONb bool, tagTableSuffix, schema string, db db.Wrapper) tagsCache { + return &defTagsCache{ + cache: map[string]*lru.Cache{}, + tagsAsJSONb: tagsAsJSONb, + tagTableSuffix: tagTableSuffix, + schema: schema, + db: db, + itemsToCache: numItemsInCachePerMetric, + } +} + +// Checks the cache for the tag set of the metric, if present returns immediately. +// Otherwise asks the database if that tag set has already been recorded. +// If not recorded, inserts a new row to the tags table for the specific measurement. +// Re-caches the tagID after checking the DB. +func (c *defTagsCache) getTagID(target *utils.TargetColumns, metric telegraf.Metric) (int, error) { + measureName := metric.Name() + tags := metric.Tags() + cacheKey := constructCacheKey(tags) + tagID, isCached := c.checkTagCache(measureName, cacheKey) + if isCached { + return tagID, nil + } + + var whereParts []string + var whereValues []interface{} + if c.tagsAsJSONb { + whereParts = []string{utils.QuoteIdent(columns.TagsJSONColumn) + "= $1"} + numTags := len(tags) + if numTags > 0 { + d, err := utils.BuildJsonb(tags) + if err != nil { + return tagID, err + } + whereValues = []interface{}{d} + } else { + whereValues = []interface{}{nil} + } + } else { + whereParts = make([]string, len(target.Names)-1) + whereValues = make([]interface{}, len(target.Names)-1) + whereIndex := 1 + for columnIndex, tagName := range target.Names[1:] { + if val, ok := tags[tagName]; ok { + whereParts[columnIndex] = utils.QuoteIdent(tagName) + " = $" + strconv.Itoa(whereIndex) + whereValues[whereIndex-1] = val + } else { + whereParts[whereIndex-1] = tagName + " IS NULL" + } + whereIndex++ + } + } + + tagsTableName := c.tagsTableName(measureName) + tagsTableFullName := utils.FullTableName(c.schema, tagsTableName).Sanitize() + // SELECT tag_id FROM measure_tag WHERE t1 = v1 AND ... tN = vN + query := fmt.Sprintf(selectTagIDTemplate, tagsTableFullName, strings.Join(whereParts, " AND ")) + err := c.db.QueryRow(query, whereValues...).Scan(&tagID) + // tag set found in DB, cache it and return + if err == nil { + c.addToCache(measureName, cacheKey, tagID) + return tagID, nil + } + + // tag set is new, insert it, and cache the tagID + query = utils.GenerateInsert(tagsTableFullName, target.Names[1:]) + " RETURNING " + columns.TagIDColumnName + err = c.db.QueryRow(query, whereValues...).Scan(&tagID) + if err == nil { + c.addToCache(measureName, cacheKey, tagID) + } + return tagID, err +} + +func (c *defTagsCache) tagsTableName(measureName string) string { + return measureName + c.tagTableSuffix +} + +// check the cache for the given 'measure' if it contains the +// tagID value for a given tag-set key. If the cache for that measure +// doesn't exist, creates it. +func (c *defTagsCache) checkTagCache(measure, key string) (int, bool) { + if cacheForMeasure, ok := c.cache[measure]; ok { + tagID, exists := cacheForMeasure.Get(key) + if exists { + return tagID.(int), exists + } + return 0, exists + } + + c.cache[measure] = lru.New(c.itemsToCache) + return 0, false +} + +func (c *defTagsCache) addToCache(measure, key string, tagID int) { + c.cache[measure].Add(key, tagID) +} + +// cache key is constructed from the tag set as +// {tag_a:1, tag_c:2, tag_b:3}=>'tag_a 1;tag_b 3;tag_c 2;' +func constructCacheKey(tags map[string]string) string { + numTags := len(tags) + if numTags == 0 { + return "" + } + keys := make([]string, numTags) + i := 0 + for key := range tags { + keys[i] = key + i++ + } + + sort.Strings(keys) + var whereParts strings.Builder + for _, key := range keys { + val := tags[key] + whereParts.WriteString(key) + whereParts.WriteString(" ") + whereParts.WriteString(val) + whereParts.WriteString(";") + } + return whereParts.String() +} diff --git a/plugins/outputs/postgresql/transformer.go b/plugins/outputs/postgresql/transformer.go new file mode 100644 index 0000000000000..1a843a264d40d --- /dev/null +++ b/plugins/outputs/postgresql/transformer.go @@ -0,0 +1,72 @@ +package postgresql + +import ( + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/columns" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" +) + +type transformer interface { + createRowFromMetric(numColumns int, metric telegraf.Metric, targetColumns, targetTagColumns *utils.TargetColumns) ([]interface{}, error) +} + +type defTransformer struct { + tagsAsFK bool + tagsAsJSONb bool + fieldsAsJSONb bool + tagsCache tagsCache +} + +func newRowTransformer(tagsAsFK, tagsAsJSONb, fieldsAsJSONb bool, tagsCache tagsCache) transformer { + return &defTransformer{ + tagsAsFK: tagsAsFK, + tagsAsJSONb: tagsAsJSONb, + fieldsAsJSONb: fieldsAsJSONb, + tagsCache: tagsCache, + } +} + +func (dt *defTransformer) createRowFromMetric(numColumns int, metric telegraf.Metric, targetColumns, targetTagColumns *utils.TargetColumns) ([]interface{}, error) { + row := make([]interface{}, numColumns) + // handle time + row[0] = metric.Time() + // handle tags and tag id + if dt.tagsAsFK { + tagID, err := dt.tagsCache.getTagID(targetTagColumns, metric) + if err != nil { + return nil, err + } + row[1] = tagID + } else { + if dt.tagsAsJSONb { + jsonVal, err := utils.BuildJsonb(metric.Tags()) + if err != nil { + return nil, err + } + targetIndex := targetColumns.Target[columns.TagsJSONColumn] + row[targetIndex] = jsonVal + } else { + for _, tag := range metric.TagList() { + targetIndex := targetColumns.Target[tag.Key] + row[targetIndex] = tag.Value + } + } + } + + // handle fields + if dt.fieldsAsJSONb { + jsonVal, err := utils.BuildJsonb(metric.Fields()) + if err != nil { + return nil, err + } + targetIndex := targetColumns.Target[columns.FieldsJSONColumn] + row[targetIndex] = jsonVal + } else { + for _, field := range metric.FieldList() { + targetIndex := targetColumns.Target[field.Key] + row[targetIndex] = field.Value + } + } + + return row, nil +} diff --git a/plugins/outputs/postgresql/utils.go b/plugins/outputs/postgresql/utils.go deleted file mode 100644 index 801a2b6aac4d5..0000000000000 --- a/plugins/outputs/postgresql/utils.go +++ /dev/null @@ -1,68 +0,0 @@ -package postgresql - -import ( - "encoding/json" - "log" - "strings" - - "github.com/jackc/pgx" -) - -func buildJsonbTags(tags map[string]string) ([]byte, error) { - js := make(map[string]interface{}) - for column, value := range tags { - js[column] = value - } - - return buildJsonb(js) -} - -func buildJsonb(data map[string]interface{}) ([]byte, error) { - if len(data) > 0 { - d, err := json.Marshal(data) - if err != nil { - return nil, err - } - return d, nil - } - - return nil, nil -} - -func quoteIdent(name string) string { - return pgx.Identifier{name}.Sanitize() -} - -func quoteLiteral(name string) string { - return "'" + strings.Replace(name, "'", "''", -1) + "'" -} - -func deriveDatatype(value interface{}) string { - var datatype string - - switch value.(type) { - case bool: - datatype = "boolean" - case uint64: - datatype = "int8" - case int64: - datatype = "int8" - case float64: - datatype = "float8" - case string: - datatype = "text" - default: - datatype = "text" - log.Printf("E! Unknown datatype %T(%v)", value, value) - } - return datatype -} - -func contains(haystack []string, needle string) bool { - for _, key := range haystack { - if key == needle { - return true - } - } - return false -} diff --git a/plugins/outputs/postgresql/utils/types.go b/plugins/outputs/postgresql/utils/types.go new file mode 100644 index 0000000000000..a44017399d601 --- /dev/null +++ b/plugins/outputs/postgresql/utils/types.go @@ -0,0 +1,30 @@ +package utils + +// ColumnRole specifies the role of a column in a metric. +// It helps map the columns to the DB. +type ColumnRole int + +const ( + TimeColType ColumnRole = iota + 1 + TagsIDColType + TagColType + FieldColType +) + +// PgDataType defines a string that represents a PostgreSQL data type. +type PgDataType string + +// TargetColumns contains all the information needed to map a collection of +// metrics who belong to the same Measurement. +type TargetColumns struct { + // the names the columns will have in the database + Names []string + // column name -> order number. where to place each column in rows + // batched to the db + Target map[string]int + // the data type of each column should have in the db. used when checking + // if the schema matches or it needs updates + DataTypes []PgDataType + // the role each column has, helps properly map the metric to the db + Roles []ColumnRole +} diff --git a/plugins/outputs/postgresql/utils/utils.go b/plugins/outputs/postgresql/utils/utils.go new file mode 100644 index 0000000000000..1fbd90cc05526 --- /dev/null +++ b/plugins/outputs/postgresql/utils/utils.go @@ -0,0 +1,168 @@ +package utils + +import ( + "encoding/json" + "fmt" + "log" + "strings" + "time" + + "github.com/influxdata/telegraf" + "github.com/jackc/pgx" +) + +const ( + insertIntoSQLTemplate = "INSERT INTO %s(%s) VALUES(%s)" +) + +// GroupMetricsByMeasurement groups the list of metrics by the measurement name. +// But the values are the index of the measure from the input list of measures. +// [m, m, m2, m2, m] => {m:[0,1,4], m2:[2,3]} +func GroupMetricsByMeasurement(m []telegraf.Metric) map[string][]int { + toReturn := make(map[string][]int) + for i, metric := range m { + var metricLocations []int + var ok bool + name := metric.Name() + if metricLocations, ok = toReturn[name]; !ok { + metricLocations = []int{} + toReturn[name] = metricLocations + } + toReturn[name] = append(metricLocations, i) + } + return toReturn +} + +// BuildJsonb returns a byte array of the json representation +// of the passed object. +func BuildJsonb(data interface{}) ([]byte, error) { + d, err := json.Marshal(data) + if err != nil { + return nil, err + } + return d, nil +} + +// QuoteIdent returns a sanitized string safe to use in SQL as an identifier +func QuoteIdent(name string) string { + return pgx.Identifier{name}.Sanitize() +} + +// QuoteLiteral returns a sanitized string safe to use in sql as a string literal +func QuoteLiteral(name string) string { + return "'" + strings.Replace(name, "'", "''", -1) + "'" +} + +// FullTableName returns a sanitized table name with it's schema (if supplied) +func FullTableName(schema, name string) *pgx.Identifier { + if schema != "" { + return &pgx.Identifier{schema, name} + } + + return &pgx.Identifier{name} +} + +const ( + PgBool = "boolean" + PgInt8 = "int8" + PgInt4 = "int4" + PgInteger = "integer" + PgBigInt = "bigint" + PgFloat8 = "float8" + PgDoublePrecision = "double precision" + PgText = "text" + PgTimestamptz = "timestamptz" + PgTimestampWithTimeZone = "timestamp with time zone" + PgTimestamp = "timestamp" + PgTimestampWithoutTimeZone = "timestamp without time zone" + PgSerial = "serial" + PgJSONb = "jsonb" +) + +// DerivePgDatatype returns the appropriate PostgreSQL data type +// that could hold the value. +func DerivePgDatatype(value interface{}) PgDataType { + switch value.(type) { + case bool: + return PgBool + case uint64: + return PgInt8 + case int64: + return PgInt8 + case int: + return PgInt8 + case uint: + return PgInt8 + case uint32: + return PgInt4 + case int32: + return PgInt4 + case float64: + return PgFloat8 + case float32: + return PgFloat8 + case string: + return PgText + case time.Time: + return PgTimestamptz + default: + log.Printf("E! Unknown datatype %T(%v)", value, value) + return PgText + } +} + +// LongToShortPgType returns a PostgreSQL datatype in it's short +// notation form. +func LongToShortPgType(longPgType string) PgDataType { + switch longPgType { + case PgInteger: + return PgInt4 + case PgBigInt: + return PgInt8 + case PgDoublePrecision: + return PgFloat8 + case PgTimestampWithTimeZone: + return PgTimestamptz + case PgTimestampWithoutTimeZone: + return PgTimestamp + default: + return PgDataType(longPgType) + } +} + +// PgTypeCanContain tells you if one PostgreSQL data type can contain +// the values of another without data loss. +func PgTypeCanContain(canThis PgDataType, containThis PgDataType) bool { + if canThis == containThis { + return true + } + if canThis == PgInt8 { + return containThis == PgInt4 + } + if canThis == PgInt4 { + return containThis == PgSerial + } + if canThis == PgFloat8 { + return containThis == PgInt4 + } + if canThis == PgTimestamptz { + return containThis == PgTimestamp + } + + return false +} + +// GenerateInsert returns a SQL statement to insert values in a table +// with $X placeholders for the values +func GenerateInsert(fullSanitizedTableName string, columns []string) string { + valuePlaceholders := make([]string, len(columns)) + quotedColumns := make([]string, len(columns)) + for i, column := range columns { + valuePlaceholders[i] = fmt.Sprintf("$%d", i+1) + quotedColumns[i] = QuoteIdent(column) + } + + columnNames := strings.Join(quotedColumns, ",") + values := strings.Join(valuePlaceholders, ",") + return fmt.Sprintf(insertIntoSQLTemplate, fullSanitizedTableName, columnNames, values) +} diff --git a/plugins/outputs/postgresql/utils/utils_test.go b/plugins/outputs/postgresql/utils/utils_test.go new file mode 100644 index 0000000000000..040a7202d5c67 --- /dev/null +++ b/plugins/outputs/postgresql/utils/utils_test.go @@ -0,0 +1,138 @@ +package utils + +import ( + "testing" + "time" + + "github.com/influxdata/telegraf" + "github.com/influxdata/telegraf/metric" + "github.com/stretchr/testify/assert" +) + +func TestPostgresqlQuote(t *testing.T) { + assert.Equal(t, `"foo"`, QuoteIdent("foo")) + assert.Equal(t, `"fo'o"`, QuoteIdent("fo'o")) + assert.Equal(t, `"fo""o"`, QuoteIdent("fo\"o")) + + assert.Equal(t, "'foo'", QuoteLiteral("foo")) + assert.Equal(t, "'fo''o'", QuoteLiteral("fo'o")) + assert.Equal(t, "'fo\"o'", QuoteLiteral("fo\"o")) +} + +func TestBuildJsonb(t *testing.T) { + testCases := []struct { + desc string + in interface{} + out string + }{ + { + desc: "simple map", + in: map[string]int{"a": 1}, + out: `{"a":1}`, + }, { + desc: "single number", + in: 1, + out: `1`, + }, { + desc: "interface map", + in: map[int]interface{}{1: "a"}, + out: `{"1":"a"}`, + }, + } + for _, tc := range testCases { + t.Run(tc.desc, func(t *testing.T) { + res, err := BuildJsonb(tc.in) + assert.Nil(t, err) + assert.Equal(t, tc.out, string(res)) + + }) + } +} + +func TestFullTableName(t *testing.T) { + assert.Equal(t, `"tableName"`, FullTableName("", "tableName").Sanitize()) + assert.Equal(t, `"table name"`, FullTableName("", "table name").Sanitize()) + assert.Equal(t, `"table.name"`, FullTableName("", "table.name").Sanitize()) + assert.Equal(t, `"table"."name"`, FullTableName("table", "name").Sanitize()) + assert.Equal(t, `"schema name"."table name"`, FullTableName("schema name", "table name").Sanitize()) +} + +func TestDerivePgDataType(t *testing.T) { + assert.Equal(t, PgDataType("boolean"), DerivePgDatatype(true)) + assert.Equal(t, PgDataType("int8"), DerivePgDatatype(uint64(1))) + assert.Equal(t, PgDataType("int8"), DerivePgDatatype(1)) + assert.Equal(t, PgDataType("int8"), DerivePgDatatype(uint(1))) + assert.Equal(t, PgDataType("int8"), DerivePgDatatype(int64(1))) + assert.Equal(t, PgDataType("int4"), DerivePgDatatype(uint32(1))) + assert.Equal(t, PgDataType("int4"), DerivePgDatatype(int32(1))) + assert.Equal(t, PgDataType("float8"), DerivePgDatatype(float64(1.0))) + assert.Equal(t, PgDataType("float8"), DerivePgDatatype(float32(1.0))) + assert.Equal(t, PgDataType("text"), DerivePgDatatype("")) + assert.Equal(t, PgDataType("timestamptz"), DerivePgDatatype(time.Now())) + assert.Equal(t, PgDataType("text"), DerivePgDatatype([]int{})) +} + +func TestLongToShortPgType(t *testing.T) { + assert.Equal(t, PgDataType("boolean"), LongToShortPgType("boolean")) + assert.Equal(t, PgDataType("int4"), LongToShortPgType("integer")) + assert.Equal(t, PgDataType("int8"), LongToShortPgType("bigint")) + assert.Equal(t, PgDataType("float8"), LongToShortPgType("double precision")) + assert.Equal(t, PgDataType("timestamptz"), LongToShortPgType("timestamp with time zone")) + assert.Equal(t, PgDataType("timestamp"), LongToShortPgType("timestamp without time zone")) + assert.Equal(t, PgDataType("jsonb"), LongToShortPgType("jsonb")) + assert.Equal(t, PgDataType("text"), LongToShortPgType("text")) + assert.Equal(t, PgDataType("unknown"), LongToShortPgType("unknown")) +} + +func TestPgTypeCanContain(t *testing.T) { + assert.True(t, PgTypeCanContain(PgDataType("bogus same"), PgDataType("bogus same"))) + assert.True(t, PgTypeCanContain(PgDataType("int8"), PgDataType("int4"))) + assert.False(t, PgTypeCanContain(PgDataType("int8"), PgDataType("float8"))) + assert.False(t, PgTypeCanContain(PgDataType("int8"), PgDataType("timestamptz"))) + + assert.True(t, PgTypeCanContain(PgDataType("int4"), PgDataType("serial"))) + assert.True(t, PgTypeCanContain(PgDataType("int8"), PgDataType("int4"))) + assert.False(t, PgTypeCanContain(PgDataType("int4"), PgDataType("int8"))) + + assert.False(t, PgTypeCanContain(PgDataType("float8"), PgDataType("int8"))) + assert.True(t, PgTypeCanContain(PgDataType("float8"), PgDataType("int4"))) + + assert.True(t, PgTypeCanContain(PgDataType("timestamptz"), PgDataType("timestamp"))) + + assert.False(t, PgTypeCanContain(PgDataType("text"), PgDataType("timestamp"))) +} + +func TestGroupMetricsByMeasurement(t *testing.T) { + m11, _ := metric.New("m", map[string]string{}, map[string]interface{}{}, time.Now()) + m12, _ := metric.New("m", map[string]string{"t1": "tv1"}, map[string]interface{}{"f1": 1}, time.Now()) + m13, _ := metric.New("m", map[string]string{}, map[string]interface{}{"f2": 2}, time.Now()) + + m21, _ := metric.New("m2", map[string]string{}, map[string]interface{}{}, time.Now()) + m22, _ := metric.New("m2", map[string]string{"t1": "tv1"}, map[string]interface{}{"f1": 1}, time.Now()) + m23, _ := metric.New("m2", map[string]string{}, map[string]interface{}{"f2": 2}, time.Now()) + in := []telegraf.Metric{m11, m12, m21, m22, m13, m23} + expected := map[string][]int{ + "m": {0, 1, 4}, + "m2": {2, 3, 5}, + } + got := GroupMetricsByMeasurement(in) + assert.Equal(t, expected, got) +} + +func TestGenerateInsert(t *testing.T) { + + sql := GenerateInsert(`"m"`, []string{"time", "f"}) + assert.Equal(t, `INSERT INTO "m"("time","f") VALUES($1,$2)`, sql) + + sql = GenerateInsert(`"m"`, []string{"time", "i"}) + assert.Equal(t, `INSERT INTO "m"("time","i") VALUES($1,$2)`, sql) + + sql = GenerateInsert(`"public"."m"`, []string{"time", "f", "i"}) + assert.Equal(t, `INSERT INTO "public"."m"("time","f","i") VALUES($1,$2,$3)`, sql) + + sql = GenerateInsert(`"public"."m n"`, []string{"time", "k", "i"}) + assert.Equal(t, `INSERT INTO "public"."m n"("time","k","i") VALUES($1,$2,$3)`, sql) + + sql = GenerateInsert("m", []string{"time", "k1", "k2", "i"}) + assert.Equal(t, `INSERT INTO m("time","k1","k2","i") VALUES($1,$2,$3,$4)`, sql) +} From dad46baf307125a19b304730f455c86ebdd76eb3 Mon Sep 17 00:00:00 2001 From: Blagoj Atanasovski Date: Thu, 18 Jul 2019 11:37:41 +0200 Subject: [PATCH 78/79] Attempt reconnect to db if conn is lost and support PG env variables --- plugins/outputs/postgresql/README.md | 11 ++- .../postgresql/columns/column_mapper.go | 4 +- plugins/outputs/postgresql/db/db_wrapper.go | 40 +++++++- .../outputs/postgresql/db/db_wrapper_test.go | 21 ++++ plugins/outputs/postgresql/postgresql.go | 61 +++++++++--- .../postgresql/postgresql_integration_test.go | 4 +- plugins/outputs/postgresql/postgresql_test.go | 95 +++++++++++++------ .../outputs/postgresql/tables/manager_test.go | 2 + .../tables/{manager.go => table_manager.go} | 8 ++ plugins/outputs/postgresql/tags_cache.go | 5 + plugins/outputs/postgresql/utils/utils.go | 18 +--- 11 files changed, 206 insertions(+), 63 deletions(-) create mode 100644 plugins/outputs/postgresql/db/db_wrapper_test.go rename plugins/outputs/postgresql/tables/{manager.go => table_manager.go} (96%) diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md index 2a38b7a08ab5a..c3d40d3babd4c 100644 --- a/plugins/outputs/postgresql/README.md +++ b/plugins/outputs/postgresql/README.md @@ -8,7 +8,16 @@ The plugin manages the schema automatically updating missing columns, and checki ```toml # Send metrics to postgres [[outputs.postgresql]] - address = "host=localhost user=postgres sslmode=verify-full" + ## specify address via a url: + ## postgres://[pqgotest[:password]]@localhost[/dbname]\ + ## ?sslmode=[disable|verify-ca|verify-full] + ## or a simple string: + ## host=localhost user=pqotest password=... sslmode=... dbname=app_production + ## + ## All connection parameters are optional. Also supported are PG environment vars + ## e.g. PGPASSWORD, PGHOST, PGUSER, PGDATABASE + ## all supported vars here: https://www.postgresql.org/docs/current/libpq-envars.html + connection = "host=localhost user=postgres sslmode=verify-full" ## Update existing tables to match the incoming metrics. Default is true # do_schema_updates = true diff --git a/plugins/outputs/postgresql/columns/column_mapper.go b/plugins/outputs/postgresql/columns/column_mapper.go index 99692997ab904..78287998f5f08 100644 --- a/plugins/outputs/postgresql/columns/column_mapper.go +++ b/plugins/outputs/postgresql/columns/column_mapper.go @@ -7,7 +7,7 @@ import ( // Mapper knows how to generate the column details for the main and tags table in the db type Mapper interface { - // Iterate through an array of 'metrics' visiting only those indexed by 'indices' + // Target iterates through an array of 'metrics' visiting only those indexed by 'indices' // and depending on 'tagsAsFK', 'tagsAsJSON', and 'fieldsAsJSON' generate the // desired columns (their name, type and which role they play) for both the // main metrics table in the DB, and if tagsAsFK == true for the tags table. @@ -32,7 +32,7 @@ func NewMapper(tagsAsFK, tagsAsJSON, fieldsAsJSON bool) Mapper { } } -// Iterate through an array of 'metrics' visiting only those indexed by 'indices' +// Target iterates through an array of 'metrics' visiting only those indexed by 'indices' // and depending on 'tagsAsFK', 'tagsAsJSON', and 'fieldsAsJSON' generate the // desired columns (their name, type and which role they play) for both the // main metrics table in the DB, and if tagsAsFK == true for the tags table. diff --git a/plugins/outputs/postgresql/db/db_wrapper.go b/plugins/outputs/postgresql/db/db_wrapper.go index c6cf999b1bc0c..4a95d12b9cf09 100644 --- a/plugins/outputs/postgresql/db/db_wrapper.go +++ b/plugins/outputs/postgresql/db/db_wrapper.go @@ -8,6 +8,8 @@ import ( _ "github.com/jackc/pgx/stdlib" ) +const checkConnQuery = "SELECT 1" + // Wrapper defines an interface that encapsulates communication with a DB. type Wrapper interface { Exec(query string, args ...interface{}) (pgx.CommandTag, error) @@ -15,6 +17,7 @@ type Wrapper interface { Query(query string, args ...interface{}) (*pgx.Rows, error) QueryRow(query string, args ...interface{}) *pgx.Row Close() error + IsAlive() bool } type defaultDbWrapper struct { @@ -23,13 +26,12 @@ type defaultDbWrapper struct { // NewWrapper returns an implementation of the db.Wrapper interface // that issues queries to a PG database. -func NewWrapper(address string) (Wrapper, error) { - connConfig, err := pgx.ParseConnectionString(address) +func NewWrapper(connection string) (Wrapper, error) { + connConfig, err := parseConnectionString(connection) if err != nil { - log.Printf("E! Couldn't parse connection address: %s\n%v", address, err) return nil, err } - db, err := pgx.Connect(connConfig) + db, err := pgx.Connect(*connConfig) if err != nil { log.Printf("E! Couldn't connect to server\n%v", err) return nil, err @@ -63,3 +65,33 @@ func (d *defaultDbWrapper) Query(query string, args ...interface{}) (*pgx.Rows, func (d *defaultDbWrapper) QueryRow(query string, args ...interface{}) *pgx.Row { return d.db.QueryRow(query, args...) } + +func (d *defaultDbWrapper) IsAlive() bool { + if !d.db.IsAlive() { + return false + } + row := d.db.QueryRow(checkConnQuery) + var one int64 + if err := row.Scan(&one); err != nil { + log.Printf("W! Error given on 'is conn alive':\n%v", err) + return false + } + return true +} + +func parseConnectionString(connection string) (*pgx.ConnConfig, error) { + envConnConfig, err := pgx.ParseEnvLibpq() + if err != nil { + log.Println("E! couldn't check PG environment variables") + return nil, err + } + + connConfig, err := pgx.ParseConnectionString(connection) + if err != nil { + log.Printf("E! Couldn't parse connection string: %s\n%v", connection, err) + return nil, err + } + + connConfig = envConnConfig.Merge(connConfig) + return &connConfig, nil +} diff --git a/plugins/outputs/postgresql/db/db_wrapper_test.go b/plugins/outputs/postgresql/db/db_wrapper_test.go new file mode 100644 index 0000000000000..ca6865b7ea56c --- /dev/null +++ b/plugins/outputs/postgresql/db/db_wrapper_test.go @@ -0,0 +1,21 @@ +package db + +import ( + "os" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseConnectionStringPgEnvOverride(t *testing.T) { + config, err := parseConnectionString("dbname=test") + assert.NoError(t, err) + assert.Equal(t, "test", config.Database) + assert.Equal(t, "", config.Password) + + os.Setenv("PGPASSWORD", "pass") + config, err = parseConnectionString("dbname=test") + assert.NoError(t, err) + assert.Equal(t, "test", config.Database) + assert.Equal(t, "pass", config.Password) +} diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index c59f5cbb87cea..cfaa407d8b24a 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -2,6 +2,7 @@ package postgresql import ( "log" + "sync" "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/plugins/outputs" @@ -12,8 +13,7 @@ import ( ) type Postgresql struct { - db db.Wrapper - Address string + Connection string Schema string DoSchemaUpdates bool TagsAsForeignkeys bool @@ -22,10 +22,16 @@ type Postgresql struct { FieldsAsJsonb bool TableTemplate string TagTableSuffix string - tables tables.Manager - tagCache tagsCache - rows transformer - columns columns.Mapper + + // lock for the assignment of the dbWrapper, + // table manager and tags cache + dbConnLock sync.Mutex + db db.Wrapper + tables tables.Manager + tagCache tagsCache + + rows transformer + columns columns.Mapper } func init() { @@ -44,12 +50,16 @@ func newPostgresql() *Postgresql { // Connect establishes a connection to the target database and prepares the cache func (p *Postgresql) Connect() error { - db, err := db.NewWrapper(p.Address) + p.dbConnLock.Lock() + defer p.dbConnLock.Unlock() + + // set p.db with a lock + db, err := db.NewWrapper(p.Connection) if err != nil { return err } p.db = db - p.tables = tables.NewManager(db, p.Schema, p.TableTemplate) + p.tables = tables.NewManager(p.db, p.Schema, p.TableTemplate) if p.TagsAsForeignkeys { p.tagCache = newTagsCache(p.CachedTagsetsPerMeasurement, p.TagsAsJsonb, p.TagTableSuffix, p.Schema, p.db) @@ -61,7 +71,8 @@ func (p *Postgresql) Connect() error { // Close closes the connection to the database func (p *Postgresql) Close() error { - p.tagCache = nil + p.dbConnLock.Lock() + defer p.dbConnLock.Unlock() p.tagCache = nil p.tables = nil return p.db.Close() @@ -74,14 +85,16 @@ var sampleConfig = ` ## or a simple string: ## host=localhost user=pqotest password=... sslmode=... dbname=app_production ## - ## All connection parameters are optional. + ## All connection parameters are optional. Also supported are PG environment vars + ## e.g. PGPASSWORD, PGHOST, PGUSER, PGDATABASE + ## all supported vars here: https://www.postgresql.org/docs/current/libpq-envars.html ## ## Without the dbname parameter, the driver will default to a database ## with the same name as the user. This dbname is just for instantiating a ## connection with the server and doesn't restrict the databases we are trying ## to grab metrics for. ## - address = "host=localhost user=postgres sslmode=verify-full" + connection = "host=localhost user=postgres sslmode=verify-full" ## Update existing tables to match the incoming metrics automatically. Default is true # do_schema_updates = true @@ -121,6 +134,14 @@ func (p *Postgresql) SampleConfig() string { return sampleConfig } func (p *Postgresql) Description() string { return "Send metrics to PostgreSQL" } func (p *Postgresql) Write(metrics []telegraf.Metric) error { + if !p.checkConnection() { + log.Println("W! Connection is not alive, attempting reset") + if err := p.resetConnection(); err != nil { + log.Printf("E! Could not reset connection:\n%v", err) + return err + } + log.Println("I! Connection established again") + } metricsByMeasurement := utils.GroupMetricsByMeasurement(metrics) for measureName, indices := range metricsByMeasurement { err := p.writeMetricsFromMeasure(measureName, indices, metrics) @@ -182,3 +203,21 @@ func (p *Postgresql) prepareTable(tableName string, details *utils.TargetColumns } return p.tables.AddColumnsToTable(tableName, missingColumns, details) } + +func (p *Postgresql) checkConnection() bool { + p.dbConnLock.Lock() + defer p.dbConnLock.Unlock() + return p.db != nil && p.db.IsAlive() +} + +func (p *Postgresql) resetConnection() error { + p.dbConnLock.Lock() + defer p.dbConnLock.Unlock() + var err error + p.db, err = db.NewWrapper(p.Connection) + p.tables.SetConnection(p.db) + if p.tagCache != nil { + p.tagCache.setDb(p.db) + } + return err +} diff --git a/plugins/outputs/postgresql/postgresql_integration_test.go b/plugins/outputs/postgresql/postgresql_integration_test.go index 457665621c033..355cdc4cc352d 100644 --- a/plugins/outputs/postgresql/postgresql_integration_test.go +++ b/plugins/outputs/postgresql/postgresql_integration_test.go @@ -19,12 +19,12 @@ func prepareAndConnect(t *testing.T, foreignTags, jsonTags, jsonFields bool) (te t.Skip("Skipping integration test in short mode") } - testAddress := "postgres://postgres@localhost:5432/postgres?sslmode=disable" + testAddress := "postgres://postgres:postgres@localhost:5432/postgres?sslmode=disable" testMetric := testMetric("metric name", "tag1", int(1)) postgres := &Postgresql{ - Address: testAddress, + Connection: testAddress, Schema: "public", TagsAsForeignkeys: foreignTags, TagsAsJsonb: jsonTags, diff --git a/plugins/outputs/postgresql/postgresql_test.go b/plugins/outputs/postgresql/postgresql_test.go index 3c988bba800b2..7efff2e1dcd77 100644 --- a/plugins/outputs/postgresql/postgresql_test.go +++ b/plugins/outputs/postgresql/postgresql_test.go @@ -1,47 +1,20 @@ package postgresql import ( - "fmt" + "sync" "testing" "time" "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/metric" "github.com/influxdata/telegraf/plugins/outputs/postgresql/columns" + "github.com/influxdata/telegraf/plugins/outputs/postgresql/db" "github.com/influxdata/telegraf/plugins/outputs/postgresql/utils" "github.com/jackc/pgx" _ "github.com/jackc/pgx/stdlib" "github.com/stretchr/testify/assert" ) -func TestWriteAllInOnePlace(t *testing.T) { - timestamp := time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC) - oneMetric, _ := metric.New("m", map[string]string{"t": "tv"}, map[string]interface{}{"f": 1}, timestamp) - twoMetric, _ := metric.New("m", map[string]string{"t2": "tv2"}, map[string]interface{}{"f2": 2}, timestamp) - threeMetric, _ := metric.New("m", map[string]string{"t": "tv", "t2": "tv2"}, map[string]interface{}{"f": 3, "f2": 4}, timestamp) - fourMetric, _ := metric.New("m2", map[string]string{"t": "tv", "t2": "tv2"}, map[string]interface{}{"f": 5, "f2": 6}, timestamp) - - p := &Postgresql{ - Schema: "public", - TableTemplate: "CREATE TABLE IF NOT EXISTS {TABLE}({COLUMNS})", - TagTableSuffix: "_tag", - DoSchemaUpdates: true, - Address: "host=localhost user=postgres password=postgres sslmode=disable dbname=postgres", - } - p.Connect() - err := p.Write([]telegraf.Metric{oneMetric, twoMetric, fourMetric, threeMetric}) - if err != nil { - fmt.Println(err.Error()) - t.Fail() - } - fiveMetric, _ := metric.New("m", map[string]string{"t": "tv", "t3": "tv3"}, map[string]interface{}{"f": 7, "f3": 8}, timestamp) - err = p.Write([]telegraf.Metric{fiveMetric}) - if err != nil { - fmt.Println(err.Error()) - t.Fail() - } -} - func TestPostgresqlMetricsFromMeasure(t *testing.T) { postgreSQL, metrics, metricIndices := prepareAllColumnsInOnePlaceNoJSON() err := postgreSQL.writeMetricsFromMeasure(metrics[0].Name(), metricIndices["m"], metrics) @@ -51,6 +24,46 @@ func TestPostgresqlMetricsFromMeasure(t *testing.T) { assert.NoError(t, err) } +func TestPostgresqlIsAliveCalledOnWrite(t *testing.T) { + postgreSQL, metrics, _ := prepareAllColumnsInOnePlaceNoJSON() + mockedDb := postgreSQL.db.(*mockDb) + mockedDb.isAliveResponses = []bool{true} + err := postgreSQL.Write(metrics[:1]) + assert.NoError(t, err) + assert.Equal(t, 1, mockedDb.currentIsAliveResponse) +} + +func TestPostgresqlDbAssignmentLock(t *testing.T) { + postgreSQL, metrics, _ := prepareAllColumnsInOnePlaceNoJSON() + mockedDb := postgreSQL.db.(*mockDb) + mockedDb.isAliveResponses = []bool{true} + mockedDb.secondsToSleepInIsAlive = 3 + var endOfWrite, startOfWrite, startOfReset, endOfReset time.Time + var wg sync.WaitGroup + wg.Add(2) + go func() { + startOfWrite = time.Now() + err := postgreSQL.Write(metrics[:1]) + assert.NoError(t, err) + endOfWrite = time.Now() + wg.Done() + }() + time.Sleep(time.Second) + + go func() { + startOfReset = time.Now() + postgreSQL.dbConnLock.Lock() + time.Sleep(time.Second) + postgreSQL.dbConnLock.Unlock() + endOfReset = time.Now() + wg.Done() + }() + wg.Wait() + assert.True(t, startOfWrite.Before(startOfReset)) + assert.True(t, startOfReset.Before(endOfWrite)) + assert.True(t, endOfWrite.Before(endOfReset)) +} + func prepareAllColumnsInOnePlaceNoJSON() (*Postgresql, []telegraf.Metric, map[string][]int) { oneMetric, _ := metric.New("m", map[string]string{"t": "tv"}, map[string]interface{}{"f": 1}, time.Now()) twoMetric, _ := metric.New("m", map[string]string{"t2": "tv2"}, map[string]interface{}{"f2": 2}, time.Now()) @@ -63,6 +76,7 @@ func prepareAllColumnsInOnePlaceNoJSON() (*Postgresql, []telegraf.Metric, map[st rows: &mockTransformer{rows: [][]interface{}{nil, nil, nil}}, columns: columns.NewMapper(false, false, false), db: &mockDb{}, + dbConnLock: sync.Mutex{}, }, []telegraf.Metric{ oneMetric, twoMetric, threeMetric, }, map[string][]int{ @@ -81,6 +95,7 @@ func prepareAllColumnsInOnePlaceTagsAndFieldsJSON() (*Postgresql, []telegraf.Met TagsAsForeignkeys: false, TagsAsJsonb: true, FieldsAsJsonb: true, + dbConnLock: sync.Mutex{}, tables: &mockTables{t: map[string]bool{"m": true}, missingCols: []int{}}, columns: columns.NewMapper(false, true, true), rows: &mockTransformer{rows: [][]interface{}{nil, nil, nil}}, @@ -116,6 +131,7 @@ func (m *mockTables) FindColumnMismatch(tableName string, colDetails *utils.Targ func (m *mockTables) AddColumnsToTable(tableName string, columnIndices []int, colDetails *utils.TargetColumns) error { return m.addColsErr } +func (m *mockTables) SetConnection(db db.Wrapper) {} type mockTransformer struct { rows [][]interface{} @@ -133,12 +149,16 @@ func (mt *mockTransformer) createRowFromMetric(numColumns int, metric telegraf.M } type mockDb struct { - doCopyErr error + doCopyErr error + isAliveResponses []bool + currentIsAliveResponse int + secondsToSleepInIsAlive int64 } func (m *mockDb) Exec(query string, args ...interface{}) (pgx.CommandTag, error) { return "", nil } + func (m *mockDb) DoCopy(fullTableName *pgx.Identifier, colNames []string, batch [][]interface{}) error { return m.doCopyErr } @@ -151,3 +171,18 @@ func (m *mockDb) QueryRow(query string, args ...interface{}) *pgx.Row { func (m *mockDb) Close() error { return nil } + +func (m *mockDb) IsAlive() bool { + if m.secondsToSleepInIsAlive > 0 { + time.Sleep(time.Duration(m.secondsToSleepInIsAlive) * time.Second) + } + if m.isAliveResponses == nil { + return true + } + if m.currentIsAliveResponse >= len(m.isAliveResponses) { + return m.isAliveResponses[len(m.isAliveResponses)] + } + which := m.currentIsAliveResponse + m.currentIsAliveResponse++ + return m.isAliveResponses[which] +} diff --git a/plugins/outputs/postgresql/tables/manager_test.go b/plugins/outputs/postgresql/tables/manager_test.go index 54a4fbbb39e3b..cf17956adb247 100644 --- a/plugins/outputs/postgresql/tables/manager_test.go +++ b/plugins/outputs/postgresql/tables/manager_test.go @@ -31,6 +31,8 @@ func (m *mockDb) Close() error { return nil } +func (m *mockDb) IsAlive() bool { return true } + func TestNewManager(t *testing.T) { db := &mockDb{} res := NewManager(db, "schema", "table template").(*defTableManager) diff --git a/plugins/outputs/postgresql/tables/manager.go b/plugins/outputs/postgresql/tables/table_manager.go similarity index 96% rename from plugins/outputs/postgresql/tables/manager.go rename to plugins/outputs/postgresql/tables/table_manager.go index f99ca0c5c72d0..eaf69c5ede742 100644 --- a/plugins/outputs/postgresql/tables/manager.go +++ b/plugins/outputs/postgresql/tables/table_manager.go @@ -41,6 +41,7 @@ type Manager interface { // From the column details (colDetails) of a given measurement, 'columnIndices' specifies which are missing in the DB. // this function will add the new columns with the required data type. AddColumnsToTable(tableName string, columnIndices []int, colDetails *utils.TargetColumns) error + SetConnection(db db.Wrapper) } type defTableManager struct { @@ -61,6 +62,13 @@ func NewManager(db db.Wrapper, schema, tableTemplate string) Manager { } } +// SetConnection to db, used only when previous was killed or restarted. +// It will also clear the local cache of which table exists. +func (t *defTableManager) SetConnection(db db.Wrapper) { + t.db = db + t.Tables = make(map[string]bool) +} + // Exists checks if a table with the given name already is present in the DB. func (t *defTableManager) Exists(tableName string) bool { if _, ok := t.Tables[tableName]; ok { diff --git a/plugins/outputs/postgresql/tags_cache.go b/plugins/outputs/postgresql/tags_cache.go index 6761a0d3ec43f..29640270d4d87 100644 --- a/plugins/outputs/postgresql/tags_cache.go +++ b/plugins/outputs/postgresql/tags_cache.go @@ -23,6 +23,7 @@ const ( type tagsCache interface { getTagID(target *utils.TargetColumns, metric telegraf.Metric) (int, error) tagsTableName(measureName string) string + setDb(db db.Wrapper) } type defTagsCache struct { @@ -46,6 +47,10 @@ func newTagsCache(numItemsInCachePerMetric int, tagsAsJSONb bool, tagTableSuffix } } +func (c *defTagsCache) setDb(db db.Wrapper) { + c.db = db +} + // Checks the cache for the tag set of the metric, if present returns immediately. // Otherwise asks the database if that tag set has already been recorded. // If not recorded, inserts a new row to the tags table for the specific measurement. diff --git a/plugins/outputs/postgresql/utils/utils.go b/plugins/outputs/postgresql/utils/utils.go index 1fbd90cc05526..649f4728460af 100644 --- a/plugins/outputs/postgresql/utils/utils.go +++ b/plugins/outputs/postgresql/utils/utils.go @@ -62,6 +62,8 @@ func FullTableName(schema, name string) *pgx.Identifier { return &pgx.Identifier{name} } +// Constants for naming PostgreSQL data types both in +// their short and long versions. const ( PgBool = "boolean" PgInt8 = "int8" @@ -85,21 +87,11 @@ func DerivePgDatatype(value interface{}) PgDataType { switch value.(type) { case bool: return PgBool - case uint64: + case uint64, int64, int, uint: return PgInt8 - case int64: - return PgInt8 - case int: - return PgInt8 - case uint: - return PgInt8 - case uint32: + case uint32, int32: return PgInt4 - case int32: - return PgInt4 - case float64: - return PgFloat8 - case float32: + case float64, float32: return PgFloat8 case string: return PgText From cf2026589a4603e4bcd70c025af9f1bbbf9c1b1d Mon Sep 17 00:00:00 2001 From: Blagoj Atanasovski Date: Mon, 23 Dec 2019 15:07:55 +0100 Subject: [PATCH 79/79] error thrown on insufficient permissions --- Gopkg.lock | 3 - plugins/outputs/postgresql/README.md | 5 ++ plugins/outputs/postgresql/postgresql.go | 1 + .../postgresql/tables/table_manager.go | 71 ++++++++++--------- 4 files changed, 43 insertions(+), 37 deletions(-) diff --git a/Gopkg.lock b/Gopkg.lock index a2de1f7655fd2..e74bee1f3000d 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -1770,12 +1770,9 @@ "github.com/go-redis/redis", "github.com/go-sql-driver/mysql", "github.com/gobwas/glob", -<<<<<<< HEAD "github.com/gofrs/uuid", "github.com/gogo/protobuf/proto", -======= "github.com/golang/groupcache/lru", ->>>>>>> Optimize insert performance of metrics and tag_id lookup "github.com/golang/protobuf/proto", "github.com/golang/protobuf/ptypes/duration", "github.com/golang/protobuf/ptypes/empty", diff --git a/plugins/outputs/postgresql/README.md b/plugins/outputs/postgresql/README.md index c3d40d3babd4c..6e80c61d868cb 100644 --- a/plugins/outputs/postgresql/README.md +++ b/plugins/outputs/postgresql/README.md @@ -3,6 +3,11 @@ This output plugin writes all metrics to PostgreSQL. The plugin manages the schema automatically updating missing columns, and checking if existing ones are of the proper type. +**_WARNING_**: In order to enable automatic schema update, the connection to the database must +be established with a user that has sufficient permissions. Either be a admin, or an owner of the +target schema. + + ### Configuration: ```toml diff --git a/plugins/outputs/postgresql/postgresql.go b/plugins/outputs/postgresql/postgresql.go index cfaa407d8b24a..569fd2cb73657 100644 --- a/plugins/outputs/postgresql/postgresql.go +++ b/plugins/outputs/postgresql/postgresql.go @@ -146,6 +146,7 @@ func (p *Postgresql) Write(metrics []telegraf.Metric) error { for measureName, indices := range metricsByMeasurement { err := p.writeMetricsFromMeasure(measureName, indices, metrics) if err != nil { + log.Printf("copy error: %v", err) return err } } diff --git a/plugins/outputs/postgresql/tables/table_manager.go b/plugins/outputs/postgresql/tables/table_manager.go index eaf69c5ede742..e7aba2efe7156 100644 --- a/plugins/outputs/postgresql/tables/table_manager.go +++ b/plugins/outputs/postgresql/tables/table_manager.go @@ -1,8 +1,8 @@ package tables import ( - "database/sql" "fmt" + "github.com/pkg/errors" "log" "strings" @@ -11,11 +11,9 @@ import ( ) const ( - addColumnTemplate = "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s;" - tableExistsTemplate = "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname = $2;" - findColumnPresenceTemplate = "WITH available AS (SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = $1 and table_name = $2)," + - "required AS (SELECT c FROM unnest(array [%s]) AS c) " + - "SELECT required.c as column_name, available.column_name IS NOT NULL as exists, available.data_type FROM required LEFT JOIN available ON required.c = available.column_name;" + addColumnTemplate = "ALTER TABLE %s ADD COLUMN IF NOT EXISTS %s %s;" + tableExistsTemplate = "SELECT tablename FROM pg_tables WHERE tablename = $1 AND schemaname = $2;" + findExistingColumnsTemplate = "SELECT column_name, data_type FROM information_schema.columns WHERE table_schema = $1 and table_name = $2" ) type columnInDbDef struct { @@ -114,7 +112,7 @@ func (t *defTableManager) FindColumnMismatch(tableName string, colDetails *utils return nil, err } - missingCols := []int{} + var missingCols []int for colIndex := range colDetails.Names { colStateInDb := columnPresence[colIndex] if !colStateInDb.exists { @@ -154,7 +152,7 @@ func (t *defTableManager) AddColumnsToTable(tableName string, columnIndices []in // The order, column names and data types are given in 'colDetails'. func (t *defTableManager) generateCreateTableSQL(tableName string, colDetails *utils.TargetColumns) string { colDefs := make([]string, len(colDetails.Names)) - pk := []string{} + var pk []string for colIndex, colName := range colDetails.Names { colDefs[colIndex] = utils.QuoteIdent(colName) + " " + string(colDetails.DataTypes[colIndex]) if colDetails.Roles[colIndex] != utils.FieldColType { @@ -174,43 +172,48 @@ func (t *defTableManager) generateCreateTableSQL(tableName string, colDetails *u // For a given table and an array of column names it checks the database if those columns exist, // and what's their data type. func (t *defTableManager) findColumnPresence(tableName string, columns []string) ([]*columnInDbDef, error) { - columnPresenseQuery := prepareColumnPresenceQuery(columns) - result, err := t.db.Query(columnPresenseQuery, t.schema, tableName) + existingCols, err := t.findExistingColumns(tableName) if err != nil { - log.Printf("E! Couldn't discover columns of table: %s\nQuery failed: %s\n%v", tableName, columnPresenseQuery, err) return nil, err } - defer result.Close() - columnStatus := make([]*columnInDbDef, len(columns)) - var exists bool - var columnName string - var pgLongType sql.NullString - currentColumn := 0 + if len(existingCols) == 0 { + log.Printf("E! Table exists, but no columns discovered, user doesn't have enough permissions") + return nil, errors.New("Table exists, but no columns discovered, user doesn't have enough permissions") + } - for result.Next() { - err := result.Scan(&columnName, &exists, &pgLongType) - if err != nil { - log.Printf("E! Couldn't discover columns of table: %s\n%v", tableName, err) - return nil, err - } - pgShortType := utils.PgDataType("") - if pgLongType.Valid { - pgShortType = utils.LongToShortPgType(pgLongType.String) + columnStatus := make([]*columnInDbDef, len(columns)) + for i := 0; i < len(columns); i++ { + currentColumn := columns[i] + colType, exists := existingCols[currentColumn] + if !exists { + colType = "" } - columnStatus[currentColumn] = &columnInDbDef{ + columnStatus[i] = &columnInDbDef{ exists: exists, - dataType: pgShortType, + dataType: colType, } - currentColumn++ } return columnStatus, nil } -func prepareColumnPresenceQuery(columns []string) string { - quotedColumns := make([]string, len(columns)) - for i, column := range columns { - quotedColumns[i] = utils.QuoteLiteral(column) +func (t *defTableManager) findExistingColumns(table string) (map[string]utils.PgDataType, error) { + rows, err := t.db.Query(findExistingColumnsTemplate, t.schema, table) + if err != nil { + log.Printf("E! Couldn't discover existing columns of table: %s\n%v", table, err) + return nil, errors.Wrap(err, "could not discover existing columns") + } + defer rows.Close() + cols := make(map[string]utils.PgDataType) + for rows.Next() { + var colName, colTypeStr string + err := rows.Scan(&colName, &colTypeStr) + if err != nil { + log.Printf("E! Couldn't discover columns of table: %s\n%v", table, err) + return nil, err + } + pgShortType := utils.LongToShortPgType(colTypeStr) + cols[colName] = pgShortType } - return fmt.Sprintf(findColumnPresenceTemplate, strings.Join(quotedColumns, ",")) + return cols, nil }