diff --git a/Gopkg.lock b/Gopkg.lock new file mode 100644 index 00000000..ad9f4fd0 --- /dev/null +++ b/Gopkg.lock @@ -0,0 +1,222 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + digest = "1:ffe9824d294da03b391f44e1ae8281281b4afc1bdaa9588c9097785e3af10cec" + name = "github.com/davecgh/go-spew" + packages = ["spew"] + pruneopts = "UT" + revision = "8991bc29aa16c548c550c7ff78260e27b9ab7c73" + version = "v1.1.1" + +[[projects]] + digest = "1:be408f349cae090a7c17a279633d6e62b00068e64af66a582cae0983de8890ea" + name = "github.com/golang/mock" + packages = ["gomock"] + pruneopts = "UT" + revision = "9fa652df1129bef0e734c9cf9bf6dbae9ef3b9fa" + version = "1.3.1" + +[[projects]] + branch = "master" + digest = "1:f5ce1529abc1204444ec73779f44f94e2fa8fcdb7aca3c355b0c95947e4005c6" + name = "github.com/golang/protobuf" + packages = [ + "proto", + "ptypes", + "ptypes/any", + "ptypes/duration", + "ptypes/timestamp", + ] + pruneopts = "UT" + revision = "b285ee9cfc6c881bb20c0d8dc73370ea9b9ec90f" + +[[projects]] + branch = "master" + digest = "1:a21eb24449bd5250ba435ac4c857f654aa162b3e03f4da511a7363d521a5ed19" + name = "github.com/grafana/grafana_plugin_model" + packages = ["go/datasource"] + pruneopts = "UT" + revision = "84176c64269d8060f99e750ee8aba6f062753336" + +[[projects]] + digest = "1:cf6b61e1b4c26b0c7526cee4a0cee6d8302b17798af4b2a56a90eedac0aef11a" + name = "github.com/hashicorp/go-hclog" + packages = ["."] + pruneopts = "UT" + revision = "5ccdce08c75b6c7b37af61159f13f6a4f5e2e928" + version = "v0.9.2" + +[[projects]] + digest = "1:82320f8469d1524df337bc315a38c87644765cd89ec4cf3cbda249a3acdde671" + name = "github.com/hashicorp/go-plugin" + packages = ["."] + pruneopts = "UT" + revision = "e8d22c780116115ae5624720c9af0c97afe4f551" + +[[projects]] + branch = "master" + digest = "1:a4826c308e84f5f161b90b54a814f0be7d112b80164b9b884698a6903ea47ab3" + name = "github.com/hashicorp/yamux" + packages = ["."] + pruneopts = "UT" + revision = "2f1d1f20f75d5404f53b9edf6b53ed5505508675" + +[[projects]] + digest = "1:42eb1f52b84a06820cedc9baec2e710bfbda3ee6dac6cdb97f8b9a5066134ec6" + name = "github.com/mitchellh/go-testing-interface" + packages = ["."] + pruneopts = "UT" + revision = "6d0b8010fcc857872e42fc6c931227569016843c" + version = "v1.0.0" + +[[projects]] + digest = "1:9ec6cf1df5ad1d55cf41a43b6b1e7e118a91bade4f68ff4303379343e40c0e25" + name = "github.com/oklog/run" + packages = ["."] + pruneopts = "UT" + revision = "4dadeb3030eda0273a12382bb2348ffc7c9d1a39" + version = "v1.0.0" + +[[projects]] + digest = "1:cf31692c14422fa27c83a05292eb5cbe0fb2775972e8f1f8446a71549bd8980b" + name = "github.com/pkg/errors" + packages = ["."] + pruneopts = "UT" + revision = "ba968bfe8b2f7e042a574c888954fccecfa385b4" + version = "v0.8.1" + +[[projects]] + digest = "1:0028cb19b2e4c3112225cd871870f2d9cf49b9b4276531f03438a88e94be86fe" + name = "github.com/pmezard/go-difflib" + packages = ["difflib"] + pruneopts = "UT" + revision = "792786c7400a136282c1664665ae0a8db921c6c2" + version = "v1.0.0" + +[[projects]] + digest = "1:972c2427413d41a1e06ca4897e8528e5a1622894050e2f527b38ddf0f343f759" + name = "github.com/stretchr/testify" + packages = ["assert"] + pruneopts = "UT" + revision = "ffdc059bfe9ce6a4e144ba849dbedead332c6053" + version = "v1.3.0" + +[[projects]] + branch = "master" + digest = "1:1b13e8770142a9251361b13a3b8b9b77296be6fa32856c937b346a45f93c845c" + name = "golang.org/x/net" + packages = [ + "context", + "context/ctxhttp", + "http/httpguts", + "http2", + "http2/hpack", + "idna", + "internal/timeseries", + "trace", + ] + pruneopts = "UT" + revision = "f3200d17e092c607f615320ecaad13d87ad9a2b3" + +[[projects]] + branch = "master" + digest = "1:668e8c66b8895d69391429b0f64a72c35603c94f364c94d4e5fab5053d57a0b6" + name = "golang.org/x/sys" + packages = ["unix"] + pruneopts = "UT" + revision = "ad28b68e88f12448a1685d038ffea87bbbb34148" + +[[projects]] + digest = "1:8d8faad6b12a3a4c819a3f9618cb6ee1fa1cfc33253abeeea8b55336721e3405" + name = "golang.org/x/text" + packages = [ + "collate", + "collate/build", + "internal/colltab", + "internal/gen", + "internal/language", + "internal/language/compact", + "internal/tag", + "internal/triegen", + "internal/ucd", + "language", + "secure/bidirule", + "transform", + "unicode/bidi", + "unicode/cldr", + "unicode/norm", + "unicode/rangetable", + ] + pruneopts = "UT" + revision = "342b2e1fbaa52c93f31447ad2c6abc048c63e475" + version = "v0.3.2" + +[[projects]] + branch = "master" + digest = "1:583a0c80f5e3a9343d33aea4aead1e1afcc0043db66fdf961ddd1fe8cd3a4faf" + name = "google.golang.org/genproto" + packages = ["googleapis/rpc/status"] + pruneopts = "UT" + revision = "fb225487d10142b5bcc35abfc6cb9a0609614976" + +[[projects]] + digest = "1:64657a7d01c4377b9456d8eaf6ad31b244e3a09a9d8d5a321eb0b1d4bd16a46c" + name = "google.golang.org/grpc" + packages = [ + ".", + "balancer", + "balancer/base", + "balancer/roundrobin", + "binarylog/grpc_binarylog_v1", + "codes", + "connectivity", + "credentials", + "credentials/internal", + "encoding", + "encoding/proto", + "grpclog", + "health", + "health/grpc_health_v1", + "internal", + "internal/backoff", + "internal/balancerload", + "internal/binarylog", + "internal/channelz", + "internal/envconfig", + "internal/grpcrand", + "internal/grpcsync", + "internal/syscall", + "internal/transport", + "keepalive", + "metadata", + "naming", + "peer", + "resolver", + "resolver/dns", + "resolver/passthrough", + "stats", + "status", + "tap", + ] + pruneopts = "UT" + revision = "869adfc8d5a43efc0d05780ad109106f457f51e4" + version = "v1.21.0" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + input-imports = [ + "github.com/golang/mock/gomock", + "github.com/grafana/grafana_plugin_model/go/datasource", + "github.com/hashicorp/go-hclog", + "github.com/hashicorp/go-plugin", + "github.com/pkg/errors", + "github.com/stretchr/testify/assert", + "golang.org/x/net/context", + "golang.org/x/net/context/ctxhttp", + "google.golang.org/grpc/codes", + "google.golang.org/grpc/status", + ] + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml new file mode 100644 index 00000000..02ed8705 --- /dev/null +++ b/Gopkg.toml @@ -0,0 +1,42 @@ +# Gopkg.toml example +# +# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" +# +# [prune] +# non-go = false +# go-tests = true +# unused-packages = true + + +[[constraint]] + branch = "master" + name = "github.com/grafana/grafana_plugin_model" + +[[constraint]] + name = "github.com/hashicorp/go-hclog" + version = "0.9.2" + +[[constraint]] + branch = "master" + name = "golang.org/x/net" + +[prune] + go-tests = true + unused-packages = true diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..5022c1ed --- /dev/null +++ b/Makefile @@ -0,0 +1,10 @@ +all: frontend backend + +frontend: + grunt + +backend: + go build -o ./dist/grafana-kairosdb-datasource_darwin_amd64 ./pkg + +clean: + rm -r ./dist/* diff --git a/README.md b/README.md index b64a23a8..bdeb2484 100644 --- a/README.md +++ b/README.md @@ -2,26 +2,48 @@ Starting in Grafana 3.x the KairosDB data source is no longer included out of th But it is easy to install this plugin! +## Overview +This plugin consists of two components: a frontend and a backend + +The backend plugin provides support for [alerts](https://grafana.com/docs/alerting/rules), but is not required to use the frontend portion. + ## Installation -Either clone this repo into your grafana plugins directory (default /var/lib/grafana/plugins if your installing grafana with package). Then run grunt to compile typescript. -Restart grafana-server and the plugin should be automatically detected and used. +### Install to plugins directory + +If you only need the frontend component you may clone the project directly into your Grafana plugin directory +(defaults to /var/lib/grafana/plugins if you're installing grafana with package). +Then simply compile the code and restart Grafana: ``` git clone https://github.com/grafana/kairosdb-datasource +cd kairosdb-datasource npm install -grunt +make frontend sudo service grafana-server restart ``` -## Clone into a directory of your choice +### Install with Alerts +If you wish to build the backend plugin, as well, your project must be setup within a [Go workspace](https://golang.org/doc/code.html#Workspaces). + +Ensure your GOPATH environment variable points to your workspace: +``` +export GOPATH=$HOME/go +cd $GOPATH/src/github.com/grafana +git clone https://github.com/grafana/kairosdb-datasource +``` + -Then edit your grafana.ini config file (Default location is at /etc/grafana/grafana.ini) and add this: +Edit your grafana.ini config file (Default location is at /etc/grafana/grafana.ini) to include the path to your clone. +Be aware that grafana-server needs read access to the project directory. ```ini [plugin.kairosdb] -path = /home/your/clone/dir/datasource-plugin-kairosdb +path = $GOPATH/src/github.com/grafana/kairosdb-datasource ``` -Note that if you clone it into the grafana plugins directory you do not need to add the above config option. That is only -if you want to place the plugin in a directory outside the standard plugins directory. Be aware that grafana-server -needs read access to the directory. +Then compile the code and restart Grafana: +``` +npm install +make +sudo service grafana-server restart +``` diff --git a/dist/README.md b/dist/README.md index b64a23a8..eb2bb604 100644 --- a/dist/README.md +++ b/dist/README.md @@ -2,26 +2,48 @@ Starting in Grafana 3.x the KairosDB data source is no longer included out of th But it is easy to install this plugin! +## Overview +This plugin consists of two components: a frontend and a backend + +The backend plugin provides support for [alerts](https://grafana.com/docs/alerting/rules), but is not required to use the frontend portion. + ## Installation -Either clone this repo into your grafana plugins directory (default /var/lib/grafana/plugins if your installing grafana with package). Then run grunt to compile typescript. -Restart grafana-server and the plugin should be automatically detected and used. +### Install to plugins directory + +If you only need the frontend component you may clone the project directly into your Grafana plugin directory +(defaults to /var/lib/grafana/plugins if you're installing grafana with package). +Then simply compile the code and restart Grafana: ``` git clone https://github.com/grafana/kairosdb-datasource +cd kairosdb-datasource npm install -grunt +make frontend sudo service grafana-server restart ``` -## Clone into a directory of your choice +### Install with Alerts +If you wish to build the backend plugin, as well, your project must be setup within a [Go workspace](https://golang.org/doc/code.html#Workspaces). + +Ensure your GOPATH environment variable points to your workspace: +``` +export GOPATH=$HOME/go +cd $GOPATH/src/github.com/kairosdb +git clone https://github.com/grafana/kairosdb-datasource +``` + -Then edit your grafana.ini config file (Default location is at /etc/grafana/grafana.ini) and add this: +Edit your grafana.ini config file (Default location is at /etc/grafana/grafana.ini) to include the path to your clone. +Be aware that grafana-server needs read access to the project directory. ```ini [plugin.kairosdb] -path = /home/your/clone/dir/datasource-plugin-kairosdb +path = $GOPATH/src/github.com/kairosdb/kairosdb-datasource ``` -Note that if you clone it into the grafana plugins directory you do not need to add the above config option. That is only -if you want to place the plugin in a directory outside the standard plugins directory. Be aware that grafana-server -needs read access to the directory. +Then compile the code and restart Grafana: +``` +npm install +make +sudo service grafana-server restart +``` diff --git a/dist/beans/aggregators/rate_aggregator.js b/dist/beans/aggregators/rate_aggregator.js index e131941f..e1d2a3d9 100644 --- a/dist/beans/aggregators/rate_aggregator.js +++ b/dist/beans/aggregators/rate_aggregator.js @@ -1,19 +1,16 @@ -System.register(["./aggregator", "./parameters/alignment_aggregator_parameter", "./parameters/enum_aggregator_parameter", "./utils"], function(exports_1) { +System.register(["./aggregator", "./parameters/enum_aggregator_parameter", "./utils"], function(exports_1) { var __extends = (this && this.__extends) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; - var aggregator_1, alignment_aggregator_parameter_1, enum_aggregator_parameter_1, utils_1; + var aggregator_1, enum_aggregator_parameter_1, utils_1; var RateAggregator; return { setters:[ function (aggregator_1_1) { aggregator_1 = aggregator_1_1; }, - function (alignment_aggregator_parameter_1_1) { - alignment_aggregator_parameter_1 = alignment_aggregator_parameter_1_1; - }, function (enum_aggregator_parameter_1_1) { enum_aggregator_parameter_1 = enum_aggregator_parameter_1_1; }, @@ -26,7 +23,6 @@ System.register(["./aggregator", "./parameters/alignment_aggregator_parameter", function RateAggregator() { _super.call(this, "rate"); this.parameters = this.parameters.concat([ - new alignment_aggregator_parameter_1.AlignmentAggregatorParameter(), new enum_aggregator_parameter_1.EnumAggregatorParameter("unit", utils_1.TimeUnit, "every") ]); } diff --git a/dist/beans/aggregators/rate_aggregator.js.map b/dist/beans/aggregators/rate_aggregator.js.map index 1eac4d63..882398eb 100644 --- a/dist/beans/aggregators/rate_aggregator.js.map +++ b/dist/beans/aggregators/rate_aggregator.js.map @@ -1 +1 @@ -{"version":3,"file":"rate_aggregator.js","sourceRoot":"","sources":["rate_aggregator.ts"],"names":["RateAggregator","RateAggregator.constructor"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;YAMA;gBAAoCA,kCAAUA;gBAC1CA;oBACIC,kBAAMA,MAAMA,CAACA,CAACA;oBACdA,IAAIA,CAACA,UAAUA,GAAGA,IAAIA,CAACA,UAAUA,CAACA,MAAMA,CAACA;wBACrCA,IAAIA,6DAA4BA,EAAEA;wBAClCA,IAAIA,mDAAuBA,CAACA,MAAMA,EAAEA,gBAAQA,EAAEA,OAAOA,CAACA;qBACzDA,CAACA,CAACA;gBACPA,CAACA;gBACLD,qBAACA;YAADA,CAACA,AARD,EAAoC,uBAAU,EAQ7C;YARD,2CAQC,CAAA"} \ No newline at end of file +{"version":3,"file":"rate_aggregator.js","sourceRoot":"","sources":["rate_aggregator.ts"],"names":["RateAggregator","RateAggregator.constructor"],"mappings":";;;;;;;;;;;;;;;;;;;;YAIA;gBAAoCA,kCAAUA;gBAC1CA;oBACIC,kBAAMA,MAAMA,CAACA,CAACA;oBACdA,IAAIA,CAACA,UAAUA,GAAGA,IAAIA,CAACA,UAAUA,CAACA,MAAMA,CAACA;wBACrCA,IAAIA,mDAAuBA,CAACA,MAAMA,EAAEA,gBAAQA,EAAEA,OAAOA,CAACA;qBACzDA,CAACA,CAACA;gBACPA,CAACA;gBACLD,qBAACA;YAADA,CAACA,AAPD,EAAoC,uBAAU,EAO7C;YAPD,2CAOC,CAAA"} \ No newline at end of file diff --git a/dist/beans/aggregators/rate_aggregator.ts b/dist/beans/aggregators/rate_aggregator.ts index b057db5b..e037d8f8 100644 --- a/dist/beans/aggregators/rate_aggregator.ts +++ b/dist/beans/aggregators/rate_aggregator.ts @@ -1,6 +1,4 @@ import {Aggregator} from "./aggregator"; - -import {AlignmentAggregatorParameter} from "./parameters/alignment_aggregator_parameter"; import {EnumAggregatorParameter} from "./parameters/enum_aggregator_parameter"; import {TimeUnit} from "./utils"; @@ -8,7 +6,6 @@ export class RateAggregator extends Aggregator { constructor() { super("rate"); this.parameters = this.parameters.concat([ - new AlignmentAggregatorParameter(), new EnumAggregatorParameter("unit", TimeUnit, "every") ]); } diff --git a/dist/beans/aggregators/sampler_aggregator.js b/dist/beans/aggregators/sampler_aggregator.js index 60eb2214..2fcfd365 100644 --- a/dist/beans/aggregators/sampler_aggregator.js +++ b/dist/beans/aggregators/sampler_aggregator.js @@ -1,25 +1,30 @@ -System.register(["./aggregator", "./parameters/any_aggregator_parameter"], function(exports_1) { +System.register(["./aggregator", "./parameters/enum_aggregator_parameter", "./utils"], function(exports_1) { var __extends = (this && this.__extends) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; - var aggregator_1, any_aggregator_parameter_1; + var aggregator_1, enum_aggregator_parameter_1, utils_1; var SamplerAggregator; return { setters:[ function (aggregator_1_1) { aggregator_1 = aggregator_1_1; }, - function (any_aggregator_parameter_1_1) { - any_aggregator_parameter_1 = any_aggregator_parameter_1_1; + function (enum_aggregator_parameter_1_1) { + enum_aggregator_parameter_1 = enum_aggregator_parameter_1_1; + }, + function (utils_1_1) { + utils_1 = utils_1_1; }], execute: function() { SamplerAggregator = (function (_super) { __extends(SamplerAggregator, _super); function SamplerAggregator() { _super.call(this, "sampler"); - this.parameters = this.parameters.concat([new any_aggregator_parameter_1.AnyAggregatorParameter("samplingUnit", "every")]); + this.parameters = this.parameters.concat([ + new enum_aggregator_parameter_1.EnumAggregatorParameter("unit", utils_1.TimeUnit, "every") + ]); } return SamplerAggregator; })(aggregator_1.Aggregator); diff --git a/dist/beans/aggregators/sampler_aggregator.js.map b/dist/beans/aggregators/sampler_aggregator.js.map index d0807086..5a2bf607 100644 --- a/dist/beans/aggregators/sampler_aggregator.js.map +++ b/dist/beans/aggregators/sampler_aggregator.js.map @@ -1 +1 @@ -{"version":3,"file":"sampler_aggregator.js","sourceRoot":"","sources":["sampler_aggregator.ts"],"names":["SamplerAggregator","SamplerAggregator.constructor"],"mappings":";;;;;;;;;;;;;;;;;YAGA;gBAAuCA,qCAAUA;gBAC7CA;oBACIC,kBAAMA,SAASA,CAACA,CAACA;oBACjBA,IAAIA,CAACA,UAAUA,GAAGA,IAAIA,CAACA,UAAUA,CAACA,MAAMA,CAACA,CAACA,IAAIA,iDAAsBA,CAACA,cAAcA,EAAEA,OAAOA,CAACA,CAACA,CAACA,CAACA;gBACpGA,CAACA;gBACLD,wBAACA;YAADA,CAACA,AALD,EAAuC,uBAAU,EAKhD;YALD,iDAKC,CAAA"} \ No newline at end of file +{"version":3,"file":"sampler_aggregator.js","sourceRoot":"","sources":["sampler_aggregator.ts"],"names":["SamplerAggregator","SamplerAggregator.constructor"],"mappings":";;;;;;;;;;;;;;;;;;;;YAIA;gBAAuCA,qCAAUA;gBAC7CA;oBACIC,kBAAMA,SAASA,CAACA,CAACA;oBACjBA,IAAIA,CAACA,UAAUA,GAAGA,IAAIA,CAACA,UAAUA,CAACA,MAAMA,CAACA;wBACrCA,IAAIA,mDAAuBA,CAACA,MAAMA,EAAEA,gBAAQA,EAAEA,OAAOA,CAACA;qBACzDA,CAACA,CAACA;gBACPA,CAACA;gBACLD,wBAACA;YAADA,CAACA,AAPD,EAAuC,uBAAU,EAOhD;YAPD,iDAOC,CAAA"} \ No newline at end of file diff --git a/dist/beans/aggregators/sampler_aggregator.ts b/dist/beans/aggregators/sampler_aggregator.ts index 72d3e6c2..090772cc 100644 --- a/dist/beans/aggregators/sampler_aggregator.ts +++ b/dist/beans/aggregators/sampler_aggregator.ts @@ -1,9 +1,12 @@ import {Aggregator} from "./aggregator"; -import {AnyAggregatorParameter} from "./parameters/any_aggregator_parameter"; +import {EnumAggregatorParameter} from "./parameters/enum_aggregator_parameter"; +import {TimeUnit} from "./utils"; export class SamplerAggregator extends Aggregator { constructor() { super("sampler"); - this.parameters = this.parameters.concat([new AnyAggregatorParameter("samplingUnit", "every")]); + this.parameters = this.parameters.concat([ + new EnumAggregatorParameter("unit", TimeUnit, "every") + ]); } } diff --git a/dist/plugin.json b/dist/plugin.json index ba7915e5..96c5bbc8 100644 --- a/dist/plugin.json +++ b/dist/plugin.json @@ -4,6 +4,9 @@ "type": "datasource", "metrics": true, "annotations": false, + "backend": true, + "alerting": true, + "executable": "grafana-kairosdb-datasource", "staticRoot": ".", "info": { "description": "datasource plugin for KairosDB", diff --git a/pkg/datasource/_testdata/ModelJson.json b/pkg/datasource/_testdata/ModelJson.json new file mode 100644 index 00000000..a49d529b --- /dev/null +++ b/pkg/datasource/_testdata/ModelJson.json @@ -0,0 +1,91 @@ +{ + "query": { + "aggregators": [ + { + "autoValueSwitch": { + "dependentParameters": [ + { + "name": "value", + "text": "every", + "type": "sampling", + "value": "1" + }, + { + "allowedValues": { + "0": "MILLISECONDS", + "1": "SECONDS", + "2": "MINUTES", + "3": "HOURS", + "4": "DAYS", + "5": "WEEKS", + "6": "MONTHS", + "7": "YEARS" + }, + "name": "unit", + "text": "unit", + "type": "sampling_unit", + "value": "HOURS" + } + ], + "enabled": false + }, + "name": "sum", + "parameters": [ + { + "allowedValues": { + "0": "NONE", + "1": "START_TIME", + "2": "SAMPLING" + }, + "name": "sampling", + "text": "align by", + "type": "alignment", + "value": "NONE" + }, + { + "name": "value", + "text": "every", + "type": "sampling", + "value": "1" + }, + { + "allowedValues": { + "0": "MILLISECONDS", + "1": "SECONDS", + "2": "MINUTES", + "3": "HOURS", + "4": "DAYS", + "5": "WEEKS", + "6": "MONTHS", + "7": "YEARS" + }, + "name": "unit", + "text": "unit", + "type": "sampling_unit", + "value": "HOURS" + } + ] + } + ], + "groupBy": { + "tags": [ + "host", + "datacenter" + ], + "time": [], + "value": [] + }, + "metricName": "abc.123", + "tags": { + "datacenter": [], + "customer": [ + "bar" + ], + "host": [ + "foo", + "foo2" + ] + } + }, + "refId": "A" +} diff --git a/pkg/datasource/converters.go b/pkg/datasource/converters.go new file mode 100644 index 00000000..206d30f5 --- /dev/null +++ b/pkg/datasource/converters.go @@ -0,0 +1,191 @@ +package datasource + +import ( + "github.com/grafana/kairosdb-datasource/pkg/remote" + "github.com/pkg/errors" + "regexp" + "strconv" +) + +type MetricQueryConverter interface { + Convert(query *MetricQuery) (*remote.MetricQuery, error) +} + +type MetricQueryConverterImpl struct { + aggregatorConverter AggregatorConverter + groupByConverter GroupByConverter +} + +func NewMetricQueryConverterImpl(aggregatorConverter AggregatorConverter, groupByConverter GroupByConverter) *MetricQueryConverterImpl { + return &MetricQueryConverterImpl{ + aggregatorConverter: aggregatorConverter, + groupByConverter: groupByConverter, + } +} + +func (c *MetricQueryConverterImpl) Convert(query *MetricQuery) (*remote.MetricQuery, error) { + remoteQuery := &remote.MetricQuery{ + Name: query.Name, + Tags: query.Tags, + } + + for _, aggregator := range query.Aggregators { + result, err := c.aggregatorConverter.Convert(aggregator) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse query for metric:%s", query.Name) + } + remoteQuery.Aggregators = append(remoteQuery.Aggregators, result) + } + + if query.GroupBy != nil { + result, err := c.groupByConverter.Convert(query.GroupBy) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse query for metric:%s", query.Name) + } + remoteQuery.GroupBy = result + } + return remoteQuery, nil +} + +type GroupByConverter interface { + Convert(groupBy *GroupBy) ([]*remote.Grouper, error) +} + +type GroupByConverterImpl struct{} + +func (c *GroupByConverterImpl) Convert(groupBy *GroupBy) ([]*remote.Grouper, error) { + groupers := make([]*remote.Grouper, 0) + if len(groupBy.Tags) > 0 { + groupers = append(groupers, &remote.Grouper{ + Name: "tag", + Tags: groupBy.Tags, + }) + } + return groupers, nil +} + +type AggregatorConverter interface { + Convert(aggregator *Aggregator) (map[string]interface{}, error) +} + +type AggregatorConverterImpl struct { + parameterConverterMappings map[string]ParameterConverter +} + +func NewAggregatorConverterImpl(paramConverterMappings map[string]ParameterConverter) *AggregatorConverterImpl { + return &AggregatorConverterImpl{ + parameterConverterMappings: paramConverterMappings, + } +} + +func (c *AggregatorConverterImpl) Convert(aggregator *Aggregator) (map[string]interface{}, error) { + result := map[string]interface{}{} + result["name"] = aggregator.Name + + for _, param := range aggregator.Parameters { + converter := c.parameterConverterMappings[param.Type] + if converter == nil { + return nil, errors.Errorf("failed to parse aggregator: %s - unknown parameter type: %s", aggregator.Name, param.Type) + } + + object, err := converter.Convert(param) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse aggregator: %s", aggregator.Name) + } + + result = mergeMaps(result, object) + } + return result, nil +} + +func mergeMaps(a map[string]interface{}, b map[string]interface{}) map[string]interface{} { + result := map[string]interface{}{} + for k, v := range a { + result[k] = v + } + + for k, v := range b { + result[k] = v + } + return result +} + +type ParameterConverter interface { + Convert(param *AggregatorParameter) (map[string]interface{}, error) +} + +type StringParameterConverter struct{} + +func (c *StringParameterConverter) Convert(param *AggregatorParameter) (map[string]interface{}, error) { + return map[string]interface{}{ + param.Name: param.Value, + }, nil +} + +type AnyParameterConverter struct{} + +func (c *AnyParameterConverter) Convert(param *AggregatorParameter) (map[string]interface{}, error) { + var value interface{} + value, err := strconv.ParseFloat(param.Value, 64) + if err != nil { + value = param.Value + } + return map[string]interface{}{ + param.Name: value, + }, nil +} + +type AlignmentParameterConverter struct{} + +func (c *AlignmentParameterConverter) Convert(param *AggregatorParameter) (map[string]interface{}, error) { + return map[string]interface{}{ + "align_sampling": param.Value == "SAMPLING", + "align_start_time": param.Value == "START_TIME", + }, nil +} + +const ( + MILLISECONDS = "milliseconds" + SECONDS = "seconds" + MINUTES = "minutes" + HOURS = "hours" + DAYS = "days" + WEEKS = "weeks" + MONTHS = "months" + YEARS = "years" +) + +var unitNameMappings = map[string]string{ + "ms": MILLISECONDS, + "s": SECONDS, + "m": MINUTES, + "h": HOURS, + "d": DAYS, + "w": WEEKS, + "M": MONTHS, + "y": YEARS, +} + +type SamplingParameterConverter struct{} + +func (c *SamplingParameterConverter) Convert(param *AggregatorParameter) (map[string]interface{}, error) { + regex := regexp.MustCompile(`([0-9]+)([a-zA-Z]+)`) + matches := regex.FindStringSubmatch(param.Value) + + if len(matches) == 0 || matches[0] != param.Value { + return nil, errors.Errorf("failed to parse sampling - invalid format: '%s'", param.Value) + } + + value, _ := strconv.ParseInt(matches[1], 10, 64) + unit, ok := unitNameMappings[matches[2]] + if !ok { + return nil, errors.Errorf("failed to parse sampling - invalid unit: '%s'", matches[2]) + } + + return map[string]interface{}{ + "sampling": &remote.Sampling{ + Value: value, + Unit: unit, + }, + }, nil +} diff --git a/pkg/datasource/converters_test.go b/pkg/datasource/converters_test.go new file mode 100644 index 00000000..16ba150e --- /dev/null +++ b/pkg/datasource/converters_test.go @@ -0,0 +1,344 @@ +package datasource_test + +import ( + "github.com/golang/mock/gomock" + "github.com/grafana/kairosdb-datasource/pkg/datasource" + "github.com/grafana/kairosdb-datasource/pkg/datasource/internal/mock_datasource" + "github.com/grafana/kairosdb-datasource/pkg/remote" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestMetricQueryConverterImpl_Convert_minimalQuery(t *testing.T) { + converter := datasource.MetricQueryConverterImpl{} + + result, err := converter.Convert(&datasource.MetricQuery{ + Name: "MetricA", + }) + + assert.Nil(t, err) + assert.Equal(t, &remote.MetricQuery{ + Name: "MetricA", + }, result) +} + +func TestMetricQueryConverterImpl_Convert_withTags(t *testing.T) { + converter := datasource.MetricQueryConverterImpl{} + + result, err := converter.Convert(&datasource.MetricQuery{ + Name: "MetricA", + Tags: map[string][]string{ + "foo": {"bar", "baz"}, + "foo1": {}, + }, + }) + + assert.Nil(t, err) + assert.Equal(t, &remote.MetricQuery{ + Name: "MetricA", + Tags: map[string][]string{ + "foo": {"bar", "baz"}, + "foo1": {}, + }, + }, result) +} + +func TestMetricQueryConverterImpl_Convert_WithAggregators(t *testing.T) { + ctrl := gomock.NewController(t) + defer ctrl.Finish() + + mockAggregatorConverter := mock_datasource.NewMockAggregatorConverter(ctrl) + mockGroupByConverter := mock_datasource.NewMockGroupByConverter(ctrl) + converter := datasource.NewMetricQueryConverterImpl(mockAggregatorConverter, mockGroupByConverter) + + aggregator := map[string]interface{}{ + "name": "foo", + "value": "baz", + } + + mockAggregatorConverter.EXPECT(). + Convert(gomock.Any()). + Return(aggregator, nil). + AnyTimes() + + result, err := converter.Convert(&datasource.MetricQuery{ + Name: "MetricA", + Aggregators: []*datasource.Aggregator{ + {}, + {}, + }, + }) + + assert.Nil(t, err) + assert.Equal(t, &remote.MetricQuery{ + Name: "MetricA", + Aggregators: []map[string]interface{}{ + aggregator, + aggregator, + }, + }, result) +} + +func TestMetricQueryConverterImpl_Convert_WithGroupBy(t *testing.T) { + ctrl := gomock.NewController(t) + defer ctrl.Finish() + + mockAggregatorConverter := mock_datasource.NewMockAggregatorConverter(ctrl) + mockGroupByConverter := mock_datasource.NewMockGroupByConverter(ctrl) + converter := datasource.NewMetricQueryConverterImpl(mockAggregatorConverter, mockGroupByConverter) + + groupers := []*remote.Grouper{ + { + Name: "tag", + Tags: []string{"host", "pool"}, + }, + } + + mockGroupByConverter.EXPECT(). + Convert(gomock.Any()). + Return(groupers, nil). + AnyTimes() + + result, err := converter.Convert(&datasource.MetricQuery{ + Name: "MetricA", + GroupBy: &datasource.GroupBy{}, + }) + + assert.Nil(t, err) + assert.Equal(t, &remote.MetricQuery{ + Name: "MetricA", + GroupBy: groupers, + }, result) +} + +func TestGroupByConverterImpl_Convert_noTags(t *testing.T) { + converter := datasource.GroupByConverterImpl{} + + result, err := converter.Convert(&datasource.GroupBy{ + Tags: nil, + }) + + assert.Nil(t, err) + assert.Equal(t, []*remote.Grouper{}, result) +} + +func TestGroupByConverterImpl_Convert(t *testing.T) { + converter := datasource.GroupByConverterImpl{} + + result, err := converter.Convert(&datasource.GroupBy{ + Tags: []string{"foo", "bar"}, + }) + + assert.Nil(t, err) + assert.Equal(t, []*remote.Grouper{ + { + Name: "tag", + Tags: []string{"foo", "bar"}, + }, + }, result) +} + +func TestAggregatorConverterImpl_Convert_singleParam(t *testing.T) { + converter := datasource.NewAggregatorConverterImpl(map[string]datasource.ParameterConverter{ + "foo": &datasource.StringParameterConverter{}, + }) + + result, err := converter.Convert(&datasource.Aggregator{ + Name: "test", + Parameters: []*datasource.AggregatorParameter{ + { + Type: "foo", + Name: "key", + Value: "value", + }, + }, + }) + + assert.Nil(t, err) + assert.Equal(t, map[string]interface{}{ + "name": "test", + "key": "value", + }, result) +} + +func TestAggregatorConverterImpl_Convert_multipleParams(t *testing.T) { + converter := datasource.NewAggregatorConverterImpl(map[string]datasource.ParameterConverter{ + "foo": &datasource.StringParameterConverter{}, + }) + + result, err := converter.Convert(&datasource.Aggregator{ + Name: "test", + Parameters: []*datasource.AggregatorParameter{ + { + Type: "foo", + Name: "key1", + Value: "value1", + }, + { + Type: "foo", + Name: "key2", + Value: "value2", + }, + }, + }) + + assert.Nil(t, err) + assert.Equal(t, map[string]interface{}{ + "name": "test", + "key1": "value1", + "key2": "value2", + }, result) +} + +func TestAggregatorConverterImpl_Convert_invalidParamType(t *testing.T) { + converter := datasource.AggregatorConverterImpl{} + result, err := converter.Convert(&datasource.Aggregator{ + Name: "test", + Parameters: []*datasource.AggregatorParameter{ + { + Type: "bogus", + }, + }, + }) + + assert.Nil(t, result) + assert.NotNil(t, err) +} + +func TestStringParameterConverter_Convert(t *testing.T) { + converter := datasource.StringParameterConverter{} + result, err := converter.Convert(&datasource.AggregatorParameter{ + Name: "unit", + Value: "MINUTES", + }) + + assert.Nil(t, err) + assert.Equal(t, map[string]interface{}{ + "unit": "MINUTES", + }, result) +} + +func TestAnyParameterConverter_Convert_float(t *testing.T) { + converter := datasource.AnyParameterConverter{} + result, err := converter.Convert(&datasource.AggregatorParameter{ + Name: "value", + Value: "1.5", + }) + + assert.Nil(t, err) + assert.Equal(t, map[string]interface{}{ + "value": 1.5, + }, result) +} + +func TestAnyParameterConverter_Convert_string(t *testing.T) { + converter := datasource.AnyParameterConverter{} + result, err := converter.Convert(&datasource.AggregatorParameter{ + Name: "value", + Value: "string", + }) + + assert.Nil(t, err) + assert.Equal(t, map[string]interface{}{ + "value": "string", + }, result) +} + +func TestAlignmentParameterConverter_Convert(t *testing.T) { + converter := datasource.AlignmentParameterConverter{} + result, err := converter.Convert(&datasource.AggregatorParameter{ + Value: "SAMPLING", + }) + + assert.Nil(t, err) + assert.Equal(t, map[string]interface{}{ + "align_sampling": true, + "align_start_time": false, + }, result) + + result, err = converter.Convert(&datasource.AggregatorParameter{ + Value: "START_TIME", + }) + + assert.Nil(t, err) + assert.Equal(t, map[string]interface{}{ + "align_sampling": false, + "align_start_time": true, + }, result) +} + +func TestSamplingParameterConverter_Convert(t *testing.T) { + converter := datasource.SamplingParameterConverter{} + result, err := converter.Convert(&datasource.AggregatorParameter{ + Value: "1h", + }) + + assert.Nil(t, err) + assert.Equal(t, map[string]interface{}{ + "sampling": &remote.Sampling{ + Value: 1, + Unit: "hours", + }, + }, result) + + converter = datasource.SamplingParameterConverter{} + result, err = converter.Convert(&datasource.AggregatorParameter{ + Value: "10ms", + }) + + assert.Nil(t, err) + assert.Equal(t, map[string]interface{}{ + "sampling": &remote.Sampling{ + Value: 10, + Unit: "milliseconds", + }, + }, result) +} + +func TestSamplingParameterConverter_Convert_invalidUnit(t *testing.T) { + converter := datasource.SamplingParameterConverter{} + result, err := converter.Convert(&datasource.AggregatorParameter{ + Value: "1x", + }) + assert.Nil(t, result) + assert.NotNil(t, err) +} + +func TestSamplingParameterConverter_Convert_invalidFormat(t *testing.T) { + converter := datasource.SamplingParameterConverter{} + result, err := converter.Convert(&datasource.AggregatorParameter{ + Value: "", + }) + assert.Nil(t, result) + assert.NotNil(t, err) + + result, err = converter.Convert(&datasource.AggregatorParameter{ + Value: "h", + }) + assert.Nil(t, result) + assert.NotNil(t, err) + + result, err = converter.Convert(&datasource.AggregatorParameter{ + Value: "1", + }) + assert.Nil(t, result) + assert.NotNil(t, err) + + result, err = converter.Convert(&datasource.AggregatorParameter{ + Value: "h1", + }) + assert.Nil(t, result) + assert.NotNil(t, err) + + result, err = converter.Convert(&datasource.AggregatorParameter{ + Value: "1h1h", + }) + assert.Nil(t, result) + assert.NotNil(t, err) + + result, err = converter.Convert(&datasource.AggregatorParameter{ + Value: "1.5h", + }) + assert.Nil(t, result) + assert.NotNil(t, err) +} diff --git a/pkg/datasource/datasource.go b/pkg/datasource/datasource.go new file mode 100644 index 00000000..a3a23eee --- /dev/null +++ b/pkg/datasource/datasource.go @@ -0,0 +1,106 @@ +package datasource + +import ( + "encoding/json" + "github.com/grafana/grafana_plugin_model/go/datasource" + "github.com/grafana/kairosdb-datasource/pkg/logging" + "github.com/grafana/kairosdb-datasource/pkg/remote" + "github.com/hashicorp/go-plugin" + "github.com/pkg/errors" + "golang.org/x/net/context" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" +) + +var logger = logging.Get("datasource").Named("KairosDBDatasource") + +type KairosDBClient interface { + QueryMetrics(ctx context.Context, dsInfo *datasource.DatasourceInfo, request *remote.MetricQueryRequest) ([]*remote.MetricQueryResults, error) +} + +type KairosDBDatasource struct { + plugin.NetRPCUnsupportedPlugin + kairosDBClient KairosDBClient + metricQueryConverter MetricQueryConverter +} + +func NewKairosDBDatasource(client KairosDBClient, converter MetricQueryConverter) *KairosDBDatasource { + return &KairosDBDatasource{ + kairosDBClient: client, + metricQueryConverter: converter, + } +} + +func (ds *KairosDBDatasource) Query(ctx context.Context, dsRequest *datasource.DatasourceRequest) (*datasource.DatasourceResponse, error) { + refIds := make([]string, 0) + var remoteQueries []*remote.MetricQuery + + for _, dsQuery := range dsRequest.Queries { + refIds = append(refIds, dsQuery.RefId) + remoteQuery, err := ds.createRemoteMetricQuery(dsQuery) + if err != nil { + return nil, status.Errorf(codes.InvalidArgument, "failed to parse metric query: %s", err) + } + remoteQueries = append(remoteQueries, remoteQuery) + } + + remoteRequest := &remote.MetricQueryRequest{ + StartAbsolute: dsRequest.TimeRange.FromEpochMs, + EndAbsolute: dsRequest.TimeRange.ToEpochMs, + Metrics: remoteQueries, + } + + results, err := ds.kairosDBClient.QueryMetrics(ctx, dsRequest.Datasource, remoteRequest) + if err != nil { + return nil, status.Errorf(codes.Internal, "remote metric query failed: %s", err) + } + + dsResults := make([]*datasource.QueryResult, 0) + + for i, result := range results { + qr := ds.ParseQueryResult(result) + qr.RefId = refIds[i] + dsResults = append(dsResults, qr) + } + + return &datasource.DatasourceResponse{ + Results: dsResults, + }, nil +} + +func (ds *KairosDBDatasource) createRemoteMetricQuery(dsQuery *datasource.Query) (*remote.MetricQuery, error) { + metricRequest := &MetricRequest{} + err := json.Unmarshal([]byte(dsQuery.ModelJson), metricRequest) + if err != nil { + logger.Debug("Failed to unmarshal JSON", "value", dsQuery.ModelJson) + return nil, errors.Wrap(err, "failed to unmarshal request model") + } + + return ds.metricQueryConverter.Convert(metricRequest.Query) +} + +func (ds *KairosDBDatasource) ParseQueryResult(results *remote.MetricQueryResults) *datasource.QueryResult { + + seriesSet := make([]*datasource.TimeSeries, 0) + + for _, result := range results.Results { + series := &datasource.TimeSeries{ + Name: result.Name, + Tags: result.GetTaggedGroup(), + } + + for _, dataPoint := range result.Values { + value := dataPoint[1] + + series.Points = append(series.Points, &datasource.Point{ + Timestamp: int64(dataPoint[0]), + Value: value, + }) + } + seriesSet = append(seriesSet, series) + } + + return &datasource.QueryResult{ + Series: seriesSet, + } +} diff --git a/pkg/datasource/datasource_test.go b/pkg/datasource/datasource_test.go new file mode 100644 index 00000000..69f2b579 --- /dev/null +++ b/pkg/datasource/datasource_test.go @@ -0,0 +1,258 @@ +package datasource_test + +import ( + "context" + "encoding/json" + "github.com/golang/mock/gomock" + grafana "github.com/grafana/grafana_plugin_model/go/datasource" + "github.com/grafana/kairosdb-datasource/pkg/datasource" + "github.com/grafana/kairosdb-datasource/pkg/datasource/internal/mock_datasource" + "github.com/grafana/kairosdb-datasource/pkg/remote" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestDatasource_ParseQueryResult_SingleSeries(t *testing.T) { + ds := &datasource.KairosDBDatasource{} + + kairosResults := &remote.MetricQueryResults{ + Results: []*remote.MetricQueryResult{ + { + Name: "MetricA", + Values: []*remote.DataPoint{ + { + 1564682818000, 10.5, + }, + { + 1564682819000, 8.0, + }, + }, + }, + }, + } + + expectedResults := &grafana.QueryResult{ + Series: []*grafana.TimeSeries{ + { + Name: "MetricA", + Tags: map[string]string{}, + Points: []*grafana.Point{ + { + Timestamp: 1564682818000, + Value: 10.5, + }, + { + Timestamp: 1564682819000, + Value: 8.0, + }, + }, + }, + }, + } + + actualResults := ds.ParseQueryResult(kairosResults) + assert.Equal(t, expectedResults, actualResults) +} + +func TestDatasource_ParseQueryResult_MultipleSeries(t *testing.T) { + ds := &datasource.KairosDBDatasource{} + + kairosResults := &remote.MetricQueryResults{ + Results: []*remote.MetricQueryResult{ + { + Name: "MetricA", + GroupInfo: []*remote.GroupInfo{ + { + Name: "tag", + Tags: []string{"host", "pool"}, + Group: map[string]string{ + "host": "server1", + "data_center": "dc1", + }, + }, + }, + Values: []*remote.DataPoint{ + { + 1564682818000, 10.5, + }, + }, + }, + { + Name: "MetricA", + GroupInfo: []*remote.GroupInfo{ + { + Name: "tag", + Tags: []string{"host", "pool"}, + Group: map[string]string{ + "host": "server2", + "data_center": "dc2", + }, + }, + }, + Values: []*remote.DataPoint{ + { + 1564682818000, 10.5, + }, + }, + }, + }, + } + + expectedResults := &grafana.QueryResult{ + Series: []*grafana.TimeSeries{ + { + Name: "MetricA", + Tags: map[string]string{ + "host": "server1", + "data_center": "dc1", + }, + Points: []*grafana.Point{ + { + Timestamp: 1564682818000, + Value: 10.5, + }, + }, + }, + { + Name: "MetricA", + Tags: map[string]string{ + "host": "server2", + "data_center": "dc2", + }, + Points: []*grafana.Point{ + { + Timestamp: 1564682818000, + Value: 10.5, + }, + }, + }, + }, + } + + actualResults := ds.ParseQueryResult(kairosResults) + assert.Equal(t, expectedResults, actualResults) +} + +func TestDatasource_Query(t *testing.T) { + ctrl := gomock.NewController(t) + defer ctrl.Finish() + + mockConverter := mock_datasource.NewMockMetricQueryConverter(ctrl) + mockClient := mock_datasource.NewMockKairosDBClient(ctrl) + + ds := datasource.NewKairosDBDatasource(mockClient, mockConverter) + + dsRequest := &grafana.DatasourceRequest{ + Datasource: &grafana.DatasourceInfo{}, + TimeRange: &grafana.TimeRange{ + FromEpochMs: 1564682808000, + ToEpochMs: 1564682828000, + }, + Queries: []*grafana.Query{ + { + RefId: "A", + ModelJson: toModelJson(&datasource.MetricQuery{ + Name: "MetricA", + }), + }, + { + RefId: "B", + ModelJson: toModelJson(&datasource.MetricQuery{ + Name: "MetricB", + }), + }, + }, + } + + mockConverter.EXPECT(). + Convert(gomock.Any()). + Return(&remote.MetricQuery{}, nil). + AnyTimes() + + expectedMetricRequest := &remote.MetricQueryRequest{ + StartAbsolute: 1564682808000, + EndAbsolute: 1564682828000, + Metrics: []*remote.MetricQuery{ + {}, {}, + }, + } + + mockClient.EXPECT(). + QueryMetrics(context.TODO(), dsRequest.Datasource, expectedMetricRequest). + Return([]*remote.MetricQueryResults{ + { + Results: []*remote.MetricQueryResult{ + { + Name: "MetricA", + Values: []*remote.DataPoint{ + { + 1564682814000, 5, + }, + }, + }, + }, + }, + { + Results: []*remote.MetricQueryResult{ + { + Name: "MetricB", + Values: []*remote.DataPoint{ + { + 1564682818000, 10.5, + }, + }, + }, + }, + }, + }, nil). + Times(1) + + response, err := ds.Query(context.TODO(), dsRequest) + + assert.Nil(t, err) + assert.Equal(t, &grafana.DatasourceResponse{ + Results: []*grafana.QueryResult{ + { + RefId: "A", + Series: []*grafana.TimeSeries{ + { + Name: "MetricA", + Tags: map[string]string{}, + Points: []*grafana.Point{ + { + Timestamp: 1564682814000, + Value: 5, + }, + }, + }, + }, + }, + { + RefId: "B", + Series: []*grafana.TimeSeries{ + { + Name: "MetricB", + Tags: map[string]string{}, + Points: []*grafana.Point{ + { + Timestamp: 1564682818000, + Value: 10.5, + }, + }, + }, + }, + }, + }, + }, response) +} + +func toModelJson(query *datasource.MetricQuery) string { + req := datasource.MetricRequest{ + Query: query, + } + rBytes, err := json.Marshal(req) + if err != nil { + panic("Failed to marshall metric request") + } + return string(rBytes) +} diff --git a/pkg/datasource/internal/mock_datasource/mock_converters.go b/pkg/datasource/internal/mock_datasource/mock_converters.go new file mode 100644 index 00000000..d9de2b92 --- /dev/null +++ b/pkg/datasource/internal/mock_datasource/mock_converters.go @@ -0,0 +1,164 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: ./pkg/datasource/converters.go + +// Package mock_datasource is a generated GoMock package. +package mock_datasource + +import ( + gomock "github.com/golang/mock/gomock" + datasource "github.com/grafana/kairosdb-datasource/pkg/datasource" + remote "github.com/grafana/kairosdb-datasource/pkg/remote" + reflect "reflect" +) + +// MockMetricQueryConverter is a mock of MetricQueryConverter interface +type MockMetricQueryConverter struct { + ctrl *gomock.Controller + recorder *MockMetricQueryConverterMockRecorder +} + +// MockMetricQueryConverterMockRecorder is the mock recorder for MockMetricQueryConverter +type MockMetricQueryConverterMockRecorder struct { + mock *MockMetricQueryConverter +} + +// NewMockMetricQueryConverter creates a new mock instance +func NewMockMetricQueryConverter(ctrl *gomock.Controller) *MockMetricQueryConverter { + mock := &MockMetricQueryConverter{ctrl: ctrl} + mock.recorder = &MockMetricQueryConverterMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockMetricQueryConverter) EXPECT() *MockMetricQueryConverterMockRecorder { + return m.recorder +} + +// Convert mocks base method +func (m *MockMetricQueryConverter) Convert(query *datasource.MetricQuery) (*remote.MetricQuery, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Convert", query) + ret0, _ := ret[0].(*remote.MetricQuery) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Convert indicates an expected call of Convert +func (mr *MockMetricQueryConverterMockRecorder) Convert(query interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Convert", reflect.TypeOf((*MockMetricQueryConverter)(nil).Convert), query) +} + +// MockGroupByConverter is a mock of GroupByConverter interface +type MockGroupByConverter struct { + ctrl *gomock.Controller + recorder *MockGroupByConverterMockRecorder +} + +// MockGroupByConverterMockRecorder is the mock recorder for MockGroupByConverter +type MockGroupByConverterMockRecorder struct { + mock *MockGroupByConverter +} + +// NewMockGroupByConverter creates a new mock instance +func NewMockGroupByConverter(ctrl *gomock.Controller) *MockGroupByConverter { + mock := &MockGroupByConverter{ctrl: ctrl} + mock.recorder = &MockGroupByConverterMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockGroupByConverter) EXPECT() *MockGroupByConverterMockRecorder { + return m.recorder +} + +// Convert mocks base method +func (m *MockGroupByConverter) Convert(groupBy *datasource.GroupBy) ([]*remote.Grouper, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Convert", groupBy) + ret0, _ := ret[0].([]*remote.Grouper) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Convert indicates an expected call of Convert +func (mr *MockGroupByConverterMockRecorder) Convert(groupBy interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Convert", reflect.TypeOf((*MockGroupByConverter)(nil).Convert), groupBy) +} + +// MockAggregatorConverter is a mock of AggregatorConverter interface +type MockAggregatorConverter struct { + ctrl *gomock.Controller + recorder *MockAggregatorConverterMockRecorder +} + +// MockAggregatorConverterMockRecorder is the mock recorder for MockAggregatorConverter +type MockAggregatorConverterMockRecorder struct { + mock *MockAggregatorConverter +} + +// NewMockAggregatorConverter creates a new mock instance +func NewMockAggregatorConverter(ctrl *gomock.Controller) *MockAggregatorConverter { + mock := &MockAggregatorConverter{ctrl: ctrl} + mock.recorder = &MockAggregatorConverterMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockAggregatorConverter) EXPECT() *MockAggregatorConverterMockRecorder { + return m.recorder +} + +// Convert mocks base method +func (m *MockAggregatorConverter) Convert(aggregator *datasource.Aggregator) (map[string]interface{}, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Convert", aggregator) + ret0, _ := ret[0].(map[string]interface{}) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Convert indicates an expected call of Convert +func (mr *MockAggregatorConverterMockRecorder) Convert(aggregator interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Convert", reflect.TypeOf((*MockAggregatorConverter)(nil).Convert), aggregator) +} + +// MockParameterConverter is a mock of ParameterConverter interface +type MockParameterConverter struct { + ctrl *gomock.Controller + recorder *MockParameterConverterMockRecorder +} + +// MockParameterConverterMockRecorder is the mock recorder for MockParameterConverter +type MockParameterConverterMockRecorder struct { + mock *MockParameterConverter +} + +// NewMockParameterConverter creates a new mock instance +func NewMockParameterConverter(ctrl *gomock.Controller) *MockParameterConverter { + mock := &MockParameterConverter{ctrl: ctrl} + mock.recorder = &MockParameterConverterMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockParameterConverter) EXPECT() *MockParameterConverterMockRecorder { + return m.recorder +} + +// Convert mocks base method +func (m *MockParameterConverter) Convert(param *datasource.AggregatorParameter) (map[string]interface{}, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Convert", param) + ret0, _ := ret[0].(map[string]interface{}) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Convert indicates an expected call of Convert +func (mr *MockParameterConverterMockRecorder) Convert(param interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Convert", reflect.TypeOf((*MockParameterConverter)(nil).Convert), param) +} diff --git a/pkg/datasource/internal/mock_datasource/mock_datasource.go b/pkg/datasource/internal/mock_datasource/mock_datasource.go new file mode 100644 index 00000000..79dc9d41 --- /dev/null +++ b/pkg/datasource/internal/mock_datasource/mock_datasource.go @@ -0,0 +1,51 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: ./pkg/datasource/datasource.go + +// Package mock_datasource is a generated GoMock package. +package mock_datasource + +import ( + gomock "github.com/golang/mock/gomock" + datasource "github.com/grafana/grafana_plugin_model/go/datasource" + remote "github.com/grafana/kairosdb-datasource/pkg/remote" + context "golang.org/x/net/context" + reflect "reflect" +) + +// MockKairosDBClient is a mock of KairosDBClient interface +type MockKairosDBClient struct { + ctrl *gomock.Controller + recorder *MockKairosDBClientMockRecorder +} + +// MockKairosDBClientMockRecorder is the mock recorder for MockKairosDBClient +type MockKairosDBClientMockRecorder struct { + mock *MockKairosDBClient +} + +// NewMockKairosDBClient creates a new mock instance +func NewMockKairosDBClient(ctrl *gomock.Controller) *MockKairosDBClient { + mock := &MockKairosDBClient{ctrl: ctrl} + mock.recorder = &MockKairosDBClientMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockKairosDBClient) EXPECT() *MockKairosDBClientMockRecorder { + return m.recorder +} + +// QueryMetrics mocks base method +func (m *MockKairosDBClient) QueryMetrics(ctx context.Context, dsInfo *datasource.DatasourceInfo, request *remote.MetricQueryRequest) ([]*remote.MetricQueryResults, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "QueryMetrics", ctx, dsInfo, request) + ret0, _ := ret[0].([]*remote.MetricQueryResults) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// QueryMetrics indicates an expected call of QueryMetrics +func (mr *MockKairosDBClientMockRecorder) QueryMetrics(ctx, dsInfo, request interface{}) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "QueryMetrics", reflect.TypeOf((*MockKairosDBClient)(nil).QueryMetrics), ctx, dsInfo, request) +} diff --git a/pkg/datasource/models.go b/pkg/datasource/models.go new file mode 100644 index 00000000..34495c0c --- /dev/null +++ b/pkg/datasource/models.go @@ -0,0 +1,29 @@ +package datasource + +type MetricRequest struct { + Query *MetricQuery `json:"query"` + RefID string `json:"refId"` +} + +type MetricQuery struct { + Name string `json:"metricName"` + Aggregators []*Aggregator `json:"aggregators"` + GroupBy *GroupBy `json:"groupBy"` + Tags map[string][]string `json:"tags"` +} + +type GroupBy struct { + Tags []string `json:"tags"` +} + +type Aggregator struct { + Name string `json:"name"` + Parameters []*AggregatorParameter `json:"parameters"` +} + +type AggregatorParameter struct { + Name string `json:"name"` + Text string `json:"text"` + Type string `json:"type"` + Value string `json:"value"` +} diff --git a/pkg/datasource/models_test.go b/pkg/datasource/models_test.go new file mode 100644 index 00000000..223ac0af --- /dev/null +++ b/pkg/datasource/models_test.go @@ -0,0 +1,64 @@ +package datasource + +import ( + "encoding/json" + "github.com/stretchr/testify/assert" + "io/ioutil" + "testing" +) + +func TestMetricRequest(t *testing.T) { + expected := &MetricRequest{ + RefID: "A", + Query: &MetricQuery{ + Name: "abc.123", + Aggregators: []*Aggregator{ + { + Name: "sum", + Parameters: []*AggregatorParameter{ + { + Name: "sampling", + Text: "align by", + Type: "alignment", + Value: "NONE", + }, + { + Name: "value", + Text: "every", + Type: "sampling", + Value: "1", + }, + { + Name: "unit", + Text: "unit", + Type: "sampling_unit", + Value: "HOURS", + }, + }, + }, + }, + Tags: map[string][]string{ + "datacenter": {}, + "host": {"foo", "foo2"}, + "customer": {"bar"}, + }, + GroupBy: &GroupBy{ + Tags: []string{ + "host", + "datacenter", + }, + }, + }, + } + + bytes, readError := ioutil.ReadFile("_testdata/ModelJson.json") + if readError != nil { + panic(readError) + } + + actual := &MetricRequest{} + parseError := json.Unmarshal(bytes, actual) + + assert.Nil(t, parseError, "Failed to unmarshal JSON: %v", parseError) + assert.Equal(t, expected, actual) +} diff --git a/pkg/logging/logger.go b/pkg/logging/logger.go new file mode 100644 index 00000000..1205960e --- /dev/null +++ b/pkg/logging/logger.go @@ -0,0 +1,13 @@ +package logging + +import "github.com/hashicorp/go-hclog" + +// Get returns a new hclog.Logger with the specified name. +// All logs will be sent to the Grafana server process. +func Get(name string) hclog.Logger { + return hclog.New(&hclog.LoggerOptions{ + Name: name, + Level: hclog.Trace, // Grafana server will filter logs + JSONFormat: true, + }) +} diff --git a/pkg/plugin.go b/pkg/plugin.go new file mode 100644 index 00000000..aed5a781 --- /dev/null +++ b/pkg/plugin.go @@ -0,0 +1,48 @@ +package main + +import ( + grafana "github.com/grafana/grafana_plugin_model/go/datasource" + "github.com/grafana/kairosdb-datasource/pkg/datasource" + "github.com/grafana/kairosdb-datasource/pkg/logging" + "github.com/grafana/kairosdb-datasource/pkg/remote" + "github.com/hashicorp/go-plugin" + "net/http" + "time" +) + +func main() { + logger := logging.Get("main") + + logger.Info("Running KairosDB backend datasource") + + // TODO support configuration of http client + kairosClient := remote.NewKairosDBClient(&http.Client{ + Timeout: time.Duration(time.Second * 30), + }) + + aggregatorConverter := datasource.NewAggregatorConverterImpl( + map[string]datasource.ParameterConverter{ + "alignment": &datasource.AlignmentParameterConverter{}, + "sampling": &datasource.SamplingParameterConverter{}, + "enum": &datasource.StringParameterConverter{}, + "any": &datasource.AnyParameterConverter{}, + }) + metricQueryConverter := datasource.NewMetricQueryConverterImpl(aggregatorConverter, &datasource.GroupByConverterImpl{}) + + plugin.Serve(&plugin.ServeConfig{ + + HandshakeConfig: plugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "grafana_plugin_type", + MagicCookieValue: "datasource", + }, + Plugins: map[string]plugin.Plugin{ + "grafana-kairosdb-datasource": &grafana.DatasourcePluginImpl{ + Plugin: datasource.NewKairosDBDatasource(kairosClient, metricQueryConverter), + }, + }, + Logger: logger, + // A non-nil value here enables gRPC serving for this plugin... + GRPCServer: plugin.DefaultGRPCServer, + }) +} diff --git a/pkg/remote/_testdata/KairosDBErrorResponse.json b/pkg/remote/_testdata/KairosDBErrorResponse.json new file mode 100644 index 00000000..b76b7ffd --- /dev/null +++ b/pkg/remote/_testdata/KairosDBErrorResponse.json @@ -0,0 +1,6 @@ +{ + "errors": [ + "metrics[0].aggregate must be one of MIN,SUM,MAX,AVG,DEV", + "metrics[0].sampling.unit must be one of SECONDS,MINUTES,HOURS,DAYS,WEEKS,YEARS" + ] +} diff --git a/pkg/remote/_testdata/KairosDBRequest.json b/pkg/remote/_testdata/KairosDBRequest.json new file mode 100644 index 00000000..9467b054 --- /dev/null +++ b/pkg/remote/_testdata/KairosDBRequest.json @@ -0,0 +1,37 @@ +{ + "start_absolute": 1357023600000, + "end_absolute": 1357024600000, + "metrics": [ + { + "name": "abc.123", + "limit": 10000, + "tags": { + "host": [ + "foo", + "foo2" + ], + "customer": [ + "bar" + ] + }, + "aggregators": [ + { + "name": "sum", + "sampling": { + "value": 10, + "unit": "minutes" + } + } + ], + "group_by": [ + { + "name": "tag", + "tags": [ + "data_center", + "host" + ] + } + ] + } + ] +} diff --git a/pkg/remote/_testdata/KairosDBResponse.json b/pkg/remote/_testdata/KairosDBResponse.json new file mode 100644 index 00000000..05429788 --- /dev/null +++ b/pkg/remote/_testdata/KairosDBResponse.json @@ -0,0 +1,45 @@ +{ + "queries": [ + { + "sample_size": 14368, + "results": [ + { + "name": "abc.123", + "group_by": [ + { + "name": "type", + "type": "number" + }, + { + "name": "tag", + "tags": [ + "host" + ], + "group": { + "host": "server1" + } + } + ], + "tags": { + "host": [ + "server1" + ], + "customer": [ + "bar" + ] + }, + "values": [ + [ + 1364968800000, + 11019 + ], + [ + 1366351200000, + 2843 + ] + ] + } + ] + } + ] +} diff --git a/pkg/remote/client.go b/pkg/remote/client.go new file mode 100644 index 00000000..8eff1f43 --- /dev/null +++ b/pkg/remote/client.go @@ -0,0 +1,85 @@ +package remote + +import ( + "context" + "encoding/json" + "fmt" + "github.com/grafana/grafana_plugin_model/go/datasource" + "github.com/pkg/errors" + "golang.org/x/net/context/ctxhttp" + "io/ioutil" + "net/http" + "net/url" + "path" + "strings" +) + +type ResponseError struct { + Messages []string + Status int +} + +func (e *ResponseError) Error() string { + return fmt.Sprintf("KairosDB response error: status=%d, messages=[%v]", e.Status, strings.Join(e.Messages, ", ")) +} + +type KairosDBClient struct { + httpClient *http.Client +} + +func NewKairosDBClient(httpClient *http.Client) *KairosDBClient { + return &KairosDBClient{ + httpClient: httpClient, + } +} + +//TODO support authentication +func (client *KairosDBClient) QueryMetrics(ctx context.Context, dsInfo *datasource.DatasourceInfo, request *MetricQueryRequest) ([]*MetricQueryResults, error) { + httpRequest, err := client.buildHTTPRequest(dsInfo, request) + if err != nil { + return nil, err + } + + res, err := ctxhttp.Do(ctx, client.httpClient, httpRequest) + if err != nil { + return nil, errors.Wrap(err, "failed to execute HTTP request") + } + + defer res.Body.Close() + + resBody, err := ioutil.ReadAll(res.Body) + if err != nil { + return nil, errors.Wrap(err, "failed to read metric query response body") + } + + metricResponse := &MetricQueryResponse{} + err = json.Unmarshal(resBody, &metricResponse) + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return metricResponse.Queries, errors.Wrap(err, "failed to unmarshal metric query response") + } + + return nil, &ResponseError{ + Status: res.StatusCode, + Messages: metricResponse.Errors, + } +} + +func (client *KairosDBClient) buildHTTPRequest(dsInfo *datasource.DatasourceInfo, request *MetricQueryRequest) (*http.Request, error) { + reqBody, err := json.Marshal(request) + if err != nil { + return nil, errors.Wrap(err, "failed to marshal metric query request body") + } + + kairosURL, _ := url.Parse(dsInfo.Url) + kairosURL.Path = path.Join(kairosURL.Path, "api/v1/datapoints/query") + + httpRequest, err := http.NewRequest(http.MethodPost, kairosURL.String(), strings.NewReader(string(reqBody))) + if err != nil { + return nil, errors.Wrap(err, "failed to create HTTP request") + } + + httpRequest.Header.Add("Content-Type", "application/json") + + return httpRequest, nil +} diff --git a/pkg/remote/client_test.go b/pkg/remote/client_test.go new file mode 100644 index 00000000..798ca42b --- /dev/null +++ b/pkg/remote/client_test.go @@ -0,0 +1,153 @@ +package remote + +import ( + "bytes" + "context" + "encoding/json" + "github.com/grafana/grafana_plugin_model/go/datasource" + "github.com/stretchr/testify/assert" + "io/ioutil" + "net/http" + "os" + "testing" +) + +type MockTransport struct { + response *http.Response + request *http.Request +} + +func (m *MockTransport) RoundTrip(req *http.Request) (*http.Response, error) { + m.request = req + return m.response, nil +} + +func TestMain(m *testing.M) { + setup() + code := m.Run() + os.Exit(code) +} + +var kairosClient *KairosDBClient +var mockTransport *MockTransport +var dsInfo *datasource.DatasourceInfo + +var okResponsePayload []byte +var errorResponsePayload []byte + +func init() { + + var err error + okResponsePayload, err = ioutil.ReadFile("_testdata/KairosDBResponse.json") + if err != nil { + panic(err) + } + + errorResponsePayload, err = ioutil.ReadFile("_testdata/KairosDBErrorResponse.json") + if err != nil { + panic(err) + } +} + +func setup() { + + mockTransport = new(MockTransport) + mockTransport.response = &http.Response{ + StatusCode: 200, + Body: ioutil.NopCloser(bytes.NewBuffer(okResponsePayload)), + } + + httpClient := &http.Client{ + Transport: mockTransport, + } + + kairosClient = &KairosDBClient{ + httpClient: httpClient, + } + + dsInfo = &datasource.DatasourceInfo{ + Url: "http://localhost/", + } +} + +func TestQueryMetrics_urlWithTrailingSlash(t *testing.T) { + + dsInfo.Url = "http://localhost/" + + _, _ = kairosClient.QueryMetrics(context.TODO(), dsInfo, &MetricQueryRequest{}) + + request := mockTransport.request + assert.Equal(t, http.MethodPost, request.Method) + assert.Equal(t, "http://localhost/api/v1/datapoints/query", request.URL.String()) +} + +func TestQueryMetrics_urlWithNoTrailingSlash(t *testing.T) { + + dsInfo.Url = "http://localhost" + + _, _ = kairosClient.QueryMetrics(context.TODO(), dsInfo, &MetricQueryRequest{}) + + request := mockTransport.request + assert.Equal(t, http.MethodPost, request.Method) + assert.Equal(t, "http://localhost/api/v1/datapoints/query", request.URL.String()) +} + +func TestQueryMetrics_okResponse(t *testing.T) { + + mockTransport.response = &http.Response{ + StatusCode: 200, + Body: ioutil.NopCloser(bytes.NewBuffer(okResponsePayload)), + } + + expectedResponse := &MetricQueryResponse{} + err := json.Unmarshal(okResponsePayload, &expectedResponse) + + results, err := kairosClient.QueryMetrics(context.TODO(), dsInfo, &MetricQueryRequest{}) + + assert.Nil(t, err) + assert.Equal(t, expectedResponse.Queries, results) +} + +func TestQueryMetrics_errorResponse(t *testing.T) { + + mockTransport.response = &http.Response{ + StatusCode: 400, + Body: ioutil.NopCloser(bytes.NewBuffer(errorResponsePayload)), + } + + expectedError := &ResponseError{ + Status: 400, + Messages: []string{ + "metrics[0].aggregate must be one of MIN,SUM,MAX,AVG,DEV", + "metrics[0].sampling.unit must be one of SECONDS,MINUTES,HOURS,DAYS,WEEKS,YEARS", + }, + } + results, err := kairosClient.QueryMetrics(context.TODO(), dsInfo, &MetricQueryRequest{}) + + assert.Nil(t, results) + assert.Equal(t, expectedError, err) +} + +func TestResponseError_Error(t *testing.T) { + err := &ResponseError{ + Status: 400, + Messages: []string{ + "error1", + "error2", + }, + } + + expectedMsg := "KairosDB response error: status=400, messages=[error1, error2]" + + assert.Equal(t, expectedMsg, err.Error()) +} + +func TestResponseError_Error_WithNoMessages(t *testing.T) { + err := &ResponseError{ + Status: 400, + } + + expectedMsg := "KairosDB response error: status=400, messages=[]" + + assert.Equal(t, expectedMsg, err.Error()) +} diff --git a/pkg/remote/models.go b/pkg/remote/models.go new file mode 100644 index 00000000..30f0a257 --- /dev/null +++ b/pkg/remote/models.go @@ -0,0 +1,61 @@ +package remote + +type MetricQueryRequest struct { + StartAbsolute int64 `json:"start_absolute"` + EndAbsolute int64 `json:"end_absolute"` + Metrics []*MetricQuery `json:"metrics"` +} + +type MetricQuery struct { + Name string `json:"name"` + Aggregators []map[string]interface{} `json:"aggregators,omitempty"` + GroupBy []*Grouper `json:"group_by,omitempty"` + Tags map[string][]string `json:"tags,omitempty"` +} + +type Sampling struct { + Value int64 `json:"value"` + Unit string `json:"unit"` +} + +//TODO support group by time and value +type Grouper struct { + Name string `json:"name"` + Tags []string `json:"tags"` +} + +type MetricQueryResponse struct { + Queries []*MetricQueryResults `json:"queries,omitempty"` + Errors []string `json:"errors,omitempty"` +} + +type MetricQueryResults struct { + Results []*MetricQueryResult `json:"results"` +} + +type MetricQueryResult struct { + Name string `json:"name"` + GroupInfo []*GroupInfo `json:"group_by,omitempty"` + Tags map[string][]string `json:"tags,omitempty"` + Values []*DataPoint `json:"values"` +} + +func (r *MetricQueryResult) GetTaggedGroup() map[string]string { + if r.GroupInfo != nil { + for _, groupInfo := range r.GroupInfo { + if groupInfo.Name == "tag" { + return groupInfo.Group + } + } + } + + return map[string]string{} +} + +type GroupInfo struct { + Name string `json:"name"` + Tags []string `json:"tags,omitempty"` + Group map[string]string `json:"group,omitempty"` +} + +type DataPoint [2]float64 diff --git a/pkg/remote/models_test.go b/pkg/remote/models_test.go new file mode 100644 index 00000000..7b010a57 --- /dev/null +++ b/pkg/remote/models_test.go @@ -0,0 +1,158 @@ +package remote + +import ( + "encoding/json" + "github.com/stretchr/testify/assert" + "io/ioutil" + "testing" +) + +func TestKairosDBRequest(t *testing.T) { + expected := &MetricQueryRequest{ + StartAbsolute: 1357023600000, + EndAbsolute: 1357024600000, + Metrics: []*MetricQuery{ + { + Name: "abc.123", + Aggregators: []map[string]interface{}{ + { + "name": "sum", + "sampling": map[string]interface{}{ + "value": 10.0, + "unit": "minutes", + }, + }, + }, + Tags: map[string][]string{ + "host": {"foo", "foo2"}, + "customer": {"bar"}, + }, + GroupBy: []*Grouper{ + { + Name: "tag", + Tags: []string{ + "data_center", + "host", + }, + }, + }, + }, + }, + } + + bytes, readError := ioutil.ReadFile("_testdata/KairosDBRequest.json") + if readError != nil { + panic(readError) + } + + actual := &MetricQueryRequest{} + parseError := json.Unmarshal(bytes, actual) + + assert.Nil(t, parseError, "Failed to unmarshal JSON: %v", parseError) + assert.Equal(t, expected, actual) +} + +func TestKairosDBResponse(t *testing.T) { + expected := &MetricQueryResponse{ + Queries: []*MetricQueryResults{ + { + Results: []*MetricQueryResult{ + { + Name: "abc.123", + GroupInfo: []*GroupInfo{ + { + Name: "type", + }, + { + Name: "tag", + Tags: []string{"host"}, + Group: map[string]string{ + "host": "server1", + }, + }, + }, + Tags: map[string][]string{ + "host": {"server1"}, + "customer": {"bar"}, + }, + Values: []*DataPoint{ + { + 1364968800000, + 11019, + }, + { + 1366351200000, + 2843, + }, + }, + }, + }, + }, + }, + } + + bytes, readError := ioutil.ReadFile("_testdata/KairosDBResponse.json") + if readError != nil { + panic(readError) + } + + actual := &MetricQueryResponse{} + parseError := json.Unmarshal(bytes, actual) + + assert.Nil(t, parseError, "Failed to unmarshal JSON: %v", parseError) + assert.Equal(t, expected, actual) +} + +func TestKairosDBErrorResponse(t *testing.T) { + expected := &MetricQueryResponse{ + Errors: []string{ + "metrics[0].aggregate must be one of MIN,SUM,MAX,AVG,DEV", + "metrics[0].sampling.unit must be one of SECONDS,MINUTES,HOURS,DAYS,WEEKS,YEARS", + }, + } + + bytes, readError := ioutil.ReadFile("_testdata/KairosDBErrorResponse.json") + if readError != nil { + panic(readError) + } + + actual := &MetricQueryResponse{} + parseError := json.Unmarshal(bytes, actual) + + assert.Nil(t, parseError, "Failed to unmarshal JSON: %v", parseError) + assert.Equal(t, expected, actual) +} + +func TestMetricQueryResult_GetTaggedGroup_nilGroupInfo(t *testing.T) { + result := MetricQueryResult{ + Name: "Foo", + Values: []*DataPoint{ + {0, 0}, + }, + } + + assert.Equal(t, map[string]string{}, result.GetTaggedGroup()) +} + +func TestMetricQueryResult_GetTaggedGroup_withTagGroup(t *testing.T) { + expectedGroup := map[string]string{ + "host": "server1", + "customer": "foo", + } + + result := MetricQueryResult{ + Name: "Foo", + Values: []*DataPoint{ + {0, 0}, + }, + GroupInfo: []*GroupInfo{ + { + Name: "tag", + Tags: []string{"host", "customer"}, + Group: expectedGroup, + }, + }, + } + + assert.Equal(t, expectedGroup, result.GetTaggedGroup()) +} diff --git a/plugin.json b/plugin.json index ba7915e5..96c5bbc8 100644 --- a/plugin.json +++ b/plugin.json @@ -4,6 +4,9 @@ "type": "datasource", "metrics": true, "annotations": false, + "backend": true, + "alerting": true, + "executable": "grafana-kairosdb-datasource", "staticRoot": ".", "info": { "description": "datasource plugin for KairosDB", diff --git a/src/beans/aggregators/rate_aggregator.ts b/src/beans/aggregators/rate_aggregator.ts index b057db5b..e037d8f8 100644 --- a/src/beans/aggregators/rate_aggregator.ts +++ b/src/beans/aggregators/rate_aggregator.ts @@ -1,6 +1,4 @@ import {Aggregator} from "./aggregator"; - -import {AlignmentAggregatorParameter} from "./parameters/alignment_aggregator_parameter"; import {EnumAggregatorParameter} from "./parameters/enum_aggregator_parameter"; import {TimeUnit} from "./utils"; @@ -8,7 +6,6 @@ export class RateAggregator extends Aggregator { constructor() { super("rate"); this.parameters = this.parameters.concat([ - new AlignmentAggregatorParameter(), new EnumAggregatorParameter("unit", TimeUnit, "every") ]); } diff --git a/src/beans/aggregators/sampler_aggregator.ts b/src/beans/aggregators/sampler_aggregator.ts index 72d3e6c2..090772cc 100644 --- a/src/beans/aggregators/sampler_aggregator.ts +++ b/src/beans/aggregators/sampler_aggregator.ts @@ -1,9 +1,12 @@ import {Aggregator} from "./aggregator"; -import {AnyAggregatorParameter} from "./parameters/any_aggregator_parameter"; +import {EnumAggregatorParameter} from "./parameters/enum_aggregator_parameter"; +import {TimeUnit} from "./utils"; export class SamplerAggregator extends Aggregator { constructor() { super("sampler"); - this.parameters = this.parameters.concat([new AnyAggregatorParameter("samplingUnit", "every")]); + this.parameters = this.parameters.concat([ + new EnumAggregatorParameter("unit", TimeUnit, "every") + ]); } }