From 735b3f313f78be8d6ca7fda2034dc4c5916835e0 Mon Sep 17 00:00:00 2001 From: Aaron Forster Date: Sun, 26 Nov 2017 06:23:48 -0800 Subject: [PATCH] Restore data sources (#11) Fixes #9, see it for the bug explanation. Commits are included in the changeset: * Updated do-restore to switch on restore type like do-backup. * Beginning of datasource restore. Changed the output file format for datasources to make them easier to match on importing. * Un-did datasource filename change. * Working import of datasources. * Cleand up a lot of garbage. * Added some default output so that we know the command has completed. * Removed the default "blah completed" messages. Added a space to separate native from third party imports and sorted them. * Fixed an issue where the restore wasnt actually hapenning. updated some log messages for clarity. * Started some unit tests with github.com/jarcoal/httpmock. Updated gitignore to allow it to include json files within the testdata directory. * Updated order of imports. --- .gitignore | 5 +- do-restore.go | 137 +++++++- do-restore_test.go | 164 ++++++++++ main.go | 2 + testdata/app-team-roll-up-start.db.json | 405 ++++++++++++++++++++++++ testdata/promartheus-test.ds.1.json | 16 + testdata/prometheus-test.ds.1.json | 16 + 7 files changed, 740 insertions(+), 5 deletions(-) create mode 100644 do-restore_test.go create mode 100644 testdata/app-team-roll-up-start.db.json create mode 100644 testdata/promartheus-test.ds.1.json create mode 100644 testdata/prometheus-test.ds.1.json diff --git a/.gitignore b/.gitignore index 77e3f41..d069279 100644 --- a/.gitignore +++ b/.gitignore @@ -25,10 +25,13 @@ _testmain.go *.json /cmd/grafana-backup/grafana-backup - # Ignored the compiled binary grafana-backup +# Do not ignore the json files in the testdata directory +!/testdata/*.json + + # Created by https://www.gitignore.io/api/go,eclipse,intellij+all ### Eclipse ### diff --git a/do-restore.go b/do-restore.go index 62b3dde..913f97e 100644 --- a/do-restore.go +++ b/do-restore.go @@ -19,25 +19,78 @@ package main import ( + "encoding/json" "fmt" "io/ioutil" "os" + "regexp" "strings" + + "github.com/grafana-tools/sdk" ) +//THIS IS BROKEN +//It no longer restores +// +//The last thing I did was change from CreateDatasource to UpdateDatasource. Perhaps I did not recompile after I made the +//change and I tested the wrong code (which is why I need to start using 'go run *.go' instead of compiling at all.) +// +//Change it back. +// +//Then figure out what happens if the datasource exists and I use CreateDatasource. +// +//Also it appears that restoreDatasources is being run twice. + +// Triggers a restore. func doRestore(opts ...option) { var ( cmd = initCommand(opts...) - rawBoard []byte - err error ) + + // TODO: apply-to auto doesn't make much sense in the context of restore yet. + // An actual heirarchal restore probably isn't feasable until we get better about parsing the JSON. + // There isn't going to be a guarantee that the datasource filename is exactly what we expect. + if cmd.applyHierarchically { + restoreDashboards(cmd) + restoreDatasources(cmd) + restoreUsers(cmd) + return + } + if cmd.applyForBoards { + restoreDashboards(cmd) + } + if cmd.applyForDs { + //restoreDatasources(cmd, nil) + restoreDatasources(cmd) + } + if cmd.applyForUsers { + restoreUsers(cmd) + } + +} + +// Restores all dashboard files. Currently that's files that end in .db.json +// Then if cmd.applyHierarchically is true calls restoreDatasources +func restoreDashboards(cmd *command) { + var ( + rawBoard []byte + //datasources = make(map[string]bool) // If cmd.applyHierarchically is true extract datasources from the dashboard and restore those as well. + err error + // These three are used in backupDashboards, figure out what they're used for and if I want to implement them. -AF + //boardLinks []sdk.FoundBoard + //meta sdk.BoardProperties + //board sdk.Board + ) + for _, filename := range cmd.filenames { - if strings.HasSuffix(filename, ".json") { + if strings.HasSuffix(filename, "db.json") { if rawBoard, err = ioutil.ReadFile(filename); err != nil { fmt.Fprintf(os.Stderr, "error on read %s", filename) continue } + // TODO add db match filters + if err = cmd.grafana.SetRawDashboard(rawBoard); err != nil { fmt.Fprintf(os.Stderr, "error on importing dashboard from %s", filename) continue @@ -45,6 +98,82 @@ func doRestore(opts ...option) { if cmd.verbose { fmt.Printf("Dashboard restored from %s.\n", filename) } - } + } //else { + // if cmd.verbose { + // fmt.Fprintf(os.Stderr, "File %s does not appear to be a dashboard: Skipping file.", filename) + // } + // + //} + } + + // Disabling the 'heirarchal' functionality until it can be implemented properly. + //if cmd.applyHierarchically { + // restoreDatasources(cmd) + //} +} + +// Restores all datasource files. Currently those are files that match the format .*.ds.([0-9]+).json. +func restoreDatasources(cmd *command) { + var ( + rawDS []byte + err error + ) + + for _, filename := range cmd.filenames { + pattern, _ := regexp.Compile(".*.ds.([0-9]+).json") + + if pattern.MatchString(filename) { + if rawDS, err = ioutil.ReadFile(filename); err != nil { + fmt.Fprintf(os.Stderr, "error on read %s", filename) + continue + } + + // TODO: most of this should probably be pushed upstream into grafana SDK in a CreateRawDatasource function + // Stolen from SetRawDashboard + var ( + resp sdk.StatusMessage + err error + plain sdk.Datasource + ) + + if err = json.Unmarshal(rawDS, &plain); err != nil { + fmt.Fprintf(os.Stderr, "Error unmarshalling datasource from file %s: %s\n", filename, err) + continue + } + + // TODO: Check to see if the datasource already exists and use the correct method or throw an error on update unless --force is specified. + resp, err = cmd.grafana.CreateDatasource(plain) + //resp, err = cmd.grafana.UpdateDatasource(plain) + + if err != nil { + fmt.Fprintf(os.Stderr, "Error importing datasource from %s: %s\n", filename, err) + continue + } + + if *resp.Message == "Data source with same name already exists" { + //TODO: Update this so that we pull out the datasource name and give that in the message. + fmt.Fprintf(os.Stderr, "A Datasource with the same name as specified in %s already exists.\n", filename) + continue + } else if *resp.Message != "Datasource added" { + fmt.Fprintf(os.Stderr, "Error importing datasource from %s: %s\n", filename, *resp.Message) + continue + } + + if cmd.verbose { + fmt.Printf("Datasource restored from %s.\n", filename) + } + } //else { + // if cmd.verbose { + // fmt.Fprintf(os.Stderr, "File %s does not appear to be a datasource: Skipping file.\n", filename) + // } + // + //} + } +} + +// Not yet implemented. +func restoreUsers(cmd *command) { + if cmd.verbose { + fmt.Fprintln(os.Stderr, "Restoring users not yet implemented!") } } diff --git a/do-restore_test.go b/do-restore_test.go new file mode 100644 index 0000000..3965932 --- /dev/null +++ b/do-restore_test.go @@ -0,0 +1,164 @@ +package main + + +import ( + "bytes" + "fmt" + "net/http" + "strings" + "testing" + + "github.com/jarcoal/httpmock" +) + +type testresults struct { + serverInstanceCalled *bool + applyForCalled *bool + matchFilenameCalled *bool +} + +func newTestresults() testresults { + + value := retFalse() + result := testresults{serverInstanceCalled: value, applyForCalled: value, matchFilenameCalled: value} + result.matchFilenameCalled = value + + return result + + //var results testresults + //results.serverInstanceCalled = false + //results.applyForCalled = &false + //results.matchFilenameCalled = &false + //return results +} + +func retFalse() *bool { + fals := false + return &fals +} + +func retTrue() *bool { + tru := true + return &tru +} + +func TestRestoreDashboards(t *testing.T) { + t.Log("TestRestoreDashboards not yet implemented!") +} + + +//TODO: Create multiple tests which test things like sending multiple files +func TestRestoreDatasources(t *testing.T) { + + //flagServerURL = flag.String("url", "", "URL of Grafana server") + *flagServerURL = "http://noserver.nodomain.com:3123" + //flagServerKey = flag.String("key", "", "API key of Grafana server") + *flagServerKey = "thisisnotreallyanapikey" + //flagTimeout = flag.Duration("timeout", 6*time.Minute, "read flagTimeout for interacting with Grafana in seconds") + + //// Dashboard matching flags. + //flagTags = flag.String("tag", "", "dashboard should match all these tags") + //flagBoardTitle = flag.String("title", "", "dashboard title should match name") + //flagStarred = flag.Bool("starred", false, "only match starred dashboards") + //// Common flags. + //flagApplyFor = flag.String("apply-for", "auto", `apply operation only for some kind of objects, available values are "auto", "all", "dashboards", "datasources", "users"`) + *flagApplyFor = "datasources" + //flagForce = flag.Bool("force", false, "force overwrite of existing objects") + //flagVerbose = flag.Bool("verbose", false, "verbose output") + + + argCommand = "restore" + argPath = "testdata/prometheus-test.ds.1.json" + + // For developing tests. Both of these cause this test to fail. + //argPath = "testdata/*.1.json" + //argPath = "testdata/promartheus-test.ds.1.json" + + // Some variables to track the results of the test + + // Check the accept header. + acceptCorrect := false + // Check for some expected text in the post body. + bodyCorrect := false + // Track how many times the API was called. + numRequests := 0 + // Were any requests made to other URIs? + wrongUriRequests := false + + // Set up httpmock + httpmock.Activate() + defer httpmock.DeactivateAndReset() + + //TODO: Break this up into multiple functions so that the NoResponder doesn't cause us to fail Accept Header, body, etc. + // Create a responder which will respond with valid JSON and check what was posted to us for validity. + httpmock.RegisterResponder("POST", *flagServerURL + "/api/datasources", + func(req *http.Request) (*http.Response, error) { + + numRequests++ + + if strings.Contains(req.Header.Get("Accept"), "application/json") { + acceptCorrect = true + } + + //TODO: Expand this to unmarshal the JSON and check specific fields for specific values. + + // Get a string out of the io.ReadCloser + buf := new(bytes.Buffer) + buf.ReadFrom(req.Body) + postBody := buf.String() // Does a complete copy of the bytes in the buffer. + + if strings.Contains(postBody, "prometheus-test") { + bodyCorrect = true + } + + // Uncomment for troubleshooting. + //fmt.Printf("Request headers: \n%v\n", req.Header) + // + //fmt.Printf("Request body: \n%s\n", postBody) + + return httpmock.NewStringResponse(409, `{ "message": "This response is from the mocking framework!" }`), nil + + //TODO: Figure out how to make sure that do-restore is throwing an error when we return anything other than "Datasource added" + }, + ) + + httpmock.RegisterNoResponder( + func(req *http.Request) (*http.Response, error) { + + wrongUriRequests = true + + fmt.Printf("Unexpected Request: \n%v\n", req) + + //fmt.Printf("Request headers: \n%v\n", req.Header) + // + //fmt.Printf("Request body: \n%v\n", req.Body) + + return httpmock.NewStringResponse(500, `{ "message": "Unexpected request" }`), nil + }, + ) + + doRestore(serverInstance, applyFor, matchFilename) + + if acceptCorrect != true { + t.Error("Accept header was invalid.") + //t.Fail() + } + + if bodyCorrect != true { + t.Error("Expected text not found in the POST body.") + //t.Fail() + } + + if numRequests != 1 { + t.Errorf("The /api/datasources URI was called an incorrect number of times. Actual requests %d", numRequests) + } + + if wrongUriRequests != false { + t.Error("Request made to an unexpected URI. See the log for details.") + } +} + +//TODO: Change t.Log to t.Error when ready to implement this. +func TestRestoreUsers(t *testing.T) { + t.Log("Test Restore Users not yet implemented because restoring users is not yet implemented.") +} diff --git a/main.go b/main.go index dc7ae50..3915232 100644 --- a/main.go +++ b/main.go @@ -108,9 +108,11 @@ func main() { case "config-set": // TBD // doConfigSet() + fmt.Fprintln(os.Stderr, "Command config-set not yet implemented!") case "config-get": // TBD // doConfigGet() + fmt.Fprintln(os.Stderr, "Command config-get not yet implemented!") default: fmt.Fprintf(os.Stderr, fmt.Sprintf("unknown command: %s\n\n", args[0])) printUsage() diff --git a/testdata/app-team-roll-up-start.db.json b/testdata/app-team-roll-up-start.db.json new file mode 100644 index 0000000..b99f649 --- /dev/null +++ b/testdata/app-team-roll-up-start.db.json @@ -0,0 +1,405 @@ +{ + "annotations": { + "list": [] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "hideControls": false, + "id": 17, + "links": [], + "rows": [ + { + "collapse": false, + "height": 355, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "prometheus-test", + "fill": 1, + "id": 1, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "span": 12, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "( \t100 \t* \t( \t\t1 \t\t- \t\tavg( \t\t\tirate( \t\t\t\tnode_cpu{job=\"node\",mode=\"idle\",Team=~\"$Team\"}[5m] \t\t\t) \t\t) BY (instance) \t) )", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{ instance }}", + "metric": "", + "refId": "A", + "step": 2 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "CPU Utilization", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percent", + "label": "", + "logBase": 1, + "max": "100", + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "repeat": null, + "repeatIteration": null, + "repeatRowId": null, + "showTitle": false, + "title": "CPU Utilization All", + "titleSize": "h6" + }, + { + "collapse": false, + "height": 50, + "panels": [ + { + "content": "# CPU Utilization by Environment", + "id": 6, + "links": [], + "mode": "markdown", + "span": 12, + "title": "", + "type": "text" + } + ], + "repeat": null, + "repeatIteration": null, + "repeatRowId": null, + "showTitle": false, + "title": "CPU by Environment Title", + "titleSize": "h6" + }, + { + "collapse": false, + "height": 319, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "prometheus-test", + "fill": 1, + "id": 3, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "scopedVars": { + "Environment": { + "selected": false, + "text": "Prod", + "value": "Prod" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "span": 12, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "( \t100 \t* \t( \t\t1 \t\t- \t\tavg( \t\t\tirate( \t\t\t\tnode_cpu{job=\"node\",mode=\"idle\",Team=~\"$Team\", environment=~\"$Environment\"}[5m] \t\t\t) \t\t) BY (instance, environment) \t) )", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{ instance }}", + "metric": "", + "refId": "A", + "step": 2 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "CPU Utilization", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percent", + "label": "", + "logBase": 1, + "max": "100", + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "repeat": "Environment", + "repeatIteration": null, + "repeatRowId": null, + "showTitle": true, + "title": "$Environment Servers CPU Utilization", + "titleSize": "h6" + }, + { + "collapse": false, + "height": 319, + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "prometheus-test", + "fill": 1, + "id": 7, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "scopedVars": { + "Environment": { + "selected": false, + "text": "Test", + "value": "Test" + } + }, + "seriesOverrides": [], + "spaceLength": 10, + "span": 12, + "stack": false, + "steppedLine": false, + "targets": [ + { + "expr": "( \t100 \t* \t( \t\t1 \t\t- \t\tavg( \t\t\tirate( \t\t\t\tnode_cpu{job=\"node\",mode=\"idle\",Team=~\"$Team\", environment=~\"$Environment\"}[5m] \t\t\t) \t\t) BY (instance, environment) \t) )", + "format": "time_series", + "interval": "", + "intervalFactor": 2, + "legendFormat": "{{ instance }}", + "metric": "", + "refId": "A", + "step": 2 + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "$Environment CPU Utilization", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percent", + "label": "", + "logBase": 1, + "max": "100", + "min": "0", + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + } + ], + "repeat": null, + "repeatIteration": 1511203038099, + "repeatRowId": 3, + "showTitle": true, + "title": "$Environment Servers CPU Utilization", + "titleSize": "h6" + } + ], + "schemaVersion": 14, + "style": "dark", + "tags": [], + "templating": { + "list": [ + { + "allValue": null, + "current": { + "text": "ISLab", + "value": "ISLab" + }, + "datasource": "prometheus-test", + "hide": 0, + "includeAll": false, + "label": "", + "multi": false, + "name": "Team", + "options": [], + "query": "label_values(node_boot_time, Team)", + "refresh": 2, + "regex": "", + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": { + "text": "All", + "value": [ + "$__all" + ] + }, + "datasource": "prometheus-test", + "hide": 0, + "includeAll": true, + "label": "", + "multi": true, + "name": "Environment", + "options": [], + "query": "label_values(node_boot_time, environment)", + "refresh": 2, + "regex": "", + "sort": 0, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + } + ] + }, + "time": { + "from": "now-5m", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "App Team Roll Up Start", + "version": 1 +} diff --git a/testdata/promartheus-test.ds.1.json b/testdata/promartheus-test.ds.1.json new file mode 100644 index 0000000..10ab80f --- /dev/null +++ b/testdata/promartheus-test.ds.1.json @@ -0,0 +1,16 @@ +{ + "id": 2, + "orgId": 1, + "name": "promartheus-test", + "type": "prometheus", + "access": "direct", + "url": "http://promarheus-test.example.com:9090", + "password": "", + "user": "", + "database": "", + "basicAuth": false, + "basicAuthUser": "", + "basicAuthPassword": "", + "isDefault": false, + "jsonData": {} +} diff --git a/testdata/prometheus-test.ds.1.json b/testdata/prometheus-test.ds.1.json new file mode 100644 index 0000000..efeca0c --- /dev/null +++ b/testdata/prometheus-test.ds.1.json @@ -0,0 +1,16 @@ +{ + "id": 2, + "orgId": 1, + "name": "prometheus-test", + "type": "prometheus", + "access": "direct", + "url": "http://prometheus-test.example.com:9090", + "password": "", + "user": "", + "database": "", + "basicAuth": false, + "basicAuthUser": "", + "basicAuthPassword": "", + "isDefault": false, + "jsonData": {} +}