Skip to content

Commit

Permalink
feat: add a test for the parser fastFromFlux parser and undo the chan…
Browse files Browse the repository at this point in the history
…ges from a previous PR (#768)
  • Loading branch information
asalem1 authored Jun 17, 2022
1 parent 3f1b857 commit 302b5bf
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 7 deletions.
2 changes: 1 addition & 1 deletion giraffe/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@influxdata/giraffe",
"version": "2.28.0",
"version": "2.29.0",
"main": "dist/index.js",
"module": "dist/index.js",
"license": "MIT",
Expand Down
37 changes: 37 additions & 0 deletions giraffe/src/utils/fromFlux.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1339,4 +1339,41 @@ there",5

expect(table.getColumn('time')).toEqual([1610972402582])
})
it('should parse JSON data as part of the table body correctly', () => {
const CSV = `#group,false,false,true,true,false,false,true,true,true,true,true,true,true,true,true,true
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,string,string,string,string,string,string,string,string,string,string,string
#default,_result,,,,,,,,,,,,,,,
,result,table,_start,_stop,_time,_value,_field,_measurement,env,error,errorCode,errorType,orgID,ot_trace_sampled,role,source
,,0,2022-06-15T19:05:02.361293138Z,2022-06-15T20:05:02.361293138Z,2022-06-15T19:05:05.145623698Z,"{""request"":{""organization_id"":""fc0e1bf81e62ea27"",""jwttoken"":""REDACTED"",""compiler"":{""Now"":""2022-06-15T19:05:00Z"",""query"":""import ""influxdata/influxdb/monitor""\nimport ""experimental""\nimport ""influxdata/influxdb/v1""\n\ndata = from(bucket: ""Website Monitoring Bucket"")\n |\u003e range(start: -10m)\n |\u003e filter(fn: (r) =\u003e r[""_measurement""] == ""http_response"")\n |\u003e filter(fn: (r) =\u003e r[""_field""] == ""result_code"")\n |\u003e filter(fn: (r) =\u003e r[""method""] == ""HEAD"")\n |\u003e filter(fn: (r) =\u003e r[""result""] == ""success"")\n |\u003e filter(fn: (r) =\u003e r[""server""] == ""https://influxdata.com"")\n\noption task = {name: ""Name this Check"", every: 1m, offset: 0s}\n\ncheck = {_check_id: ""0854d93f9225d000"", _check_name: ""Name this Check"", _type: ""deadman"", tags: {}}\ncrit = (r) =\u003e r[""dead""]\nmessageFn = (r) =\u003e ""Check: $\{r._check_name} is: $\{r._level}""\n\ndata |\u003e v1[""fieldsAsCols""]() |\u003e monitor[""deadman""](t: experimental[""subDuration""](from: now(), d: 90s))\n |\u003e monitor[""check""](data: check, messageFn: messageFn, crit: crit)""},""source"":""tasks"",""parameters"":null,""UseIOx"":false,""compiler_type"":""flux""},""dialect"":{},""dialect_type"":""no-content""}",request,query_log,prod01-eu-central-1,"failed to initialize execute state: could not find bucket ""Website Monitoring Bucket""",not found,user,fc0e1bf81e62ea27,false,queryd-pull-internal,tasks\
`

const {table} = fastFromFlux(CSV)

const valueColumn = table.getColumn('_value')

const expectedValueColumn = [
'{"request":{"organization_id":"fc0e1bf81e62ea27","jwttoken":"REDACTED","compiler":{"Now":"2022-06-15T19:05:00Z","query":"import "influxdata/influxdb/monitor"\n' +
'import "experimental"\n' +
'import "influxdata/influxdb/v1"\n' +
'\n' +
'data = from(bucket: "Website Monitoring Bucket")\n' +
' |> range(start: -10m)\n' +
' |> filter(fn: (r) => r["_measurement"] == "http_response")\n' +
' |> filter(fn: (r) => r["_field"] == "result_code")\n' +
' |> filter(fn: (r) => r["method"] == "HEAD")\n' +
' |> filter(fn: (r) => r["result"] == "success")\n' +
' |> filter(fn: (r) => r["server"] == "https://influxdata.com")\n' +
'\n' +
'option task = {name: "Name this Check", every: 1m, offset: 0s}\n' +
'\n' +
'check = {_check_id: "0854d93f9225d000", _check_name: "Name this Check", _type: "deadman", tags: {}}\n' +
'crit = (r) => r["dead"]\n' +
'messageFn = (r) => "Check: ${r._check_name} is: ${r._level}"\n' +
'\n' +
'data |> v1["fieldsAsCols"]() |> monitor["deadman"](t: experimental["subDuration"](from: now(), d: 90s))\n' +
' |> monitor["check"](data: check, messageFn: messageFn, crit: crit)"},"source":"tasks","parameters":null,"UseIOx":false,"compiler_type":"flux"},"dialect":{},"dialect_type":"no-content"}',
]

expect(valueColumn).toEqual(expectedValueColumn)
})
})
6 changes: 1 addition & 5 deletions giraffe/src/utils/fromFlux.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import {Table, ColumnType, FluxDataType} from '../types'
import {assert} from './assert'
import {newTable} from './newTable'
import {RESULT} from '../constants/columnKeys'
import {escapeCSVFieldWithSpecialCharacters} from './escapeCSVFieldWithSpecialCharacters'
export interface FromFluxResult {
error?: Error

Expand Down Expand Up @@ -142,10 +141,7 @@ export const fromFlux = (fluxCSV: string): FromFluxResult => {
* we want to add + 1 to the substring ending
*/
chunk = fluxCSV.substring(start, end + 1)
const parsedChunkData = Papa.parse(chunk).data
const splittedChunk: string[] = parsedChunkData.map(line =>
line.map(escapeCSVFieldWithSpecialCharacters).join(',')
)
const splittedChunk = chunk.split('\n')
const tableTexts = []
const annotationTexts = []

Expand Down
2 changes: 1 addition & 1 deletion stories/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@influxdata/giraffe-stories",
"version": "2.28.0",
"version": "2.29.0",
"license": "MIT",
"repository": {
"type": "git",
Expand Down

0 comments on commit 302b5bf

Please sign in to comment.