Skip to content

Commit

Permalink
Merge pull request #440 from tigrisdata/main
Browse files Browse the repository at this point in the history
Alpha release
  • Loading branch information
adilansari authored Aug 18, 2022
2 parents 74a8a2c + d19ac5e commit 7dd268d
Show file tree
Hide file tree
Showing 20 changed files with 373 additions and 84 deletions.
64 changes: 64 additions & 0 deletions api/server/v1/marshaler.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"encoding/json"
"io"
"net/url"
"strings"
"time"

"github.com/ajg/form"
Expand Down Expand Up @@ -386,6 +387,69 @@ func (x *CreateOrUpdateCollectionRequest) UnmarshalJSON(data []byte) error {
return nil
}

// UnmarshalJSON on QueryTimeSeriesMetricsRequest. Handles enum.
func (x *QueryTimeSeriesMetricsRequest) UnmarshalJSON(data []byte) error {
var mp map[string]jsoniter.RawMessage
if err := jsoniter.Unmarshal(data, &mp); err != nil {
return err
}
for key, value := range mp {
switch key {
case "db":
if err := jsoniter.Unmarshal(value, &x.Db); err != nil {
return err
}
case "collection":
if err := jsoniter.Unmarshal(value, &x.Collection); err != nil {
return err
}
case "from":
if err := jsoniter.Unmarshal(value, &x.From); err != nil {
return err
}
case "to":
if err := jsoniter.Unmarshal(value, &x.To); err != nil {
return err
}
case "metric_name":
if err := jsoniter.Unmarshal(value, &x.MetricName); err != nil {
return err
}
case "space_aggregation":
var t string
if err := jsoniter.Unmarshal(value, &t); err != nil {
return err
}
switch strings.ToUpper(t) {
case "AVG":
x.SpaceAggregation = MetricQuerySpaceAggregation_AVG
case "MIN":
x.SpaceAggregation = MetricQuerySpaceAggregation_MIN
case "MAX":
x.SpaceAggregation = MetricQuerySpaceAggregation_MAX
case "SUM":
x.SpaceAggregation = MetricQuerySpaceAggregation_SUM
}
case "space_aggregated_by":
if err := jsoniter.Unmarshal(value, &x.SpaceAggregatedBy); err != nil {
return err
}
case "function":
var t string
if err := jsoniter.Unmarshal(value, &t); err != nil {
return err
}
switch strings.ToUpper(t) {
case "RATE":
x.Function = MetricQueryFunction_RATE
case "COUNT":
x.Function = MetricQueryFunction_COUNT
}
}
}
return nil
}

type collDesc struct {
Collection string `json:"collection"`
Metadata *CollectionMetadata `json:"metadata"`
Expand Down
16 changes: 14 additions & 2 deletions api/server/v1/validator.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,14 @@

package api

import (
"regexp"

"github.com/tigrisdata/tigris/util"
)

var validNamePattern = regexp.MustCompile("^[a-zA-Z]+[a-zA-Z0-9_]+$")

type Validator interface {
Validate() error
}
Expand Down Expand Up @@ -182,15 +190,19 @@ func isValidCollection(name string) error {
if len(name) == 0 {
return Errorf(Code_INVALID_ARGUMENT, "invalid collection name")
}

if !validNamePattern.MatchString(name) || util.LanguageKeywords.Contains(name) {
return Errorf(Code_INVALID_ARGUMENT, "invalid collection name")
}
return nil
}

func isValidDatabase(name string) error {
if len(name) == 0 {
return Errorf(Code_INVALID_ARGUMENT, "invalid database name")
}

if !validNamePattern.MatchString(name) || util.LanguageKeywords.Contains(name) {
return Errorf(Code_INVALID_ARGUMENT, "invalid database name")
}
return nil
}

Expand Down
14 changes: 14 additions & 0 deletions lib/date/converter.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
package date

import (
"time"
)

// ToUnixNano converts a time to Unix nano seconds
func ToUnixNano(format string, dateStr string) (int64, error) {
t, err := time.Parse(format, dateStr)
if err != nil {
return 0, err
}
return t.UnixNano(), nil
}
45 changes: 45 additions & 0 deletions lib/date/converter_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
package date

import (
"testing"

"github.com/stretchr/testify/assert"
"time"
)

func TestToUnixNano(t *testing.T) {
validCases := []struct {
name string
date string
expected int64
}{
{"UTC RFC 3339", "2022-10-18T00:51:07+00:00", 1666054267000000000},
{"UTC RFC 3339 Nano", "2022-10-18T00:51:07.528106+00:00", 1666054267528106000},
{"IST RFC 3339", "2022-10-11T04:19:32+05:30", 1665442172000000000},
{"IST RFC 3339 Nano", "2022-10-18T00:51:07.999999999+05:30", 1666034467999999999},
{"No TZ RFC 3339", "2022-10-18T00:51:07Z", 1666054267000000000},
}

for _, v := range validCases {
t.Run(v.name, func(t *testing.T) {
actual, err := ToUnixNano(time.RFC3339Nano, v.date)
assert.NoError(t, err)
assert.Equal(t, v.expected, actual)
})
}

failureCases := []struct {
name string
date string
errorLike string
}{
{"RFC 1123", "Mon, 02 Jan 2006 15:04:05 MST", "cannot parse"},
}

for _, v := range failureCases {
t.Run(v.name, func(t *testing.T) {
_, err := ToUnixNano(time.RFC3339Nano, v.date)
assert.ErrorContains(t, err, v.errorLike)
})
}
}
7 changes: 7 additions & 0 deletions query/filter/selector.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import (

"github.com/tigrisdata/tigris/schema"
"github.com/tigrisdata/tigris/value"
"github.com/tigrisdata/tigris/lib/date"
)

// Selector is a condition defined inside a filter. It has a field which corresponding the field on which condition
Expand Down Expand Up @@ -88,6 +89,12 @@ func (s *Selector) ToSearchFilter() []string {
case schema.DoubleType:
// for double, we pass string in the filter to search backend
return []string{fmt.Sprintf(op, s.Field.Name(), v.String())}
case schema.DateTimeType:
// encode into int64
if nsec, err := date.ToUnixNano(schema.DateTimeFormat, v.String()); err == nil {
return []string{fmt.Sprintf(op, s.Field.Name(), nsec)}
}

}
return []string{fmt.Sprintf(op, s.Field.Name(), v.AsInterface())}
}
Expand Down
15 changes: 13 additions & 2 deletions schema/collection.go
Original file line number Diff line number Diff line change
Expand Up @@ -184,16 +184,27 @@ func GetSearchDeltaFields(existingFields []*QueryableField, incomingFields []*Fi
}

func buildSearchSchema(name string, queryableFields []*QueryableField) *tsApi.CollectionSchema {
var ptrTrue = true
var ptrTrue, ptrFalse = true, false
var tsFields []tsApi.Field
for _, s := range queryableFields {
tsFields = append(tsFields, tsApi.Field{
Name: s.FieldName,
Name: s.Name(),
Type: s.SearchType,
Facet: &s.Faceted,
Index: &s.Indexed,
Optional: &ptrTrue,
})
// Save original date as string to disk
if s.DataType == DateTimeType {
tsFields = append(tsFields, tsApi.Field{
Name: ToSearchDateKey(s.Name()),
Type: toSearchFieldType(StringType),
Facet: &ptrFalse,
Index: &ptrFalse,
Sort: &ptrFalse,
Optional: &ptrTrue,
})
}
}

return &tsApi.CollectionSchema{
Expand Down
2 changes: 1 addition & 1 deletion schema/collection_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ func TestCollection_SearchSchema(t *testing.T) {
schFactory, err := Build("t1", reqSchema)
require.NoError(t, err)

expFlattenedFields := []string{"id", "id_32", "product", "id_uuid", "ts", "price", "simple_items", "simple_object.name",
expFlattenedFields := []string{"id", "id_32", "product", "id_uuid", "ts", ToSearchDateKey("ts"), "price", "simple_items", "simple_object.name",
"simple_object.phone", "simple_object.address.street", "simple_object.details.nested_id", "simple_object.details.nested_obj.id",
"simple_object.details.nested_obj.name", "simple_object.details.nested_array", "simple_object.details.nested_string",
}
Expand Down
29 changes: 7 additions & 22 deletions schema/fields.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import (
jsoniter "github.com/json-iterator/go"
api "github.com/tigrisdata/tigris/api/server/v1"
"github.com/tigrisdata/tigris/lib/set"
"github.com/tigrisdata/tigris/util"
)

type FieldType int
Expand Down Expand Up @@ -65,25 +66,7 @@ var FieldNames = [...]string{
var (
MsgFieldNameAsLanguageKeyword = "Invalid collection field name, It contains language keyword for fieldName = '%s'"
MsgFieldNameInvalidPattern = "Invalid collection field name, field name can only contain [a-zA-Z0-9_$] and it can only start with [a-zA-Z_$] for fieldName = '%s'"
LanguageKeywords = set.New("abstract", "add", "alias", "and", "any", "args", "arguments", "array",
"as", "as?", "ascending", "assert", "async", "await", "base", "bool", "boolean", "break", "by", "byte",
"callable", "case", "catch", "chan", "char", "checked", "class", "clone", "const", "constructor", "continue",
"debugger", "decimal", "declare", "def", "default", "defer", "del", "delegate", "delete", "descending", "die",
"do", "double", "dynamic", "echo", "elif", "else", "elseif", "empty", "enddeclare", "endfor", "endforeach",
"endif", "endswitch", "endwhile", "enum", "equals", "eval", "event", "except", "exception", "exit", "explicit",
"export", "extends", "extern", "fallthrough", "false", "final", "finally", "fixed", "float", "fn", "for",
"foreach", "from", "fun", "func", "function", "get", "global", "go", "goto", "group", "if", "implements",
"implicit", "import", "in", "include", "include_once", "init", "instanceof", "insteadof", "int", "integer",
"interface", "internal", "into", "is", "isset", "join", "lambda", "let", "list", "lock", "long", "managed",
"map", "match", "module", "nameof", "namespace", "native", "new", "nint", "none", "nonlocal", "not", "notnull",
"nuint", "null", "number", "object", "of", "on", "operator", "or", "orderby", "out", "override", "package",
"params", "partial", "pass", "print", "private", "protected", "public", "raise", "range", "readonly", "record",
"ref", "remove", "require", "require_once", "return", "sbyte", "sealed", "select", "set", "short", "sizeof",
"stackalloc", "static", "strictfp", "string", "struct", "super", "switch", "symbol", "synchronized", "this",
"throw", "throws", "trait", "transient", "true", "try", "type", "typealias", "typeof", "uint", "ulong",
"unchecked", "unmanaged", "unsafe", "unset", "use", "ushort", "using", "val", "value", "var", "virtual", "void",
"volatile", "when", "where", "while", "with", "xor", "yield")
ValidFieldNamePattern = regexp.MustCompile(`^[a-zA-Z_$][a-zA-Z0-9_$]*$`)
ValidFieldNamePattern = regexp.MustCompile(`^[a-zA-Z_$][a-zA-Z0-9_$]*$`)
)

const (
Expand Down Expand Up @@ -202,8 +185,10 @@ func toSearchFieldType(fieldType FieldType) string {
return FieldNames[fieldType]
case Int32Type, Int64Type:
return FieldNames[fieldType]
case StringType, ByteType, UUIDType, DateTimeType:
case StringType, ByteType, UUIDType:
return FieldNames[StringType]
case DateTimeType:
return FieldNames[Int64Type]
case DoubleType:
return searchDoubleType
case ArrayType:
Expand Down Expand Up @@ -306,7 +291,7 @@ func (f *FieldBuilder) Build(isArrayElement bool) (*Field, error) {
}

// check for language keywords
if LanguageKeywords.Contains(strings.ToLower(f.FieldName)) {
if util.LanguageKeywords.Contains(strings.ToLower(f.FieldName)) {
return nil, api.Errorf(api.Code_INVALID_ARGUMENT, MsgFieldNameAsLanguageKeyword, f.FieldName)
}

Expand Down Expand Up @@ -410,7 +395,7 @@ func (q *QueryableField) Name() string {
}

func (q *QueryableField) ShouldPack() bool {
return q.DataType == ArrayType
return q.DataType == ArrayType || q.DataType == DateTimeType
}

func buildQueryableFields(fields []*Field) []*QueryableField {
Expand Down
16 changes: 12 additions & 4 deletions schema/reserved.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,15 @@ const (
UpdatedAt
Metadata
IdToSearchKey
DateSearchKeyPrefix
)

var ReservedFields = [...]string{
CreatedAt: "created_at",
UpdatedAt: "updated_at",
Metadata: "metadata",
IdToSearchKey: "_tigris_id",
CreatedAt: "created_at",
UpdatedAt: "updated_at",
Metadata: "metadata",
IdToSearchKey: "_tigris_id",
DateSearchKeyPrefix: "_tigris_date_",
}

func IsReservedField(name string) bool {
Expand All @@ -39,3 +41,9 @@ func IsReservedField(name string) bool {

return false
}

// ToSearchDateKey can be used to generate storage field for search backend
// Original date strings are persisted as it is under this field
func ToSearchDateKey(key string) string {
return ReservedFields[DateSearchKeyPrefix] + key
}
4 changes: 4 additions & 0 deletions schema/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
package schema

import (
"time"

"github.com/buger/jsonparser"
jsoniter "github.com/json-iterator/go"
"github.com/pkg/errors"
Expand Down Expand Up @@ -66,6 +68,8 @@ const (
PrimaryKeyIndexName = "pkey"
AutoPrimaryKeyF = "id"
PrimaryKeySchemaK = "primary_key"
// DateTimeFormat represents the supported date time format
DateTimeFormat = time.RFC3339Nano
)

var (
Expand Down
24 changes: 14 additions & 10 deletions server/config/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -145,11 +145,13 @@ var DefaultConfig = Config{
EnableHeap: true,
},
Quota: QuotaConfig{
Enabled: false,
RateLimit: 1000, // requests per second both reads and writes
WriteThroughputLimit: 10000000, // bytes per second
ReadThroughputLimit: 10000000, // bytes per second
DataSizeLimit: 10000000000, // bytes
Enabled: false,
RateLimit: 1000, // requests per second both reads and writes
WriteThroughputLimit: 10000000, // bytes per second
ReadThroughputLimit: 10000000, // bytes per second
DataSizeLimit: 10000000000, // bytes
LimitUpdateInterval: 5, // seconds
TenantSizeRefreshInterval: 60, // seconds
},
Observability: ObservabilityConfig{
Enabled: false,
Expand All @@ -170,9 +172,11 @@ type SearchConfig struct {
}

type QuotaConfig struct {
Enabled bool
RateLimit int `mapstructure:"rate_limit" yaml:"rate_limit" json:"rate_limit"`
WriteThroughputLimit int `mapstructure:"write_throughput_limit" yaml:"write_throughput_limit" json:"write_throughput_limit"`
ReadThroughputLimit int `mapstructure:"read_throughput_limit" yaml:"read_throughput_limit" json:"read_throughput_limit"`
DataSizeLimit int64 `mapstructure:"data_size_limit" yaml:"data_size_limit" json:"data_size_limit"`
Enabled bool
RateLimit int `mapstructure:"rate_limit" yaml:"rate_limit" json:"rate_limit"`
WriteThroughputLimit int `mapstructure:"write_throughput_limit" yaml:"write_throughput_limit" json:"write_throughput_limit"`
ReadThroughputLimit int `mapstructure:"read_throughput_limit" yaml:"read_throughput_limit" json:"read_throughput_limit"`
DataSizeLimit int64 `mapstructure:"data_size_limit" yaml:"data_size_limit" json:"data_size_limit"`
LimitUpdateInterval int64 `mapstructure:"limit_update_interval" yaml:"limit_update_interval" json:"limit_update_interval"`
TenantSizeRefreshInterval int64 `mapstructure:"tenant_size_refresh_interval" yaml:"tenant_size_refresh_interval" json:"tenant_size_refresh_interval"`
}
Loading

0 comments on commit 7dd268d

Please sign in to comment.