diff --git a/api/handler_events_test.go b/api/handler_events_test.go index 7db1caaec..a37159377 100644 --- a/api/handler_events_test.go +++ b/api/handler_events_test.go @@ -123,7 +123,9 @@ func TestHandler_Events(t *testing.T) { }, "url": "http://localhost/foo", }, - "response": map[string]interface{}{"body": "", "size": float64(0), "statusCode": float64(0)}}, + "response": map[string]interface{}{"body": "", "size": float64(0), "statusCode": float64(0)}, + "clientIP": "192.0.2.1", + }, m[0]["data"]) })) }, @@ -174,7 +176,9 @@ func TestHandler_Events(t *testing.T) { }, "url": "http://localhost/foo", }, - "response": map[string]interface{}{"body": "", "size": float64(0), "statusCode": float64(0)}}, + "response": map[string]interface{}{"body": "", "size": float64(0), "statusCode": float64(0)}, + "clientIP": "192.0.2.1", + }, m[0]["data"]) })) }, diff --git a/api/handler_schema_test.go b/api/handler_schema_test.go index aa943c4e1..fae20612e 100644 --- a/api/handler_schema_test.go +++ b/api/handler_schema_test.go @@ -247,7 +247,7 @@ func TestHandler_Schema_Example(t *testing.T) { }, }, { - name: "string or number", + name: "string or integer", app: &runtime.App{ Monitor: monitor.New(), }, @@ -256,7 +256,7 @@ func TestHandler_Schema_Example(t *testing.T) { http.MethodGet, "http://foo.api/api/schema/example", nil, - `{"name": "", "schema": {"type": ["string","number"]}}`, + `{"name": "", "schema": {"type": ["string","integer"]}}`, h, try.HasStatusCode(200), try.HasHeader("Content-Type", "application/json"), @@ -266,10 +266,10 @@ func TestHandler_Schema_Example(t *testing.T) { require.NoError(t, err) b, err := base64.StdEncoding.DecodeString(data[0]["value"]) require.NoError(t, err) - require.Equal(t, "609859.0117483337", string(b)) + require.Equal(t, "-168643", string(b)) }), - try.HasBody(`[{"contentType":"application/json","value":"NjA5ODU5LjAxMTc0ODMzMzc="}]`)) + try.HasBody(`[{"contentType":"application/json","value":"LTE2ODY0Mw=="}]`)) }, }, { diff --git a/config/static/static_config.go b/config/static/static_config.go index ff2faf07c..d62a890af 100644 --- a/config/static/static_config.go +++ b/config/static/static_config.go @@ -4,11 +4,12 @@ import ( "bytes" "encoding/json" "fmt" - log "github.com/sirupsen/logrus" "mokapi/config/tls" "net/url" "strconv" "strings" + + log "github.com/sirupsen/logrus" ) type Config struct { diff --git a/engine/common/host.go b/engine/common/host.go index 9976e48f5..fefb8a44a 100644 --- a/engine/common/host.go +++ b/engine/common/host.go @@ -104,6 +104,7 @@ type HttpClient interface { type HttpClientOptions struct { MaxRedirects int + Timeout time.Duration } type Action struct { diff --git a/engine/enginetest/host.go b/engine/enginetest/host.go index a3a12612e..c79f7d00f 100644 --- a/engine/enginetest/host.go +++ b/engine/enginetest/host.go @@ -22,6 +22,7 @@ type Host struct { DebugFunc func(args ...interface{}) IsLevelEnabledFunc func(level string) bool HttpClientTest *HttpClient + HttpClientFunc func(opts common.HttpClientOptions) common.HttpClient KafkaClientTest *KafkaClient EveryFunc func(every string, do func(), opt common.JobOptions) CronFunc func(every string, do func(), opt common.JobOptions) @@ -128,7 +129,10 @@ func (h *Host) Unlock() { h.m.Unlock() } -func (h *Host) HttpClient(_ common.HttpClientOptions) common.HttpClient { +func (h *Host) HttpClient(opts common.HttpClientOptions) common.HttpClient { + if h.HttpClientFunc != nil { + return h.HttpClientFunc(opts) + } return h.HttpClientTest } diff --git a/engine/host.go b/engine/host.go index 39b2e3b8a..1f25ebf2f 100644 --- a/engine/host.go +++ b/engine/host.go @@ -310,7 +310,7 @@ func (sh *scriptHost) KafkaClient() common.KafkaClient { func (sh *scriptHost) HttpClient(opts common.HttpClientOptions) common.HttpClient { return &http.Client{ - Timeout: time.Second * 30, + Timeout: opts.Timeout, CheckRedirect: func(req *http.Request, via []*http.Request) error { if l := len(via); l > opts.MaxRedirects { log.Warnf("Stopped after %d redirects, original URL was %s", opts.MaxRedirects, via[0].URL) diff --git a/examples/mokapi/kafka.js b/examples/mokapi/kafka.js index 0ce9f6bc7..d865f4421 100644 --- a/examples/mokapi/kafka.js +++ b/examples/mokapi/kafka.js @@ -345,7 +345,7 @@ export let events = [ binary: base64.encode([0xa, 0x43, 0x61, 0x72, 0x6f, 0x6c, 0x3a].map(x => String.fromCharCode(x)).join('')) }, partition: 0, - messageId: 'shopOrder' + messageId: 'avro' } } ] diff --git a/examples/mokapi/services_http.js b/examples/mokapi/services_http.js index 851b5ddec..e05708bdb 100644 --- a/examples/mokapi/services_http.js +++ b/examples/mokapi/services_http.js @@ -588,9 +588,14 @@ export let events = [ url: "http://127.0.0.1:18080/pet", parameters: [ { - name: 'Acceot-Encoding', + name: 'Accept-Encoding', type: 'header', raw: 'gzip, deflate' + }, + { + name: 'LongHeader', + type: 'header', + raw: 'Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet. Lorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam voluptua. At vero eos et accusam et justo duo dolores et ea rebum. Stet clita kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.' } ], contentType: "application/xml", @@ -625,7 +630,7 @@ export let events = [ url: "http://127.0.0.1:18080/pet", parameters: [ { - name: 'Acceot-Encoding', + name: 'Accept-Encoding', type: 'header', raw: 'gzip, deflate' } @@ -642,7 +647,8 @@ export let events = [ }) ] } - ] + ], + clientIP: '127.0.0.1' }, }, { @@ -691,7 +697,8 @@ export let events = [ message: 'An example script error message' } } - ] + ], + clientIP: '127.0.0.1' } }, { @@ -735,7 +742,8 @@ export let events = [ event: "http" } } - ] + ], + clientIP: '127.0.0.1' } }, { @@ -773,7 +781,8 @@ export let events = [ event: "http" } } - ] + ], + clientIP: '127.0.0.1' } }, { @@ -819,6 +828,7 @@ export let events = [ size: 512 }, duration: 133, + clientIP: '192.0.1.127' } } ] \ No newline at end of file diff --git a/go.mod b/go.mod index 1f4cd7bbe..e62257edc 100644 --- a/go.mod +++ b/go.mod @@ -4,12 +4,12 @@ go 1.25.1 require ( github.com/Masterminds/sprig v2.22.0+incompatible - github.com/blevesearch/bleve/v2 v2.5.3 + github.com/blevesearch/bleve/v2 v2.5.4 github.com/blevesearch/bleve_index_api v1.2.10 github.com/bradleyfalzon/ghinstallation/v2 v2.17.0 github.com/brianvoe/gofakeit/v6 v6.28.0 github.com/dop251/goja v0.0.0-20250309171923-bcd7cc6bf64c - github.com/evanw/esbuild v0.25.11 + github.com/evanw/esbuild v0.25.12 github.com/fsnotify/fsnotify v1.9.0 github.com/go-co-op/gocron v1.37.0 github.com/go-git/go-git/v5 v5.16.3 @@ -39,7 +39,7 @@ require ( github.com/blevesearch/go-porterstemmer v1.0.3 // indirect github.com/blevesearch/gtreap v0.1.1 // indirect github.com/blevesearch/mmap-go v1.0.4 // indirect - github.com/blevesearch/scorch_segment_api/v2 v2.3.10 // indirect + github.com/blevesearch/scorch_segment_api/v2 v2.3.12 // indirect github.com/blevesearch/segment v0.9.1 // indirect github.com/blevesearch/snowballstem v0.9.0 // indirect github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect @@ -49,7 +49,7 @@ require ( github.com/blevesearch/zapx/v13 v13.4.2 // indirect github.com/blevesearch/zapx/v14 v14.4.2 // indirect github.com/blevesearch/zapx/v15 v15.4.2 // indirect - github.com/blevesearch/zapx/v16 v16.2.4 // indirect + github.com/blevesearch/zapx/v16 v16.2.6 // indirect github.com/cloudflare/circl v1.6.1 // indirect github.com/cyphar/filepath-securejoin v0.4.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect @@ -60,7 +60,6 @@ require ( github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect github.com/golang-jwt/jwt/v4 v4.5.2 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect - github.com/golang/protobuf v1.5.4 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/go-github/v75 v75.0.0 // indirect github.com/google/go-querystring v1.1.0 // indirect @@ -83,6 +82,6 @@ require ( go.uber.org/atomic v1.9.0 // indirect golang.org/x/crypto v0.43.0 // indirect golang.org/x/sys v0.37.0 // indirect - google.golang.org/protobuf v1.33.0 // indirect + google.golang.org/protobuf v1.36.6 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect ) diff --git a/go.sum b/go.sum index 2af47952c..ca866c990 100644 --- a/go.sum +++ b/go.sum @@ -22,8 +22,8 @@ github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkY github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/bits-and-blooms/bitset v1.22.0 h1:Tquv9S8+SGaS3EhyA+up3FXzmkhxPGjQQCkcs2uw7w4= github.com/bits-and-blooms/bitset v1.22.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= -github.com/blevesearch/bleve/v2 v2.5.3 h1:9l1xtKaETv64SZc1jc4Sy0N804laSa/LeMbYddq1YEM= -github.com/blevesearch/bleve/v2 v2.5.3/go.mod h1:Z/e8aWjiq8HeX+nW8qROSxiE0830yQA071dwR3yoMzw= +github.com/blevesearch/bleve/v2 v2.5.4 h1:1iur8e+PHsxtncV2xIVuqlQme/V8guEDO2uV6Wll3lQ= +github.com/blevesearch/bleve/v2 v2.5.4/go.mod h1:yB4PnV4N2q5rTEpB2ndG8N2ISexBQEFIYgwx4ztfvoo= github.com/blevesearch/bleve_index_api v1.2.10 h1:FMFmZCmTX6PdoLLvwUnKF2RsmILFFwO3h0WPevXY9fE= github.com/blevesearch/bleve_index_api v1.2.10/go.mod h1:rKQDl4u51uwafZxFrPD1R7xFOwKnzZW7s/LSeK4lgo0= github.com/blevesearch/geo v0.2.4 h1:ECIGQhw+QALCZaDcogRTNSJYQXRtC8/m8IKiA706cqk= @@ -36,8 +36,8 @@ github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZG github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk= github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc= github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs= -github.com/blevesearch/scorch_segment_api/v2 v2.3.10 h1:Yqk0XD1mE0fDZAJXTjawJ8If/85JxnLd8v5vG/jWE/s= -github.com/blevesearch/scorch_segment_api/v2 v2.3.10/go.mod h1:Z3e6ChN3qyN35yaQpl00MfI5s8AxUJbpTR/DL8QOQ+8= +github.com/blevesearch/scorch_segment_api/v2 v2.3.12 h1:GGZc2qwbyRBwtckPPkHkLyXw64mmsLJxdturBI1cM+c= +github.com/blevesearch/scorch_segment_api/v2 v2.3.12/go.mod h1:JBRGAneqgLSI2+jCNjtwMqp2B7EBF3/VUzgDPIU33MM= github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU= github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw= github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s= @@ -56,8 +56,8 @@ github.com/blevesearch/zapx/v14 v14.4.2 h1:2SGHakVKd+TrtEqpfeq8X+So5PShQ5nW6GNxT github.com/blevesearch/zapx/v14 v14.4.2/go.mod h1:rz0XNb/OZSMjNorufDGSpFpjoFKhXmppH9Hi7a877D8= github.com/blevesearch/zapx/v15 v15.4.2 h1:sWxpDE0QQOTjyxYbAVjt3+0ieu8NCE0fDRaFxEsp31k= github.com/blevesearch/zapx/v15 v15.4.2/go.mod h1:1pssev/59FsuWcgSnTa0OeEpOzmhtmr/0/11H0Z8+Nw= -github.com/blevesearch/zapx/v16 v16.2.4 h1:tGgfvleXTAkwsD5mEzgM3zCS/7pgocTCnO1oyAUjlww= -github.com/blevesearch/zapx/v16 v16.2.4/go.mod h1:Rti/REtuuMmzwsI8/C/qIzRaEoSK/wiFYw5e5ctUKKs= +github.com/blevesearch/zapx/v16 v16.2.6 h1:OHuUl2GhM+FpBq9RwNsJ4k/QodqbMMHoQEgn/IHYpu8= +github.com/blevesearch/zapx/v16 v16.2.6/go.mod h1:cuAPB+YoIyRngNhno1S1GPr9SfMk+x/SgAHBLXSIq3k= github.com/bradleyfalzon/ghinstallation/v2 v2.17.0 h1:SmbUK/GxpAspRjSQbB6ARvH+ArzlNzTtHydNyXUQ6zg= github.com/bradleyfalzon/ghinstallation/v2 v2.17.0/go.mod h1:vuD/xvJT9Y+ZVZRv4HQ42cMyPFIYqpc7AbB4Gvt/DlY= github.com/brianvoe/gofakeit/v6 v6.28.0 h1:Xib46XXuQfmlLS2EXRuJpqcw8St6qSZz75OUo0tgAW4= @@ -81,8 +81,8 @@ github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= -github.com/evanw/esbuild v0.25.11 h1:NGtezc+xk+Mti4fgWaoD3dncZNCzcTA+r0BxMV3Koyw= -github.com/evanw/esbuild v0.25.11/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= +github.com/evanw/esbuild v0.25.12 h1:7kIg7aG2++vhheW5YCzut1q1AjehYVQU752NcMuGVsw= +github.com/evanw/esbuild v0.25.12/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= @@ -103,8 +103,6 @@ github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXe github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= -github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= -github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= @@ -219,8 +217,8 @@ golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/js/faker/faker_schema_test.go b/js/faker/faker_schema_test.go index 55451835d..0490f2ff8 100644 --- a/js/faker/faker_schema_test.go +++ b/js/faker/faker_schema_test.go @@ -281,10 +281,10 @@ func TestFaker_Schema(t *testing.T) { }, { name: "minItems", - schema: "{ minItems: 3, }", + schema: "{ minItems: 3, items: { type: 'string' } }", test: func(t *testing.T, v goja.Value, err error) { r.NoError(t, err) - r.Equal(t, []any{true, int64(-354976), "ZuoWq vY5elXhlD", []interface{}{-743110.6375100765, "lYehCIA", map[string]interface{}{"caravan": true, "hail": -717855.7306413883, "mob": int64(-679449), "scale": true}, false}, false, []interface{}{true, int64(859972), -161661.97092417115}, false, "LWxKt"}, v.Export()) + r.Equal(t, []interface{}{"", "idZ", "wYx?vY5elXhlD", "VhgPevuwyrNrL", "lVeCZKW1JKqG", "sffl", "tR", "h"}, v.Export()) }, }, { @@ -326,10 +326,10 @@ func TestFaker_Schema(t *testing.T) { }, { name: "contains", - schema: "{ contains: { type: 'string' } }", + schema: "{ contains: { const: 'foo' }, items: { type: 'string' } }", test: func(t *testing.T, v goja.Value, err error) { r.NoError(t, err) - r.Equal(t, []any{[]any{true, int64(859972), -161661.97092417115}, "wYx?vY5elXhlD", []any{-743110.6375100765, "lYehCIA", map[string]any{"caravan": true, "hail": -717855.7306413883, "mob": int64(-679449), "scale": true}, false}, false, "idZ"}, v.Export()) + r.Equal(t, []any{"VhgPevuwyrNrL", "foo", "idZ", "wYx?vY5elXhlD", "foo"}, v.Export()) }, }, { @@ -356,10 +356,10 @@ func TestFaker_Schema(t *testing.T) { }, { name: "maxContains", - schema: "{ contains: { type: 'string' }, maxContains: 2 }", + schema: "{ contains: { type: 'string' }, maxContains: 2, items: { type: 'string' } }", test: func(t *testing.T, v goja.Value, err error) { r.NoError(t, err) - r.Equal(t, []any{" vY5elXhlD4ezl", -859791.1845789105, 326768.021588166, int64(628235), "idZ"}, v.Export()) + r.Equal(t, []any{"wYx?vY5elXhlD", "idZ"}, v.Export()) }, }, { @@ -401,10 +401,10 @@ func TestFaker_Schema(t *testing.T) { }, { name: "minProperties", - schema: "{ minProperties: 3 }", + schema: "{ minProperties: 3, additionalProperties: { type: 'string' } }", test: func(t *testing.T, v goja.Value, err error) { r.NoError(t, err) - r.Equal(t, map[string]any{"bunch": 326768.021588166, "gang": int64(628235), "growth": " vY5elXhlD4ezl", "woman": -859791.1845789105}, v.Export()) + r.Equal(t, map[string]any{"bunch": "VhgPevuwyrNrL", "gang": "lVeCZKW1JKqG", "growth": "sffl", "woman": "wYx?vY5elXhlD"}, v.Export()) }, }, { @@ -454,13 +454,13 @@ func TestFaker_Schema(t *testing.T) { }, { name: "properties, patternProperties and additionalProperties", - schema: "{ properties: { builtin: { type: 'number' } }, patternProperties: { '^S_': { type: 'string' }, '^I_': { type: 'integer' } }, additionalProperties: { type: 'string' } }", + schema: "{ properties: { builtin: { type: 'integer' } }, patternProperties: { '^S_': { type: 'string' }, '^I_': { type: 'integer' } }, additionalProperties: { type: 'string' } }", test: func(t *testing.T, v goja.Value, err error) { r.NoError(t, err) r.Equal(t, map[string]any{ "I_4VX": int64(-908662), "S_kY9X3W": "m", - "builtin": -383134.1033810867, + "builtin": int64(-86829), "group": "CKu", "ocean": "LJgmr9arWgSfi", "party": "m", @@ -584,12 +584,12 @@ func TestFaker_Schema(t *testing.T) { }, { name: "required but not defined in properties", - schema: "{ properties: { foo: { type: 'string' } }, required: ['bar'] }", + schema: "{ properties: { foo: { type: 'string' }}, additionalProperties: { type: 'string' }, required: ['bar'] }", test: func(t *testing.T, v goja.Value, err error) { r.NoError(t, err) m := v.Export() r.Contains(t, m, "bar") - r.Equal(t, map[string]any{"bar": 256208.42538087885, "foo": "XidZuoWq "}, m) + r.Equal(t, map[string]any{"bar": "Pevuwy", "woman": "q vY5elXhlD4ez"}, m) }, }, { diff --git a/js/faker/faker_test.go b/js/faker/faker_test.go index 35fc68ad3..b6dab2e3a 100644 --- a/js/faker/faker_test.go +++ b/js/faker/faker_test.go @@ -173,10 +173,10 @@ func TestModule(t *testing.T) { test: func(t *testing.T, vm *goja.Runtime, _ *enginetest.Host) { v, err := vm.RunString(` const m = require('faker') - m.fake({ type: 'object', properties: { foo: { type: 'string' }, bar: { type: 'string' }}, required: ['foo', 'bar','x', 'y', 'z'] } ) + m.fake({ type: 'object', properties: { foo: { type: 'string' }, bar: { type: 'string' }}, additionalProperties: { type: 'string' }, required: ['foo', 'bar','x', 'y', 'z'] } ) `) r.NoError(t, err) - r.Equal(t, map[string]any{"bar": "", "foo": "XidZuoWq ", "x": int64(-117432), "y": int64(995706), "z": -383134.1033810867}, v.Export()) + r.Equal(t, map[string]any{"bar": "vm", "bunch": "LJgmr9arWgSfi", "foo": "Yx?vY5", "gang": "jLWxKtR4", "growth": "m", "woman": "hlD4ezlYehCIA0O", "x": "CKu", "y": "gaqpsEbkw", "z": "LuvMVE6iIqk"}, v.Export()) }, }, } diff --git a/js/http/http.go b/js/http/http.go index fe9275f78..1457db1e7 100644 --- a/js/http/http.go +++ b/js/http/http.go @@ -4,13 +4,18 @@ import ( "bytes" "encoding/json" "fmt" - "github.com/dop251/goja" "io" "mokapi/engine/common" "mokapi/js/eventloop" + "mokapi/js/util" "mokapi/media" "net/http" + "os" + "reflect" "strings" + "time" + + "github.com/dop251/goja" ) type Client interface { @@ -48,15 +53,15 @@ func Require(vm *goja.Runtime, module *goja.Object) { loop: loop, } obj := module.Get("exports").(*goja.Object) - obj.Set("get", f.Get) - obj.Set("post", f.Post) - obj.Set("put", f.Put) - obj.Set("head", f.Head) - obj.Set("patch", f.Patch) - obj.Set("delete", f.Delete) - obj.Set("del", f.Delete) - obj.Set("options", f.Options) - obj.Set("fetch", f.Fetch) + _ = obj.Set("get", f.Get) + _ = obj.Set("post", f.Post) + _ = obj.Set("put", f.Put) + _ = obj.Set("head", f.Head) + _ = obj.Set("patch", f.Patch) + _ = obj.Set("delete", f.Delete) + _ = obj.Set("del", f.Delete) + _ = obj.Set("options", f.Options) + _ = obj.Set("fetch", f.Fetch) } func (m *Module) Get(url string, args goja.Value) interface{} { @@ -94,7 +99,7 @@ func (m *Module) Fetch(url string, v goja.Value) *goja.Promise { r := recover() if r != nil { m.loop.Run(func(vm *goja.Runtime) { - reject(r) + _ = reject(r) }) } }() @@ -115,7 +120,7 @@ func (m *Module) Fetch(url string, v goja.Value) *goja.Promise { res := m.doRequest(method, url, body, v) m.loop.Run(func(vm *goja.Runtime) { - resolve(res) + _ = resolve(res) }) }() return p @@ -127,7 +132,7 @@ func (m *Module) doRequest(method, url string, body interface{}, args goja.Value } rArgs := &RequestArgs{Headers: make(map[string]interface{})} - maxRedirects := 5 + opts := common.HttpClientOptions{MaxRedirects: 5} if args != nil && !goja.IsUndefined(args) && !goja.IsNull(args) { params := args.ToObject(m.rt) for _, k := range params.Keys() { @@ -144,7 +149,21 @@ func (m *Module) doRequest(method, url string, body interface{}, args goja.Value continue } if redirects, ok := v.Export().(int); ok { - maxRedirects = redirects + opts.MaxRedirects = redirects + } + case "timeout": + v := params.Get(k) + switch v.ExportType().Kind() { + case reflect.Int64: + opts.Timeout = time.Duration(v.ToInteger()) * time.Millisecond + case reflect.String: + d, err := time.ParseDuration(v.String()) + if err != nil { + panic(m.rt.ToValue(fmt.Sprintf("expected duration for timeout: %s", err.Error()))) + } + opts.Timeout = d + default: + panic(m.rt.ToValue(fmt.Sprintf("unexpected type for 'timeout': got %s, expected Number or String", util.JsType(v)))) } } } @@ -156,9 +175,12 @@ func (m *Module) doRequest(method, url string, body interface{}, args goja.Value panic(m.rt.ToValue(err.Error())) } - client := m.host.HttpClient(common.HttpClientOptions{MaxRedirects: maxRedirects}) + client := m.host.HttpClient(opts) res, err := client.Do(req) if err != nil { + if os.IsTimeout(err) { + panic(m.rt.ToValue(fmt.Errorf("request to %s %s timed out", method, url))) + } panic(m.rt.ToValue(err.Error())) } diff --git a/js/http/http_test.go b/js/http/http_test.go index 7596905de..2627de9cb 100644 --- a/js/http/http_test.go +++ b/js/http/http_test.go @@ -2,10 +2,9 @@ package http_test import ( "fmt" - "github.com/dop251/goja" - r "github.com/stretchr/testify/require" "io" "mokapi/config/dynamic" + "mokapi/engine/common" "mokapi/engine/enginetest" "mokapi/js" "mokapi/js/eventloop" @@ -15,12 +14,20 @@ import ( "strings" "testing" "time" + + "github.com/dop251/goja" + r "github.com/stretchr/testify/require" ) +type timeoutErr struct{} + +func (timeoutErr) Timeout() bool { return true } +func (timeoutErr) Error() string { return "timeout" } + func TestHttp(t *testing.T) { testcases := []struct { name string - client *enginetest.HttpClient + client func(options common.HttpClientOptions) common.HttpClient test func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) }{ { @@ -35,10 +42,12 @@ func TestHttp(t *testing.T) { }, { name: "http client error", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - return nil, fmt.Errorf("TEST") - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + return nil, fmt.Errorf("TEST") + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -50,13 +59,15 @@ func TestHttp(t *testing.T) { }, { name: "request uses given url", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.URL.String() == "https://foo.bar" { - return &http.Response{}, nil - } - return nil, fmt.Errorf("TEST") - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.URL.String() == "https://foo.bar" { + return &http.Response{}, nil + } + return nil, fmt.Errorf("TEST") + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -68,10 +79,12 @@ func TestHttp(t *testing.T) { }, { name: "HTTP status code", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - return &http.Response{StatusCode: http.StatusOK}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + return &http.Response{StatusCode: http.StatusOK}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { v, err := vm.RunString(` @@ -84,10 +97,12 @@ func TestHttp(t *testing.T) { }, { name: "HTTP status code", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - return &http.Response{StatusCode: http.StatusOK}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + return &http.Response{StatusCode: http.StatusOK}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { v, err := vm.RunString(` @@ -100,10 +115,12 @@ func TestHttp(t *testing.T) { }, { name: "HTTP header", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - return &http.Response{Header: map[string][]string{"foo": {"bar"}}}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + return &http.Response{Header: map[string][]string{"foo": {"bar"}}}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { v, err := vm.RunString(` @@ -116,10 +133,12 @@ func TestHttp(t *testing.T) { }, { name: "HTTP body", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - return &http.Response{Body: io.NopCloser(strings.NewReader("foobar"))}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + return &http.Response{Body: io.NopCloser(strings.NewReader("foobar"))}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { v, err := vm.RunString(` @@ -132,10 +151,12 @@ func TestHttp(t *testing.T) { }, { name: "HTTP body to json", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - return &http.Response{Body: io.NopCloser(strings.NewReader(`{"foo":"bar"}`))}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + return &http.Response{Body: io.NopCloser(strings.NewReader(`{"foo":"bar"}`))}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { v, err := vm.RunString(` @@ -148,10 +169,12 @@ func TestHttp(t *testing.T) { }, { name: "HTTP body to json but invalid format", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - return &http.Response{Body: io.NopCloser(strings.NewReader(`{"foo":"bar"`))}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + return &http.Response{Body: io.NopCloser(strings.NewReader(`{"foo":"bar"`))}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -163,19 +186,21 @@ func TestHttp(t *testing.T) { }, { name: "HTTP post, convert object to json", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodPost { - return nil, fmt.Errorf("expected HTTP method POST, but is %v", request.Method) - } - if s := request.Header["Content-Type"][0]; s != "application/json" { - return nil, fmt.Errorf("expected Content-Type application/json, but is %v", s) - } - if b, _ := io.ReadAll(request.Body); string(b) != `{"foo":"bar"}` { - return nil, fmt.Errorf("expected request body , but is %s", b) - } - return &http.Response{}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodPost { + return nil, fmt.Errorf("expected HTTP method POST, but is %v", request.Method) + } + if s := request.Header["Content-Type"][0]; s != "application/json" { + return nil, fmt.Errorf("expected Content-Type application/json, but is %v", s) + } + if b, _ := io.ReadAll(request.Body); string(b) != `{"foo":"bar"}` { + return nil, fmt.Errorf("expected request body , but is %s", b) + } + return &http.Response{}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -189,13 +214,15 @@ func TestHttp(t *testing.T) { }, { name: "HTTP post, unsupported content type", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodPost { - return nil, fmt.Errorf("expected HTTP method POST, but is %v", request.Method) - } - return &http.Response{}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodPost { + return nil, fmt.Errorf("expected HTTP method POST, but is %v", request.Method) + } + return &http.Response{}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -209,13 +236,15 @@ func TestHttp(t *testing.T) { }, { name: "HTTP put", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodPut { - return nil, fmt.Errorf("expected HTTP method PUT, but is %v", request.Method) - } - return &http.Response{}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodPut { + return nil, fmt.Errorf("expected HTTP method PUT, but is %v", request.Method) + } + return &http.Response{}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -227,13 +256,15 @@ func TestHttp(t *testing.T) { }, { name: "HTTP head", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodHead { - return nil, fmt.Errorf("expected HTTP method HEAD, but is %v", request.Method) - } - return &http.Response{}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodHead { + return nil, fmt.Errorf("expected HTTP method HEAD, but is %v", request.Method) + } + return &http.Response{}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -245,13 +276,15 @@ func TestHttp(t *testing.T) { }, { name: "HTTP patch", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodPatch { - return nil, fmt.Errorf("expected HTTP method PATCH, but is %v", request.Method) - } - return &http.Response{}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodPatch { + return nil, fmt.Errorf("expected HTTP method PATCH, but is %v", request.Method) + } + return &http.Response{}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -263,13 +296,15 @@ func TestHttp(t *testing.T) { }, { name: "HTTP delete", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodDelete { - return nil, fmt.Errorf("expected HTTP method DELETE, but is %v", request.Method) - } - return &http.Response{}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodDelete { + return nil, fmt.Errorf("expected HTTP method DELETE, but is %v", request.Method) + } + return &http.Response{}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -281,13 +316,15 @@ func TestHttp(t *testing.T) { }, { name: "HTTP options", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodOptions { - return nil, fmt.Errorf("expected HTTP method OPTIONS, but is %v", request.Method) - } - return &http.Response{}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodOptions { + return nil, fmt.Errorf("expected HTTP method OPTIONS, but is %v", request.Method) + } + return &http.Response{}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -297,15 +334,35 @@ func TestHttp(t *testing.T) { r.NoError(t, err) }, }, + { + name: "timeout", + client: func(options common.HttpClientOptions) common.HttpClient { + r.Equal(t, 500*time.Millisecond, options.Timeout) + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + return nil, &timeoutErr{} + }, + } + }, + test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { + _, err := vm.RunString(` + const m = require('mokapi/http') + m.get('https://foo.bar', { timeout: '500ms' }) + `) + r.EqualError(t, err, "request to GET https://foo.bar timed out at mokapi/js/http.(*Module).Get-fm (native)") + }, + }, { name: "fetch get request", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodGet { - return nil, fmt.Errorf("expected HTTP method GET, but is %v", request.Method) - } - return &http.Response{StatusCode: http.StatusOK}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodGet { + return nil, fmt.Errorf("expected HTTP method GET, but is %v", request.Method) + } + return &http.Response{StatusCode: http.StatusOK}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -328,13 +385,15 @@ func TestHttp(t *testing.T) { }, { name: "fetch post request", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodPost { - return nil, fmt.Errorf("expected HTTP method POST, but is %v", request.Method) - } - return &http.Response{StatusCode: http.StatusOK}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodPost { + return nil, fmt.Errorf("expected HTTP method POST, but is %v", request.Method) + } + return &http.Response{StatusCode: http.StatusOK}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -357,19 +416,21 @@ func TestHttp(t *testing.T) { }, { name: "fetch put with body", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodPut { - return nil, fmt.Errorf("expected HTTP method PUT, but is %v", request.Method) - } - b, err := io.ReadAll(request.Body) - if err != nil { - return nil, fmt.Errorf("cannot read body: %w", err) - } else if string(b) != `{"foo":"bar"}` { - return nil, fmt.Errorf("expected body to be '{\"foo\":\"bar\"}', but is %s", b) - } - return &http.Response{StatusCode: http.StatusOK}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodPut { + return nil, fmt.Errorf("expected HTTP method PUT, but is %v", request.Method) + } + b, err := io.ReadAll(request.Body) + if err != nil { + return nil, fmt.Errorf("cannot read body: %w", err) + } else if string(b) != `{"foo":"bar"}` { + return nil, fmt.Errorf("expected body to be '{\"foo\":\"bar\"}', but is %s", b) + } + return &http.Response{StatusCode: http.StatusOK}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -392,19 +453,21 @@ func TestHttp(t *testing.T) { }, { name: "fetch delete with header", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - if request.Method != http.MethodDelete { - return nil, fmt.Errorf("expected HTTP method DELETE, but is %v", request.Method) - } - if request.Header["foo"][0] != "bar" { - return nil, fmt.Errorf("expected header foo to contain 'bar', but is %v", request.Header["foo"]) - } - if request.Header["bar"][0] != "f" || request.Header["bar"][1] != "o" || request.Header["bar"][2] != "o" { - return nil, fmt.Errorf("expected header foo to be [f o o], but is %v", request.Header["bar"]) - } - return &http.Response{StatusCode: http.StatusOK}, nil - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + if request.Method != http.MethodDelete { + return nil, fmt.Errorf("expected HTTP method DELETE, but is %v", request.Method) + } + if request.Header["foo"][0] != "bar" { + return nil, fmt.Errorf("expected header foo to contain 'bar', but is %v", request.Header["foo"]) + } + if request.Header["bar"][0] != "f" || request.Header["bar"][1] != "o" || request.Header["bar"][2] != "o" { + return nil, fmt.Errorf("expected header foo to be [f o o], but is %v", request.Header["bar"]) + } + return &http.Response{StatusCode: http.StatusOK}, nil + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -427,10 +490,12 @@ func TestHttp(t *testing.T) { }, { name: "fetch error", - client: &enginetest.HttpClient{ - DoFunc: func(request *http.Request) (*http.Response, error) { - return nil, fmt.Errorf("TEST ERROR") - }, + client: func(options common.HttpClientOptions) common.HttpClient { + return &enginetest.HttpClient{ + DoFunc: func(request *http.Request) (*http.Response, error) { + return nil, fmt.Errorf("TEST ERROR") + }, + } }, test: func(t *testing.T, vm *goja.Runtime, host *enginetest.Host) { _, err := vm.RunString(` @@ -454,7 +519,7 @@ func TestHttp(t *testing.T) { t.Run(tc.name, func(t *testing.T) { vm := goja.New() vm.SetFieldNameMapper(goja.TagFieldNameMapper("json", true)) - host := &enginetest.Host{HttpClientTest: tc.client} + host := &enginetest.Host{HttpClientFunc: tc.client} js.EnableInternal(vm, host, &eventloop.EventLoop{}, &dynamic.Config{}) req, err := require.NewRegistry() r.NoError(t, err) diff --git a/js/mokapi/mokapi.go b/js/mokapi/mokapi.go index f7c6112f3..82d1dc622 100644 --- a/js/mokapi/mokapi.go +++ b/js/mokapi/mokapi.go @@ -39,7 +39,7 @@ func Require(vm *goja.Runtime, module *goja.Object) { _ = obj.Set("marshal", f.Marshal) _ = obj.Set("patch", patch) _ = obj.Set("Delete", Delete) - _ = obj.Set("shared", NewSharedMemory(host.Store())) + _ = obj.Set("shared", NewSharedMemory(host.Store(), vm)) } func (m *Module) Sleep(i interface{}) { diff --git a/js/mokapi/shared.go b/js/mokapi/shared.go index f26efc55c..f73cb2836 100644 --- a/js/mokapi/shared.go +++ b/js/mokapi/shared.go @@ -2,14 +2,17 @@ package mokapi import ( "mokapi/engine/common" + + "github.com/dop251/goja" ) type SharedMemory struct { store common.Store + vm *goja.Runtime } -func NewSharedMemory(store common.Store) *SharedMemory { - return &SharedMemory{store: store} +func NewSharedMemory(store common.Store, vm *goja.Runtime) *SharedMemory { + return &SharedMemory{store: store, vm: vm} } func (m *SharedMemory) Get(key string) any { @@ -33,7 +36,18 @@ func (m *SharedMemory) Clear() { } func (m *SharedMemory) Update(key string, fn func(v any) any) any { - return m.store.Update(key, fn) + r := m.store.Update(key, func(v any) any { + return fn(v) + }) + + switch val := r.(type) { + case map[string]any: + return m.vm.NewDynamicObject(&SharedObject{m: val, vm: m.vm}) + case []any: + return m.vm.NewDynamicArray(&SharedArray{array: val, vm: m.vm}) + default: + return val + } } func (m *SharedMemory) Keys() []string { @@ -44,3 +58,154 @@ func (m *SharedMemory) Namespace(name string) *SharedMemory { s := m.store.Namespace(name) return &SharedMemory{store: s} } + +type SharedObject struct { + m map[string]any + vm *goja.Runtime +} + +func (v *SharedObject) Get(key string) goja.Value { + val, ok := v.m[key] + if !ok { + return goja.Undefined() + } + return toValue(val, v.vm, func(val any) { + v.m[key] = val + }) +} + +func (v *SharedObject) Set(key string, val goja.Value) bool { + v.m[key] = val.Export() + return true +} + +func (v *SharedObject) Delete(key string) bool { + if _, ok := v.m[key]; ok { + delete(v.m, key) + return true + } + return false +} + +func (v *SharedObject) Has(key string) bool { + if _, ok := v.m[key]; ok { + return true + } + return false +} + +func (v *SharedObject) Keys() []string { + var keys []string + for k := range v.m { + keys = append(keys, k) + } + return keys +} + +func (v *SharedObject) Export() any { + return v.m +} + +type SharedArray struct { + array []any + vm *goja.Runtime + update func(v any) +} + +func (s *SharedArray) Get(idx int) goja.Value { + if idx < 0 { + idx += len(s.array) + } + if idx >= 0 && idx < len(s.array) { + return toValue(s.array[idx], s.vm, nil) + } + return nil +} + +func (s *SharedArray) Set(idx int, val goja.Value) bool { + if idx < 0 { + idx += len(s.array) + } + if idx < 0 { + return false + } + if idx >= len(s.array) { + s.expand(idx + 1) + } + s.array[idx] = val.Export() + return true +} + +func (s *SharedArray) Len() int { + return len(s.array) +} + +func (s *SharedArray) SetLen(n int) bool { + if n > len(s.array) { + s.expand(n) + return true + } + if n < 0 { + return false + } + if n < len(s.array) { + tail := s.array[n:len(s.array)] + for j := range tail { + tail[j] = nil + } + s.array = s.array[:n] + if s.update != nil { + s.update(s.array) + } + } + return true +} + +func (s *SharedArray) expand(newLen int) { + if newLen > cap(s.array) { + a := make([]any, newLen) + copy(a, s.array) + s.array = a + } else { + s.array = s.array[:newLen] + } + if s.update != nil { + s.update(s.array) + } +} + +func (s *SharedArray) Export() any { + return s.array +} + +func toValue(value any, vm *goja.Runtime, update func(v any)) goja.Value { + switch v := value.(type) { + case map[string]any: + return vm.NewDynamicObject(&SharedObject{m: v, vm: vm}) + case []any: + return vm.NewDynamicArray(&SharedArray{array: v, vm: vm, update: update}) + default: + return vm.ToValue(value) + } +} + +func Export(v any) any { + switch val := v.(type) { + case *SharedObject: + m := make(map[string]any) + for k, item := range val.m { + m[k] = Export(item) + } + return m + case *SharedArray: + arr := make([]any, len(val.array)) + for i, item := range val.array { + arr[i] = Export(item) + } + return arr + case goja.Value: + return Export(val.Export()) + default: + return v + } +} diff --git a/js/mokapi/shared_test.go b/js/mokapi/shared_test.go index 834dca242..2d82f1463 100644 --- a/js/mokapi/shared_test.go +++ b/js/mokapi/shared_test.go @@ -52,7 +52,7 @@ func TestModule_Shared(t *testing.T) { `) r.NoError(t, err) - r.Equal(t, "hello world", v.Export()) + r.Equal(t, "hello world", mokapi.Export(v)) }, }, { @@ -77,7 +77,7 @@ func TestModule_Shared(t *testing.T) { `) r.NoError(t, err) - r.Equal(t, map[string]any{"bar": false, "test": true}, v.Export()) + r.Equal(t, map[string]any{"bar": false, "test": true}, mokapi.Export(v)) }, }, { @@ -102,7 +102,7 @@ func TestModule_Shared(t *testing.T) { m.shared.get('bar') `) r.NoError(t, err) - r.Equal(t, nil, v.Export()) + r.Equal(t, nil, mokapi.Export(v)) _, err = vm1.RunString(` m.shared.set('bar', 123) @@ -113,7 +113,7 @@ func TestModule_Shared(t *testing.T) { m.shared.get('bar') `) r.NoError(t, err) - r.Equal(t, int64(123), v.Export()) + r.Equal(t, int64(123), mokapi.Export(v)) }, }, { @@ -134,7 +134,7 @@ func TestModule_Shared(t *testing.T) { `) r.NoError(t, err) - r.Equal(t, int64(2), v.Export()) + r.Equal(t, int64(2), mokapi.Export(v)) }, }, { @@ -158,7 +158,7 @@ func TestModule_Shared(t *testing.T) { `) r.NoError(t, err) - r.Equal(t, []string{"1", "100", "bar", "foo"}, v.Export()) + r.Equal(t, []string{"1", "100", "bar", "foo"}, mokapi.Export(v)) }, }, { @@ -181,7 +181,7 @@ func TestModule_Shared(t *testing.T) { `) r.NoError(t, err) - r.Equal(t, int64(123), v.Export()) + r.Equal(t, int64(123), mokapi.Export(v)) }, }, { @@ -204,7 +204,72 @@ func TestModule_Shared(t *testing.T) { `) r.NoError(t, err) - r.Equal(t, map[string]any{"bar": "123"}, v.Export()) + r.Equal(t, map[string]any{"bar": "123"}, mokapi.Export(v)) + }, + }, + { + name: "update with array", + test: func(t *testing.T, newVm func() *goja.Runtime) { + vm1 := newVm() + + v, err := vm1.RunString(` + const m = require('mokapi'); + const foo = m.shared.update('foo', (v) => v ?? { items: [] }); + foo.items.push(123) + foo + `) + r.NoError(t, err) + m := map[string]interface{}{} + err = vm1.ExportTo(v, &m) + r.Equal(t, map[string]any{"items": []any{int64(123)}}, mokapi.Export(v)) + }, + }, + { + name: "enumerate object", + test: func(t *testing.T, newVm func() *goja.Runtime) { + vm1 := newVm() + + v, err := vm1.RunString(` + const m = require('mokapi'); + const foo = m.shared.update('foo', (v) => v ?? { foo: 'bar' }); + const result = [] + for (let k in foo) { + result.push(k) + } + result + `) + r.NoError(t, err) + r.Equal(t, []any{"foo"}, mokapi.Export(v)) + }, + }, + { + name: "spread object", + test: func(t *testing.T, newVm func() *goja.Runtime) { + vm1 := newVm() + + v, err := vm1.RunString(` + const m = require('mokapi'); + const shared = m.shared.update('foo', (v) => v ?? { foo: 'bar' }); + const { foo } = shared + foo + `) + r.NoError(t, err) + r.Equal(t, "bar", mokapi.Export(v)) + }, + }, + { + name: "splice array", + test: func(t *testing.T, newVm func() *goja.Runtime) { + vm1 := newVm() + + v, err := vm1.RunString(` + const m = require('mokapi'); + const shared = m.shared.update('foo', (v) => v ?? { items: [1,2,3] }); + shared.items.splice(1, 1) + shared.items + `) + r.NoError(t, err) + r.Equal(t, []any{int64(1), int64(3)}, mokapi.Export(v)) }, }, } diff --git a/js/script_faker_test.go b/js/script_faker_test.go index 7b7b6a639..b38aa0abf 100644 --- a/js/script_faker_test.go +++ b/js/script_faker_test.go @@ -32,17 +32,17 @@ func TestScript_Faker(t *testing.T) { }, }, { - name: "fake string or number", + name: "fake string or integer", test: func(t *testing.T, host *enginetest.Host) { s, err := jstest.New(jstest.WithSource(`import faker from 'mokapi/faker' export default function() { - return faker.fake({ type: ['string', 'number'] }) + return faker.fake({ type: ['string', 'integer'] }) }`), js.WithHost(host)) r.NoError(t, err) v, err := s.RunDefault() r.NoError(t, err) - r.Equal(t, 609859.0117483337, v.Export()) + r.Equal(t, int64(-168643), v.Export()) }, }, { diff --git a/kafka/apiVersion/apiVersion.go b/kafka/apiVersion/apiVersion.go index cbccf0d2d..a54d1b916 100644 --- a/kafka/apiVersion/apiVersion.go +++ b/kafka/apiVersion/apiVersion.go @@ -1,6 +1,7 @@ package apiVersion import ( + "math" "mokapi/kafka" ) @@ -16,7 +17,7 @@ func init() { // https://github.com/a0x8o/kafka/blob/master/clients/src/main/resources/common/message/ApiVersionsResponse.json // Tagged fields are only supported in the body but // not in the header - 4, + math.MaxInt16, ) } diff --git a/kafka/apiVersion/apiVersion_test.go b/kafka/apiVersion/apiVersion_test.go index 52800621b..2ce7b22a2 100644 --- a/kafka/apiVersion/apiVersion_test.go +++ b/kafka/apiVersion/apiVersion_test.go @@ -1,10 +1,14 @@ package apiVersion_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" "mokapi/kafka/apiVersion" + "mokapi/kafka/kafkatest" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -19,3 +23,86 @@ func TestNewApiKeyResponse(t *testing.T) { require.Equal(t, int16(0), res.MinVersion) require.Equal(t, int16(3), res.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 2, &apiVersion.Request{}) + + kafkatest.TestRequest(t, 3, &apiVersion.Request{ + ClientSwName: "foo", + ClientSwVersion: "1.1", + }) + + b := kafkatest.WriteRequest(t, 3, 123, "me", &apiVersion.Request{ + ClientSwName: "foo", + ClientSwVersion: "1.1", + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(22)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.ApiVersions)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(3)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Key length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Key + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Key length + _ = binary.Write(expected, binary.BigEndian, []byte("1.1")) // Key + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 2, &apiVersion.Response{ + ErrorCode: 0, + ApiKeys: []apiVersion.ApiKeyResponse{ + { + ApiKey: kafka.Produce, + MinVersion: 0, + MaxVersion: 3, + }, + }, + ThrottleTimeMs: 123, + }) + + kafkatest.TestResponse(t, 3, &apiVersion.Response{ + ErrorCode: 0, + ApiKeys: []apiVersion.ApiKeyResponse{ + { + ApiKey: kafka.Produce, + MinVersion: 0, + MaxVersion: 3, + }, + }, + ThrottleTimeMs: 123, + }) + + b := kafkatest.WriteResponse(t, 3, 123, &apiVersion.Response{ + ErrorCode: 0, + ApiKeys: []apiVersion.ApiKeyResponse{ + { + ApiKey: kafka.Produce, + MinVersion: 0, + MaxVersion: 3, + }, + }, + ThrottleTimeMs: 123, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(19)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + // no tag in header + // message + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Array length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.Produce)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(0)) // MinVersion + _ = binary.Write(expected, binary.BigEndian, int16(3)) // MaxVersion + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/createTopics/createTopic_test.go b/kafka/createTopics/createTopic_test.go index 3c51c06e9..dba56b53b 100644 --- a/kafka/createTopics/createTopic_test.go +++ b/kafka/createTopics/createTopic_test.go @@ -1,9 +1,14 @@ package createTopics_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/createTopics" + "mokapi/kafka/kafkatest" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,192 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(7), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 4, &createTopics.Request{ + Topics: []createTopics.Topic{ + { + Name: "foo", + NumPartitions: 2, + ReplicationFactor: 1, + Assignments: []createTopics.Assignment{ + { + Index: 0, + BrokerIds: []int32{0, 1}, + }, + }, + Configs: []createTopics.Config{ + { + Name: "foo", + Value: "bar", + }, + }, + }, + }, + TimeoutMs: 123, + ValidateOnly: false, + }) + + kafkatest.TestRequest(t, 5, &createTopics.Request{ + Topics: []createTopics.Topic{ + { + Name: "foo", + NumPartitions: 2, + ReplicationFactor: 1, + Assignments: []createTopics.Assignment{ + { + Index: 0, + BrokerIds: []int32{0, 1}, + }, + }, + Configs: []createTopics.Config{ + { + Name: "foo", + Value: "bar", + }, + }, + }, + }, + TimeoutMs: 123, + ValidateOnly: false, + }) + + b := kafkatest.WriteRequest(t, 5, 123, "me", &createTopics.Request{ + Topics: []createTopics.Topic{ + { + Name: "foo", + NumPartitions: 2, + ReplicationFactor: 1, + Assignments: []createTopics.Assignment{ + { + Index: 0, + BrokerIds: []int32{0, 1}, + }, + }, + Configs: []createTopics.Config{ + { + Name: "foo", + Value: "bar", + }, + }, + }, + }, + TimeoutMs: 123, + ValidateOnly: false, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(56)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.CreateTopics)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(5)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Topic name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Topic name + _ = binary.Write(expected, binary.BigEndian, int32(2)) // NumPartitions + _ = binary.Write(expected, binary.BigEndian, int16(1)) // ReplicationFactor + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Assignments length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // Index + _ = binary.Write(expected, binary.BigEndian, int8(3)) // BrokerIds length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // BrokerId 0 + _ = binary.Write(expected, binary.BigEndian, int32(1)) // BrokerId 1 + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Configs length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Value length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // Value + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int32(123)) // TimeoutMs + _ = binary.Write(expected, binary.BigEndian, int8(0)) // ValidateOnly + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 4, &createTopics.Response{ + ThrottleTimeMs: 123, + Topics: []createTopics.TopicResponse{ + { + Name: "foo", + ErrorCode: 0, + ErrorMessage: "", + }, + }, + }) + + kafkatest.TestResponse(t, 5, &createTopics.Response{ + ThrottleTimeMs: 123, + Topics: []createTopics.TopicResponse{ + { + Name: "foo", + ErrorCode: 0, + ErrorMessage: "", + NumPartitions: 2, + ReplicationFactor: 1, + Configs: []createTopics.ConfigResponse{ + { + Name: "foo", + Value: "bar", + ReadOnly: false, + ConfigSource: 0, + IsSensitive: false, + }, + }, + }, + }, + }) + + b := kafkatest.WriteResponse(t, 5, 123, &createTopics.Response{ + ThrottleTimeMs: 123, + Topics: []createTopics.TopicResponse{ + { + Name: "foo", + ErrorCode: 0, + ErrorMessage: "", + NumPartitions: 2, + ReplicationFactor: 1, + Configs: []createTopics.ConfigResponse{ + { + Name: "foo", + Value: "bar", + ReadOnly: false, + ConfigSource: 0, + IsSensitive: false, + }, + }, + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(38)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Topic name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Topic name + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(0)) // ErrorMessage length + _ = binary.Write(expected, binary.BigEndian, int32(2)) // NumPartitions + _ = binary.Write(expected, binary.BigEndian, int16(1)) // ReplicationFactor + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Configs length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Value length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // Value + _ = binary.Write(expected, binary.BigEndian, int8(0)) // ReadOnly + _ = binary.Write(expected, binary.BigEndian, int8(0)) // ConfigSource + _ = binary.Write(expected, binary.BigEndian, int8(0)) // IsSensitive + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Configs tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/createTopics/createTopics.go b/kafka/createTopics/createTopics.go index 447a5dd4d..a5469ae69 100644 --- a/kafka/createTopics/createTopics.go +++ b/kafka/createTopics/createTopics.go @@ -18,7 +18,7 @@ func init() { } type Request struct { - Topics []Topic `kafka:""` + Topics []Topic `kafka:"compact=5"` TimeoutMs int32 `kafka:""` ValidateOnly bool `kafka:"min=1"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=5"` @@ -28,14 +28,14 @@ type Topic struct { Name string `kafka:"compact=5"` NumPartitions int32 `kafka:""` ReplicationFactor int16 `kafka:""` - Assignments []Assignment `kafka:""` - Configs []Config `kafka:""` + Assignments []Assignment `kafka:"compact=5"` + Configs []Config `kafka:"compact=5"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=5"` } type Assignment struct { Index int32 `kafka:""` - BrokerIds []int32 `kafka:""` + BrokerIds []int32 `kafka:"compact=5"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=5"` } @@ -47,7 +47,7 @@ type Config struct { type Response struct { ThrottleTimeMs int32 `kafka:"min=2"` - Topics []TopicResponse `kafka:""` + Topics []TopicResponse `kafka:"compact=5"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=5"` } @@ -57,7 +57,7 @@ type TopicResponse struct { ErrorMessage string `kafka:"min=1,compact=5,nullable"` NumPartitions int32 `kafka:"min=5"` ReplicationFactor int16 `kafka:"min=5"` - Configs []ConfigResponse `kafka:"min=5"` + Configs []ConfigResponse `kafka:"min=5,compact=5"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=5"` } diff --git a/kafka/decode.go b/kafka/decode.go index 3a9728915..f69f95aaf 100644 --- a/kafka/decode.go +++ b/kafka/decode.go @@ -2,9 +2,10 @@ package kafka import ( "encoding/binary" - "github.com/pkg/errors" "io" "reflect" + + "github.com/pkg/errors" ) type decodeFunc func(*Decoder, reflect.Value) @@ -29,10 +30,10 @@ func NewDecoder(reader io.Reader, size int) *Decoder { } func newDecodeFunc(t reflect.Type, version int16, tag kafkaTag) decodeFunc { - if reflect.PtrTo(t).Implements(readerFrom) { + if reflect.PointerTo(t).Implements(readerFrom) { return func(d *Decoder, v reflect.Value) { i := v.Addr().Interface() - i.(ReaderFrom).ReadFrom(d, version, tag) + _ = i.(ReaderFrom).ReadFrom(d, version, tag) } } @@ -259,7 +260,7 @@ func (d *Decoder) ReadCompactBytes() []byte { if n := d.ReadUvarint(); n < 1 { return nil } else { - b := make([]byte, n) + b := make([]byte, n-1) if d.ReadFull(b) { return b } else { diff --git a/kafka/encode.go b/kafka/encode.go index 27bb08c29..2ae51860c 100644 --- a/kafka/encode.go +++ b/kafka/encode.go @@ -45,11 +45,11 @@ func newEncodeFunc(t reflect.Type, version int16, tag kafkaTag) encodeFunc { case reflect.Struct: return newStructEncodeFunc(t, version, tag) case reflect.String: - if version >= tag.compact && tag.nullable { + if version >= tag.compact && version >= tag.nullable { return (*Encoder).encodeCompactNullString } else if version >= tag.compact { return (*Encoder).encodeCompactString - } else if tag.nullable { + } else if version >= tag.nullable { return (*Encoder).encodeNullString } return (*Encoder).encodeString @@ -80,11 +80,11 @@ func newEncodeFunc(t reflect.Type, version int16, tag kafkaTag) encodeFunc { func newBytesEncodeFunc(version int16, tag kafkaTag) encodeFunc { switch { - case version >= tag.compact && tag.nullable: + case version >= tag.compact && version >= tag.nullable: return (*Encoder).encodeCompactNullBytes case version >= tag.compact: return (*Encoder).encodeCompactBytes - case tag.nullable: + case version >= tag.nullable: return (*Encoder).encodeNullBytes default: return (*Encoder).encodeBytes diff --git a/kafka/fetch/fetch_test.go b/kafka/fetch/fetch_test.go index cb9dd6119..d0aece293 100644 --- a/kafka/fetch/fetch_test.go +++ b/kafka/fetch/fetch_test.go @@ -1,9 +1,14 @@ package fetch_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/fetch" + "mokapi/kafka/kafkatest" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,348 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(12), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 11, &fetch.Request{ + ReplicaId: 0, + MaxWaitMs: 123, + MinBytes: 456, + MaxBytes: 789, + IsolationLevel: 0, + SessionId: 0, + SessionEpoch: 0, + Topics: []fetch.Topic{ + { + Name: "foo", + Partitions: []fetch.RequestPartition{ + { + Index: 0, + CurrentLeaderEpoch: 0, + FetchOffset: 0, + LastFetchedEpoch: 0, + LogStartOffset: 0, + MaxBytes: 123, + }, + }, + }, + }, + ForgottenTopics: []fetch.Topic{}, + RackId: "bar", + }) + + kafkatest.TestRequest(t, 12, &fetch.Request{ + ReplicaId: 0, + MaxWaitMs: 123, + MinBytes: 456, + MaxBytes: 789, + IsolationLevel: 0, + SessionId: 0, + SessionEpoch: 0, + Topics: []fetch.Topic{ + { + Name: "foo", + Partitions: []fetch.RequestPartition{ + { + Index: 0, + CurrentLeaderEpoch: 0, + FetchOffset: 0, + LastFetchedEpoch: 0, + LogStartOffset: 0, + MaxBytes: 123, + }, + }, + }, + }, + ForgottenTopics: []fetch.Topic{}, + RackId: "bar", + }) + + b := kafkatest.WriteRequest(t, 12, 123, "me", &fetch.Request{ + ReplicaId: 0, + MaxWaitMs: 123, + MinBytes: 456, + MaxBytes: 789, + IsolationLevel: 0, + SessionId: 0, + SessionEpoch: 0, + Topics: []fetch.Topic{ + { + Name: "foo", + Partitions: []fetch.RequestPartition{ + { + Index: 0, + CurrentLeaderEpoch: 0, + FetchOffset: 0, + LastFetchedEpoch: 0, + LogStartOffset: 0, + MaxBytes: 123, + }, + }, + }, + }, + ForgottenTopics: []fetch.Topic{}, + RackId: "bar", + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(84)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.Fetch)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(12)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(0)) // ReplicaId + _ = binary.Write(expected, binary.BigEndian, int32(123)) // MaxWaitMs + _ = binary.Write(expected, binary.BigEndian, int32(456)) // MinBytes + _ = binary.Write(expected, binary.BigEndian, int32(789)) // MaxBytes + _ = binary.Write(expected, binary.BigEndian, int8(0)) // IsolationLevel + _ = binary.Write(expected, binary.BigEndian, int32(0)) // SessionId + _ = binary.Write(expected, binary.BigEndian, int32(0)) // SessionEpoch + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // Index + _ = binary.Write(expected, binary.BigEndian, int32(0)) // CurrentLeaderEpoch + _ = binary.Write(expected, binary.BigEndian, int64(0)) // FetchOffset + _ = binary.Write(expected, binary.BigEndian, int32(0)) // LastFetchedEpoch + _ = binary.Write(expected, binary.BigEndian, int64(0)) // LogStartOffset + _ = binary.Write(expected, binary.BigEndian, int32(123)) // MaxBytes + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partitions tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(1)) // ForgottenTopics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // RackId length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 11, &fetch.Response{ + ThrottleTimeMs: 0, + ErrorCode: 0, + SessionId: 0, + Topics: []fetch.ResponseTopic{ + { + Name: "foo", + Partitions: []fetch.ResponsePartition{ + { + Index: 0, + ErrorCode: 0, + HighWatermark: 0, + LastStableOffset: 0, + LogStartOffset: 0, + AbortedTransactions: []fetch.AbortedTransaction{}, + PreferredReadReplica: 0, + RecordSet: kafka.RecordBatch{Records: []*kafka.Record{ + { + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + Headers: nil, + }, + }, + }, + }, + }, + }, + }, + }) + + kafkatest.TestResponse(t, 12, &fetch.Response{ + ThrottleTimeMs: 0, + ErrorCode: 0, + SessionId: 0, + Topics: []fetch.ResponseTopic{ + { + Name: "foo", + Partitions: []fetch.ResponsePartition{ + { + Index: 0, + ErrorCode: 0, + HighWatermark: 0, + LastStableOffset: 0, + LogStartOffset: 0, + AbortedTransactions: []fetch.AbortedTransaction{}, + PreferredReadReplica: 0, + RecordSet: kafka.RecordBatch{Records: []*kafka.Record{ + { + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + Headers: nil, + }, + }, + }, + }, + }, + }, + }, + }) + + b := kafkatest.WriteResponse(t, 11, 123, &fetch.Response{ + ThrottleTimeMs: 0, + ErrorCode: 0, + SessionId: 0, + Topics: []fetch.ResponseTopic{ + { + Name: "foo", + Partitions: []fetch.ResponsePartition{ + { + Index: 0, + ErrorCode: 0, + HighWatermark: 0, + LastStableOffset: 0, + LogStartOffset: 0, + AbortedTransactions: []fetch.AbortedTransaction{}, + PreferredReadReplica: 0, + RecordSet: kafka.RecordBatch{Records: []*kafka.Record{ + { + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + Headers: nil, + }, + }, + }, + }, + }, + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(143)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + // message + _ = binary.Write(expected, binary.BigEndian, int32(0)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int32(0)) // SessionId + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int16(3)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // Index + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int64(0)) // HighWatermark + _ = binary.Write(expected, binary.BigEndian, int64(0)) // LastStableOffset + _ = binary.Write(expected, binary.BigEndian, int64(0)) // LogStartOffset + _ = binary.Write(expected, binary.BigEndian, int32(0)) // AbortedTransactions length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // PreferredReadReplica + _ = binary.Write(expected, binary.BigEndian, int32(74)) // Records length + _ = binary.Write(expected, binary.BigEndian, int64(0)) // base offset + _ = binary.Write(expected, binary.BigEndian, int32(62)) // message size + _ = binary.Write(expected, binary.BigEndian, int32(0)) // leader epoch + _ = binary.Write(expected, binary.BigEndian, int8(2)) // magic + _ = binary.Write(expected, binary.BigEndian, []byte{119, 89, 114, 22}) // crc32 + _ = binary.Write(expected, binary.BigEndian, int16(0)) // attributes + _ = binary.Write(expected, binary.BigEndian, int32(0)) // last offset delta + _ = binary.Write(expected, binary.BigEndian, int64(1657010762684)) // first timestamp + _ = binary.Write(expected, binary.BigEndian, int64(1657010762684)) // max timestamp + + _ = binary.Write(expected, binary.BigEndian, []byte{255, 255, 255, 255, 255, 255, 255, 255}) // producer id + _ = binary.Write(expected, binary.BigEndian, []byte{255, 255}) // producer epoch + _ = binary.Write(expected, binary.BigEndian, []byte{255, 255, 255, 255}) // base sequence + + _ = binary.Write(expected, binary.BigEndian, int32(1)) // number of records + _ = binary.Write(expected, binary.BigEndian, int8(24)) // record length + _ = binary.Write(expected, binary.BigEndian, int8(0)) // attributes + _ = binary.Write(expected, binary.BigEndian, int8(0)) // delta timestamp + _ = binary.Write(expected, binary.BigEndian, int8(0)) // delta offset + _ = binary.Write(expected, binary.BigEndian, int8(6)) // key length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // key + _ = binary.Write(expected, binary.BigEndian, int8(6)) // value length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // value + _ = binary.Write(expected, binary.BigEndian, int8(0)) // header length + + require.Equal(t, expected.Bytes(), b) + + b = kafkatest.WriteResponse(t, 12, 123, &fetch.Response{ + ThrottleTimeMs: 0, + ErrorCode: 0, + SessionId: 0, + Topics: []fetch.ResponseTopic{ + { + Name: "foo", + Partitions: []fetch.ResponsePartition{ + { + Index: 0, + ErrorCode: 0, + HighWatermark: 0, + LastStableOffset: 0, + LogStartOffset: 0, + AbortedTransactions: []fetch.AbortedTransaction{}, + PreferredReadReplica: 0, + RecordSet: kafka.RecordBatch{ + Records: []*kafka.Record{ + { + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + Headers: nil, + }, + }, + }, + }, + }, + }, + }, + }) + expected = new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(134)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(0)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int32(0)) // SessionId + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // Index + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int64(0)) // HighWatermark + _ = binary.Write(expected, binary.BigEndian, int64(0)) // LastStableOffset + _ = binary.Write(expected, binary.BigEndian, int64(0)) // LogStartOffset + _ = binary.Write(expected, binary.BigEndian, int8(1)) // AbortedTransactions length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // PreferredReadReplica + _ = binary.Write(expected, binary.BigEndian, int8(75)) // Records length + _ = binary.Write(expected, binary.BigEndian, int64(0)) // base offset + _ = binary.Write(expected, binary.BigEndian, int32(62)) // message size + _ = binary.Write(expected, binary.BigEndian, int32(0)) // leader epoch + _ = binary.Write(expected, binary.BigEndian, int8(2)) // magic + _ = binary.Write(expected, binary.BigEndian, []byte{119, 89, 114, 22}) // crc32 + _ = binary.Write(expected, binary.BigEndian, int16(0)) // attributes + _ = binary.Write(expected, binary.BigEndian, int32(0)) // last offset delta + _ = binary.Write(expected, binary.BigEndian, int64(1657010762684)) // first timestamp + _ = binary.Write(expected, binary.BigEndian, int64(1657010762684)) // max timestamp + + _ = binary.Write(expected, binary.BigEndian, []byte{255, 255, 255, 255, 255, 255, 255, 255}) // producer id + _ = binary.Write(expected, binary.BigEndian, []byte{255, 255}) // producer epoch + _ = binary.Write(expected, binary.BigEndian, []byte{255, 255, 255, 255}) // base sequence + + _ = binary.Write(expected, binary.BigEndian, int32(1)) // number of records + _ = binary.Write(expected, binary.BigEndian, int8(24)) // record length + _ = binary.Write(expected, binary.BigEndian, int8(0)) // attributes + _ = binary.Write(expected, binary.BigEndian, int8(0)) // delta timestamp + _ = binary.Write(expected, binary.BigEndian, int8(0)) // delta offset + _ = binary.Write(expected, binary.BigEndian, int8(6)) // key length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // key + _ = binary.Write(expected, binary.BigEndian, int8(6)) // value length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // value + _ = binary.Write(expected, binary.BigEndian, int8(0)) // header length + + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partitions tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/findCoordinator/findCoordinator.go b/kafka/findCoordinator/findCoordinator.go index 33f5c73b4..a565a13b2 100644 --- a/kafka/findCoordinator/findCoordinator.go +++ b/kafka/findCoordinator/findCoordinator.go @@ -1,7 +1,6 @@ package findCoordinator import ( - "math" "mokapi/kafka" ) @@ -14,7 +13,7 @@ func init() { &Request{}, &Response{}, 3, - math.MaxInt16, + 3, ) } diff --git a/kafka/findCoordinator/findCoordinator_test.go b/kafka/findCoordinator/findCoordinator_test.go index aa8bd6316..5b61bd77e 100644 --- a/kafka/findCoordinator/findCoordinator_test.go +++ b/kafka/findCoordinator/findCoordinator_test.go @@ -1,9 +1,14 @@ package findCoordinator_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/findCoordinator" + "mokapi/kafka/kafkatest" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,79 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(3), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 2, &findCoordinator.Request{ + Key: "foo-group", + KeyType: findCoordinator.KeyTypeGroup, + }) + + kafkatest.TestRequest(t, 3, &findCoordinator.Request{ + Key: "foo-group", + KeyType: findCoordinator.KeyTypeGroup, + }) + + b := kafkatest.WriteRequest(t, 3, 123, "me", &findCoordinator.Request{ + Key: "foo-group", + KeyType: findCoordinator.KeyTypeGroup, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(25)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.FindCoordinator)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(3)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(10)) // Key length + _ = binary.Write(expected, binary.BigEndian, []byte("foo-group")) // Key + _ = binary.Write(expected, binary.BigEndian, int8(findCoordinator.KeyTypeGroup)) // KeyType + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 2, &findCoordinator.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + ErrorMessage: "", + NodeId: 1, + Port: 1234, + }) + + kafkatest.TestResponse(t, 3, &findCoordinator.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + ErrorMessage: "", + NodeId: 1, + Host: "foo", + Port: 1234, + }) + + b := kafkatest.WriteResponse(t, 3, 123, &findCoordinator.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + ErrorMessage: "", + NodeId: 1, + Host: "foo", + Port: 1234, + }) + + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(25)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(0)) // ErrorMessage length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // NodeId + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Host length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Host + _ = binary.Write(expected, binary.BigEndian, int32(1234)) // Port + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/heartbeat/heartbeat.go b/kafka/heartbeat/heartbeat.go index 882b426cc..b46e331b8 100644 --- a/kafka/heartbeat/heartbeat.go +++ b/kafka/heartbeat/heartbeat.go @@ -1,7 +1,6 @@ package heartbeat import ( - "math" "mokapi/kafka" ) @@ -14,7 +13,7 @@ func init() { &Request{}, &Response{}, 4, - math.MaxInt16, + 4, ) } diff --git a/kafka/heartbeat/heartbeat_test.go b/kafka/heartbeat/heartbeat_test.go index c149a1c96..36aea9317 100644 --- a/kafka/heartbeat/heartbeat_test.go +++ b/kafka/heartbeat/heartbeat_test.go @@ -1,9 +1,14 @@ package heartbeat_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/heartbeat" + "mokapi/kafka/kafkatest" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,72 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(4), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 3, &heartbeat.Request{ + GroupId: "foo", + GenerationId: 1, + MemberId: "m1", + GroupInstanceId: "g1", + }) + + kafkatest.TestRequest(t, 4, &heartbeat.Request{ + GroupId: "foo", + GenerationId: 1, + MemberId: "m1", + GroupInstanceId: "g1", + }) + + b := kafkatest.WriteRequest(t, 4, 123, "me", &heartbeat.Request{ + GroupId: "foo", + GenerationId: 1, + MemberId: "m1", + GroupInstanceId: "g1", + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(28)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.Heartbeat)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(4)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(4)) // GroupId length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // GroupId + _ = binary.Write(expected, binary.BigEndian, int32(1)) // GenerationId + _ = binary.Write(expected, binary.BigEndian, int8(3)) // MemberId length + _ = binary.Write(expected, binary.BigEndian, []byte("m1")) // MemberId + _ = binary.Write(expected, binary.BigEndian, int8(3)) // GroupInstanceId length + _ = binary.Write(expected, binary.BigEndian, []byte("g1")) // GroupInstanceId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 3, &heartbeat.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + }) + + kafkatest.TestResponse(t, 4, &heartbeat.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + }) + + b := kafkatest.WriteResponse(t, 4, 123, &heartbeat.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(12)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/joinGroup/joinGroup.go b/kafka/joinGroup/joinGroup.go index 7b5444ef1..d464fba78 100644 --- a/kafka/joinGroup/joinGroup.go +++ b/kafka/joinGroup/joinGroup.go @@ -1,7 +1,6 @@ package joinGroup import ( - "math" "mokapi/kafka" ) @@ -14,7 +13,7 @@ func init() { &Request{}, &Response{}, 6, - math.MaxInt16, + 6, ) } @@ -39,10 +38,11 @@ type Response struct { ThrottleTimeMs int32 `kafka:"min=2"` ErrorCode kafka.ErrorCode `kafka:""` GenerationId int32 `kafka:""` - ProtocolName string `kafka:"compact=6"` + ProtocolType string `kafka:"min=7,compact=7,nullable=7"` + ProtocolName string `kafka:"compact=6,nullable=7"` Leader string `kafka:"compact=6"` MemberId string `kafka:"compact=6"` - Members []Member `kafka:""` + Members []Member `kafka:"compact=6"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=6"` } diff --git a/kafka/joinGroup/joinGroup_test.go b/kafka/joinGroup/joinGroup_test.go index 93aa42371..32d8e3b58 100644 --- a/kafka/joinGroup/joinGroup_test.go +++ b/kafka/joinGroup/joinGroup_test.go @@ -1,9 +1,14 @@ package joinGroup_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/joinGroup" + "mokapi/kafka/kafkatest" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,156 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(7), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 5, &joinGroup.Request{ + GroupId: "foo", + SessionTimeoutMs: 0, + RebalanceTimeoutMs: 0, + MemberId: "m1", + GroupInstanceId: "g1", + ProtocolType: "proto", + Protocols: []joinGroup.Protocol{ + { + Name: "p1", + MetaData: []byte("metadata"), + }, + }, + }) + + kafkatest.TestRequest(t, 6, &joinGroup.Request{ + GroupId: "foo", + SessionTimeoutMs: 0, + RebalanceTimeoutMs: 0, + MemberId: "m1", + GroupInstanceId: "g1", + ProtocolType: "proto", + Protocols: []joinGroup.Protocol{ + { + Name: "p1", + MetaData: []byte("metadata"), + }, + }, + }) + + b := kafkatest.WriteRequest(t, 6, 123, "me", &joinGroup.Request{ + GroupId: "foo", + SessionTimeoutMs: 0, + RebalanceTimeoutMs: 0, + MemberId: "m1", + GroupInstanceId: "g1", + ProtocolType: "proto", + Protocols: []joinGroup.Protocol{ + { + Name: "p1", + MetaData: []byte("metadata"), + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(52)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.JoinGroup)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(6)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(4)) // GroupId length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // GroupId + _ = binary.Write(expected, binary.BigEndian, int32(0)) // SessionTimeoutMs + _ = binary.Write(expected, binary.BigEndian, int32(0)) // RebalanceTimeoutMs + _ = binary.Write(expected, binary.BigEndian, int8(3)) // MemberId length + _ = binary.Write(expected, binary.BigEndian, []byte("m1")) // MemberId + _ = binary.Write(expected, binary.BigEndian, int8(3)) // GroupInstanceId length + _ = binary.Write(expected, binary.BigEndian, []byte("g1")) // GroupInstanceId + _ = binary.Write(expected, binary.BigEndian, int8(6)) // ProtocolType length + _ = binary.Write(expected, binary.BigEndian, []byte("proto")) // ProtocolType + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Protocols length + _ = binary.Write(expected, binary.BigEndian, int8(3)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("p1")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(9)) // MetaData length + _ = binary.Write(expected, binary.BigEndian, []byte("metadata")) // MetaData + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Protocols tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 5, &joinGroup.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + GenerationId: 1, + ProtocolName: "p1", + Leader: "m2", + MemberId: "m1", + Members: []joinGroup.Member{ + { + MemberId: "m1", + GroupInstanceId: "g1", + MetaData: []byte("metadata"), + }, + }, + }) + kafkatest.TestResponse(t, 7, &joinGroup.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + GenerationId: 1, + ProtocolType: "proto", + ProtocolName: "p1", + Leader: "m2", + MemberId: "m1", + Members: []joinGroup.Member{ + { + MemberId: "m1", + GroupInstanceId: "g1", + MetaData: []byte("metadata"), + }, + }, + }) + + b := kafkatest.WriteResponse(t, 7, 123, &joinGroup.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + GenerationId: 1, + ProtocolType: "proto", + ProtocolName: "p1", + Leader: "m2", + MemberId: "m1", + Members: []joinGroup.Member{ + { + MemberId: "m1", + GroupInstanceId: "g1", + MetaData: []byte("metadata"), + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(48)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int32(1)) // GenerationId + _ = binary.Write(expected, binary.BigEndian, int8(6)) // ProtocolType length + _ = binary.Write(expected, binary.BigEndian, []byte("proto")) // ProtocolType + _ = binary.Write(expected, binary.BigEndian, int8(3)) // ProtocolName length + _ = binary.Write(expected, binary.BigEndian, []byte("p1")) // ProtocolName + _ = binary.Write(expected, binary.BigEndian, int8(3)) // Leader length + _ = binary.Write(expected, binary.BigEndian, []byte("m2")) // Leader + _ = binary.Write(expected, binary.BigEndian, int8(3)) // MemberId length + _ = binary.Write(expected, binary.BigEndian, []byte("m1")) // MemberId + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Members length + _ = binary.Write(expected, binary.BigEndian, int8(3)) // MemberId length + _ = binary.Write(expected, binary.BigEndian, []byte("m1")) // MemberId + _ = binary.Write(expected, binary.BigEndian, int8(3)) // GroupInstanceId length + _ = binary.Write(expected, binary.BigEndian, []byte("g1")) // GroupInstanceId + _ = binary.Write(expected, binary.BigEndian, int8(9)) // MetaData length + _ = binary.Write(expected, binary.BigEndian, []byte("metadata")) // MetaData + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Members tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/kafkatest/client.go b/kafka/kafkatest/client.go index d32ddbb1a..548a2d082 100644 --- a/kafka/kafkatest/client.go +++ b/kafka/kafkatest/client.go @@ -31,7 +31,7 @@ type Client struct { } func NewClient(addr, clientId string) *Client { - return &Client{Addr: addr, clientId: clientId, Timeout: time.Second * 10} + return &Client{Addr: addr, clientId: clientId, Timeout: time.Second * 30} } func (c *Client) Close() { @@ -69,12 +69,12 @@ func (c *Client) Send(r *kafka.Request) (*kafka.Response, error) { } res := kafka.NewResponse(r.Header.ApiKey, r.Header.ApiVersion, r.Header.CorrelationId) - c.conn.SetReadDeadline(time.Now().Add(c.Timeout)) + _ = c.conn.SetReadDeadline(time.Now().Add(c.Timeout)) err = res.Read(c.conn) return res, err } -func (c *Client) ApiVersion(version int, r *apiVersion.Request) (*apiVersion.Response, error) { +func (c *Client) ApiVersion(version int16, r *apiVersion.Request) (*apiVersion.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -85,7 +85,7 @@ func (c *Client) ApiVersion(version int, r *apiVersion.Request) (*apiVersion.Res return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) Metadata(version int, r *metaData.Request) (*metaData.Response, error) { +func (c *Client) Metadata(version int16, r *metaData.Request) (*metaData.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -96,7 +96,7 @@ func (c *Client) Metadata(version int, r *metaData.Request) (*metaData.Response, return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) Produce(version int, r *produce.Request) (*produce.Response, error) { +func (c *Client) Produce(version int16, r *produce.Request) (*produce.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -107,7 +107,7 @@ func (c *Client) Produce(version int, r *produce.Request) (*produce.Response, er return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) Fetch(version int, r *fetch.Request) (*fetch.Response, error) { +func (c *Client) Fetch(version int16, r *fetch.Request) (*fetch.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -118,7 +118,7 @@ func (c *Client) Fetch(version int, r *fetch.Request) (*fetch.Response, error) { return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) OffsetFetch(version int, r *offsetFetch.Request) (*offsetFetch.Response, error) { +func (c *Client) OffsetFetch(version int16, r *offsetFetch.Request) (*offsetFetch.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -129,7 +129,7 @@ func (c *Client) OffsetFetch(version int, r *offsetFetch.Request) (*offsetFetch. return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) Offset(version int, r *offset.Request) (*offset.Response, error) { +func (c *Client) Offset(version int16, r *offset.Request) (*offset.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -140,7 +140,7 @@ func (c *Client) Offset(version int, r *offset.Request) (*offset.Response, error return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) JoinGroup(version int, r *joinGroup.Request) (*joinGroup.Response, error) { +func (c *Client) JoinGroup(version int16, r *joinGroup.Request) (*joinGroup.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -151,7 +151,7 @@ func (c *Client) JoinGroup(version int, r *joinGroup.Request) (*joinGroup.Respon return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) SyncGroup(version int, r *syncGroup.Request) (*syncGroup.Response, error) { +func (c *Client) SyncGroup(version int16, r *syncGroup.Request) (*syncGroup.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -162,7 +162,7 @@ func (c *Client) SyncGroup(version int, r *syncGroup.Request) (*syncGroup.Respon return nil, fmt.Errorf("unexpected response message: %T", reflect.ValueOf(res.Message).Elem().Type()) } -func (c *Client) Heartbeat(version int, r *heartbeat.Request) (*heartbeat.Response, error) { +func (c *Client) Heartbeat(version int16, r *heartbeat.Request) (*heartbeat.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -173,7 +173,7 @@ func (c *Client) Heartbeat(version int, r *heartbeat.Request) (*heartbeat.Respon return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) FindCoordinator(version int, r *findCoordinator.Request) (*findCoordinator.Response, error) { +func (c *Client) FindCoordinator(version int16, r *findCoordinator.Request) (*findCoordinator.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -184,7 +184,7 @@ func (c *Client) FindCoordinator(version int, r *findCoordinator.Request) (*find return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) OffsetCommit(version int, r *offsetCommit.Request) (*offsetCommit.Response, error) { +func (c *Client) OffsetCommit(version int16, r *offsetCommit.Request) (*offsetCommit.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -195,7 +195,7 @@ func (c *Client) OffsetCommit(version int, r *offsetCommit.Request) (*offsetComm return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) Listgroup(version int, r *listgroup.Request) (*listgroup.Response, error) { +func (c *Client) Listgroup(version int16, r *listgroup.Request) (*listgroup.Response, error) { res, err := c.Send(NewRequest(c.clientId, version, r)) if err != nil { return nil, err @@ -206,7 +206,7 @@ func (c *Client) Listgroup(version int, r *listgroup.Request) (*listgroup.Respon return nil, fmt.Errorf("unexpected response message: %T", res.Message) } -func (c *Client) JoinSyncGroup(member, group string, joinVersion, syncVersion int) error { +func (c *Client) JoinSyncGroup(member, group string, joinVersion, syncVersion int16) error { join, err := c.JoinGroup(joinVersion, &joinGroup.Request{ GroupId: group, MemberId: member, diff --git a/kafka/kafkatest/kafkatest.go b/kafka/kafkatest/kafkatest.go index 746ce0424..b6481e1b1 100644 --- a/kafka/kafkatest/kafkatest.go +++ b/kafka/kafkatest/kafkatest.go @@ -20,11 +20,11 @@ import ( "mokapi/kafka/syncGroup" ) -func NewRequest(clientId string, version int, msg kafka.Message) *kafka.Request { +func NewRequest(clientId string, version int16, msg kafka.Message) *kafka.Request { r := &kafka.Request{ Header: &kafka.Header{ ApiKey: getApiKey(msg), - ApiVersion: int16(version), + ApiVersion: version, ClientId: clientId, }, Message: msg, diff --git a/kafka/kafkatest/protocol.go b/kafka/kafkatest/protocol.go new file mode 100644 index 000000000..7002cb473 --- /dev/null +++ b/kafka/kafkatest/protocol.go @@ -0,0 +1,153 @@ +package kafkatest + +import ( + "bytes" + "mokapi/kafka" + "reflect" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestRequest(t *testing.T, version int16, msg kafka.Message) { + r1 := kafka.Request{ + Header: &kafka.Header{ + ApiKey: getApiKey(msg), + ApiVersion: version, + ClientId: "me", + CorrelationId: 123, + }, + Message: msg, + } + + b := &bytes.Buffer{} + err := r1.Write(b) + require.NoError(t, err) + + r2 := &kafka.Request{} + err = r2.Read(b) + require.NoError(t, err) + + require.True(t, deepEqual(r1.Message, r2.Message)) +} + +func WriteRequest(t *testing.T, version int16, correlationId int32, clientId string, msg kafka.Message) []byte { + r := kafka.Request{ + Header: &kafka.Header{ + ApiKey: getApiKey(msg), + ApiVersion: version, + ClientId: clientId, + CorrelationId: correlationId, + }, + Message: msg, + } + + b := &bytes.Buffer{} + err := r.Write(b) + require.NoError(t, err) + return b.Bytes() +} + +func TestResponse(t *testing.T, version int16, msg kafka.Message) { + apiKey := getApiKey(msg) + r1 := kafka.Response{ + Header: &kafka.Header{ + ApiKey: apiKey, + ApiVersion: version, + CorrelationId: 123, + }, + Message: msg, + } + + b := &bytes.Buffer{} + err := r1.Write(b) + require.NoError(t, err) + + r2 := &kafka.Response{ + Header: &kafka.Header{ + ApiKey: apiKey, + ApiVersion: version, + CorrelationId: 123, + }, + } + err = r2.Read(b) + require.NoError(t, err) + + require.True(t, deepEqual(r1.Message, r2.Message)) +} + +func WriteResponse(t *testing.T, version int16, correlationId int32, msg kafka.Message) []byte { + r := kafka.Response{ + Header: &kafka.Header{ + ApiKey: getApiKey(msg), + ApiVersion: version, + CorrelationId: correlationId, + }, + Message: msg, + } + + b := &bytes.Buffer{} + err := r.Write(b) + require.NoError(t, err) + return b.Bytes() +} + +func deepEqual(i1, i2 any) bool { + if b1, ok := i1.(kafka.Bytes); ok { + if b2, ok := i2.(kafka.Bytes); ok { + if b1.Size() != b2.Size() { + return false + } + return bytes.Equal(kafka.Read(b1), kafka.Read(b2)) + } + return false + } + + v1 := reflect.ValueOf(i1) + v2 := reflect.ValueOf(i2) + + t1 := reflect.TypeOf(i1) + if v1.Type() != v2.Type() { + return false + } + + switch v1.Kind() { + case reflect.Struct: + for i := 0; i < v1.NumField(); i++ { + if !t1.Field(i).IsExported() { + continue + } + if !deepEqual(v1.Field(i).Interface(), v2.Field(i).Interface()) { + return false + } + } + return true + case reflect.Ptr: + if v1.IsNil() { + return v2.IsNil() + } + return deepEqual(v1.Elem().Interface(), v2.Elem().Interface()) + case reflect.Slice: + if v1.Len() != v2.Len() { + return false + } + for i := 0; i < v1.Len(); i++ { + if !deepEqual(v1.Index(i).Interface(), v2.Index(i).Interface()) { + return false + } + } + return true + case reflect.Map: + if v1.Len() != v2.Len() { + return false + } + for _, k := range v1.MapKeys() { + if !deepEqual(v1.MapIndex(k).Interface(), v2.MapIndex(k).Interface()) { + return false + } + } + return true + default: + return i1 == i2 + } +} diff --git a/kafka/listgroup/listgroup.go b/kafka/listgroup/listgroup.go index 8efdadedf..75c3071f9 100644 --- a/kafka/listgroup/listgroup.go +++ b/kafka/listgroup/listgroup.go @@ -25,7 +25,7 @@ type Request struct { type Response struct { ThrottleTimeMs int32 `kafka:"min=1"` ErrorCode kafka.ErrorCode `kafka:""` - Groups []Group `kafka:""` + Groups []Group `kafka:"compact=3"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=3"` } diff --git a/kafka/listgroup/listgroup_test.go b/kafka/listgroup/listgroup_test.go index 362ba9204..16300ddd2 100644 --- a/kafka/listgroup/listgroup_test.go +++ b/kafka/listgroup/listgroup_test.go @@ -1,9 +1,14 @@ package listgroup_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/kafkatest" + "mokapi/kafka/listgroup" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,89 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(4), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 2, &listgroup.Request{}) + + kafkatest.TestRequest(t, 4, &listgroup.Request{ + StatesFilter: []string{ + "foo", + }, + }) + + b := kafkatest.WriteRequest(t, 4, 123, "me", &listgroup.Request{ + StatesFilter: []string{ + "foo", + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(19)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.ListGroup)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(4)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(2)) // StatesFilters length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // StatesFilter length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Key + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 2, &listgroup.Response{ + ThrottleTimeMs: 0, + ErrorCode: 0, + Groups: []listgroup.Group{ + { + GroupId: "g-1", + ProtocolType: "proto", + }, + }, + }) + + kafkatest.TestResponse(t, 4, &listgroup.Response{ + ThrottleTimeMs: 0, + ErrorCode: 0, + Groups: []listgroup.Group{ + { + GroupId: "g-1", + ProtocolType: "proto", + GroupState: "state", + }, + }, + }) + + b := kafkatest.WriteResponse(t, 4, 123, &listgroup.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + Groups: []listgroup.Group{ + { + GroupId: "g-1", + ProtocolType: "proto", + GroupState: "state", + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(30)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Groups length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // GroupId length + _ = binary.Write(expected, binary.BigEndian, []byte("g-1")) // GroupId + _ = binary.Write(expected, binary.BigEndian, int8(6)) // ProtocolType length + _ = binary.Write(expected, binary.BigEndian, []byte("proto")) // ProtocolType + _ = binary.Write(expected, binary.BigEndian, int8(6)) // GroupState length + _ = binary.Write(expected, binary.BigEndian, []byte("state")) // GroupState + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Groups tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/metaData/metaData_test.go b/kafka/metaData/metaData_test.go index 5824541ee..82cacab77 100644 --- a/kafka/metaData/metaData_test.go +++ b/kafka/metaData/metaData_test.go @@ -1,9 +1,14 @@ package metaData_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/kafkatest" + "mokapi/kafka/metaData" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,193 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(9), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 8, &metaData.Request{ + Topics: []metaData.TopicName{ + {Name: "foo"}, + }, + AllowAutoTopicCreation: true, + IncludeClusterAuthorizedOperations: false, + IncludeTopicAuthorizedOperations: false, + }) + + kafkatest.TestRequest(t, 9, &metaData.Request{ + Topics: []metaData.TopicName{ + {Name: "foo"}, + }, + AllowAutoTopicCreation: true, + IncludeClusterAuthorizedOperations: false, + IncludeTopicAuthorizedOperations: false, + }) + + b := kafkatest.WriteRequest(t, 9, 123, "me", &metaData.Request{ + Topics: []metaData.TopicName{ + {Name: "foo"}, + }, + AllowAutoTopicCreation: true, + IncludeClusterAuthorizedOperations: false, + IncludeTopicAuthorizedOperations: false, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(23)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.Metadata)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(9)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(1)) // AllowAutoTopicCreation + _ = binary.Write(expected, binary.BigEndian, int8(0)) // IncludeClusterAuthorizedOperations + _ = binary.Write(expected, binary.BigEndian, int8(0)) // IncludeTopicAuthorizedOperations + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 8, &metaData.Response{ + ThrottleTimeMs: 123, + Brokers: []metaData.ResponseBroker{ + { + NodeId: 1, + Host: "localhost", + Port: 9092, + Rack: "", + }, + }, + ClusterId: "foo", + ControllerId: 1, + Topics: []metaData.ResponseTopic{ + { + ErrorCode: 0, + Name: "bar", + IsInternal: false, + Partitions: []metaData.ResponsePartition{ + { + ErrorCode: 0, + PartitionIndex: 1, + LeaderId: 0, + LeaderEpoch: 0, + ReplicaNodes: []int32{0, 1}, + IsrNodes: []int32{}, + OfflineReplicas: []int32{}, + }, + }, + TopicAuthorizedOperations: 0, + }, + }, + ClusterAuthorizedOperations: 0, + }) + + kafkatest.TestResponse(t, 9, &metaData.Response{ + ThrottleTimeMs: 123, + Brokers: []metaData.ResponseBroker{ + { + NodeId: 1, + Host: "localhost", + Port: 9092, + Rack: "", + }, + }, + ClusterId: "foo", + ControllerId: 1, + Topics: []metaData.ResponseTopic{ + { + ErrorCode: 0, + Name: "bar", + IsInternal: false, + Partitions: []metaData.ResponsePartition{ + { + ErrorCode: 0, + PartitionIndex: 1, + LeaderId: 0, + LeaderEpoch: 0, + ReplicaNodes: []int32{0, 1}, + IsrNodes: []int32{}, + OfflineReplicas: []int32{}, + }, + }, + TopicAuthorizedOperations: 0, + }, + }, + ClusterAuthorizedOperations: 0, + }) + + b := kafkatest.WriteResponse(t, 9, 123, &metaData.Response{ + ThrottleTimeMs: 123, + Brokers: []metaData.ResponseBroker{ + { + NodeId: 1, + Host: "localhost", + Port: 9092, + Rack: "", + }, + }, + ClusterId: "foo", + ControllerId: 1, + Topics: []metaData.ResponseTopic{ + { + ErrorCode: 0, + Name: "bar", + IsInternal: false, + Partitions: []metaData.ResponsePartition{ + { + ErrorCode: 0, + PartitionIndex: 1, + LeaderId: 0, + LeaderEpoch: 0, + ReplicaNodes: []int32{0, 1}, + IsrNodes: []int32{}, + OfflineReplicas: []int32{}, + }, + }, + TopicAuthorizedOperations: 0, + }, + }, + ClusterAuthorizedOperations: 0, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(83)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Brokers length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // NodeId + _ = binary.Write(expected, binary.BigEndian, int8(10)) // Host length + _ = binary.Write(expected, binary.BigEndian, []byte("localhost")) // Host + _ = binary.Write(expected, binary.BigEndian, int32(9092)) // Port + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Rack length + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Brokers tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(4)) // ClusterId length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // ClusterId + _ = binary.Write(expected, binary.BigEndian, int32(1)) // ControllerId + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(0)) // IsInternal + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int32(1)) // PartitionIndex + _ = binary.Write(expected, binary.BigEndian, int32(0)) // LeaderId + _ = binary.Write(expected, binary.BigEndian, int32(0)) // LeaderEpoch + _ = binary.Write(expected, binary.BigEndian, int8(3)) // ReplicaNodes length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // ReplicaNode 0 + _ = binary.Write(expected, binary.BigEndian, int32(1)) // ReplicaNode 1 + _ = binary.Write(expected, binary.BigEndian, int8(1)) // IsrNodes length + _ = binary.Write(expected, binary.BigEndian, int8(1)) // OfflineReplicas length + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partitions tag buffer + _ = binary.Write(expected, binary.BigEndian, int32(0)) // TopicAuthorizedOperations + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int32(0)) // ClusterAuthorizedOperations + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/offset/offsets_test.go b/kafka/offset/offsets_test.go index 0805d9232..5b608ed73 100644 --- a/kafka/offset/offsets_test.go +++ b/kafka/offset/offsets_test.go @@ -1,9 +1,14 @@ package offset_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/kafkatest" + "mokapi/kafka/offset" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,156 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(8), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 5, &offset.Request{ + ReplicaId: 1, + IsolationLevel: 0, + Topics: []offset.RequestTopic{ + { + Name: "foo", + Partitions: []offset.RequestPartition{ + { + Index: 1, + LeaderEpoch: 0, + Timestamp: 1657010762684, + }, + }, + }, + }, + }) + + kafkatest.TestRequest(t, 8, &offset.Request{ + ReplicaId: 1, + IsolationLevel: 0, + Topics: []offset.RequestTopic{ + { + Name: "foo", + Partitions: []offset.RequestPartition{ + { + Index: 1, + LeaderEpoch: 0, + Timestamp: 1657010762684, + }, + }, + }, + }, + }) + + b := kafkatest.WriteRequest(t, 8, 123, "me", &offset.Request{ + ReplicaId: 1, + IsolationLevel: 0, + Topics: []offset.RequestTopic{ + { + Name: "foo", + Partitions: []offset.RequestPartition{ + { + Index: 1, + LeaderEpoch: 0, + Timestamp: 1657010762684, + }, + }, + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(43)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.Offset)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(8)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(1)) // ReplicaId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // IsolationLevel + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Index + _ = binary.Write(expected, binary.BigEndian, int32(0)) // LeaderEpoch + _ = binary.Write(expected, binary.BigEndian, int64(1657010762684)) // Timestamp + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partitions tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 5, &offset.Response{ + ThrottleTimeMs: 0, + Topics: []offset.ResponseTopic{ + { + Name: "foo", + Partitions: []offset.ResponsePartition{ + { + Index: 1, + ErrorCode: 0, + Timestamp: 1657010762684, + Offset: 0, + LeaderEpoch: 0, + }, + }, + }, + }, + }) + + kafkatest.TestResponse(t, 8, &offset.Response{ + ThrottleTimeMs: 0, + Topics: []offset.ResponseTopic{ + { + Name: "foo", + Partitions: []offset.ResponsePartition{ + { + Index: 1, + ErrorCode: 0, + Timestamp: 1657010762684, + Offset: 0, + LeaderEpoch: 0, + }, + }, + }, + }, + }) + + b := kafkatest.WriteResponse(t, 8, 123, &offset.Response{ + ThrottleTimeMs: 123, + Topics: []offset.ResponseTopic{ + { + Name: "foo", + Partitions: []offset.ResponsePartition{ + { + Index: 1, + ErrorCode: 0, + Timestamp: 1657010762684, + Offset: 0, + LeaderEpoch: 0, + }, + }, + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(44)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Index + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int64(1657010762684)) // Timestamp + _ = binary.Write(expected, binary.BigEndian, int64(0)) // Offset + _ = binary.Write(expected, binary.BigEndian, int32(0)) // LeaderEpoch + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partition tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topic tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/offsetCommit/offsetCommit.go b/kafka/offsetCommit/offsetCommit.go index cada320fe..682d332b8 100644 --- a/kafka/offsetCommit/offsetCommit.go +++ b/kafka/offsetCommit/offsetCommit.go @@ -1,7 +1,6 @@ package offsetCommit import ( - "math" "mokapi/kafka" ) @@ -10,44 +9,53 @@ func init() { kafka.ApiReg{ ApiKey: kafka.OffsetCommit, MinVersion: 0, - MaxVersion: 2}, + MaxVersion: 9}, &Request{}, &Response{}, - math.MaxInt16, - math.MaxInt16, + 8, + 8, ) } type Request struct { - GroupId string `kafka:""` - GenerationId int32 `kafka:"min=1"` - MemberId string `kafka:"min=1"` - RetentionTime int64 `kafka:"min=2"` - Topics []Topic `kafka:""` + GroupId string `kafka:"compact=8"` + GenerationId int32 `kafka:"min=1"` + MemberId string `kafka:"min=1,compact=8"` + GroupInstanceId string `kafka:"min=7,compact=8,nullable"` + RetentionTime int64 `kafka:"min=2,max=4"` + Topics []Topic `kafka:"compact=8"` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=8"` } type Topic struct { - Name string `kafka:""` - Partitions []Partition `kafka:""` + Name string `kafka:"compact=8"` + Partitions []Partition `kafka:"compact=8"` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=8"` } type Partition struct { - Index int32 `kafka:""` - Offset int64 `kafka:""` - Timestamp int64 `kafka:"min=1,max=1"` - Metadata string `kafka:"nullable"` + Index int32 `kafka:""` + Offset int64 `kafka:""` + LeaderEpoch int32 `kafka:"min=6"` + Timestamp int64 `kafka:"min=1,max=1"` + Metadata string `kafka:"nullable,compact=8"` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=8"` } type Response struct { - Topics []ResponseTopic `kafka:""` + ThrottleTimeMs int32 `kafka:"min=3"` + Topics []ResponseTopic `kafka:"compact=8"` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=8"` } type ResponseTopic struct { - Name string `kafka:""` - Partitions []ResponsePartition `kafka:""` + Name string `kafka:"compact=8"` + Partitions []ResponsePartition `kafka:"compact=8"` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=8"` } type ResponsePartition struct { - Index int32 `kafka:""` - ErrorCode kafka.ErrorCode `kafka:""` + Index int32 `kafka:""` + ErrorCode kafka.ErrorCode `kafka:""` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=8"` } diff --git a/kafka/offsetCommit/offsetCommit_test.go b/kafka/offsetCommit/offsetCommit_test.go index 0bef1041d..bcfa2d970 100644 --- a/kafka/offsetCommit/offsetCommit_test.go +++ b/kafka/offsetCommit/offsetCommit_test.go @@ -1,13 +1,174 @@ package offsetCommit_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/kafkatest" + "mokapi/kafka/offsetCommit" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { reg := kafka.ApiTypes[kafka.OffsetCommit] require.Equal(t, int16(0), reg.MinVersion) - require.Equal(t, int16(2), reg.MaxVersion) + require.Equal(t, int16(9), reg.MaxVersion) +} + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 7, &offsetCommit.Request{ + GroupId: "foo", + GenerationId: 1, + MemberId: "m1", + GroupInstanceId: "g1", + Topics: []offsetCommit.Topic{ + { + Name: "bar", + Partitions: []offsetCommit.Partition{ + { + Index: 1, + Offset: 12, + LeaderEpoch: 0, + Metadata: "metadata", + }, + }, + }, + }, + }) + + kafkatest.TestRequest(t, 9, &offsetCommit.Request{ + GroupId: "foo", + GenerationId: 1, + MemberId: "m1", + GroupInstanceId: "g1", + Topics: []offsetCommit.Topic{ + { + Name: "bar", + Partitions: []offsetCommit.Partition{ + { + Index: 1, + Offset: 12, + LeaderEpoch: 0, + Metadata: "metadata", + }, + }, + }, + }, + }) + + b := kafkatest.WriteRequest(t, 9, 123, "me", &offsetCommit.Request{ + GroupId: "foo", + GenerationId: 1, + MemberId: "m1", + GroupInstanceId: "g1", + Topics: []offsetCommit.Topic{ + { + Name: "bar", + Partitions: []offsetCommit.Partition{ + { + Index: 1, + Offset: 12, + LeaderEpoch: 0, + Metadata: "metadata", + }, + }, + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(61)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.OffsetCommit)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(9)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(4)) // GroupId length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // GroupId + _ = binary.Write(expected, binary.BigEndian, int32(1)) // GenerationId + _ = binary.Write(expected, binary.BigEndian, int8(3)) // MemberId length + _ = binary.Write(expected, binary.BigEndian, []byte("m1")) // MemberId + _ = binary.Write(expected, binary.BigEndian, int8(3)) // GroupInstanceId length + _ = binary.Write(expected, binary.BigEndian, []byte("g1")) // GroupInstanceId + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Partition Index + _ = binary.Write(expected, binary.BigEndian, int64(12)) // Offset + _ = binary.Write(expected, binary.BigEndian, int32(0)) // LeaderEpoch + _ = binary.Write(expected, binary.BigEndian, int8(9)) // Metadata length + _ = binary.Write(expected, binary.BigEndian, []byte("metadata")) // Metadata + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partitions tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 7, &offsetCommit.Response{ + ThrottleTimeMs: 123, + Topics: []offsetCommit.ResponseTopic{ + { + Name: "foo", + Partitions: []offsetCommit.ResponsePartition{ + { + Index: 1, + ErrorCode: 0, + }, + }, + }, + }, + }) + + kafkatest.TestResponse(t, 9, &offsetCommit.Response{ + ThrottleTimeMs: 123, + Topics: []offsetCommit.ResponseTopic{ + { + Name: "foo", + Partitions: []offsetCommit.ResponsePartition{ + { + Index: 1, + ErrorCode: 0, + }, + }, + }, + }, + }) + + b := kafkatest.WriteResponse(t, 9, 123, &offsetCommit.Response{ + ThrottleTimeMs: 123, + Topics: []offsetCommit.ResponseTopic{ + { + Name: "foo", + Partitions: []offsetCommit.ResponsePartition{ + { + Index: 1, + ErrorCode: 0, + }, + }, + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(24)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Partitions Index + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partitions tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) } diff --git a/kafka/offsetFetch/offsetFetch.go b/kafka/offsetFetch/offsetFetch.go index aaf4eb733..7e721f8eb 100644 --- a/kafka/offsetFetch/offsetFetch.go +++ b/kafka/offsetFetch/offsetFetch.go @@ -9,17 +9,18 @@ func init() { kafka.ApiReg{ ApiKey: kafka.OffsetFetch, MinVersion: 0, - MaxVersion: 7}, + MaxVersion: 9}, &Request{}, &Response{}, 6, - 7, + 6, ) } type Request struct { - GroupId string `kafka:"compact=6"` - Topics []RequestTopic `kafka:"compact=6"` + GroupId string `kafka:"compact=6,max=7"` + Topics []RequestTopic `kafka:"compact=6,max=7"` + Groups []RequestGroup `kafka:"min=8,compact=8"` RequireStable bool `kafka:"min=7"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=6"` } @@ -30,9 +31,18 @@ type RequestTopic struct { TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=6"` } +type RequestGroup struct { + GroupId string `kafka:"min=8,compact=8"` + MemberId string `kafka:"min=9,compact=9,nullable"` + MemberEpoch int32 `kafka:"min=9"` + Topics []RequestTopic `kafka:"min=8,compact=8"` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=8"` +} + type Response struct { ThrottleTimeMs int32 `kafka:"min=3"` - Topics []ResponseTopic `kafka:"compact=6"` + Topics []ResponseTopic `kafka:"compact=6,max=7"` + Groups []ResponseGroup `kafka:"min=8,compact=8"` ErrorCode kafka.ErrorCode `kafka:"min=2"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=6"` } @@ -43,6 +53,12 @@ type ResponseTopic struct { TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=6"` } +type ResponseGroup struct { + GroupId string `kafka:"min=8,compact=8"` + Topics []ResponseTopic `kafka:"min=8,compact=8"` + TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=6"` +} + type Partition struct { Index int32 `kafka:""` CommittedOffset int64 `kafka:""` diff --git a/kafka/offsetFetch/offsetFetch_test.go b/kafka/offsetFetch/offsetFetch_test.go index f650a210e..1758c0bf4 100644 --- a/kafka/offsetFetch/offsetFetch_test.go +++ b/kafka/offsetFetch/offsetFetch_test.go @@ -1,13 +1,251 @@ package offsetFetch_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/kafkatest" + "mokapi/kafka/offsetFetch" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { reg := kafka.ApiTypes[kafka.OffsetFetch] require.Equal(t, int16(0), reg.MinVersion) - require.Equal(t, int16(7), reg.MaxVersion) + require.Equal(t, int16(9), reg.MaxVersion) +} + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 5, &offsetFetch.Request{ + GroupId: "foo", + Topics: []offsetFetch.RequestTopic{ + { + Name: "foo", + PartitionIndexes: []int32{0, 1}, + }, + }, + RequireStable: false, + }) + + kafkatest.TestRequest(t, 7, &offsetFetch.Request{ + GroupId: "foo", + Topics: []offsetFetch.RequestTopic{ + { + Name: "foo", + PartitionIndexes: []int32{0, 1}, + }, + }, + RequireStable: false, + }) + + b := kafkatest.WriteRequest(t, 7, 123, "me", &offsetFetch.Request{ + GroupId: "foo", + Topics: []offsetFetch.RequestTopic{ + { + Name: "bar", + PartitionIndexes: []int32{0, 1}, + }, + }, + RequireStable: false, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(34)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.OffsetFetch)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(7)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(4)) // GroupId length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // GroupId + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(3)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // Partition 0 + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Partition 1 + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // RequireStable + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) + + b = kafkatest.WriteRequest(t, 9, 123, "me", &offsetFetch.Request{ + Groups: []offsetFetch.RequestGroup{ + { + GroupId: "foo", + MemberId: "m1", + MemberEpoch: 0, + Topics: []offsetFetch.RequestTopic{ + { + Name: "bar", + PartitionIndexes: []int32{0, 1}, + }, + }, + TagFields: nil, + }, + }, + RequireStable: false, + }) + expected = new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(43)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.OffsetFetch)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(9)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Groups length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // GroupId length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // GroupId + _ = binary.Write(expected, binary.BigEndian, int8(3)) // MemberId length + _ = binary.Write(expected, binary.BigEndian, []byte("m1")) // MemberId + _ = binary.Write(expected, binary.BigEndian, int32(0)) // MemberEpoch + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(3)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(0)) // Partition 0 + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Partition 1 + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Groups tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // RequireStable + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 5, &offsetFetch.Response{ + ThrottleTimeMs: 123, + Topics: []offsetFetch.ResponseTopic{ + { + Name: "", + Partitions: []offsetFetch.Partition{ + { + Index: 1, + CommittedOffset: 12, + CommittedLeaderEpoch: 0, + Metadata: "metadata", + ErrorCode: 0, + }, + }, + }, + }, + ErrorCode: 0, + }) + + kafkatest.TestResponse(t, 7, &offsetFetch.Response{ + ThrottleTimeMs: 123, + Topics: []offsetFetch.ResponseTopic{ + { + Name: "foo", + Partitions: []offsetFetch.Partition{ + { + Index: 1, + CommittedOffset: 12, + CommittedLeaderEpoch: 0, + Metadata: "metadata", + ErrorCode: 0, + }, + }, + }, + }, + ErrorCode: 0, + }) + + b := kafkatest.WriteResponse(t, 7, 123, &offsetFetch.Response{ + ThrottleTimeMs: 123, + Topics: []offsetFetch.ResponseTopic{ + { + Name: "foo", + Partitions: []offsetFetch.Partition{ + { + Index: 1, + CommittedOffset: 12, + CommittedLeaderEpoch: 0, + Metadata: "metadata", + ErrorCode: 0, + }, + }, + }, + }, + ErrorCode: 0, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(47)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Partition Index + _ = binary.Write(expected, binary.BigEndian, int64(12)) // CommittedOffset + _ = binary.Write(expected, binary.BigEndian, int32(0)) // CommittedLeaderEpoch + _ = binary.Write(expected, binary.BigEndian, int8(9)) // Metadata length + _ = binary.Write(expected, binary.BigEndian, []byte("metadata")) // Metadata + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partitions tag buffer + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) + + b = kafkatest.WriteResponse(t, 9, 123, &offsetFetch.Response{ + ThrottleTimeMs: 123, + Groups: []offsetFetch.ResponseGroup{ + { + GroupId: "foo", + Topics: []offsetFetch.ResponseTopic{ + { + Name: "foo", + Partitions: []offsetFetch.Partition{ + { + Index: 1, + CommittedOffset: 12, + CommittedLeaderEpoch: 0, + Metadata: "metadata", + ErrorCode: 0, + }, + }, + }, + }, + }, + }, + ErrorCode: 0, + }) + expected = new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(53)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Groups length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // GroupId length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // GroupId + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Partition Index + _ = binary.Write(expected, binary.BigEndian, int64(12)) // CommittedOffset + _ = binary.Write(expected, binary.BigEndian, int32(0)) // CommittedLeaderEpoch + _ = binary.Write(expected, binary.BigEndian, int8(9)) // Metadata length + _ = binary.Write(expected, binary.BigEndian, []byte("metadata")) // Metadata + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partitions tag buffer + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Groups tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) } diff --git a/kafka/produce/produce_test.go b/kafka/produce/produce_test.go index 76918f359..e9378ea04 100644 --- a/kafka/produce/produce_test.go +++ b/kafka/produce/produce_test.go @@ -1,9 +1,14 @@ package produce_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/kafkatest" + "mokapi/kafka/produce" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,219 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(9), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 8, &produce.Request{ + TransactionalId: "", + Acks: 0, + TimeoutMs: 12, + Topics: []produce.RequestTopic{ + { + Name: "foo", + Partitions: []produce.RequestPartition{ + { + Index: 0, + Record: kafka.RecordBatch{ + Records: []*kafka.Record{ + { + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + Headers: nil, + }, + }, + }, + }, + }, + }, + }, + }) + + kafkatest.TestRequest(t, 9, &produce.Request{ + TransactionalId: "", + Acks: 0, + TimeoutMs: 12, + Topics: []produce.RequestTopic{ + { + Name: "foo", + Partitions: []produce.RequestPartition{ + { + Index: 1, + Record: kafka.RecordBatch{ + Records: []*kafka.Record{ + { + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + Headers: nil, + }, + }, + }, + }, + }, + }, + }, + }) + + b := kafkatest.WriteRequest(t, 9, 123, "me", &produce.Request{ + TransactionalId: "", + Acks: 0, + TimeoutMs: 12, + Topics: []produce.RequestTopic{ + { + Name: "foo", + Partitions: []produce.RequestPartition{ + { + Index: 1, + Record: kafka.RecordBatch{ + Records: []*kafka.Record{ + { + Offset: 0, + Time: kafka.ToTime(1657010762684), + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + Headers: nil, + }, + }, + }, + }, + }, + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(108)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.Produce)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(9)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(0)) // TransactionalId length + _ = binary.Write(expected, binary.BigEndian, int16(0)) // Acks + _ = binary.Write(expected, binary.BigEndian, int32(12)) // TimeoutMs + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Index + + _ = binary.Write(expected, binary.BigEndian, int8(75)) // Records length + _ = binary.Write(expected, binary.BigEndian, int64(0)) // base offset + _ = binary.Write(expected, binary.BigEndian, int32(62)) // message size + _ = binary.Write(expected, binary.BigEndian, int32(0)) // leader epoch + _ = binary.Write(expected, binary.BigEndian, int8(2)) // magic + _ = binary.Write(expected, binary.BigEndian, []byte{119, 89, 114, 22}) // crc32 + _ = binary.Write(expected, binary.BigEndian, int16(0)) // attributes + _ = binary.Write(expected, binary.BigEndian, int32(0)) // last offset delta + _ = binary.Write(expected, binary.BigEndian, int64(1657010762684)) // first timestamp + _ = binary.Write(expected, binary.BigEndian, int64(1657010762684)) // max timestamp + + _ = binary.Write(expected, binary.BigEndian, []byte{255, 255, 255, 255, 255, 255, 255, 255}) // producer id + _ = binary.Write(expected, binary.BigEndian, []byte{255, 255}) // producer epoch + _ = binary.Write(expected, binary.BigEndian, []byte{255, 255, 255, 255}) // base sequence + + _ = binary.Write(expected, binary.BigEndian, int32(1)) // number of records + _ = binary.Write(expected, binary.BigEndian, int8(24)) // record length + _ = binary.Write(expected, binary.BigEndian, int8(0)) // attributes + _ = binary.Write(expected, binary.BigEndian, int8(0)) // delta timestamp + _ = binary.Write(expected, binary.BigEndian, int8(0)) // delta offset + _ = binary.Write(expected, binary.BigEndian, int8(6)) // key length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // key + _ = binary.Write(expected, binary.BigEndian, int8(6)) // value length + _ = binary.Write(expected, binary.BigEndian, []byte("bar")) // value + _ = binary.Write(expected, binary.BigEndian, int8(0)) // header length + + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Paritions tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 8, &produce.Response{ + Topics: []produce.ResponseTopic{ + { + Name: "foo", + Partitions: []produce.ResponsePartition{ + { + Index: 1, + ErrorCode: 0, + BaseOffset: 1, + LogAppendTime: 0, + LogStartOffset: 0, + RecordErrors: nil, + ErrorMessage: "", + }, + }, + }, + }, + ThrottleTimeMs: 0, + }) + + kafkatest.TestResponse(t, 9, &produce.Response{ + Topics: []produce.ResponseTopic{ + { + Name: "foo", + Partitions: []produce.ResponsePartition{ + { + Index: 1, + ErrorCode: 0, + BaseOffset: 1, + LogAppendTime: 0, + LogStartOffset: 0, + RecordErrors: nil, + ErrorMessage: "", + }, + }, + }, + }, + ThrottleTimeMs: 0, + }) + + b := kafkatest.WriteResponse(t, 9, 123, &produce.Response{ + Topics: []produce.ResponseTopic{ + { + Name: "foo", + Partitions: []produce.ResponsePartition{ + { + Index: 1, + ErrorCode: 0, + BaseOffset: 1, + LogAppendTime: 0, + LogStartOffset: 0, + RecordErrors: nil, + ErrorMessage: "", + }, + }, + }, + }, + ThrottleTimeMs: 0, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(50)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Topics length + _ = binary.Write(expected, binary.BigEndian, int8(4)) // Name length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // Name + _ = binary.Write(expected, binary.BigEndian, int8(2)) // Partitions length + _ = binary.Write(expected, binary.BigEndian, int32(1)) // Index + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int64(1)) // BaseOffset + _ = binary.Write(expected, binary.BigEndian, int64(0)) // LogAppendTime + _ = binary.Write(expected, binary.BigEndian, int64(0)) // LogStartOffset + _ = binary.Write(expected, binary.BigEndian, int8(1)) // RecordErrors length + _ = binary.Write(expected, binary.BigEndian, int8(0)) // ErrorMessage length + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Partitions tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // Topics tag buffer + _ = binary.Write(expected, binary.BigEndian, int32(0)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/kafka/protocol.go b/kafka/protocol.go index ce03a8cea..280ffbfbb 100644 --- a/kafka/protocol.go +++ b/kafka/protocol.go @@ -79,9 +79,11 @@ type kafkaTag struct { minVersion int16 maxVersion int16 // version to switch to compact mode (inclusive) + // Since there are exceptions (ApiVersion) to the sentence, it contains redundant information: + // When a struct is marked as “flexible”, all fields of type string, array, or bytes use the “compact” encoding. compact int16 protoType string - nullable bool + nullable int16 } func (t kafkaTag) isValid(version int16) bool { @@ -185,8 +187,10 @@ func getTag(f reflect.StructField) kafkaTag { t.protoType = kv[1] case "nullable": if len(kv) == 1 { - t.nullable = true - } // else: parse bool value + t.nullable = 0 + } else if i, err := strconv.Atoi(kv[1]); err == nil { + t.nullable = int16(i) + } } } return t diff --git a/kafka/record_test.go b/kafka/record_test.go index 354397fe9..ba6118e7e 100644 --- a/kafka/record_test.go +++ b/kafka/record_test.go @@ -3,11 +3,12 @@ package kafka import ( "bufio" "bytes" - "github.com/stretchr/testify/require" "mokapi/buffer" "strings" "testing" "time" + + "github.com/stretchr/testify/require" ) func bytesToString(bytes Bytes) string { diff --git a/kafka/response.go b/kafka/response.go index 0f5371e33..ca0220a60 100644 --- a/kafka/response.go +++ b/kafka/response.go @@ -31,7 +31,7 @@ func (r *Response) Read(reader io.Reader) error { if r.Header.Size == 0 { return io.EOF } - d.leftSize = int(r.Header.Size) - 4 + d.leftSize = int(r.Header.Size) correlationId := d.ReadInt32() if correlationId != r.Header.CorrelationId { diff --git a/kafka/server.go b/kafka/server.go index baf61d78b..960c47a69 100644 --- a/kafka/server.go +++ b/kafka/server.go @@ -3,7 +3,6 @@ package kafka import ( "context" "errors" - log "github.com/sirupsen/logrus" "io" "mokapi/safe" "net" @@ -11,6 +10,8 @@ import ( "sync" "syscall" "time" + + log "github.com/sirupsen/logrus" ) var ErrServerClosed = errors.New("kafka: Server closed") @@ -96,11 +97,11 @@ func (s *Server) Close() { } if s.listener != nil { - s.listener.Close() + _ = s.listener.Close() } for conn, ctx := range s.activeConn { ctx.Done() - conn.Close() + _ = conn.Close() delete(s.activeConn, conn) } } @@ -133,7 +134,9 @@ func (s *Server) serve(conn net.Conn, ctx context.Context) { continue } - go func() { + // The server guarantees that on a single TCP connection, requests will be processed in the order they are + // sent and responses will return in that order as well. + func() { defer func() { err := recover() if err != nil { @@ -155,7 +158,7 @@ func (s *Server) closeConn(conn net.Conn) { return } ctx.Done() - conn.Close() + _ = conn.Close() delete(s.activeConn, conn) } diff --git a/kafka/syncGroup/syncGroup.go b/kafka/syncGroup/syncGroup.go index d3e9049f9..e9df53a29 100644 --- a/kafka/syncGroup/syncGroup.go +++ b/kafka/syncGroup/syncGroup.go @@ -1,7 +1,6 @@ package syncGroup import ( - "math" "mokapi/kafka" ) @@ -14,7 +13,7 @@ func init() { &Request{}, &Response{}, 4, - math.MaxInt16, + 4, ) } @@ -25,7 +24,7 @@ type Request struct { GroupInstanceId string `kafka:"min=3,compact=4,nullable"` ProtocolType string `kafka:"min=5,compact=5,nullable"` ProtocolName string `kafka:"min=5,compact=5,nullable"` - GroupAssignments []GroupAssignment `kafka:""` + GroupAssignments []GroupAssignment `kafka:"compact=4"` TagFields map[int64]string `kafka:"type=TAG_BUFFER,min=4"` } diff --git a/kafka/syncGroup/syncGroup_test.go b/kafka/syncGroup/syncGroup_test.go index a90ae9b50..54560bba1 100644 --- a/kafka/syncGroup/syncGroup_test.go +++ b/kafka/syncGroup/syncGroup_test.go @@ -1,9 +1,14 @@ package syncGroup_test import ( - "github.com/stretchr/testify/require" + "bytes" + "encoding/binary" "mokapi/kafka" + "mokapi/kafka/kafkatest" + "mokapi/kafka/syncGroup" "testing" + + "github.com/stretchr/testify/require" ) func TestInit(t *testing.T) { @@ -11,3 +16,117 @@ func TestInit(t *testing.T) { require.Equal(t, int16(0), reg.MinVersion) require.Equal(t, int16(5), reg.MaxVersion) } + +func TestRequest(t *testing.T) { + kafkatest.TestRequest(t, 3, &syncGroup.Request{ + GroupId: "foo", + GenerationId: 1, + MemberId: "m1", + GroupInstanceId: "g1", + GroupAssignments: []syncGroup.GroupAssignment{ + { + MemberId: "m2", + Assignment: []byte("assign"), + }, + }, + }) + + kafkatest.TestRequest(t, 5, &syncGroup.Request{ + GroupId: "foo", + GenerationId: 1, + MemberId: "m1", + GroupInstanceId: "g1", + ProtocolType: "proto", + ProtocolName: "p1", + GroupAssignments: []syncGroup.GroupAssignment{ + { + MemberId: "m2", + Assignment: []byte("assign"), + }, + }, + }) + + b := kafkatest.WriteRequest(t, 5, 123, "me", &syncGroup.Request{ + GroupId: "foo", + GenerationId: 1, + MemberId: "m1", + GroupInstanceId: "g1", + ProtocolType: "proto", + ProtocolName: "p1", + GroupAssignments: []syncGroup.GroupAssignment{ + { + MemberId: "m2", + Assignment: []byte("assign"), + }, + }, + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(49)) // length + _ = binary.Write(expected, binary.BigEndian, int16(kafka.SyncGroup)) // ApiKey + _ = binary.Write(expected, binary.BigEndian, int16(5)) // ApiVersion + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int16(2)) // ClientId length + _ = binary.Write(expected, binary.BigEndian, []byte("me")) // ClientId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int8(4)) // GroupId length + _ = binary.Write(expected, binary.BigEndian, []byte("foo")) // GroupId + _ = binary.Write(expected, binary.BigEndian, int32(1)) // GenerationId + _ = binary.Write(expected, binary.BigEndian, int8(3)) // MemberId length + _ = binary.Write(expected, binary.BigEndian, []byte("m1")) // MemberId + _ = binary.Write(expected, binary.BigEndian, int8(3)) // GroupInstanceId length + _ = binary.Write(expected, binary.BigEndian, []byte("g1")) // GroupInstanceId + _ = binary.Write(expected, binary.BigEndian, int8(6)) // ProtocolType length + _ = binary.Write(expected, binary.BigEndian, []byte("proto")) // ProtocolType + _ = binary.Write(expected, binary.BigEndian, int8(3)) // ProtocolName length + _ = binary.Write(expected, binary.BigEndian, []byte("p1")) // ProtocolName + _ = binary.Write(expected, binary.BigEndian, int8(2)) // GroupAssignments length + _ = binary.Write(expected, binary.BigEndian, int8(3)) // MemberId length + _ = binary.Write(expected, binary.BigEndian, []byte("m2")) // MemberId + _ = binary.Write(expected, binary.BigEndian, int8(7)) // Assignment length + _ = binary.Write(expected, binary.BigEndian, []byte("assign")) // Assignment + _ = binary.Write(expected, binary.BigEndian, int8(0)) // GroupAssignments tag buffer + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} + +func TestResponse(t *testing.T) { + kafkatest.TestResponse(t, 3, &syncGroup.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + Assignment: []byte("assign"), + }) + + kafkatest.TestResponse(t, 5, &syncGroup.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + ProtocolType: "proto", + ProtocolName: "p1", + Assignment: []byte("assign"), + }) + + b := kafkatest.WriteResponse(t, 5, 123, &syncGroup.Response{ + ThrottleTimeMs: 123, + ErrorCode: 0, + ProtocolType: "proto", + ProtocolName: "p1", + Assignment: []byte("assign"), + }) + expected := new(bytes.Buffer) + // header + _ = binary.Write(expected, binary.BigEndian, int32(28)) // length + _ = binary.Write(expected, binary.BigEndian, int32(123)) // correlationId + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + // message + _ = binary.Write(expected, binary.BigEndian, int32(123)) // ThrottleTimeMs + _ = binary.Write(expected, binary.BigEndian, int16(0)) // ErrorCode + _ = binary.Write(expected, binary.BigEndian, int8(6)) // ProtocolType length + _ = binary.Write(expected, binary.BigEndian, []byte("proto")) // ProtocolType + _ = binary.Write(expected, binary.BigEndian, int8(3)) // ProtocolType length + _ = binary.Write(expected, binary.BigEndian, []byte("p1")) // ProtocolType + _ = binary.Write(expected, binary.BigEndian, int8(7)) // Assignment length + _ = binary.Write(expected, binary.BigEndian, []byte("assign")) // Assignment + _ = binary.Write(expected, binary.BigEndian, int8(0)) // tag buffer + require.Equal(t, expected.Bytes(), b) +} diff --git a/npm/types/http.d.ts b/npm/types/http.d.ts index f6451261d..57ffbc1ff 100644 --- a/npm/types/http.d.ts +++ b/npm/types/http.d.ts @@ -105,6 +105,14 @@ export interface Args { * @default 5 **/ maxRedirects: number + /** + * Maximum time to wait for the request to complete. Default + * timeout is 60 seconds ("60s"). The type can also be a number, in which + * case Mokapi interprets it as milliseconds + * @example + * const res = get(url, { timeout: '5m' }) + */ + timeout: number | string } /** diff --git a/pkg/cli/bind.go b/pkg/cli/bind.go index 8d5dbbac5..505dfffbd 100644 --- a/pkg/cli/bind.go +++ b/pkg/cli/bind.go @@ -188,9 +188,6 @@ func (f *flagConfigBinder) setMap(ctx *context) error { m.Set(reflect.MakeMap(ctx.element.Type())) } - i := m.Interface() - _ = i - var key reflect.Value if len(ctx.paths) >= 1 { key = reflect.ValueOf(ctx.paths[0]) @@ -358,6 +355,8 @@ func (f *flagConfigBinder) setJson(element reflect.Value, i interface{}) error { case int64, string, bool: element.Set(reflect.ValueOf(i)) case []interface{}: + // reset array + element.Set(reflect.MakeSlice(element.Type(), 0, len(o))) for _, item := range o { ptr := reflect.New(element.Type().Elem()) err := f.setJson(ptr.Elem(), item) @@ -475,8 +474,13 @@ func (c *context) Next(element reflect.Value) *context { func getSplitCharsForList(s string) []rune { s = strings.Trim(s, "") - if strings.Contains(s, ",") && strings.Contains(s, " ") { + if strings.Contains(s, ",") && strings.Contains(s, " ") || isJsonValue(s) { return []rune{' '} } return []rune{' ', ','} } + +func isJsonValue(s string) bool { + return (strings.HasPrefix(s, "[") && strings.HasSuffix(s, "]")) || + (strings.HasPrefix(s, "{") && strings.HasSuffix(s, "}")) +} diff --git a/pkg/cli/command.go b/pkg/cli/command.go index dcf1a0b8c..1c558ad8c 100644 --- a/pkg/cli/command.go +++ b/pkg/cli/command.go @@ -2,24 +2,24 @@ package cli import ( "fmt" - "github.com/pkg/errors" "os" "reflect" "strings" ) type Command struct { - Name string - Short string - Long string - Example string - Config any - Commands []*Command - Run func(cmd *Command, args []string) error - - envPrefix string - args []string - flags *FlagSet + Name string + Short string + Long string + Example string + Config any + Commands []*Command + Run func(cmd *Command, args []string) error + EnvPrefix string + + configFile string + args []string + flags *FlagSet } func (c *Command) Execute() error { @@ -29,21 +29,21 @@ func (c *Command) Execute() error { } cmd := c - envPrefix := c.envPrefix + envPrefix := c.EnvPrefix if len(args) > 0 { for _, child := range c.Commands { if child.Name == args[0] { cmd = child args = args[1:] - if cmd.envPrefix != "" { - envPrefix = cmd.envPrefix + if cmd.EnvPrefix != "" { + envPrefix = cmd.EnvPrefix } } } } - m, err := parseFlags(args, envPrefix) + m, err := parseFlags(args, envPrefix, c.Flags().IsValidFlag) if err != nil { return err } @@ -81,16 +81,28 @@ func (c *Command) Execute() error { } } - for k, v := range cmd.flags.flags { - if _, ok := m[k]; !ok && v.DefaultValue != "" { - m[k] = []string{v.DefaultValue} + defaultValues := map[string][]string{} + for _, f := range cmd.flags.flags { + if f.DefaultValue == "" { + continue } + if _, ok := m[f.Name]; ok { + continue + } + if _, ok := m[f.Shorthand]; ok { + continue + } + defaultValues[f.Name] = []string{f.DefaultValue} } if cmd.Config != nil { - // reset configs, because values or now in flag set + // reset configs, because values are now in the flag set clearConfig(cmd.Config) b := flagConfigBinder{} + err = b.Decode(defaultValues, cmd.Config) + if err != nil { + return fmt.Errorf("failed to bind flags to config: %w", err) + } err = b.Decode(m, cmd.Config) if err != nil { return fmt.Errorf("failed to bind flags to config: %w", err) @@ -115,13 +127,8 @@ func (c *Command) Flags() *FlagSet { return c.flags } -// SetEnvPrefix defines prefix of the environment variables to be considered -// With the prefix "mokapi", only environment variables with MOKAPI_ are considered. -func (c *Command) SetEnvPrefix(in string) { - if in != "" { - in = in + "_" - } - c.envPrefix = strings.ToUpper(in) +func (c *Command) SetConfigFile(file string) { + c.configFile = file } func getMapFromConfig(cfg any, flags *FlagSet) (map[string][]string, error) { @@ -170,18 +177,41 @@ func getMapFrom(v reflect.Value, key string, flags *FlagSet) (map[string][]strin } return result, nil case reflect.Slice: - if _, err := flags.GetValue(key); err != nil { - var notFound *FlagNotFound - if errors.As(err, ¬Found) { - return nil, nil + if _, err := flags.GetValue(key); err == nil { + var values []string + for i := 0; i < v.Len(); i++ { + values = append(values, fmt.Sprintf("%v", v.Index(i))) } - return nil, err + return map[string][]string{key: values}, nil } - var values []string + result := map[string][]string{} for i := 0; i < v.Len(); i++ { - values = append(values, fmt.Sprintf("%v", v.Index(i))) + m, err := getMapFrom(v.Index(i), fmt.Sprintf("%s[%v]", key, i), flags) + if err != nil { + return nil, err + } else if m == nil { + continue + } + for k, val := range m { + result[k] = val + } + } + return result, nil + case reflect.Map: + result := map[string][]string{} + for _, k := range v.MapKeys() { + m, err := getMapFrom(v.MapIndex(k), fmt.Sprintf("%s-%v", key, k.Interface()), flags) + if err != nil { + return nil, err + } else if m == nil { + continue + } + for k, val := range m { + result[k] = val + } } - return map[string][]string{key: values}, nil + + return result, nil default: if canBeNil(v) && v.IsNil() { return nil, nil diff --git a/pkg/cli/command_test.go b/pkg/cli/command_test.go index ef8604ce1..f9bfff8ce 100644 --- a/pkg/cli/command_test.go +++ b/pkg/cli/command_test.go @@ -21,7 +21,7 @@ func TestCommand(t *testing.T) { { name: "--help", cmd: func() *Command { - c := &Command{Name: "foo", envPrefix: "Mokapi"} + c := &Command{Name: "foo", EnvPrefix: "Mokapi"} c.Flags().Bool("help", false, "") return c }, @@ -35,7 +35,7 @@ func TestCommand(t *testing.T) { { name: "-h", cmd: func() *Command { - c := &Command{Name: "foo", envPrefix: "Mokapi"} + c := &Command{Name: "foo", EnvPrefix: "Mokapi"} c.Flags().BoolShort("help", "h", false, "") return c }, @@ -49,7 +49,7 @@ func TestCommand(t *testing.T) { { name: "bind to config", cmd: func() *Command { - c := &Command{Config: cfg, envPrefix: "Mokapi"} + c := &Command{Config: cfg, EnvPrefix: "Mokapi"} c.Flags().Bool("flag", false, "") return c }, @@ -63,7 +63,7 @@ func TestCommand(t *testing.T) { { name: "--count", cmd: func() *Command { - c := &Command{Config: cfg, envPrefix: "Mokapi"} + c := &Command{Config: cfg, EnvPrefix: "Mokapi"} c.Flags().Int("count", 12, "") return c }, @@ -76,7 +76,7 @@ func TestCommand(t *testing.T) { { name: "--count default", cmd: func() *Command { - c := &Command{Config: cfg, envPrefix: "Mokapi"} + c := &Command{Config: cfg, EnvPrefix: "Mokapi"} c.Flags().Int("count", 12, "") return c }, @@ -89,7 +89,7 @@ func TestCommand(t *testing.T) { { name: "--skip-prefix", cmd: func() *Command { - c := &Command{Config: cfg, envPrefix: "Mokapi"} + c := &Command{Config: cfg, EnvPrefix: "Mokapi"} c.Flags().StringSlice("skip-prefix", []string{"_"}, "", false) return c }, @@ -103,7 +103,7 @@ func TestCommand(t *testing.T) { { name: "--skip-prefix default", cmd: func() *Command { - c := &Command{Config: cfg, envPrefix: "Mokapi"} + c := &Command{Config: cfg, EnvPrefix: "Mokapi"} c.Flags().StringSlice("skip-prefix", []string{"_"}, "", false) return c }, diff --git a/pkg/cli/dynamic.go b/pkg/cli/dynamic.go index 1f9be3098..512d9f0ab 100644 --- a/pkg/cli/dynamic.go +++ b/pkg/cli/dynamic.go @@ -6,7 +6,7 @@ import ( ) var regexIndex = regexp.MustCompile(`\[<.*>]`) -var regexString = regexp.MustCompile(`<.*>`) +var regexKey = regexp.MustCompile(`<.*>`) func (fs *FlagSet) DynamicInt(name string, defaultValue int, usage string) { v := &intFlag{value: defaultValue} @@ -49,7 +49,7 @@ func (fs *FlagSet) DynamicStringSlice(name string, defaultValue []string, usage func convertToPattern(s string) *regexp.Regexp { pattern := regexIndex.ReplaceAllString(s, "\\[[0-9]+]") - pattern = regexString.ReplaceAllString(pattern, "[a-zA-Z]+") + pattern = regexKey.ReplaceAllString(pattern, "[a-zA-Z]+") regex, err := regexp.Compile(fmt.Sprintf("^%s$", pattern)) if err != nil { panic(fmt.Errorf("invalid regex pattern: %s", pattern)) diff --git a/pkg/cli/dynamic_test.go b/pkg/cli/dynamic_test.go new file mode 100644 index 000000000..8452010ce --- /dev/null +++ b/pkg/cli/dynamic_test.go @@ -0,0 +1,65 @@ +package cli_test + +import ( + "mokapi/pkg/cli" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestDynamic(t *testing.T) { + newCmd := func(args []string, cfg any) *cli.Command { + c := &cli.Command{EnvPrefix: "Mokapi_"} + c.SetArgs(args) + c.Config = cfg + c.Run = func(cmd *cli.Command, args []string) error { + return nil + } + return c + } + + testcases := []struct { + name string + test func(t *testing.T) + }{ + { + name: "array index 0", + test: func(t *testing.T) { + s := &struct{ Foo []string }{} + c := newCmd([]string{"--foo[0]", "bar"}, &s) + c.Flags().DynamicString("foo[]", "", "") + err := c.Execute() + require.NoError(t, err) + require.Equal(t, []string{"bar"}, s.Foo) + }, + }, + { + name: "array index 1", + test: func(t *testing.T) { + s := &struct{ Foo []string }{} + c := newCmd([]string{"--foo[1]", "bar"}, &s) + c.Flags().DynamicString("foo[]", "", "") + err := c.Execute() + require.NoError(t, err) + require.Equal(t, []string{"", "bar"}, s.Foo) + }, + }, + { + name: "map", + test: func(t *testing.T) { + s := &struct{ Foo map[string]string }{} + c := newCmd([]string{"--foo-bar", "yuh"}, &s) + c.Flags().DynamicString("foo-", "", "") + err := c.Execute() + require.NoError(t, err) + require.Equal(t, map[string]string{"bar": "yuh"}, s.Foo) + }, + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + tc.test(t) + }) + } +} diff --git a/pkg/cli/file.go b/pkg/cli/file.go index 1cbd25956..b35f8fc64 100644 --- a/pkg/cli/file.go +++ b/pkg/cli/file.go @@ -20,6 +20,10 @@ type ReadFileFS func(path string) ([]byte, error) var readFile ReadFileFS = os.ReadFile +func SetReadFileFS(f ReadFileFS) { + readFile = f +} + func readConfigFileFromFlags(flags map[string][]string, element interface{}) (string, error) { var filename string if len(filename) == 0 { @@ -44,9 +48,9 @@ func readConfigFileFromFlags(flags map[string][]string, element interface{}) (st for _, name := range fileNames { path := filepath.Join(dir, name) if err := readConfigFile(path, element); err == nil { - return filename, nil + return path, nil } else if !os.IsNotExist(err) { - return filename, err + return path, err } } } diff --git a/pkg/cli/flag_bool.go b/pkg/cli/flag_bool.go index c7b54d836..f2c7c2abd 100644 --- a/pkg/cli/flag_bool.go +++ b/pkg/cli/flag_bool.go @@ -38,11 +38,8 @@ func (fs *FlagSet) Bool(name string, defaultValue bool, usage string) { func (fs *FlagSet) BoolShort(name string, short string, defaultValue bool, usage string) { v := &boolFlag{value: defaultValue} - f := &Flag{Value: &boolFlag{}, Usage: usage, DefaultValue: v.String()} - fs.setFlag(name, f) - if short != "" { - fs.setFlag(short, f) - } + f := &Flag{Value: &boolFlag{}, Name: name, Shorthand: short, Usage: usage, DefaultValue: v.String()} + fs.setFlag(f) } func (fs *FlagSet) GetBool(name string) bool { diff --git a/pkg/cli/flag_file.go b/pkg/cli/flag_file.go new file mode 100644 index 000000000..12ddf5d90 --- /dev/null +++ b/pkg/cli/flag_file.go @@ -0,0 +1,40 @@ +package cli + +import "fmt" + +type fileFlag struct { + value string +} + +func (f *fileFlag) Set(values []string) error { + if len(values) > 0 { + f.value = values[0] + } + return nil +} + +func (f *fileFlag) Value() any { + return f.value +} + +func (f *fileFlag) String() string { + return fmt.Sprintf("%v", f.value) +} + +func (fs *FlagSet) File(name string, defaultValue string, usage string) { + v := &fileFlag{value: defaultValue} + f := &Flag{Name: name, Value: &boolFlag{}, Usage: usage, DefaultValue: v.String()} + fs.setFlag(f) +} + +func (fs *FlagSet) GetFile(name string) string { + v, err := fs.GetValue(name) + if err != nil { + panic(err) + } + s, ok := v.(string) + if !ok { + panic(fmt.Sprintf("flag '%s' is not a file", name)) + } + return s +} diff --git a/pkg/cli/flag_file_test.go b/pkg/cli/flag_file_test.go new file mode 100644 index 000000000..fd01b6560 --- /dev/null +++ b/pkg/cli/flag_file_test.go @@ -0,0 +1,227 @@ +package cli_test + +import ( + "fmt" + "io/fs" + "mokapi/pkg/cli" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFileDecoder_Decode(t *testing.T) { + newCmd := func(args []string, cfg any) *cli.Command { + c := &cli.Command{EnvPrefix: "Mokapi_"} + c.SetArgs(args) + c.Config = cfg + c.Run = func(cmd *cli.Command, args []string) error { + return nil + } + return c + } + + testcases := []struct { + name string + test func(t *testing.T) + }{ + { + name: "file in folder mokapi in etc", + test: func(t *testing.T) { + s := &struct{ Name string }{} + f := func(path string) ([]byte, error) { + // if test is executed on windows we get second path + if path == "/etc/mokapi/mokapi.yaml" || path == "\\etc\\mokapi\\mokapi.yaml" { + return []byte("name: foobar"), nil + } + return nil, fs.ErrNotExist + } + cli.SetReadFileFS(f) + c := newCmd([]string{}, &s) + c.Flags().String("name", "", "") + err := c.Execute() + require.NoError(t, err) + require.Equal(t, "foobar", s.Name) + }, + }, + { + name: "file does not exist", + test: func(t *testing.T) { + s := &struct{ Name string }{} + f := func(path string) ([]byte, error) { return []byte(""), fmt.Errorf("file not found") } + cli.SetReadFileFS(f) + c := newCmd([]string{}, &s) + err := c.Execute() + require.Error(t, err) + }, + }, + { + name: "empty file", + test: func(t *testing.T) { + s := &struct{ Name string }{} + f := func(path string) ([]byte, error) { return []byte(""), nil } + cli.SetReadFileFS(f) + c := newCmd([]string{}, &s) + c.Flags().String("name", "", "") + err := c.Execute() + require.NoError(t, err) + }, + }, + { + name: "yaml schema error", + test: func(t *testing.T) { + s := &struct{ Name int }{} + f := func(path string) ([]byte, error) { return []byte("name: {}"), nil } + cli.SetReadFileFS(f) + c := newCmd([]string{}, &s) + c.Flags().String("name", "", "") + err := c.Execute() + require.EqualError(t, err, "parse file 'mokapi.yaml' failed: cannot unmarshal object into int") + }, + }, + { + name: "temp file with data", + test: func(t *testing.T) { + s := &struct{ Name string }{} + path := createTempFile(t, "test.yml", "name: foobar") + + c := newCmd([]string{"--config-file", path}, s) + c.Flags().String("name", "", "") + err := c.Execute() + require.NoError(t, err) + require.Equal(t, "foobar", s.Name) + }, + }, + { + name: "pascal case", + test: func(t *testing.T) { + s := &struct { + InstallDir string `yaml:"installDir" flag:"install-dir"` + }{} + path := createTempFile(t, "test.yml", "installDir: foobar") + c := newCmd([]string{"--config-file", path}, s) + c.Flags().String("install-dir", "", "") + + err := c.Execute() + require.NoError(t, err) + require.Equal(t, "foobar", s.InstallDir) + }, + }, + { + name: "map", + test: func(t *testing.T) { + s := &struct { + Values map[string]string + }{} + path := createTempFile(t, "test.yml", "values: {foo: bar}") + c := newCmd([]string{"--config-file", path}, s) + c.Flags().DynamicString("values-", "", "") + + err := c.Execute() + require.NoError(t, err) + require.Equal(t, map[string]string{"foo": "bar"}, s.Values) + }, + }, + { + name: "array", + test: func(t *testing.T) { + s := &struct { + Key []string + }{} + path := createTempFile(t, "test.yml", "key: [bar]") + c := newCmd([]string{"--config-file", path}, s) + c.Flags().DynamicString("key[]", "", "") + + err := c.Execute() + require.NoError(t, err) + require.Equal(t, []string{"bar"}, s.Key) + }, + }, + { + name: "map with array", + test: func(t *testing.T) { + s := &struct { + Values map[string][]string + }{} + path := createTempFile(t, "test.yml", "values: {foo: [bar]}") + c := newCmd([]string{"--config-file", path}, s) + c.Flags().DynamicString("values-[]", "", "") + + err := c.Execute() + require.NoError(t, err) + require.Equal(t, map[string][]string{"foo": {"bar"}}, s.Values) + }, + }, + { + name: "map pointer struct", + test: func(t *testing.T) { + type test struct { + Name string + Foo string + } + s := &struct { + Values map[string]*test + }{} + path := createTempFile(t, "test.yml", "values: {foo: {name: Bob, foo: bar}}") + c := newCmd([]string{"--config-file", path}, s) + c.Flags().DynamicString("values--name", "", "") + c.Flags().DynamicString("values--foo", "", "") + + err := c.Execute() + require.NoError(t, err) + require.Equal(t, "Bob", s.Values["foo"].Name) + require.Equal(t, "bar", s.Values["foo"].Foo) + }, + }, + { + name: "map struct", + test: func(t *testing.T) { + type test struct { + Name string + Foo string + } + s := &struct { + Values map[string]test + }{} + path := createTempFile(t, "test.yml", "values: {foo: {name: Bob, foo: bar}}") + c := newCmd([]string{"--config-file", path}, s) + c.Flags().DynamicString("values--name", "", "") + c.Flags().DynamicString("values--foo", "", "") + + err := c.Execute() + require.NoError(t, err) + require.Equal(t, "Bob", s.Values["foo"].Name) + require.Equal(t, "bar", s.Values["foo"].Foo) + }, + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + defer func() { + cli.SetReadFileFS(os.ReadFile) + }() + + tc.test(t) + }) + } +} + +func createTempFile(t *testing.T, filename, data string) string { + path := filepath.Join(t.TempDir(), filename) + file, err := os.Create(path) + if err != nil { + t.Fatal(err) + } + defer func() { + _ = file.Close() + }() + + _, err = file.Write([]byte(data)) + if err != nil { + t.Fatal(err) + } + + return path +} diff --git a/pkg/cli/flag_float.go b/pkg/cli/flag_float.go index 74259c5f7..9b3bd7421 100644 --- a/pkg/cli/flag_float.go +++ b/pkg/cli/flag_float.go @@ -37,11 +37,8 @@ func (fs *FlagSet) Float(name string, defaultValue float64, usage string) { func (fs *FlagSet) FloatShort(name string, short string, defaultValue float64, usage string) { v := &floatFlag{value: defaultValue} - f := &Flag{Value: v, Usage: usage, DefaultValue: v.String()} - fs.setFlag(name, f) - if short != "" { - fs.setFlag(short, f) - } + f := &Flag{Name: name, Shorthand: short, Value: v, Usage: usage, DefaultValue: v.String()} + fs.setFlag(f) } func (fs *FlagSet) GetFloat(name string) float64 { diff --git a/pkg/cli/flag_int.go b/pkg/cli/flag_int.go index df4d05ff7..60fbb823b 100644 --- a/pkg/cli/flag_int.go +++ b/pkg/cli/flag_int.go @@ -40,11 +40,8 @@ func (fs *FlagSet) Int(name string, defaultValue int, usage string) { func (fs *FlagSet) IntShort(name string, short string, defaultValue int, usage string) { v := &intFlag{value: defaultValue} - f := &Flag{Value: v, Usage: usage, DefaultValue: v.String()} - fs.setFlag(name, f) - if short != "" { - fs.setFlag(short, f) - } + f := &Flag{Name: name, Shorthand: short, Value: v, Usage: usage, DefaultValue: v.String()} + fs.setFlag(f) } func (fs *FlagSet) GetInt(name string) int { diff --git a/pkg/cli/flag_slice.go b/pkg/cli/flag_slice.go index 9867ffda2..408f41611 100644 --- a/pkg/cli/flag_slice.go +++ b/pkg/cli/flag_slice.go @@ -33,11 +33,8 @@ func (fs *FlagSet) StringSlice(name string, defaultValue []string, usage string, func (fs *FlagSet) StringSliceShort(name string, short string, defaultValue []string, usage string, explode bool) { v := &stringSliceFlag{value: defaultValue, explode: explode} - f := &Flag{Value: v, Usage: usage, DefaultValue: v.String()} - fs.setFlag(name, f) - if short != "" { - fs.setFlag(short, f) - } + f := &Flag{Name: name, Shorthand: short, Value: v, Usage: usage, DefaultValue: v.String()} + fs.setFlag(f) } func (fs *FlagSet) GetStringSlice(name string) []string { @@ -51,29 +48,3 @@ func (fs *FlagSet) GetStringSlice(name string) []string { } return s } - -type sliceFlag struct { - value []string - explode bool -} - -func (f *sliceFlag) Set(values []string) error { - return nil -} - -func (f *sliceFlag) Value() any { - return f.value -} - -func (f *sliceFlag) String() string { - return "" -} - -func (fs *FlagSet) SliceShort(name string, short string, defaultValue []string, usage string, explode bool) { - v := &sliceFlag{value: defaultValue, explode: explode} - f := &Flag{Value: v, Usage: usage, DefaultValue: ""} - fs.setFlag(name, f) - if short != "" { - fs.setFlag(short, f) - } -} diff --git a/pkg/cli/flag_string.go b/pkg/cli/flag_string.go index 86999d5da..dc4ab8cb3 100644 --- a/pkg/cli/flag_string.go +++ b/pkg/cli/flag_string.go @@ -31,11 +31,8 @@ func (fs *FlagSet) String(name string, defaultValue string, usage string) { func (fs *FlagSet) StringShort(name string, short string, defaultValue string, usage string) { v := &stringFlag{value: defaultValue} - f := &Flag{Value: v, Usage: usage, DefaultValue: defaultValue} - fs.setFlag(name, f) - if short != "" { - fs.setFlag(short, f) - } + f := &Flag{Name: name, Shorthand: short, Value: v, Usage: usage, DefaultValue: defaultValue} + fs.setFlag(f) } func (fs *FlagSet) GetString(name string) string { diff --git a/pkg/cli/flags.go b/pkg/cli/flags.go index 8dd50c085..2a9b822c3 100644 --- a/pkg/cli/flags.go +++ b/pkg/cli/flags.go @@ -3,6 +3,7 @@ package cli import ( "fmt" "regexp" + "strings" ) type FlagSet struct { @@ -11,6 +12,8 @@ type FlagSet struct { } type Flag struct { + Name string + Shorthand string Usage string Value Value DefaultValue string @@ -31,28 +34,48 @@ type Value interface { String() string } -func (fs *FlagSet) setFlag(name string, f *Flag) { +func (fs *FlagSet) setFlag(f *Flag) { if fs.flags == nil { fs.flags = make(map[string]*Flag) } - fs.flags[name] = f + fs.flags[f.Name] = f + if f.Shorthand != "" { + fs.flags[f.Shorthand] = f + } } func (fs *FlagSet) setValue(name string, value []string) error { + // backwards compatibility + name = strings.ReplaceAll(name, ".", "-") if fs.flags != nil { f, ok := fs.flags[name] if ok { return f.Value.Set(value) } - for _, flag := range fs.dynamic { - if flag.isValidFlag(name) { - return flag.Value.Set(value) - } + } + for _, flag := range fs.dynamic { + if flag.isValidFlag(name) { + return flag.Value.Set(value) } } return fmt.Errorf("unknown flag '%v'", name) } +func (fs *FlagSet) IsValidFlag(name string) bool { + if fs.flags == nil { + return false + } + if _, ok := fs.flags[name]; ok { + return true + } + for _, flag := range fs.dynamic { + if flag.isValidFlag(name) { + return true + } + } + return false +} + func (fs *FlagSet) GetValue(name string) (any, error) { f, ok := fs.flags[name] if !ok { diff --git a/pkg/cli/parse.go b/pkg/cli/parse.go index faa5bde1f..c816d0cd8 100644 --- a/pkg/cli/parse.go +++ b/pkg/cli/parse.go @@ -6,7 +6,7 @@ import ( "strings" ) -func parseFlags(args []string, envNamePrefix string) (map[string][]string, error) { +func parseFlags(args []string, envNamePrefix string, isValidFlag func(name string) bool) (map[string][]string, error) { flags, err := parseArgs(args) if err != nil { return nil, err @@ -16,6 +16,9 @@ func parseFlags(args []string, envNamePrefix string) (map[string][]string, error // merge maps. env flags does not overwrite cli flags for k, v := range envs { if _, ok := flags[k]; !ok { + if !isValidFlag(k) { + return nil, fmt.Errorf("unknown environment variable '%s' (value '%s')", k, v) + } flags[k] = []string{v} } } diff --git a/pkg/cmd/mokapi/mokapi.go b/pkg/cmd/mokapi/mokapi.go index ae269f0a6..2bb75f1f3 100644 --- a/pkg/cmd/mokapi/mokapi.go +++ b/pkg/cmd/mokapi/mokapi.go @@ -3,7 +3,6 @@ package mokapi import ( "context" "fmt" - log "github.com/sirupsen/logrus" stdlog "log" "mokapi/api" "mokapi/config/dynamic" @@ -25,6 +24,8 @@ import ( "mokapi/server/cert" "mokapi/version" "strings" + + log "github.com/sirupsen/logrus" ) const logo = "888b d888 888 d8888 d8b \n8888b d8888 888 d88888 Y8P \n88888b.d88888 888 d88P888 \n888Y88888P888 .d88b. 888 888 d88P 888 88888b. 888 \n888 Y888P 888 d88\"\"88b 888 .88P d88P 888 888 \"88b 888 \n888 Y8P 888 888 888 888888K d88P 888 888 888 888 \n888 \" 888 Y88..88P 888 \"88b d8888888888 888 d88P 888 \n888 888 \"Y88P\" 888 888 d88P 888 88888P\" 888 \n v%s by Marcel Lehmann%s 888 \n https://mokapi.io 888 \n 888 \n" @@ -41,10 +42,9 @@ func NewCmdMokapi(ctx context.Context) *cli.Command { Commands: []*cli.Command{ NewCmdSampleData(), }, + EnvPrefix: "MOKAPI_", } - cmd.SetEnvPrefix("mokapi") - cmd.Flags().BoolShort("version", "v", false, "Show version information and exit") cmd.Flags().Bool("generate-cli-skeleton", false, "Generates the skeleton configuration file") @@ -58,14 +58,17 @@ func NewCmdMokapi(ctx context.Context) *cli.Command { cmd.Flags().String("log-format", "text", "Mokapi log format: json|text (default is text)") // file provider + cmd.Flags().String("providers-file", "", "") cmd.Flags().StringSlice("providers-file-filename", []string{}, "Load the dynamic configuration from files", true) cmd.Flags().StringSlice("providers-file-filenames", []string{}, "Load the dynamic configuration from files", false) cmd.Flags().StringSlice("providers-file-directory", []string{}, "Load the dynamic configuration from directories", true) cmd.Flags().StringSlice("providers-file-directories", []string{}, "Load the dynamic configuration from directories", false) cmd.Flags().StringSlice("providers-file-skip-prefix", []string{"_"}, "", false) cmd.Flags().StringSlice("providers-file-include", []string{}, "", false) + cmd.Flags().DynamicStringSlice("providers-file-include[]", []string{}, "", false) // git provider + cmd.Flags().String("providers-git", "", "") cmd.Flags().StringSlice("providers-git-url", []string{}, "", true) cmd.Flags().StringSlice("providers-git-urls", []string{}, "", false) cmd.Flags().String("providers-git-pull-interval", "3m", "") @@ -82,6 +85,7 @@ func NewCmdMokapi(ctx context.Context) *cli.Command { cmd.Flags().DynamicString("providers-git-repositories[]-pull-interval", "", "Specifies an array of filenames or pattern to include in mokapi") // http provider + cmd.Flags().String("providers-http", "", "") cmd.Flags().StringSlice("providers-http-url", []string{}, "", true) cmd.Flags().StringSlice("providers-http-urls", []string{}, "", false) cmd.Flags().String("providers-http-poll-interval", "3m", "") @@ -91,6 +95,7 @@ func NewCmdMokapi(ctx context.Context) *cli.Command { cmd.Flags().String("providers-http-ca", "", "Certificate authority") // npm provider + cmd.Flags().String("providers-npm", "", "") cmd.Flags().StringSlice("providers-npm-global-folder", []string{}, "", true) cmd.Flags().StringSlice("providers-npm-global-folders", []string{}, "", false) // npm package diff --git a/pkg/cmd/mokapi/mokapi_test.go b/pkg/cmd/mokapi/mokapi_test.go index 4ce36b5e5..11186039a 100644 --- a/pkg/cmd/mokapi/mokapi_test.go +++ b/pkg/cmd/mokapi/mokapi_test.go @@ -2,11 +2,16 @@ package mokapi_test import ( "context" - "github.com/stretchr/testify/require" + "encoding/json" + "fmt" "mokapi/config/static" "mokapi/pkg/cli" "mokapi/pkg/cmd/mokapi" + "os" + "path/filepath" "testing" + + "github.com/stretchr/testify/require" ) func TestMain_Flags(t *testing.T) { @@ -89,3 +94,581 @@ func TestMain_Flags(t *testing.T) { }) } } + +// Tests from old design +func TestStaticConfig(t *testing.T) { + testcases := []struct { + name string + args []string + test func(t *testing.T, cfg *static.Config) + }{ + { + name: "assign with =", + args: []string{"--log-level=debug"}, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "debug", cfg.Log.Level) + }, + }, + { + name: "assign without =", + args: []string{"--log-level", "debug"}, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "debug", cfg.Log.Level) + }, + }, + { + name: "--help", + args: []string{"--help"}, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, true, cfg.Help) + }, + }, + { + name: "-h", + args: []string{"-h"}, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, true, cfg.Help) + }, + }, + { + name: "--version", + args: []string{"--version"}, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, true, cfg.Version) + }, + }, + { + name: "-v", + args: []string{"-v"}, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, true, cfg.Version) + }, + }, + { + name: "json", + args: []string{`--providers-file={"filename":"foo.yaml","directory":"foo", "skipPrefix":["_"]}`}, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo.yaml"}, cfg.Providers.File.Filenames) + require.Equal(t, []string{"foo"}, cfg.Providers.File.Directories) + require.Equal(t, []string{"_"}, cfg.Providers.File.SkipPrefix) + }, + }, + { + name: "shorthand object", + args: []string{"--providers-file", "filename=foo.yaml"}, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo.yaml"}, cfg.Providers.File.Filenames) + }, + }, + { + name: "args", + args: []string{ + "--providers-git-repositories[0]-url=https://github.com/PATH-TO/REPOSITORY?ref=branch-name", + "--providers-git-repositories[0]-pull-interval=5m", + "--providers-git-repositories[1]-pull-interval=5h", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "https://github.com/PATH-TO/REPOSITORY?ref=branch-name", cfg.Providers.Git.Repositories[0].Url) + require.Equal(t, "5m", cfg.Providers.Git.Repositories[0].PullInterval) + require.Equal(t, "", cfg.Providers.Git.Repositories[1].Url) + require.Equal(t, "5h", cfg.Providers.Git.Repositories[1].PullInterval) + }, + }, + { + name: "shorthand array", + args: []string{ + "--providers-git-repositories", + "url=foo,pullInterval=5m url=bar,pullInterval=5h", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Len(t, cfg.Providers.Git.Repositories, 2) + require.Equal(t, "foo", cfg.Providers.Git.Repositories[0].Url) + require.Equal(t, "5m", cfg.Providers.Git.Repositories[0].PullInterval) + require.Equal(t, "bar", cfg.Providers.Git.Repositories[1].Url) + require.Equal(t, "5h", cfg.Providers.Git.Repositories[1].PullInterval) + }, + }, + { + name: "explode with json", + args: []string{ + `--providers-git-repository={"url":"https://github.com/PATH-TO/REPOSITORY?ref=branch-name","pullInterval":"5m"}`, + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "https://github.com/PATH-TO/REPOSITORY?ref=branch-name", cfg.Providers.Git.Repositories[0].Url) + require.Equal(t, "5m", cfg.Providers.Git.Repositories[0].PullInterval) + }, + }, + { + name: "file provider include", + args: []string{ + "--providers-file-include", + `mokapi/**/*.json mokapi/**/*.yaml "foo bar/**/*.yaml`, + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"mokapi/**/*.json", "mokapi/**/*.yaml", "foo bar/**/*.yaml"}, cfg.Providers.File.Include) + }, + }, + { + name: "file provider include with space", + args: []string{ + "--Providers.file.include", + `"C:\Documents and Settings\" C:\Work"`, + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"C:\\Documents and Settings\\", "C:\\Work"}, cfg.Providers.File.Include) + }, + }, + { + name: "file provider include twice", + args: []string{ + "--providers-file-include", "foo", + "--providers-file-include", "bar", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo", "bar"}, cfg.Providers.File.Include) + }, + }, + { + name: "file provider include overwrite", + args: []string{ + "--providers-file-include", "foo", + "--Providers-file-include[0]", "bar", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"bar"}, cfg.Providers.File.Include) + }, + }, + { + name: "git provider set url", + args: []string{ + "--providers-git-url", "foo", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo"}, cfg.Providers.Git.Urls) + }, + }, + { + name: "git provider set urls", + args: []string{ + "--providers-git-urls", "foo", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo"}, cfg.Providers.Git.Urls) + }, + }, + { + name: "http provider set url", + args: []string{ + "--providers-http-url", "foo", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo"}, cfg.Providers.Http.Urls) + }, + }, + { + name: "http provider set urls using explode", + args: []string{ + "--providers-http-url", "foo", + "--providers-http-url", "bar", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo", "bar"}, cfg.Providers.Http.Urls) + }, + }, + { + name: "http provider set urls", + args: []string{ + "--providers-http-urls", "foo bar", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo", "bar"}, cfg.Providers.Http.Urls) + }, + }, + { + name: "http provider", + args: []string{ + "--providers-http", `urls=foo bar,pollInterval=5s,pollTimeout=30s,proxy=bar,tlsSkipVerify=true`, + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo", "bar"}, cfg.Providers.Http.Urls) + require.Equal(t, "5s", cfg.Providers.Http.PollInterval) + require.Equal(t, "30s", cfg.Providers.Http.PollTimeout) + require.Equal(t, true, cfg.Providers.Http.TlsSkipVerify) + require.Equal(t, "bar", cfg.Providers.Http.Proxy) + }, + }, + { + name: "npm provider global folders", + args: []string{ + "--providers-npm-global-folders", "/etc/foo", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "/etc/foo", cfg.Providers.Npm.GlobalFolders[0]) + }, + }, + { + name: "config", + args: []string{ + "--config", `{"openapi": "3.0"}`, + }, + test: func(t *testing.T, cfg *static.Config) { + require.Len(t, cfg.Configs, 1) + require.Equal(t, "{\"openapi\": \"3.0\"}", cfg.Configs[0]) + }, + }, + { + name: "positional parameter file", + args: []string{ + "foo.json", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, []string{"foo.json"}, cfg.Args) + // next check requires to run cfg.Parse + require.Equal(t, "foo.json", cfg.Providers.File.Filenames[0]) + }, + }, + { + name: "positional parameter http", + args: []string{ + "http://foo.io/foo.json", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "http://foo.io/foo.json", cfg.Providers.Http.Urls[0]) + }, + }, + { + name: "positional parameter https", + args: []string{ + "https://foo.io/foo.json", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "https://foo.io/foo.json", cfg.Providers.Http.Urls[0]) + }, + }, + { + name: "positional parameter git with https", + args: []string{ + "git+https://foo.io/foo.json", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "https://foo.io/foo.json", cfg.Providers.Git.Urls[0]) + }, + }, + { + name: "positional parameter npm", + args: []string{ + "npm://bar/foo.txt?scope=@foo", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "npm://bar/foo.txt?scope=@foo", cfg.Providers.Npm.Packages[0].Name) + }, + }, + { + name: "positional parameter Windows path", + args: []string{ + "C:\\bar", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, "C:\\bar", cfg.Providers.File.Filenames[0]) + }, + }, + { + name: "data-gen optional properties", + args: []string{ + "--data-gen-optional-properties", "often", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, 0.85, cfg.DataGen.OptionalPropertiesProbability()) + }, + }, + { + name: "data-gen optional properties always", + args: []string{ + "--data-gen-optional-properties", "always", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, 1.0, cfg.DataGen.OptionalPropertiesProbability()) + }, + }, + { + name: "data-gen optional properties sometimes", + args: []string{ + "--data-gen-optional-properties", "sometimes", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, 0.5, cfg.DataGen.OptionalPropertiesProbability()) + }, + }, + { + name: "data-gen optional properties 0.3", + args: []string{ + "--data-gen-optional-properties", "0.3", + }, + test: func(t *testing.T, cfg *static.Config) { + require.Equal(t, 0.3, cfg.DataGen.OptionalPropertiesProbability()) + }, + }, + } + + for _, tc := range testcases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + cmd := mokapi.NewCmdMokapi(context.Background()) + cmd.SetArgs(tc.args) + + cfg := static.NewConfig() + cmd.Run = func(cmd *cli.Command, args []string) error { + cfg = cmd.Config.(*static.Config) + return cfg.Parse() + } + err := cmd.Execute() + require.NoError(t, err) + + tc.test(t, cfg) + }) + } +} + +func TestMokapi_Env(t *testing.T) { + testcases := []struct { + name string + env map[string]string + test func(t *testing.T, cfg *static.Config, err error) + }{ + { + name: "env var", + env: map[string]string{ + "MOKAPI_Providers_GIT_Repositories[0]_Url": "https://github.com/PATH-TO/REPOSITORY", + "MOKAPI_Providers_GIT_Repositories[0]_Pull_Interval": "3m", + }, + test: func(t *testing.T, cfg *static.Config, err error) { + require.NoError(t, err) + require.Len(t, cfg.Providers.Git.Repositories, 1) + require.Equal(t, "https://github.com/PATH-TO/REPOSITORY", cfg.Providers.Git.Repositories[0].Url) + require.Equal(t, "3m", cfg.Providers.Git.Repositories[0].PullInterval) + }, + }, + { + name: "data-gen env var", + env: map[string]string{ + "MOKAPI_DATA_GEN_OPTIONAL_PROPERTIES": "sometimes", + }, + test: func(t *testing.T, cfg *static.Config, err error) { + require.NoError(t, err) + require.Equal(t, 0.5, cfg.DataGen.OptionalPropertiesProbability()) + }, + }, + { + name: "not supported env var", + env: map[string]string{ + "MOKAPI_NOT_SUPPORTED": "foo", + }, + test: func(t *testing.T, cfg *static.Config, err error) { + require.EqualError(t, err, "unknown environment variable 'not-supported' (value 'foo')") + }, + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + defer func() { + for k := range tc.env { + _ = os.Unsetenv(k) + } + }() + for k, v := range tc.env { + err := os.Setenv(k, v) + require.NoError(t, err) + } + + cmd := mokapi.NewCmdMokapi(context.Background()) + cmd.SetArgs([]string{}) + + cfg := static.NewConfig() + cmd.Run = func(cmd *cli.Command, args []string) error { + cfg = cmd.Config.(*static.Config) + return cfg.Parse() + } + err := cmd.Execute() + tc.test(t, cfg, err) + }) + } +} + +func TestMokapi_File(t *testing.T) { + newCmd := func(args []string) (*cli.Command, *static.Config) { + c := mokapi.NewCmdMokapi(context.Background()) + c.SetArgs(args) + c.Run = func(cmd *cli.Command, args []string) error { + return nil + } + return c, c.Config.(*static.Config) + } + + testcases := []struct { + name string + test func(t *testing.T) + }{ + { + name: "config file://", + test: func(t *testing.T) { + path := createTempFile(t, "test.json", `{"openapi": "3.0"}`) + c, cfg := newCmd([]string{"--config", fmt.Sprintf("file://%s", path)}) + + err := c.Execute() + require.NoError(t, err) + require.Len(t, cfg.Configs, 1) + require.Equal(t, "{\"openapi\": \"3.0\"}", cfg.Configs[0]) + }, + }, + { + name: "configfile json", + test: func(t *testing.T) { + path := createTempFile(t, "test.json", `{"configs": [ { "openapi": "3.0", "info": { "name": "foo" } } ]}`) + c, cfg := newCmd([]string{"--configfile", path}) + err := c.Execute() + require.NoError(t, err) + + require.Len(t, cfg.Configs, 1) + + actual := map[string]interface{}{} + err = json.Unmarshal([]byte(cfg.Configs[0]), &actual) + require.NoError(t, err) + expected := map[string]interface{}{ + "openapi": "3.0", + "info": map[string]interface{}{ + "name": "foo", + }, + } + require.Equal(t, expected, actual) + }, + }, + { + name: "configfile yaml", + test: func(t *testing.T) { + path := createTempFile(t, "foo.yaml", ` +configs: + - openapi: "3.0" + info: + name: foo +`) + c, cfg := newCmd([]string{"--configfile", path}) + err := c.Execute() + require.NoError(t, err) + + actual := map[string]interface{}{} + err = json.Unmarshal([]byte(cfg.Configs[0]), &actual) + require.NoError(t, err) + expected := map[string]interface{}{ + "openapi": "3.0", + "info": map[string]interface{}{ + "name": "foo", + }, + } + + require.Len(t, cfg.Configs, 1) + require.Equal(t, expected, actual) + }, + }, + { + name: "config-file", + test: func(t *testing.T) { + path := createTempFile(t, "foo.json", `{"log": { "level": "error" } }`) + c, cfg := newCmd([]string{"--config-file", path}) + err := c.Execute() + require.NoError(t, err) + + require.Equal(t, "error", cfg.Log.Level) + }, + }, + { + name: "cli-input", + test: func(t *testing.T) { + path := createTempFile(t, "foo.json", `{"log": { "level": "error" } }`) + c, cfg := newCmd([]string{"--cli-input", path}) + err := c.Execute() + require.NoError(t, err) + require.Equal(t, "error", cfg.Log.Level) + }, + }, + { + name: "cli-input file provider directories", + test: func(t *testing.T) { + path := createTempFile(t, "foo.yaml", ` +providers: + file: + directory: foo +`) + c, cfg := newCmd([]string{"--cli-input", path}) + err := c.Execute() + require.NoError(t, err) + require.Equal(t, []string{"foo"}, cfg.Providers.File.Directories) + }, + }, + { + name: "cli-input file provider directories", + test: func(t *testing.T) { + path := createTempFile(t, "foo.yaml", ` +providers: + file: + directories: ["/foo", "/bar"] +`) + c, cfg := newCmd([]string{"--cli-input", path}) + err := c.Execute() + require.NoError(t, err) + require.Equal(t, []string{"/foo", "/bar"}, cfg.Providers.File.Directories) + }, + }, + { + name: "cli-input file provider directory", + test: func(t *testing.T) { + path := createTempFile(t, "foo.json", `{"providers":{"file":{"directory":"foo"}}}`) + c, cfg := newCmd([]string{"--cli-input", path}) + err := c.Execute() + require.NoError(t, err) + require.Equal(t, []string{"foo"}, cfg.Providers.File.Directories) + }, + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + defer func() { + cli.SetReadFileFS(os.ReadFile) + }() + + tc.test(t) + }) + } +} + +func TestPositionalArg_Error(t *testing.T) { + cmd := mokapi.NewCmdMokapi(context.Background()) + cmd.SetArgs([]string{"foo://bar"}) + + cfg := static.NewConfig() + cmd.Run = func(cmd *cli.Command, args []string) error { + cfg = cmd.Config.(*static.Config) + return cfg.Parse() + } + err := cmd.Execute() + require.EqualError(t, err, "positional argument is not supported: foo://bar") +} + +func createTempFile(t *testing.T, filename, data string) string { + path := filepath.Join(t.TempDir(), filename) + file, err := os.Create(path) + if err != nil { + t.Fatal(err) + } + defer func() { + _ = file.Close() + }() + + _, err = file.Write([]byte(data)) + if err != nil { + t.Fatal(err) + } + + return path +} diff --git a/providers/asyncapi3/kafka/store/broker.go b/providers/asyncapi3/kafka/store/broker.go index ffd875b64..1732aaa48 100644 --- a/providers/asyncapi3/kafka/store/broker.go +++ b/providers/asyncapi3/kafka/store/broker.go @@ -16,11 +16,13 @@ type Broker struct { Host string Port int + config *asyncapi3.Server kafkaConfig asyncapi3.BrokerBindings stopCleanerChan chan bool + topics map[string]*Topic } -func newBroker(id int, name string, config asyncapi3.Server) *Broker { +func newBroker(id int, name string, config *asyncapi3.Server) *Broker { h, p := parseHostAndPort(config.Host) return &Broker{ @@ -28,6 +30,7 @@ func newBroker(id int, name string, config asyncapi3.Server) *Broker { Name: name, Host: h, Port: p, + config: config, kafkaConfig: config.Bindings.Kafka, stopCleanerChan: make(chan bool, 1), } diff --git a/providers/asyncapi3/kafka/store/find_coordinator.go b/providers/asyncapi3/kafka/store/find_coordinator.go index 295ba2e75..8d76ef05d 100644 --- a/providers/asyncapi3/kafka/store/find_coordinator.go +++ b/providers/asyncapi3/kafka/store/find_coordinator.go @@ -2,10 +2,11 @@ package store import ( "fmt" - log "github.com/sirupsen/logrus" "mokapi/kafka" "mokapi/kafka/findCoordinator" "net" + + log "github.com/sirupsen/logrus" ) func (s *Store) findCoordinator(rw kafka.ResponseWriter, req *kafka.Request) error { @@ -29,7 +30,7 @@ func (s *Store) findCoordinator(rw kafka.ResponseWriter, req *kafka.Request) err if g.Coordinator == nil { return writeError(kafka.CoordinatorNotAvailable, fmt.Sprintf("no coordinator for group %v available", r.Key)) } else { - host := b.Host + host := g.Coordinator.Host if len(host) == 0 { var err error host, _, err = net.SplitHostPort(req.Host) @@ -38,9 +39,9 @@ func (s *Store) findCoordinator(rw kafka.ResponseWriter, req *kafka.Request) err } } - res.NodeId = int32(b.Id) + res.NodeId = int32(g.Coordinator.Id) res.Host = host - res.Port = int32(b.Port) + res.Port = int32(g.Coordinator.Port) } default: res.ErrorCode = kafka.UnknownServerError diff --git a/providers/asyncapi3/kafka/store/find_coordinator_test.go b/providers/asyncapi3/kafka/store/find_coordinator_test.go index 520943c98..1425c4a57 100644 --- a/providers/asyncapi3/kafka/store/find_coordinator_test.go +++ b/providers/asyncapi3/kafka/store/find_coordinator_test.go @@ -1,7 +1,6 @@ package store_test import ( - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/findCoordinator" @@ -10,6 +9,8 @@ import ( "mokapi/providers/asyncapi3/kafka/store" "mokapi/runtime/events/eventstest" "testing" + + "github.com/stretchr/testify/require" ) func TestFindCoordinator(t *testing.T) { diff --git a/providers/asyncapi3/kafka/store/group_balancer.go b/providers/asyncapi3/kafka/store/group_balancer.go index 7a1dfe5f1..f05abe2e0 100644 --- a/providers/asyncapi3/kafka/store/group_balancer.go +++ b/providers/asyncapi3/kafka/store/group_balancer.go @@ -3,12 +3,13 @@ package store import ( "bufio" "bytes" - log "github.com/sirupsen/logrus" "mokapi/kafka" "mokapi/kafka/joinGroup" "mokapi/kafka/syncGroup" "mokapi/providers/asyncapi3" "time" + + log "github.com/sirupsen/logrus" ) type groupBalancer struct { @@ -24,6 +25,7 @@ type groupBalancer struct { type joindata struct { client *kafka.ClientContext writer kafka.ResponseWriter + protocolType string protocols []joinGroup.Protocol rebalanceTimeout int sessionTimeout int @@ -221,6 +223,7 @@ StopWaitingForConsumers: GenerationId: int32(generation.Id), Leader: generation.LeaderId, MemberId: memberId, + ProtocolType: j.protocolType, ProtocolName: protocol, }) } @@ -229,6 +232,7 @@ StopWaitingForConsumers: GenerationId: int32(generation.Id), Leader: generation.LeaderId, MemberId: generation.LeaderId, + ProtocolType: leader.protocolType, ProtocolName: protocol, Members: members, }) diff --git a/providers/asyncapi3/kafka/store/joingroup.go b/providers/asyncapi3/kafka/store/joingroup.go index 420b0907c..b40d7e73d 100644 --- a/providers/asyncapi3/kafka/store/joingroup.go +++ b/providers/asyncapi3/kafka/store/joingroup.go @@ -27,6 +27,7 @@ func (s *Store) joingroup(rw kafka.ResponseWriter, req *kafka.Request) error { data := joindata{ client: ctx, writer: rw, + protocolType: r.ProtocolType, protocols: r.Protocols, rebalanceTimeout: int(r.RebalanceTimeoutMs), sessionTimeout: int(r.SessionTimeoutMs), diff --git a/providers/asyncapi3/kafka/store/offset_fetch.go b/providers/asyncapi3/kafka/store/offset_fetch.go index 34d407fd4..60f57b576 100644 --- a/providers/asyncapi3/kafka/store/offset_fetch.go +++ b/providers/asyncapi3/kafka/store/offset_fetch.go @@ -2,21 +2,36 @@ package store import ( "fmt" - log "github.com/sirupsen/logrus" "mokapi/kafka" "mokapi/kafka/offsetFetch" "mokapi/schema/json/parser" + + log "github.com/sirupsen/logrus" ) func (s *Store) offsetFetch(rw kafka.ResponseWriter, req *kafka.Request) error { r := req.Message.(*offsetFetch.Request) - res := &offsetFetch.Response{ - Topics: make([]offsetFetch.ResponseTopic, 0, len(r.Topics)), - } + res := &offsetFetch.Response{} ctx := kafka.ClientFromContext(req) - for _, rt := range r.Topics { + if req.Header.ApiVersion >= 8 { + for _, g := range r.Groups { + res.Groups = append(res.Groups, offsetFetch.ResponseGroup{ + GroupId: g.GroupId, + Topics: s.fetchTopicOffsets(g.GroupId, g.Topics, ctx), + }) + } + } else { + res.Topics = s.fetchTopicOffsets(r.GroupId, r.Topics, ctx) + } + + return rw.Write(res) +} + +func (s *Store) fetchTopicOffsets(groupId string, topics []offsetFetch.RequestTopic, ctx *kafka.ClientContext) []offsetFetch.ResponseTopic { + result := make([]offsetFetch.ResponseTopic, 0, len(topics)) + for _, rt := range topics { topic := s.Topic(rt.Name) resTopic := offsetFetch.ResponseTopic{Name: rt.Name, Partitions: make([]offsetFetch.Partition, 0, len(rt.PartitionIndexes))} @@ -31,14 +46,14 @@ func (s *Store) offsetFetch(rw kafka.ResponseWriter, req *kafka.Request) error { if p == nil { log.Errorf("kafka OffsetFetch: unknown partition %v, topic=%v, client=%v", index, rt.Name, ctx.ClientId) resPartition.ErrorCode = kafka.UnknownTopicOrPartition - } else if _, ok := ctx.Member[r.GroupId]; !ok { + } else if _, ok := ctx.Member[groupId]; !ok { log.Errorf("kafka OffsetFetch: unknown member topic=%v, client=%v", rt.Name, ctx.ClientId) resPartition.ErrorCode = kafka.UnknownMemberId } else { // todo check partition is assigned to member - g, ok := s.Group(r.GroupId) + g, ok := s.Group(groupId) if !ok { - log.Errorf("kafka OffsetFetch: unkown group name %v, topic=%v, client=%v", r.GroupId, rt.Name, ctx.ClientId) + log.Errorf("kafka OffsetFetch: unkown group name %v, topic=%v, client=%v", groupId, rt.Name, ctx.ClientId) resPartition.ErrorCode = kafka.GroupIdNotFound } else { if err, code := validateConsumer(topic, ctx.ClientId, g.Name); err != nil { @@ -54,10 +69,9 @@ func (s *Store) offsetFetch(rw kafka.ResponseWriter, req *kafka.Request) error { resTopic.Partitions = append(resTopic.Partitions, *resPartition) } - res.Topics = append(res.Topics, resTopic) + result = append(result, resTopic) } - - return rw.Write(res) + return result } func validateConsumer(t *Topic, clientId, groupId string) (error, kafka.ErrorCode) { diff --git a/providers/asyncapi3/kafka/store/offset_fetch_test.go b/providers/asyncapi3/kafka/store/offset_fetch_test.go index d66a1bcdd..9196e7d5b 100644 --- a/providers/asyncapi3/kafka/store/offset_fetch_test.go +++ b/providers/asyncapi3/kafka/store/offset_fetch_test.go @@ -1,9 +1,6 @@ package store_test import ( - "github.com/sirupsen/logrus" - "github.com/sirupsen/logrus/hooks/test" - "github.com/stretchr/testify/require" "mokapi/engine/enginetest" "mokapi/kafka" "mokapi/kafka/kafkatest" @@ -15,6 +12,10 @@ import ( "mokapi/runtime/events/eventstest" "mokapi/schema/json/schema/schematest" "testing" + + "github.com/sirupsen/logrus" + "github.com/sirupsen/logrus/hooks/test" + "github.com/stretchr/testify/require" ) func TestOffsetFetch(t *testing.T) { @@ -200,7 +201,7 @@ func TestOffsetFetch(t *testing.T) { asyncapi3test.WithServer("", "kafka", b.Addr), asyncapi3test.WithChannel("foo"), )) - s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ + _, _, _ = s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ Records: []*kafka.Record{ { Key: kafka.NewBytes([]byte("foo")), @@ -242,6 +243,63 @@ func TestOffsetFetch(t *testing.T) { require.Equal(t, int64(0), p.CommittedOffset) }, }, + { + "offset fetch with new version fetching offsets for multiple groups", + func(t *testing.T, s *store.Store) { + b := kafkatest.NewBroker(kafkatest.WithHandler(s)) + defer b.Close() + s.Update(asyncapi3test.NewConfig( + asyncapi3test.WithServer("", "kafka", b.Addr), + asyncapi3test.WithChannel("foo"), + )) + _, _, _ = s.Topic("foo").Partition(0).Write(kafka.RecordBatch{ + Records: []*kafka.Record{ + { + Key: kafka.NewBytes([]byte("foo")), + Value: kafka.NewBytes([]byte("bar")), + }, + }, + }) + + err := b.Client().JoinSyncGroup("foo", "bar", 3, 3) + require.NoError(t, err) + + _, err = b.Client().OffsetCommit(2, &offsetCommit.Request{ + GroupId: "bar", + MemberId: "foo", + Topics: []offsetCommit.Topic{ + { + Name: "foo", + Partitions: []offsetCommit.Partition{{}}, + }, + }, + }) + require.NoError(t, err) + + r, err := b.Client().OffsetFetch(9, &offsetFetch.Request{ + Groups: []offsetFetch.RequestGroup{ + { + GroupId: "bar", + Topics: []offsetFetch.RequestTopic{ + { + Name: "foo", + PartitionIndexes: []int32{0}, + }, + }, + }, + }, + }) + require.NoError(t, err) + require.Equal(t, kafka.None, r.ErrorCode) + require.Len(t, r.Groups, 1) + require.Len(t, r.Groups[0].Topics, 1) + require.Len(t, r.Groups[0].Topics[0].Partitions, 1) + + p := r.Groups[0].Topics[0].Partitions[0] + require.Equal(t, kafka.None, p.ErrorCode) + require.Equal(t, int64(0), p.CommittedOffset) + }, + }, } t.Parallel() diff --git a/providers/asyncapi3/kafka/store/partition.go b/providers/asyncapi3/kafka/store/partition.go index fee3d0d9f..a596427aa 100644 --- a/providers/asyncapi3/kafka/store/partition.go +++ b/providers/asyncapi3/kafka/store/partition.go @@ -2,13 +2,16 @@ package store import ( "fmt" - log "github.com/sirupsen/logrus" "mokapi/kafka" "mokapi/kafka/produce" + "mokapi/providers/asyncapi3" "mokapi/runtime/events" + "slices" "strconv" "sync" "time" + + log "github.com/sirupsen/logrus" ) type Partition struct { @@ -52,8 +55,16 @@ type WriteArgs struct { func newPartition(index int, brokers Brokers, logger LogRecord, trigger Trigger, topic *Topic) *Partition { brokerList := make([]int, 0, len(brokers)) - for i := range brokers { - brokerList = append(brokerList, i) + for i, b := range brokers { + if topic.Config != nil && len(topic.Config.Servers) > 0 { + if slices.ContainsFunc(topic.Config.Servers, func(s *asyncapi3.ServerRef) bool { + return s.Value == b.config + }) { + brokerList = append(brokerList, i) + } + } else { + brokerList = append(brokerList, i) + } } p := &Partition{ Index: index, diff --git a/providers/asyncapi3/kafka/store/store.go b/providers/asyncapi3/kafka/store/store.go index 5ef98fefd..04e060f53 100644 --- a/providers/asyncapi3/kafka/store/store.go +++ b/providers/asyncapi3/kafka/store/store.go @@ -2,8 +2,6 @@ package store import ( "fmt" - "github.com/pkg/errors" - log "github.com/sirupsen/logrus" "mokapi/engine/common" "mokapi/kafka" "mokapi/kafka/apiVersion" @@ -27,6 +25,9 @@ import ( "strconv" "sync" "time" + + "github.com/pkg/errors" + log "github.com/sirupsen/logrus" ) type Store struct { @@ -147,7 +148,7 @@ func (s *Store) Update(c *asyncapi3.Config) { b.Host = host b.Port = port } else { - s.addBroker(n, *server.Value) + s.addBroker(n, server.Value) } } for _, b := range s.brokers { @@ -255,7 +256,7 @@ func (s *Store) deleteTopic(name string) { delete(s.topics, name) } -func (s *Store) addBroker(name string, config asyncapi3.Server) { +func (s *Store) addBroker(name string, config *asyncapi3.Server) { s.m.Lock() defer s.m.Unlock() @@ -302,7 +303,7 @@ func (s *Store) getBrokerByHost(addr string) *Broker { func (s *Store) log(log *KafkaLog, traits events.Traits) { log.Api = s.cluster - s.eh.Push( + _ = s.eh.Push( log, traits.WithNamespace("kafka").WithName(s.cluster), ) @@ -332,8 +333,8 @@ func (s *Store) trigger(record *kafka.Record, schemaId int) bool { return false } - record.Key.Close() - record.Value.Close() + _ = record.Key.Close() + _ = record.Value.Close() record.Key = kafka.NewBytes([]byte(r.Key)) record.Value = kafka.NewBytes([]byte(r.Value)) diff --git a/providers/openapi/handler.go b/providers/openapi/handler.go index dd23abdda..59dfdc507 100644 --- a/providers/openapi/handler.go +++ b/providers/openapi/handler.go @@ -106,7 +106,7 @@ func (h *responseHandler) ServeHTTP(rw http.ResponseWriter, r *http.Request) { // Not reading the request body can cause a couple of problems. // Go’s HTTP server uses connection pooling by default. // If you don’t read and fully consume (or close) the request body, the remaining unread bytes will stay in the TCP buffer. - _, _ = io.Copy(io.Discard, r.Body) + drainRequestBody(r) } if len(op.Security) > 0 { @@ -153,9 +153,6 @@ func (h *responseHandler) ServeHTTP(rw http.ResponseWriter, r *http.Request) { // todo: only specified headers should be written for k, v := range response.Headers { rw.Header().Add(k, v) - if logHttp != nil { - logHttp.Response.Headers[k] = v - } } if len(res.Content) == 0 { @@ -448,3 +445,17 @@ func extractPathParams(route, path string) (map[string]string, error) { return params, nil } + +func drainRequestBody(r *http.Request) { + done := make(chan struct{}) + go func() { + _, _ = io.Copy(io.Discard, r.Body) + close(done) + }() + + select { + case <-done: + case <-time.After(10 * time.Second): + log.Warnf("timeout reading request body for %s %s", r.Method, lib.GetUrl(r)) + } +} diff --git a/providers/openapi/handler_test.go b/providers/openapi/handler_test.go index bb891210e..bd56e0f22 100644 --- a/providers/openapi/handler_test.go +++ b/providers/openapi/handler_test.go @@ -767,12 +767,12 @@ func TestResolveEndpoint(t *testing.T) { func TestHandler_Event(t *testing.T) { testcases := []struct { name string - test func(t *testing.T, f http.HandlerFunc, c *openapi.Config) + test func(t *testing.T, f http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) event func(event string, args ...interface{}) []*common.Action }{ { name: "no response found", - test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config) { + test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) { op := openapitest.NewOperation( openapitest.WithResponse(http.StatusOK, openapitest.WithContent("application/json", openapitest.NewContent()))) openapitest.AppendPath("/foo", c, openapitest.WithOperation("get", op)) @@ -791,7 +791,7 @@ func TestHandler_Event(t *testing.T) { }, { name: "event sets unknown status code", - test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config) { + test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) { op := openapitest.NewOperation( openapitest.WithResponse(http.StatusOK, openapitest.WithContent("application/json", openapitest.NewContent()))) openapitest.AppendPath("/foo", c, openapitest.WithOperation("get", op)) @@ -810,7 +810,7 @@ func TestHandler_Event(t *testing.T) { }, { name: "event changes content type", - test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config) { + test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) { op := openapitest.NewOperation( openapitest.WithResponse(http.StatusOK, openapitest.WithContent("application/json", openapitest.NewContent()), @@ -832,7 +832,7 @@ func TestHandler_Event(t *testing.T) { }, { name: "post request using body in event function", - test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config) { + test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) { op := openapitest.NewOperation( openapitest.WithRequestBody("", true, openapitest.WithRequestContent("application/json", openapitest.NewContent())), @@ -856,7 +856,7 @@ func TestHandler_Event(t *testing.T) { }, { name: "post request without defining requestBody, body should not be available in event", - test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config) { + test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) { op := openapitest.NewOperation( openapitest.WithResponse(http.StatusOK, openapitest.WithContent("application/json", openapitest.NewContent()), @@ -878,7 +878,7 @@ func TestHandler_Event(t *testing.T) { }, { name: "path parameter", - test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config) { + test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) { op := openapitest.NewOperation( openapitest.WithResponse(http.StatusOK, openapitest.WithContent("application/json", openapitest.NewContent()), @@ -903,7 +903,7 @@ func TestHandler_Event(t *testing.T) { }, { name: "path parameter with trailing slash in route", - test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config) { + test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) { op := openapitest.NewOperation( openapitest.WithResponse(http.StatusOK, openapitest.WithContent("application/json", openapitest.NewContent()), @@ -926,6 +926,60 @@ func TestHandler_Event(t *testing.T) { return nil }, }, + { + name: "set response header", + test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) { + op := openapitest.NewOperation( + openapitest.WithResponse(http.StatusOK, + openapitest.WithContent("application/json", openapitest.NewContent()), + )) + openapitest.AppendPath("/foo/{id}/", c, + openapitest.WithOperation(http.MethodPost, op), + openapitest.WithPathParam("id", openapitest.WithParamSchema(schematest.New("string"))), + ) + r := httptest.NewRequest("post", "http://localhost/foo/123", strings.NewReader(`{ "foo": "bar" }`)) + r.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h(rr, r) + require.Equal(t, "12345", rr.Header().Get("foo")) + require.Equal(t, http.Header{"Content-Type": []string{"application/json"}, "Foo": []string{"12345"}}, rr.Header()) + }, + event: func(event string, args ...interface{}) []*common.Action { + res := args[1].(*common.EventResponse) + res.Headers["foo"] = "12345" + return nil + }, + }, + { + name: "set response header with cases", + test: func(t *testing.T, h http.HandlerFunc, c *openapi.Config, sm *events.StoreManager) { + op := openapitest.NewOperation( + openapitest.WithResponse(http.StatusOK, + openapitest.WithContent("application/json", openapitest.NewContent()), + )) + openapitest.AppendPath("/foo/{id}/", c, + openapitest.WithOperation(http.MethodPost, op), + openapitest.WithPathParam("id", openapitest.WithParamSchema(schematest.New("string"))), + ) + r := httptest.NewRequest("post", "http://localhost/foo/123", strings.NewReader(`{ "foo": "bar" }`)) + r.Header.Set("Content-Type", "application/json") + rr := httptest.NewRecorder() + h(rr, r) + require.Equal(t, "12345", rr.Header().Get("FooBarYuh")) + // Go canonicalizes header names automatically. + require.Equal(t, http.Header{"Content-Type": []string{"application/json"}, "Foobaryuh": []string{"12345"}}, rr.Header()) + + e := sm.GetEvents(events.NewTraits()) + require.Len(t, e, 1) + log := e[0].Data.(*openapi.HttpLog) + require.Equal(t, map[string]string{"Content-Type": "application/json", "Foobaryuh": "12345"}, log.Response.Headers) + }, + event: func(event string, args ...interface{}) []*common.Action { + res := args[1].(*common.EventResponse) + res.Headers["FooBarYuh"] = "12345" + return nil + }, + }, } t.Parallel() @@ -940,17 +994,23 @@ func TestHandler_Event(t *testing.T) { Servers: []*openapi.Server{{Url: "http://localhost"}}, Components: openapi.Components{}, } + sm := &events.StoreManager{} + sm.SetStore(1, events.NewTraits().WithNamespace("http")) + eh := &engine{emit: tc.event} tc.test(t, func(rw http.ResponseWriter, r *http.Request) { - h := openapi.NewHandler(config, &engine{emit: tc.event}, &events.StoreManager{}) - err := h.ServeHTTP(rw, r) + h := openapi.NewHandler(config, eh, sm) + ctx, err := openapi.NewLogEventContext(r, false, sm, events.NewTraits()) + require.NoError(t, err) + r = r.WithContext(ctx) + httpErr := h.ServeHTTP(rw, r) if err != nil { - for k, v := range err.Header { + for k, v := range httpErr.Header { rw.Header()[k] = v } - http.Error(rw, err.Message, err.StatusCode) + http.Error(rw, httpErr.Message, httpErr.StatusCode) } - }, config) + }, config, sm) }) } diff --git a/providers/openapi/log.go b/providers/openapi/log.go index ea73a063f..febfb5d04 100644 --- a/providers/openapi/log.go +++ b/providers/openapi/log.go @@ -7,6 +7,7 @@ import ( "mokapi/engine/common" "mokapi/lib" "mokapi/runtime/events" + "net" "net/http" "net/textproto" "strings" @@ -22,6 +23,7 @@ type HttpLog struct { Actions []*common.Action `json:"actions"` Api string `json:"api"` Path string `json:"path"` + ClientIP string `json:"clientIP"` } type HttpRequestLog struct { @@ -57,6 +59,7 @@ func NewLogEventContext(r *http.Request, deprecated bool, eh events.Handler, tra Deprecated: deprecated, Api: traits.GetName(), Path: traits.Get("path"), + ClientIP: clientIP(r), } params, _ := FromContext(r.Context()) @@ -131,3 +134,17 @@ func getParsedHeaders(headers map[string]RequestParameterValue) map[string]bool } return result } + +func clientIP(r *http.Request) string { + if xff := r.Header.Get("X-Forwarded-For"); xff != "" { + parts := strings.Split(xff, ",") + return strings.TrimSpace(parts[0]) + } + + if realIP := r.Header.Get("X-Real-IP"); realIP != "" { + return realIP + } + + host, _, _ := net.SplitHostPort(r.RemoteAddr) + return host +} diff --git a/providers/openapi/schema/apply.go b/providers/openapi/schema/apply.go index 34c407986..311ca87d4 100644 --- a/providers/openapi/schema/apply.go +++ b/providers/openapi/schema/apply.go @@ -1,6 +1,9 @@ package schema -import "strings" +import ( + "fmt" + "strings" +) func (s *Schema) apply(ref *Schema) { if ref == nil { @@ -108,6 +111,8 @@ func (s *Schema) apply(ref *Schema) { } if !s.isSet("additionalProperties") { s.AdditionalProperties = ref.AdditionalProperties + } else { + fmt.Print("") } if !s.isSet("unevaluatedProperties") { s.UnevaluatedProperties = ref.UnevaluatedProperties diff --git a/providers/openapi/schema/apply_test.go b/providers/openapi/schema/apply_test.go index 48d3c3c6f..ebe2ef328 100644 --- a/providers/openapi/schema/apply_test.go +++ b/providers/openapi/schema/apply_test.go @@ -2,14 +2,17 @@ package schema_test import ( "encoding/json" - "github.com/stretchr/testify/require" "mokapi/config/dynamic" "mokapi/config/dynamic/dynamictest" - "mokapi/schema/json/schema" + "mokapi/providers/openapi/schema" + jsonSchema "mokapi/schema/json/schema" "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" ) -func TestSchema_ApplyRef(t *testing.T) { +func TestSchema_ApplyRef_JSON(t *testing.T) { testcases := []struct { name string input string @@ -156,6 +159,25 @@ func TestSchema_ApplyRef(t *testing.T) { require.Equal(t, float64(2), *s.Const) }, }, + { + name: "allOf not overwritten by ref", + input: ` +{ + "$defs": { + "foo": { + + } + }, + "type": "integer", + "allOf": [{ "type": "string" }], + "$ref": "#/$defs/foo" +}`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Equal(t, "integer", s.Type.String()) + require.Equal(t, jsonSchema.Types{"string"}, s.AllOf[0].Type) + }, + }, } t.Parallel() @@ -175,3 +197,165 @@ func TestSchema_ApplyRef(t *testing.T) { }) } } + +func TestSchema_ApplyRef_YAML(t *testing.T) { + testcases := []struct { + name string + input string + test func(t *testing.T, s *schema.Schema, err error) + }{ + { + name: "boolean used from ref", + input: ` +$defs: + foo: false +$ref: '#/$defs/foo' +`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Equal(t, false, *s.Boolean) + }, + }, + { + name: "boolean from ref not used", + input: ` +$defs: + foo: false +type: integer +$ref: '#/$defs/foo' +`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Nil(t, s.Boolean) + require.Equal(t, "integer", s.Type.String()) + }, + }, + { + name: "type used from ref", + input: ` +$defs: + foo: + type: string +$ref: "#/$defs/foo" +`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Equal(t, "string", s.Type.String()) + }, + }, + { + name: "type not overwritten by ref", + input: ` +$defs: + foo: + type: string +type: integer +$ref: "#/$defs/foo" +`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Equal(t, "integer", s.Type.String()) + }, + }, + { + name: "enum used from ref", + input: ` +$defs: + foo: + type: string + enum: [foo, bar] +type: integer +$ref: "#/$defs/foo" +`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Equal(t, "integer", s.Type.String()) + require.Equal(t, []interface{}{"foo", "bar"}, s.Enum) + }, + }, + { + name: "enum not overwritten by ref", + input: ` +$defs: + foo: + type: string + enum: [foo, bar] +type: integer +enum: [1,2] +$ref: "#/$defs/foo" +`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Equal(t, "integer", s.Type.String()) + require.Equal(t, []interface{}{1, 2}, s.Enum) + }, + }, + { + name: "const used from ref", + input: ` +$defs: + foo: + type: string + const: foo +type: integer +$ref: "#/$defs/foo" +`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Equal(t, "integer", s.Type.String()) + require.Equal(t, "foo", *s.Const) + }, + }, + { + name: "const not overwritten by ref", + input: ` +$defs: + foo: + type: string + const: foo + +type: integer +const: 2 +$ref: "#/$defs/foo" +`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Equal(t, "integer", s.Type.String()) + require.Equal(t, 2, *s.Const) + }, + }, + { + name: "allOf not overwritten by ref", + input: ` +$defs: + foo: {} +type: integer +allOf: + - type: string +$ref: "#/$defs/foo" +`, + test: func(t *testing.T, s *schema.Schema, err error) { + require.NoError(t, err) + require.Equal(t, "integer", s.Type.String()) + require.Equal(t, jsonSchema.Types{"string"}, s.AllOf[0].Type) + }, + }, + } + + t.Parallel() + for _, tc := range testcases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + var s *schema.Schema + err := yaml.Unmarshal([]byte(tc.input), &s) + if err != nil { + tc.test(t, s, err) + } else { + err = s.Parse(&dynamic.Config{Data: s}, &dynamictest.Reader{}) + tc.test(t, s, err) + } + }) + } +} diff --git a/providers/openapi/schema/convert.go b/providers/openapi/schema/convert.go index 2c56d1b81..e98fa0684 100644 --- a/providers/openapi/schema/convert.go +++ b/providers/openapi/schema/convert.go @@ -126,7 +126,11 @@ func (c *JsonSchemaConverter) Convert(s *Schema) *schema.Schema { } if s.Nullable { - js.Type = append(js.Type, "null") + if len(js.Type) > 0 { + js.Type = append(js.Type, "null") + } else { + js.Type = schema.Types{"null", "boolean", "object", "array", "number", "string", "integer"} + } } js.Examples = s.Examples diff --git a/providers/openapi/schema/generator_test.go b/providers/openapi/schema/generator_test.go index d9fe936e0..f305ec2e3 100644 --- a/providers/openapi/schema/generator_test.go +++ b/providers/openapi/schema/generator_test.go @@ -525,7 +525,7 @@ func TestGeneratorArray(t *testing.T) { { name: "unique items with error", schema: schematest.New("array", schematest.WithMinItems(5), schematest.WithMaxItems(10), schematest.WithUniqueItems(true), - schematest.WithItems("integer", schematest.WithFormat("int32"), schematest.WithMinimum(0), schematest.WithMaximum(3)), + schematest.WithItems("integer", schematest.WithMinimum(0), schematest.WithMaximum(3)), ), test: func(t *testing.T, i interface{}, err error) { require.EqualError(t, err, "failed to generate valid array: reached attempt limit (10) caused by: cannot fill array with unique items") @@ -616,11 +616,6 @@ func TestGeneratorObject(t *testing.T) { schema: schematest.New("object", schematest.WithAdditionalProperties(schematest.New("string"))), }, - { - name: "no fields defined", - exp: map[string]interface{}{"bunch": int64(995706), "child": int64(-489581), "gang": -383134.1033810867, "growth": int64(-83276), "hall": 181060.30342605617, "shower": true, "uncle": int64(-142346), "woman": int64(-117432)}, - schema: schematest.New("object"), - }, { name: "with property _metadata", exp: map[string]interface{}{"_metadata": int64(-837149)}, diff --git a/providers/openapi/schema/marshal.go b/providers/openapi/schema/marshal.go index 453f5af42..a13892804 100644 --- a/providers/openapi/schema/marshal.go +++ b/providers/openapi/schema/marshal.go @@ -48,6 +48,10 @@ type encoder struct { } func (e *encoder) encode(s *Schema) ([]byte, error) { + if s == nil { + return []byte("null"), nil + } + var b bytes.Buffer if s.Boolean != nil { b.Write([]byte(fmt.Sprintf("%v", *s.Boolean))) diff --git a/providers/openapi/schema/marshal_schema_test.go b/providers/openapi/schema/marshal_schema_test.go index e5733f246..1fcc11082 100644 --- a/providers/openapi/schema/marshal_schema_test.go +++ b/providers/openapi/schema/marshal_schema_test.go @@ -2,11 +2,12 @@ package schema_test import ( "encoding/json" - "github.com/stretchr/testify/require" "mokapi/providers/openapi/schema" "mokapi/providers/openapi/schema/schematest" jsonSchema "mokapi/schema/json/schema" "testing" + + "github.com/stretchr/testify/require" ) func TestSchema_Marshal(t *testing.T) { @@ -15,6 +16,16 @@ func TestSchema_Marshal(t *testing.T) { schema *schema.Schema exp string }{ + { + name: "null", + schema: nil, + exp: `null`, + }, + { + name: "property schema is null", + schema: schematest.New("object", schematest.WithProperty("foo", nil)), + exp: `{"type":"object","properties":{"foo":null}}`, + }, { name: "$ref", schema: &schema.Schema{Ref: "#/components/schemas/Foo"}, diff --git a/providers/openapi/schema/patch.go b/providers/openapi/schema/patch.go index 594481b36..0f1c1c8bf 100644 --- a/providers/openapi/schema/patch.go +++ b/providers/openapi/schema/patch.go @@ -6,6 +6,9 @@ func (s *Schema) Patch(patch *Schema) { if patch == nil { return } + if s == patch { + return + } if patch.Id != "" { s.Id = patch.Id diff --git a/providers/openapi/schema/patch_test.go b/providers/openapi/schema/patch_test.go index 92e97031f..4c9e1ef54 100644 --- a/providers/openapi/schema/patch_test.go +++ b/providers/openapi/schema/patch_test.go @@ -1,13 +1,22 @@ package schema_test import ( - "github.com/stretchr/testify/require" "mokapi/providers/openapi/schema" "mokapi/providers/openapi/schema/schematest" jsonSchema "mokapi/schema/json/schema" "testing" + + "github.com/stretchr/testify/require" ) +func TestSchema_PatchSameSchema(t *testing.T) { + // The same scheme is used for a reference ($ref) and should not create + // duplicate entries when patching arrays like allOf. + s := schematest.NewAllOf(schematest.New("string")) + s.Patch(s) + require.Len(t, s.AllOf, 1) +} + func TestSchema_Patch(t *testing.T) { testcases := []struct { name string diff --git a/providers/openapi/schema/ref.go b/providers/openapi/schema/ref.go deleted file mode 100644 index 1f9006843..000000000 --- a/providers/openapi/schema/ref.go +++ /dev/null @@ -1,101 +0,0 @@ -package schema - -/*type Ref struct { - dynamic.Reference - Boolean *bool - Value *Schema -} - -func (r *Ref) Parse(config *dynamic.Config, reader dynamic.Reader) error { - if r == nil { - return nil - } - if len(r.Ref) > 0 { - err := dynamic.Resolve(r.Ref, &r.Value, config, reader) - if err != nil { - return fmt.Errorf("parse schema failed: %w", err) - } - return nil - } - - if r.Value == nil { - return nil - } - - return r.Value.Parse(config, reader) -} - -func (r *Ref) UnmarshalYAML(node *yaml.Node) error { - var boolVal bool - if err := node.Decode(&boolVal); err == nil { - r.Boolean = &boolVal - return nil - } - - return r.UnmarshalYaml(node, &r.Value) -} - -func (r *Ref) UnmarshalJSON(b []byte) error { - var boolVal bool - if err := json.Unmarshal(b, &boolVal); err == nil { - r.Boolean = &boolVal - return nil - } - - return r.UnmarshalJson(b, &r.Value) -} - -func (r *Ref) HasProperties() bool { - return r.Value != nil && r.Value.HasProperties() -} - -func (r *Ref) String() string { - if r.Value == nil && len(r.Ref) == 0 { - return fmt.Sprintf("no schema defined") - } - if r.Value == nil { - return fmt.Sprintf("unresolved schema %v", r.Ref) - } - return r.Value.String() -} - -func (r *Ref) getXml() *Xml { - if r != nil && r.Value != nil && r.Value.SubSchema != nil { - return r.Value.Xml - } - return nil -} - -func (r *Ref) getProperty(name string) *Ref { - if r == nil && r.Value == nil { - return nil - } - return r.Value.Properties.Get(name) -} - -func (r *Ref) getPropertyXml(name string) *Xml { - prop := r.getProperty(name) - if prop == nil { - return nil - } - return prop.getXml() -} - -func (r *Ref) IsXmlWrapped() bool { - return r.Value != nil && r.Value.Xml != nil && r.Value.Xml.Wrapped -} - -func (r *Ref) IsFreeForm() bool { - if r == nil { - return true - } - if r.Boolean != nil { - return *r.Boolean - } - return r.Value.IsFreeForm() -} - -func (r *Ref) MarshalJSON() ([]byte, error) { - e := encoder{refs: map[string]bool{}} - return e.encode(r) -}*/ diff --git a/providers/openapi/schema/ref_test.go b/providers/openapi/schema/ref_test.go deleted file mode 100644 index 2cef1a80f..000000000 --- a/providers/openapi/schema/ref_test.go +++ /dev/null @@ -1,197 +0,0 @@ -package schema_test - -/*func TestRef_HasProperties(t *testing.T) { - r := &schema.Ref{} - require.False(t, r.HasProperties()) - - r.Value = &schema.Schema{} - require.False(t, r.HasProperties()) - - r.Value.Properties = &schema.Schemas{} - require.False(t, r.HasProperties()) - - r.Value.Properties.Set("foo", nil) - require.True(t, r.HasProperties()) -} - -func TestRef_String(t *testing.T) { - r := &schema.Ref{} - require.Equal(t, "no schema defined", r.String()) - - r = &schema.Ref{Reference: dynamic.Reference{Ref: "foo"}} - require.Equal(t, "unresolved schema foo", r.String()) - - r.Value = &schema.Schema{} - require.Equal(t, "", r.String()) - - r.Value.Type = jsonSchema.Types{"number"} - require.Equal(t, "schema type=number", r.String()) -} - -func TestRef_Parse(t *testing.T) { - testcases := []struct { - name string - test func(t *testing.T) - }{ - { - name: "Ref is nil", - test: func(t *testing.T) { - reader := dynamictest.ReaderFunc(func(_ *url.URL, _ any) (*dynamic.Config, error) { - return nil, nil - }) - var r *schema.Ref - err := r.Parse(&dynamic.Config{Info: dynamic.ConfigInfo{Url: &url.URL{}}, Data: r}, reader) - require.NoError(t, err) - }, - }, - { - name: "with reference", - test: func(t *testing.T) { - reader := dynamictest.ReaderFunc(func(u *url.URL, _ any) (*dynamic.Config, error) { - cfg := &dynamic.Config{Info: dynamic.ConfigInfo{Url: u}, Data: schematest.New("number")} - return cfg, nil - }) - r := &schema.Ref{Reference: dynamic.Reference{Ref: "foo.yml"}} - err := r.Parse(&dynamic.Config{Info: dynamic.ConfigInfo{Url: &url.URL{}}, Data: r}, reader) - require.NoError(t, err) - require.NotNil(t, r.Value) - require.Equal(t, "number", r.Value.Type.String()) - }, - }, - { - name: "with reference but error", - test: func(t *testing.T) { - reader := dynamictest.ReaderFunc(func(_ *url.URL, _ any) (*dynamic.Config, error) { - return nil, fmt.Errorf("TEST ERROR") - }) - r := &schema.Ref{Reference: dynamic.Reference{Ref: "foo.yml"}} - err := r.Parse(&dynamic.Config{Info: dynamic.ConfigInfo{Url: &url.URL{}}, Data: r}, reader) - require.EqualError(t, err, "parse schema failed: resolve reference 'foo.yml' failed: TEST ERROR") - }, - }, - { - name: "value is nil", - test: func(t *testing.T) { - reader := dynamictest.ReaderFunc(func(u *url.URL, _ any) (*dynamic.Config, error) { - return &dynamic.Config{Info: dynamic.ConfigInfo{Url: u}}, nil - }) - r := &schema.Ref{} - err := r.Parse(&dynamic.Config{Info: dynamic.ConfigInfo{Url: &url.URL{}}, Data: r}, reader) - require.NoError(t, err) - require.Nil(t, r.Value) - }, - }, - { - name: "with value", - test: func(t *testing.T) { - reader := dynamictest.ReaderFunc(func(u *url.URL, _ any) (*dynamic.Config, error) { - return &dynamic.Config{Info: dynamic.ConfigInfo{Url: u}}, nil - }) - r := &schema.Ref{Value: schematest.New("integer")} - err := r.Parse(&dynamic.Config{Info: dynamic.ConfigInfo{Url: &url.URL{}}, Data: r}, reader) - require.NoError(t, err) - require.NotNil(t, r.Value) - }, - }, - } - - t.Parallel() - for _, tc := range testcases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - tc.test(t) - }) - } -} - -func TestRef_UnmarshalJSON(t *testing.T) { - for _, testcase := range []struct { - name string - s string - fn func(t *testing.T, r *schema.Ref) - }{ - { - name: "ref", - s: `{ "$ref": "#/components/schema/Foo" }`, - fn: func(t *testing.T, r *schema.Ref) { - require.Equal(t, "#/components/schema/Foo", r.Ref) - }, - }, - } { - test := testcase - t.Run(test.name, func(t *testing.T) { - r := &schema.Ref{} - err := json.Unmarshal([]byte(test.s), r) - require.NoError(t, err) - test.fn(t, r) - }) - } -} - -func TestRef_UnmarshalYAML(t *testing.T) { - for _, testcase := range []struct { - name string - s string - fn func(t *testing.T, r *schema.Ref) - }{ - { - name: "ref", - s: "$ref: '#/components/schema/Foo'", - fn: func(t *testing.T, r *schema.Ref) { - require.Equal(t, "#/components/schema/Foo", r.Ref) - }, - }, - } { - test := testcase - t.Run(test.name, func(t *testing.T) { - r := &schema.Ref{} - err := yaml.Unmarshal([]byte(test.s), r) - require.NoError(t, err) - test.fn(t, r) - }) - } -} - -func TestRef_MarshalJSON(t *testing.T) { - testcases := []struct { - name string - s *schema.Ref - test func(t *testing.T, s string, err error) - }{ - { - name: "empty type", - s: &schema.Ref{}, - test: func(t *testing.T, s string, err error) { - require.NoError(t, err) - require.Equal(t, "{}", s) - }, - }, - { - name: "with type", - s: &schema.Ref{Value: schematest.New("string")}, - test: func(t *testing.T, s string, err error) { - require.NoError(t, err) - require.Equal(t, `{"type":"string"}`, s) - }, - }, - { - name: "with properties", - s: schematest.NewRef("object", schematest.WithProperty("foo", schematest.New("string"))), - test: func(t *testing.T, s string, err error) { - require.NoError(t, err) - require.Equal(t, `{"type":"object","properties":{"foo":{"type":"string"}}}`, s) - }, - }, - } - - t.Parallel() - for _, tc := range testcases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - b, err := json.Marshal(tc.s) - tc.test(t, string(b), err) - }) - } -}*/ diff --git a/providers/openapi/schema/schema.go b/providers/openapi/schema/schema.go index 5bb44d889..b5e771c58 100644 --- a/providers/openapi/schema/schema.go +++ b/providers/openapi/schema/schema.go @@ -3,10 +3,11 @@ package schema import ( "encoding/json" "fmt" - "gopkg.in/yaml.v3" "mokapi/config/dynamic" "mokapi/schema/json/schema" "strings" + + "gopkg.in/yaml.v3" ) type Schema struct { @@ -338,7 +339,14 @@ func (s *Schema) ConvertTo(i interface{}) (interface{}, error) { } func (s *Schema) UnmarshalJSON(b []byte) error { - _ = json.Unmarshal(b, &s.m) + m := map[string]json.RawMessage{} + _ = json.Unmarshal(b, &m) + if s.m == nil { + s.m = map[string]bool{} + } + for k := range m { + s.m[k] = true + } var boolVal bool if err := json.Unmarshal(b, &boolVal); err == nil { @@ -358,7 +366,14 @@ func (s *Schema) UnmarshalJSON(b []byte) error { } func (s *Schema) UnmarshalYAML(node *yaml.Node) error { - _ = node.Decode(&s.m) + m := map[string]yaml.Node{} + _ = node.Decode(&m) + if s.m == nil { + s.m = map[string]bool{} + } + for k := range m { + s.m[k] = true + } var boolVal bool if err := node.Decode(&boolVal); err == nil { diff --git a/providers/openapi/schema/unmarshal_json_test.go b/providers/openapi/schema/unmarshal_json_test.go index 110a91548..47a85ab8a 100644 --- a/providers/openapi/schema/unmarshal_json_test.go +++ b/providers/openapi/schema/unmarshal_json_test.go @@ -2,7 +2,6 @@ package schema_test import ( "fmt" - "github.com/stretchr/testify/require" "math" "mokapi/media" "mokapi/providers/openapi" @@ -10,6 +9,8 @@ import ( "mokapi/providers/openapi/schema/schematest" jsonSchema "mokapi/schema/json/schema" "testing" + + "github.com/stretchr/testify/require" ) func TestRef_Unmarshal_Json(t *testing.T) { @@ -478,6 +479,23 @@ func TestRef_Unmarshal_Json_String(t *testing.T) { require.Nil(t, i) }, }, + { + name: "only nullable set", + s: `{"foo": "bar"}`, + schema: schematest.NewTypes(nil, schematest.IsNullable(true)), + test: func(t *testing.T, i interface{}, err error) { + require.NoError(t, err) + require.Equal(t, map[string]interface{}{"foo": "bar"}, i) + }, + }, + { + name: "nullable string but got integer", + s: "123", + schema: schematest.New("string", schematest.IsNullable(true)), + test: func(t *testing.T, i interface{}, err error) { + require.EqualError(t, err, "error count 1:\n\t- #/type: invalid type, expected [string, null] but got number") + }, + }, } t.Parallel() diff --git a/schema/json/generator/array.go b/schema/json/generator/array.go index dbca17b69..b9908a4da 100644 --- a/schema/json/generator/array.go +++ b/schema/json/generator/array.go @@ -186,7 +186,12 @@ func fakeArray(r *Request, fakeItem *faker) (interface{}, error) { return nil }) if err != nil { - return nil, fmt.Errorf("failed to generate valid array: %w", err) + if minItems < length { + length-- + i-- + } else { + return nil, fmt.Errorf("failed to generate valid array: %w", err) + } } } diff --git a/schema/json/generator/object.go b/schema/json/generator/object.go index e5dbf460c..273113697 100644 --- a/schema/json/generator/object.go +++ b/schema/json/generator/object.go @@ -296,9 +296,9 @@ func (r *resolver) fakeObject(req *Request) (*sortedmap.LinkedHashMap[string, *f // if additionalProperties=false no additional properties is allowed // if additionalProperties=true we don't add random properties, it is not expected by users - length := numProperties(1, 10, req.Schema) + length := numProperties(1, 10, s) for i := 0; i < length; i++ { - f, err := r.resolve(req.WithSchema(req.Schema.AdditionalProperties), true) + f, err := r.resolve(req.WithSchema(s.AdditionalProperties), true) if err != nil { return nil, err } @@ -312,7 +312,7 @@ func (r *resolver) fakeObject(req *Request) (*sortedmap.LinkedHashMap[string, *f for _, name := range s.Required { if _, ok := fakes.Get(name); !ok { - f, err := r.resolve(req.With(append(req.Path, name), nil, req.examples), false) + f, err := r.resolve(req.With(append(req.Path, name), s.AdditionalProperties, req.examples), false) if err != nil { return nil, err } diff --git a/schema/json/generator/object_test.go b/schema/json/generator/object_test.go index 39f3c1d57..d9a177aec 100644 --- a/schema/json/generator/object_test.go +++ b/schema/json/generator/object_test.go @@ -188,23 +188,7 @@ func TestObject(t *testing.T) { }, test: func(t *testing.T, v interface{}, err error) { require.NoError(t, err) - require.Equal(t, - map[string]interface{}{ - "brace": 55776.1449323867, - "collection": int64(237568), - "comb": false, - "company": "Redfin", - "luck": int64(-927794), - "person": map[string]interface{}{ - "email": "oliver.nelson@globalfacilitate.com", - "firstname": "Oliver", - "gender": "male", - "lastname": "Nelson", - }, - "problem": true, - "sunshine": true, - }, - v) + require.Len(t, v.(map[string]any), 8) }, }, { diff --git a/schema/json/parser/parser.go b/schema/json/parser/parser.go index 0ada2980d..453d3fa61 100644 --- a/schema/json/parser/parser.go +++ b/schema/json/parser/parser.go @@ -20,6 +20,10 @@ type Parser struct { SkipValidationFormatKeyword bool } +type Exportable interface { + Export() any +} + func (p *Parser) ParseWith(data interface{}, schema *schema.Schema) (interface{}, error) { v, err := p.parse(data, schema) if err != nil { @@ -42,6 +46,11 @@ func (p *Parser) parse(data interface{}, s *schema.Schema) (interface{}, error) if s == nil { return data, nil } + + if e, ok := data.(Exportable); ok { + data = e.Export() + } + if s.Boolean != nil { if *s.Boolean { return data, nil @@ -74,10 +83,17 @@ func (p *Parser) parse(data interface{}, s *schema.Schema) (interface{}, error) } for _, typeName := range s.Type { - v, err = p.parseType(data, s, typeName, evaluatedProperties, evaluatedItems) - if err != nil { + vt, errT := p.parseType(data, s, typeName, evaluatedProperties, evaluatedItems) + if errT != nil && err != nil && typeName == "null" { + // null error has a lower priority. continue } + if errT != nil { + err = errT + continue + } + v = vt + err = nil break } @@ -129,21 +145,21 @@ func (p *Parser) parseType(data interface{}, s *schema.Schema, typeName string, case []interface{}: if typeName != "array" { return nil, &ErrorDetail{ - Message: fmt.Sprintf("invalid type, expected %v but got %v", typeName, toType(data)), + Message: fmt.Sprintf("invalid type, expected %v but got %v", s.Type, toType(data)), Field: "type", } } case map[string]interface{}: if typeName != "object" { return nil, &ErrorDetail{ - Message: fmt.Sprintf("invalid type, expected %v but got %v", typeName, toType(data)), + Message: fmt.Sprintf("invalid type, expected %v but got %v", s.Type, toType(data)), Field: "type", } } case struct{}: if typeName != "object" { return nil, &ErrorDetail{ - Message: fmt.Sprintf("invalid type, expected %v but got %v", typeName, toType(data)), + Message: fmt.Sprintf("invalid type, expected %v but got %v", s.Type, toType(data)), Field: "type", } } @@ -163,6 +179,13 @@ func (p *Parser) parseType(data interface{}, s *schema.Schema, typeName string, data, err = p.ParseArray(data, s, evaluatedItems) case "object": data, err = p.parseObject(data, s, evaluatedProperties) + case "null": + if data != nil { + return nil, &ErrorDetail{ + Message: fmt.Sprintf("invalid type, expected %v but got %v", s.Type, toType(data)), + Field: "type", + } + } } if s.Const != nil { diff --git a/schema/json/parser/parser_test.go b/schema/json/parser/parser_test.go index 85530037c..764bc1d9f 100644 --- a/schema/json/parser/parser_test.go +++ b/schema/json/parser/parser_test.go @@ -143,3 +143,136 @@ func TestParser_NoType(t *testing.T) { }) } } + +func TestParser_Null(t *testing.T) { + testcases := []struct { + name string + data interface{} + schema *schema.Schema + test func(t *testing.T, v interface{}, err error) + }{ + { + name: "only null valid", + data: nil, + schema: schematest.New("null"), + test: func(t *testing.T, v interface{}, err error) { + require.NoError(t, err) + require.Nil(t, v) + }, + }, + { + name: "only null not valid", + data: 123, + schema: schematest.New("null"), + test: func(t *testing.T, v interface{}, err error) { + require.EqualError(t, err, "error count 1:\n\t- #/type: invalid type, expected null but got integer") + }, + }, + { + name: "null and string with valid value", + data: "foo", + schema: schematest.NewTypes([]string{"null", "string"}), + test: func(t *testing.T, v interface{}, err error) { + require.NoError(t, err) + require.Equal(t, "foo", v) + }, + }, + { + name: "null and string with valid null", + data: nil, + schema: schematest.NewTypes([]string{"null", "string"}), + test: func(t *testing.T, v interface{}, err error) { + require.NoError(t, err) + require.Nil(t, v) + }, + }, + { + name: "null and string with valid null", + data: nil, + schema: schematest.NewTypes([]string{"null", "string"}), + test: func(t *testing.T, v interface{}, err error) { + require.NoError(t, err) + require.Nil(t, v) + }, + }, + { + name: "null and string not valid", + data: 123, + schema: schematest.NewTypes([]string{"null", "string"}), + test: func(t *testing.T, v interface{}, err error) { + require.EqualError(t, err, "error count 1:\n\t- #/type: invalid type, expected [null, string] but got integer") + }, + }, + { + name: "null and string not valid but different type order", + data: 123, + schema: schematest.NewTypes([]string{"string", "null"}), + test: func(t *testing.T, v interface{}, err error) { + require.EqualError(t, err, "error count 1:\n\t- #/type: invalid type, expected [string, null] but got integer") + }, + }, + { + name: "null and object with valid value", + data: map[string]interface{}{"foo": 123}, + schema: schematest.NewTypes([]string{"null", "object"}), + test: func(t *testing.T, v interface{}, err error) { + require.NoError(t, err) + require.Equal(t, map[string]interface{}{"foo": 123}, v) + }, + }, + { + name: "null and object with valid null", + data: nil, + schema: schematest.NewTypes([]string{"null", "object"}), + test: func(t *testing.T, v interface{}, err error) { + require.NoError(t, err) + require.Nil(t, v) + }, + }, + { + name: "null and object with invalid property", + data: map[string]interface{}{"foo": 123}, + schema: schematest.NewTypes([]string{"null", "object"}, schematest.WithProperty("foo", schematest.New("string"))), + test: func(t *testing.T, v interface{}, err error) { + require.EqualError(t, err, "error count 1:\n\t- #/foo/type: invalid type, expected string but got integer") + }, + }, + { + name: "null and object with invalid property but different type order", + data: map[string]interface{}{"foo": 123}, + schema: schematest.NewTypes([]string{"object", "null"}, schematest.WithProperty("foo", schematest.New("string"))), + test: func(t *testing.T, v interface{}, err error) { + require.EqualError(t, err, "error count 1:\n\t- #/foo/type: invalid type, expected string but got integer") + }, + }, + { + name: "nullable field", + data: map[string]interface{}{"foo": nil}, + schema: schematest.New("object", schematest.WithProperty("foo", schematest.NewTypes([]string{"null", "object"}))), + test: func(t *testing.T, v interface{}, err error) { + require.NoError(t, err) + require.Equal(t, map[string]interface{}{"foo": nil}, v) + }, + }, + { + name: "null but object", + data: map[string]interface{}{"foo": nil}, + schema: schematest.New("null"), + test: func(t *testing.T, v interface{}, err error) { + require.EqualError(t, err, "error count 1:\n\t- #/type: invalid type, expected null but got object") + }, + }, + } + + t.Parallel() + for _, tc := range testcases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + p := &parser.Parser{Schema: tc.schema} + v, err := p.Parse(tc.data) + tc.test(t, v, err) + }) + } +} diff --git a/webui/package-lock.json b/webui/package-lock.json index c590500cb..c49e2fa48 100644 --- a/webui/package-lock.json +++ b/webui/package-lock.json @@ -12,16 +12,16 @@ "@ssthouse/vue3-tree-chart": "^0.2.6", "@types/bootstrap": "^5.2.10", "@types/whatwg-mimetype": "^3.0.2", - "ace-builds": "^1.43.3", + "ace-builds": "^1.43.4", "bootstrap": "^5.3.8", "bootstrap-icons": "^1.13.1", - "dayjs": "^1.11.18", + "dayjs": "^1.11.19", "del-cli": "^7.0.0", "fuse.js": "^7.1.0", "http-status-codes": "^2.3.0", "js-yaml": "^4.1.0", "ncp": "^2.0.0", - "vue": "^3.5.22", + "vue": "^3.5.23", "vue-router": "^4.6.3", "vue3-ace-editor": "^2.2.4", "vue3-highlightjs": "^1.0.5", @@ -31,20 +31,20 @@ }, "devDependencies": { "@playwright/test": "^1.56.1", - "@rushstack/eslint-patch": "^1.13.0", + "@rushstack/eslint-patch": "^1.14.1", "@types/js-yaml": "^4.0.9", - "@types/node": "^24.8.1", + "@types/node": "^24.10.0", "@vitejs/plugin-vue": "^6.0.1", "@vue/eslint-config-prettier": "^10.2.0", "@vue/eslint-config-typescript": "^14.6.0", "@vue/tsconfig": "^0.8.1", - "eslint": "^9.37.0", + "eslint": "^9.39.1", "eslint-plugin-vue": "^10.5.1", "npm-run-all": "^4.1.5", "prettier": "^3.6.2", "typescript": "~5.9.3", - "vite": "^7.1.11", - "vue-tsc": "^3.1.1", + "vite": "^7.2.2", + "vue-tsc": "^3.1.3", "xml2js": "^0.6.2" } }, @@ -67,21 +67,21 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz", - "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", + "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", "license": "MIT", "dependencies": { - "@babel/types": "^7.28.4" + "@babel/types": "^7.28.5" }, "bin": { "parser": "bin/babel-parser.js" @@ -91,13 +91,13 @@ } }, "node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", + "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", "license": "MIT", "dependencies": { "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" + "@babel/helper-validator-identifier": "^7.28.5" }, "engines": { "node": ">=6.9.0" @@ -558,12 +558,13 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.21.0", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", - "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", + "version": "0.21.1", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz", + "integrity": "sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==", "dev": true, + "license": "Apache-2.0", "dependencies": { - "@eslint/object-schema": "^2.1.6", + "@eslint/object-schema": "^2.1.7", "debug": "^4.3.1", "minimatch": "^3.1.2" }, @@ -572,22 +573,22 @@ } }, "node_modules/@eslint/config-helpers": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.0.tgz", - "integrity": "sha512-WUFvV4WoIwW8Bv0KeKCIIEgdSiFOsulyN0xrMu+7z43q/hkOLXjvb5u7UC9jDxvRzcrbEmuZBX5yJZz1741jog==", + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz", + "integrity": "sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.16.0" + "@eslint/core": "^0.17.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/core": { - "version": "0.16.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.16.0.tgz", - "integrity": "sha512-nmC8/totwobIiFcGkDza3GIKfAw1+hLiYVrh3I1nIomQ8PEr5cxg34jnkmGawul/ep52wGRAcyeDCNtWKSOj4Q==", + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz", + "integrity": "sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -622,9 +623,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.37.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.37.0.tgz", - "integrity": "sha512-jaS+NJ+hximswBG6pjNX0uEJZkrT0zwpVi3BA3vX22aFGjJjmgSTSmPpZCRKmoBL5VY/M6p0xsSJx7rk7sy5gg==", + "version": "9.39.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.1.tgz", + "integrity": "sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw==", "dev": true, "license": "MIT", "engines": { @@ -635,22 +636,23 @@ } }, "node_modules/@eslint/object-schema": { - "version": "2.1.6", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", - "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz", + "integrity": "sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, "node_modules/@eslint/plugin-kit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.0.tgz", - "integrity": "sha512-sB5uyeq+dwCWyPi31B2gQlVlo+j5brPlWx4yZBrEaRo/nhdDE8Xke1gsGgtiBdaBTxuTkceLVuVt/pclrasb0A==", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz", + "integrity": "sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.16.0", + "@eslint/core": "^0.17.0", "levn": "^0.4.1" }, "engines": { @@ -1086,9 +1088,9 @@ ] }, "node_modules/@rushstack/eslint-patch": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.13.0.tgz", - "integrity": "sha512-2ih5qGw5SZJ+2fLZxP6Lr6Na2NTIgPRL/7Kmyuw0uIyBQnuhQ8fi8fzUTd38eIQmqp+GYLC00cI6WgtqHxBwmw==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.14.1.tgz", + "integrity": "sha512-jGTk8UD/RdjsNZW8qq10r0RBvxL8OWtoT+kImlzPDFilmozzM+9QmIJsmze9UiSBrFU45ZxhTYBypn9q9z/VfQ==", "dev": true, "license": "MIT" }, @@ -1172,13 +1174,13 @@ "peer": true }, "node_modules/@types/node": { - "version": "24.8.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.8.1.tgz", - "integrity": "sha512-alv65KGRadQVfVcG69MuB4IzdYVpRwMG/mq8KWOaoOdyY617P5ivaDiMCGOFDWD2sAn5Q0mR3mRtUOgm99hL9Q==", + "version": "24.10.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.0.tgz", + "integrity": "sha512-qzQZRBqkFsYyaSWXuEHc2WR9c0a0CXwiE5FWUvn7ZM+vdy1uZLfCunD38UzhuB7YN/J11ndbDBcTmOdxJo9Q7A==", "dev": true, "license": "MIT", "dependencies": { - "undici-types": "~7.14.0" + "undici-types": "~7.16.0" } }, "node_modules/@types/whatwg-mimetype": { @@ -1296,13 +1298,13 @@ } }, "node_modules/@vue/compiler-core": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.22.tgz", - "integrity": "sha512-jQ0pFPmZwTEiRNSb+i9Ow/I/cHv2tXYqsnHKKyCQ08irI2kdF5qmYedmF8si8mA7zepUFmJ2hqzS8CQmNOWOkQ==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.5.23.tgz", + "integrity": "sha512-nW7THWj5HOp085ROk65LwaoxuzDsjIxr485F4iu63BoxsXoSqKqmsUUoP4A7Gl67DgIgi0zJ8JFgHfvny/74MA==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.4", - "@vue/shared": "3.5.22", + "@babel/parser": "^7.28.5", + "@vue/shared": "3.5.23", "entities": "^4.5.0", "estree-walker": "^2.0.2", "source-map-js": "^1.2.1" @@ -1321,40 +1323,40 @@ } }, "node_modules/@vue/compiler-dom": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.22.tgz", - "integrity": "sha512-W8RknzUM1BLkypvdz10OVsGxnMAuSIZs9Wdx1vzA3mL5fNMN15rhrSCLiTm6blWeACwUwizzPVqGJgOGBEN/hA==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.5.23.tgz", + "integrity": "sha512-AT8RMw0vEzzzO0JU5gY0F6iCzaWUIh/aaRVordzMBKXRpoTllTT4kocHDssByPsvodNCfump/Lkdow2mT/O5KQ==", "license": "MIT", "dependencies": { - "@vue/compiler-core": "3.5.22", - "@vue/shared": "3.5.22" + "@vue/compiler-core": "3.5.23", + "@vue/shared": "3.5.23" } }, "node_modules/@vue/compiler-sfc": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.22.tgz", - "integrity": "sha512-tbTR1zKGce4Lj+JLzFXDq36K4vcSZbJ1RBu8FxcDv1IGRz//Dh2EBqksyGVypz3kXpshIfWKGOCcqpSbyGWRJQ==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.5.23.tgz", + "integrity": "sha512-3QTEUo4qg7FtQwaDJa8ou1CUikx5WTtZlY61rRRDu3lK2ZKrGoAGG8mvDgOpDsQ4A1bez9s+WtBB6DS2KuFCPw==", "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.4", - "@vue/compiler-core": "3.5.22", - "@vue/compiler-dom": "3.5.22", - "@vue/compiler-ssr": "3.5.22", - "@vue/shared": "3.5.22", + "@babel/parser": "^7.28.5", + "@vue/compiler-core": "3.5.23", + "@vue/compiler-dom": "3.5.23", + "@vue/compiler-ssr": "3.5.23", + "@vue/shared": "3.5.23", "estree-walker": "^2.0.2", - "magic-string": "^0.30.19", + "magic-string": "^0.30.21", "postcss": "^8.5.6", "source-map-js": "^1.2.1" } }, "node_modules/@vue/compiler-ssr": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.22.tgz", - "integrity": "sha512-GdgyLvg4R+7T8Nk2Mlighx7XGxq/fJf9jaVofc3IL0EPesTE86cP/8DD1lT3h1JeZr2ySBvyqKQJgbS54IX1Ww==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.5.23.tgz", + "integrity": "sha512-Hld2xphbMjXs9Q9WKxPf2EqmE+Rq/FEDnK/wUBtmYq74HCV4XDdSCheAaB823OQXIIFGq9ig/RbAZkF9s4U0Ow==", "license": "MIT", "dependencies": { - "@vue/compiler-dom": "3.5.22", - "@vue/shared": "3.5.22" + "@vue/compiler-dom": "3.5.23", + "@vue/shared": "3.5.23" } }, "node_modules/@vue/devtools-api": { @@ -1703,9 +1705,9 @@ } }, "node_modules/@vue/language-core": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-3.1.1.tgz", - "integrity": "sha512-qjMY3Q+hUCjdH+jLrQapqgpsJ0rd/2mAY02lZoHG3VFJZZZKLjAlV+Oo9QmWIT4jh8+Rx8RUGUi++d7T9Wb6Mw==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@vue/language-core/-/language-core-3.1.3.tgz", + "integrity": "sha512-KpR1F/eGAG9D1RZ0/T6zWJs6dh/pRLfY5WupecyYKJ1fjVmDMgTPw9wXmKv2rBjo4zCJiOSiyB8BDP1OUwpMEA==", "dev": true, "license": "MIT", "dependencies": { @@ -1740,53 +1742,53 @@ } }, "node_modules/@vue/reactivity": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.22.tgz", - "integrity": "sha512-f2Wux4v/Z2pqc9+4SmgZC1p73Z53fyD90NFWXiX9AKVnVBEvLFOWCEgJD3GdGnlxPZt01PSlfmLqbLYzY/Fw4A==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.5.23.tgz", + "integrity": "sha512-ji5w0qvrPyBmBx5Ldv4QGNsw0phgRreEvjt0iUf1lei2Sm8//9ZAi78uM2ZjsT5gk0YZilLuoRCIMvtuZlHMJw==", "license": "MIT", "dependencies": { - "@vue/shared": "3.5.22" + "@vue/shared": "3.5.23" } }, "node_modules/@vue/runtime-core": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.22.tgz", - "integrity": "sha512-EHo4W/eiYeAzRTN5PCextDUZ0dMs9I8mQ2Fy+OkzvRPUYQEyK9yAjbasrMCXbLNhF7P0OUyivLjIy0yc6VrLJQ==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.5.23.tgz", + "integrity": "sha512-LMB0S6/G7mFJcpQeQaZrbsthFbWrIX8FVTzu5x9U3Ec8YW5MY1CGAnBBHNj+TPOBu3pIbtPpjrXtcaN04X+aBw==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.22", - "@vue/shared": "3.5.22" + "@vue/reactivity": "3.5.23", + "@vue/shared": "3.5.23" } }, "node_modules/@vue/runtime-dom": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.22.tgz", - "integrity": "sha512-Av60jsryAkI023PlN7LsqrfPvwfxOd2yAwtReCjeuugTJTkgrksYJJstg1e12qle0NarkfhfFu1ox2D+cQotww==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.5.23.tgz", + "integrity": "sha512-r/PYc8W9THzEL0UExpTkV+d31zO+Jid/RMZIDG6aS/NekOEUHuCJkJgftySWZw7JTJO/+q9Kxkg8p+i7Q7Q+ew==", "license": "MIT", "dependencies": { - "@vue/reactivity": "3.5.22", - "@vue/runtime-core": "3.5.22", - "@vue/shared": "3.5.22", + "@vue/reactivity": "3.5.23", + "@vue/runtime-core": "3.5.23", + "@vue/shared": "3.5.23", "csstype": "^3.1.3" } }, "node_modules/@vue/server-renderer": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.22.tgz", - "integrity": "sha512-gXjo+ao0oHYTSswF+a3KRHZ1WszxIqO7u6XwNHqcqb9JfyIL/pbWrrh/xLv7jeDqla9u+LK7yfZKHih1e1RKAQ==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.5.23.tgz", + "integrity": "sha512-NiWZsNCsXA20/VufcrW5u+Trt/PyFlpMmxaB2KERYM8eZgUoKUjXxJQb9ypq+LZ0Sp3XHJGNBR8DkhRnkKAMUw==", "license": "MIT", "dependencies": { - "@vue/compiler-ssr": "3.5.22", - "@vue/shared": "3.5.22" + "@vue/compiler-ssr": "3.5.23", + "@vue/shared": "3.5.23" }, "peerDependencies": { - "vue": "3.5.22" + "vue": "3.5.23" } }, "node_modules/@vue/shared": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.22.tgz", - "integrity": "sha512-F4yc6palwq3TT0u+FYf0Ns4Tfl9GRFURDN2gWG7L1ecIaS/4fCIuFOjMTnCyjsu/OK6vaDKLCrGAa+KvvH+h4w==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.5.23.tgz", + "integrity": "sha512-0YZ1DYuC5o/YJPf6pFdt2KYxVGDxkDbH/1NYJnVJWUkzr8ituBEmFVQRNX2gCaAsFEjEDnLkWpgqlZA7htgS/g==", "license": "MIT" }, "node_modules/@vue/tsconfig": { @@ -1809,9 +1811,9 @@ } }, "node_modules/ace-builds": { - "version": "1.43.3", - "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.43.3.tgz", - "integrity": "sha512-MCl9rALmXwIty/4Qboijo/yNysx1r6hBTzG+6n/TiOm5LFhZpEvEIcIITPFiEOEFDfgBOEmxu+a4f54LEFM6Sg==", + "version": "1.43.4", + "resolved": "https://registry.npmjs.org/ace-builds/-/ace-builds-1.43.4.tgz", + "integrity": "sha512-8hAxVfo2ImICd69BWlZwZlxe9rxDGDjuUhh+WeWgGDvfBCE+r3lkynkQvIovDz4jcMi8O7bsEaFygaDT+h9sBA==", "license": "BSD-3-Clause" }, "node_modules/acorn": { @@ -1853,9 +1855,9 @@ } }, "node_modules/alien-signals": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-3.0.0.tgz", - "integrity": "sha512-JHoRJf18Y6HN4/KZALr3iU+0vW9LKG+8FMThQlbn4+gv8utsLIkwpomjElGPccGeNwh0FI2HN6BLnyFLo6OyLQ==", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/alien-signals/-/alien-signals-3.0.6.tgz", + "integrity": "sha512-gCs0YqC1mkYGC6IRXsSrA62ShOSv1FlVN5tRp/Cs2vRWLK/BAeluWIdfsl253pFQPznKEvRmHhfep7crWfyfWQ==", "dev": true, "license": "MIT" }, @@ -2465,9 +2467,9 @@ } }, "node_modules/dayjs": { - "version": "1.11.18", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.18.tgz", - "integrity": "sha512-zFBQ7WFRvVRhKcWoUh+ZA1g2HVgUbsZm9sbddh8EC5iv93sui8DVVz1Npvz+r6meo9VKfa8NyLWBsQK1VvIKPA==", + "version": "1.11.19", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.19.tgz", + "integrity": "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==", "license": "MIT" }, "node_modules/debug": { @@ -2757,25 +2759,24 @@ } }, "node_modules/eslint": { - "version": "9.37.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.37.0.tgz", - "integrity": "sha512-XyLmROnACWqSxiGYArdef1fItQd47weqB7iwtfr9JHwRrqIXZdcFMvvEcL9xHCmL0SNsOvF0c42lWyM1U5dgig==", + "version": "9.39.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.39.1.tgz", + "integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.8.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.21.0", - "@eslint/config-helpers": "^0.4.0", - "@eslint/core": "^0.16.0", + "@eslint/config-array": "^0.21.1", + "@eslint/config-helpers": "^0.4.2", + "@eslint/core": "^0.17.0", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.37.0", - "@eslint/plugin-kit": "^0.4.0", + "@eslint/js": "9.39.1", + "@eslint/plugin-kit": "^0.4.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", - "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", @@ -3902,9 +3903,9 @@ "dev": true }, "node_modules/magic-string": { - "version": "0.30.19", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.19.tgz", - "integrity": "sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==", + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", "license": "MIT", "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" @@ -5283,9 +5284,9 @@ } }, "node_modules/undici-types": { - "version": "7.14.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.14.0.tgz", - "integrity": "sha512-QQiYxHuyZ9gQUIrmPo3IA+hUl4KYk8uSA7cHrcKd/l3p1OTpZcM0Tbp9x7FAtXdAYhlasd60ncPpgu6ihG6TOA==", + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", "dev": true, "license": "MIT" }, @@ -5328,9 +5329,9 @@ } }, "node_modules/vite": { - "version": "7.1.11", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.11.tgz", - "integrity": "sha512-uzcxnSDVjAopEUjljkWh8EIrg6tlzrjFUfMcR1EVsRDGwf/ccef0qQPRyOrROwhrTDaApueq+ja+KLPlzR/zdg==", + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.2.2.tgz", + "integrity": "sha512-BxAKBWmIbrDgrokdGZH1IgkIk/5mMHDreLDmCJ0qpyJaAteP8NvMhkwr/ZCQNqNH97bw/dANTE9PDzqwJghfMQ==", "dev": true, "license": "MIT", "dependencies": { @@ -5455,16 +5456,16 @@ "license": "MIT" }, "node_modules/vue": { - "version": "3.5.22", - "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.22.tgz", - "integrity": "sha512-toaZjQ3a/G/mYaLSbV+QsQhIdMo9x5rrqIpYRObsJ6T/J+RyCSFwN2LHNVH9v8uIcljDNa3QzPVdv3Y6b9hAJQ==", + "version": "3.5.23", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.5.23.tgz", + "integrity": "sha512-CfvZv/vI52xUhumUvHtD6iFIS78nGWfX4IJnHfBGhpqMI0CwDq2YEngXOeaBFMRmiArcqczuVrLxurvesTYT9w==", "license": "MIT", "dependencies": { - "@vue/compiler-dom": "3.5.22", - "@vue/compiler-sfc": "3.5.22", - "@vue/runtime-dom": "3.5.22", - "@vue/server-renderer": "3.5.22", - "@vue/shared": "3.5.22" + "@vue/compiler-dom": "3.5.23", + "@vue/compiler-sfc": "3.5.23", + "@vue/runtime-dom": "3.5.23", + "@vue/server-renderer": "3.5.23", + "@vue/shared": "3.5.23" }, "peerDependencies": { "typescript": "*" @@ -5527,14 +5528,14 @@ } }, "node_modules/vue-tsc": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-3.1.1.tgz", - "integrity": "sha512-fyixKxFniOVgn+L/4+g8zCG6dflLLt01Agz9jl3TO45Bgk87NZJRmJVPsiK+ouq3LB91jJCbOV+pDkzYTxbI7A==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/vue-tsc/-/vue-tsc-3.1.3.tgz", + "integrity": "sha512-StMNfZHwPIXQgY3KxPKM0Jsoc8b46mDV3Fn2UlHCBIwRJApjqrSwqeMYgWf0zpN+g857y74pv7GWuBm+UqQe1w==", "dev": true, "license": "MIT", "dependencies": { "@volar/typescript": "2.4.23", - "@vue/language-core": "3.1.1" + "@vue/language-core": "3.1.3" }, "bin": { "vue-tsc": "bin/vue-tsc.js" diff --git a/webui/package.json b/webui/package.json index 1ee4116d5..e1cb35963 100644 --- a/webui/package.json +++ b/webui/package.json @@ -20,16 +20,16 @@ "@ssthouse/vue3-tree-chart": "^0.2.6", "@types/bootstrap": "^5.2.10", "@types/whatwg-mimetype": "^3.0.2", - "ace-builds": "^1.43.3", + "ace-builds": "^1.43.4", "bootstrap": "^5.3.8", "bootstrap-icons": "^1.13.1", - "dayjs": "^1.11.18", + "dayjs": "^1.11.19", "del-cli": "^7.0.0", "fuse.js": "^7.1.0", "http-status-codes": "^2.3.0", "js-yaml": "^4.1.0", "ncp": "^2.0.0", - "vue": "^3.5.22", + "vue": "^3.5.23", "vue-router": "^4.6.3", "vue3-ace-editor": "^2.2.4", "vue3-highlightjs": "^1.0.5", @@ -39,20 +39,20 @@ }, "devDependencies": { "@playwright/test": "^1.56.1", - "@rushstack/eslint-patch": "^1.13.0", + "@rushstack/eslint-patch": "^1.14.1", "@types/js-yaml": "^4.0.9", - "@types/node": "^24.8.1", + "@types/node": "^24.10.0", "@vitejs/plugin-vue": "^6.0.1", "@vue/eslint-config-prettier": "^10.2.0", "@vue/eslint-config-typescript": "^14.6.0", "@vue/tsconfig": "^0.8.1", - "eslint": "^9.37.0", + "eslint": "^9.39.1", "eslint-plugin-vue": "^10.5.1", "npm-run-all": "^4.1.5", "prettier": "^3.6.2", "typescript": "~5.9.3", - "vite": "^7.1.11", - "vue-tsc": "^3.1.1", + "vite": "^7.2.2", + "vue-tsc": "^3.1.3", "xml2js": "^0.6.2" } } diff --git a/webui/src/assets/dashboard.css b/webui/src/assets/dashboard.css index f057990b7..b33567eca 100644 --- a/webui/src/assets/dashboard.css +++ b/webui/src/assets/dashboard.css @@ -14,6 +14,7 @@ margin-left: 0; font-weight: 400; font-size: 0.9rem; + box-shadow: 0 4px 8px rgba(255, 255, 255, 0.05); } [data-theme="light"] .dashboard .card-group .card { box-shadow: 0 4px 8px rgba(0, 0, 0, 0.08); @@ -32,6 +33,10 @@ margin-bottom: 0; color: var(--color-text); } +.dashboard .card .card-body h6.card-title { + font-size: 0.9rem; + font-weight: 400; +} .dashboard .card-text { font-size: 2.25rem; } @@ -126,4 +131,30 @@ border-bottom: 4px solid; margin-bottom: -3px; border-bottom-color: var(--dashboard-nav-border-active); +} + +.tooltip .tooltip-inner { + text-align: left; + max-width: 500px; + white-space: normal; +} + +[data-theme="light"] .tooltip .tooltip-inner { + background-color: #f8f9fa; + color: #212529; + border: 1px solid #ced4da; + box-shadow: 0 0.25rem 0.5rem rgba(0,0,0,0.1); +} + +.tooltip.bs-tooltip-top .tooltip-arrow::before { + border-top-color: #f8f9fa; +} +.tooltip.bs-tooltip-bottom .tooltip-arrow::before { + border-bottom-color: #f8f9fa; +} +.tooltip.bs-tooltip-start .tooltip-arrow::before { + border-left-color: #f8f9fa; +} +.tooltip.bs-tooltip-end .tooltip-arrow::before { + border-right-color: #f8f9fa; } \ No newline at end of file diff --git a/webui/src/assets/datatable.css b/webui/src/assets/datatable.css index f1b50ee39..79034e5f4 100644 --- a/webui/src/assets/datatable.css +++ b/webui/src/assets/datatable.css @@ -7,7 +7,7 @@ table.dataTable { table-layout: fixed; } table.dataTable th { - background-color: var(---datatable-background); + background-color: var(--datatable-background); color: var(--datatable-header-color); } table.dataTable td { @@ -17,8 +17,8 @@ table.dataTable td { table.dataTable th { padding: 3px 0 3px 12px; border-color: var(--datatable-border-color); - border-top-width: 0px; - border-bottom-width: 2px; + border-top-width: 0; + border-bottom-width: 3px; font-weight: 500; } table.dataTable td{ @@ -44,7 +44,13 @@ table.dataTable .description p { margin: 0 !important; line-height: 1.75 !important; } -table .badge { - vertical-align: middle; - margin-top: -0.5em; -} \ No newline at end of file + +.table-responsive { + table.dataTable { + table-layout: auto; + } + + table.dataTable th, .table.dataTable td { + white-space: nowrap; + } +} \ No newline at end of file diff --git a/webui/src/assets/modal.css b/webui/src/assets/modal.css index f47ff77e4..6f8066997 100644 --- a/webui/src/assets/modal.css +++ b/webui/src/assets/modal.css @@ -1,7 +1,7 @@ .modal-header { border-color: var(--modal-header-border); - padding-bottom: 15px; - padding-top: 15px; + padding-bottom: 10px; + padding-top: 10px; } .modal-content { color: var(--color-text); diff --git a/webui/src/assets/vars.css b/webui/src/assets/vars.css index 4020c0b0a..31f20360d 100644 --- a/webui/src/assets/vars.css +++ b/webui/src/assets/vars.css @@ -57,7 +57,7 @@ --form-color-border-active: #eabaabff; - --link-color: #eabaabff;; + --link-color: #eabaabff; --link-color-active: #eabaabff; --nav-color: #d3d4d5; @@ -91,6 +91,8 @@ --modal-header-border: rgba(255, 255, 255, 0.1); --modal-footer-border: rgba(255, 255, 255, 0.1); + --badge-background: #eabaabff; + --footer-background: #282b33; } @@ -162,5 +164,7 @@ --modal-header-border: rgba(0, 0, 0, 0.1); --modal-footer-border: rgba(0, 0, 0, 0.1); + --badge-background: rgb(8, 109, 215); + --footer-background: rgb(244 244 246); } \ No newline at end of file diff --git a/webui/src/components/RegexInput.vue b/webui/src/components/RegexInput.vue new file mode 100644 index 000000000..7c8e95f3b --- /dev/null +++ b/webui/src/components/RegexInput.vue @@ -0,0 +1,38 @@ + + + \ No newline at end of file diff --git a/webui/src/components/dashboard/Actions.vue b/webui/src/components/dashboard/Actions.vue index f21279c9e..d540cc660 100644 --- a/webui/src/components/dashboard/Actions.vue +++ b/webui/src/components/dashboard/Actions.vue @@ -90,9 +90,9 @@ function getStatus(action: Action) { - Error - Warning - Success + Error + Warning + Success {{ getName(action) }} {{ duration(action.duration) }} diff --git a/webui/src/components/dashboard/Search.vue b/webui/src/components/dashboard/Search.vue index ed6d46835..6c44095c6 100644 --- a/webui/src/components/dashboard/Search.vue +++ b/webui/src/components/dashboard/Search.vue @@ -288,19 +288,25 @@ function facetTitle(s: string) {
-
-
-
-
- {{ result.type }} - {{ result.domain }} +
+
+
+
+
+ {{ result.type }} + {{ result.domain }} +
+ {{ format(result.time) }}
- {{ format(result.time) }} + +
+ +

- -

-
-

@@ -357,42 +363,40 @@ function facetTitle(s: string) { .search-results { margin-top: 15px; } -.search-results > div { - border: none; - background-color: var(--color-background-soft); - padding-left: 0; - padding-right: 0; - padding-bottom: 30px; +.pagination .page-link { + color: var(--link-color) +} +.dashboard .search-results .card { + border: 1px solid var(--card-border); + border-radius: 0.75rem; + transition: box-shadow 0.2s, transform 0.1s; + /* background-color: var(--card-background); */ } -.search-results a:hover h3 { - background-color: transparent; + +.search-results .card:hover { cursor: pointer; - color: var(--color-text); - text-decoration: underline; -} -.search-results a:hover h3 { - color: var(--link-color); + transform: translateY(-2px); } -.search-results h3 { - padding-top: 5px; - margin-top: 3px; + +[data-theme="light"] .search-results .card:hover { + box-shadow: 0 4px 8px rgba(0,0,0,0.08); } -.search-results .config { - line-height: 1; + +.search-results .card-title { + font-size: 1.1rem; } -.page-item { - cursor: pointer; + +.search-results .badge { + font-size: 0.7rem; + background-color: var(--badge-background) !important; } \ No newline at end of file diff --git a/webui/src/components/dashboard/SourceView.vue b/webui/src/components/dashboard/SourceView.vue index 7b5148751..7e40ab006 100644 --- a/webui/src/components/dashboard/SourceView.vue +++ b/webui/src/components/dashboard/SourceView.vue @@ -121,7 +121,7 @@ function switchCode() { \ No newline at end of file diff --git a/webui/src/components/dashboard/http/HttpOperation.vue b/webui/src/components/dashboard/http/HttpOperation.vue index 74c677a11..054667284 100644 --- a/webui/src/components/dashboard/http/HttpOperation.vue +++ b/webui/src/components/dashboard/http/HttpOperation.vue @@ -71,7 +71,7 @@ const route = useRoute()
- +
diff --git a/webui/src/components/dashboard/http/HttpOperationsCard.vue b/webui/src/components/dashboard/http/HttpOperationsCard.vue index 821d24c16..8bc7b1aa5 100644 --- a/webui/src/components/dashboard/http/HttpOperationsCard.vue +++ b/webui/src/components/dashboard/http/HttpOperationsCard.vue @@ -8,6 +8,9 @@ const props = defineProps({ }) const operations = computed(() => { + if (!props.path.operations) { + return []; + } return props.path.operations.sort(comparePath) }) diff --git a/webui/src/components/dashboard/http/HttpPath.vue b/webui/src/components/dashboard/http/HttpPath.vue index 4f4156a0a..e3ef7067d 100644 --- a/webui/src/components/dashboard/http/HttpPath.vue +++ b/webui/src/components/dashboard/http/HttpPath.vue @@ -13,7 +13,7 @@ const props = defineProps({ const route = useRoute() function allOperationsDeprecated(): boolean{ - if (!props.path){ + if (!props.path || !props.path.operations){ return false } for (var op of props.path.operations){ @@ -77,7 +77,7 @@ onUnmounted(() => {
- +
\ No newline at end of file diff --git a/webui/src/components/dashboard/http/HttpService.vue b/webui/src/components/dashboard/http/HttpService.vue index edc877eb4..67ad03c3c 100644 --- a/webui/src/components/dashboard/http/HttpService.vue +++ b/webui/src/components/dashboard/http/HttpService.vue @@ -94,7 +94,7 @@ function endpointNotFoundMessage(msg: string | undefined) {
- +
diff --git a/webui/src/components/dashboard/http/HttpServicesCard.vue b/webui/src/components/dashboard/http/HttpServicesCard.vue index cb8e15e9a..41f06fc67 100644 --- a/webui/src/components/dashboard/http/HttpServicesCard.vue +++ b/webui/src/components/dashboard/http/HttpServicesCard.vue @@ -44,7 +44,7 @@ onUnmounted(() => { + + \ No newline at end of file diff --git a/webui/src/components/dashboard/http/Requests.vue b/webui/src/components/dashboard/http/Requests.vue index 58b88bf4f..b2e3e3074 100644 --- a/webui/src/components/dashboard/http/Requests.vue +++ b/webui/src/components/dashboard/http/Requests.vue @@ -1,32 +1,100 @@ @@ -101,4 +799,7 @@ onUnmounted(() => { .warning:empty { padding: 0; } +.modal-dialog-scrollable .modal-body { + min-height: calc(100vh - 200px); /* header + footer spacing */ +} \ No newline at end of file diff --git a/webui/src/components/dashboard/kafka/KafkaTopic.vue b/webui/src/components/dashboard/kafka/KafkaTopic.vue index 3d3d1a733..70f3aa7f6 100644 --- a/webui/src/components/dashboard/kafka/KafkaTopic.vue +++ b/webui/src/components/dashboard/kafka/KafkaTopic.vue @@ -1,5 +1,5 @@