diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..39839e0 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @gofiber/maintainers \ No newline at end of file diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 0000000..8e266c3 --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,24 @@ +coverage: + status: + project: + default: + target: auto + threshold: 0.5% + base: auto + patch: + default: + target: auto + threshold: 0.5% + base: auto +ignore: + # Ignore generated root files + - "*_msgp.go" + - "*_msgp_test.go" + - "*_gen.go" + # Ignore generated files below root + - "**/*_msgp.go" + - "**/*_msgp_test.go" + - "**/*_gen.go" + # Ignore internal and docs + - "internal/**" + - "docs/**" diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..78a037b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,16 @@ +# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "gomod" + directory: "/" # Location of package manifests + labels: + - "๐Ÿค– Dependencies" + schedule: + interval: "daily" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: daily + labels: + - "๐Ÿค– Dependencies" \ No newline at end of file diff --git a/.github/release.yml b/.github/release.yml new file mode 100644 index 0000000..8f1601c --- /dev/null +++ b/.github/release.yml @@ -0,0 +1,26 @@ +# .github/release.yml + +changelog: + categories: + - title: 'โ— Breaking Changes' + labels: + - 'โ— BreakingChange' + - title: '๐Ÿš€ New Features' + labels: + - 'โœ๏ธ Feature' + - '๐Ÿ“ Proposal' + - title: '๐Ÿงน Updates' + labels: + - '๐Ÿงน Updates' + - title: '๐Ÿ› Bug Fixes' + labels: + - 'โ˜ข๏ธ Bug' + - title: '๐Ÿ› ๏ธ Maintenance' + labels: + - '๐Ÿค– Dependencies' + - title: '๐Ÿ“š Documentation' + labels: + - '๐Ÿ“’ Documentation' + - title: 'Other Changes' + labels: + - '*' diff --git a/.github/workflows/auto-labeler.yml b/.github/workflows/auto-labeler.yml new file mode 100644 index 0000000..346aed6 --- /dev/null +++ b/.github/workflows/auto-labeler.yml @@ -0,0 +1,17 @@ +name: auto-labeler + +on: + issues: + types: [opened, edited, milestoned] + pull_request_target: + types: [opened, edited, reopened, synchronize] + workflow_dispatch: + +jobs: + auto-labeler: + uses: gofiber/.github/.github/workflows/auto-labeler.yml@main + secrets: + github-token: ${{ secrets.ISSUE_PR_TOKEN }} + with: + config-path: .github/labeler.yml + config-repository: gofiber/.github diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 0000000..c40c4a5 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,111 @@ +on: + push: + branches: + - master + - main + paths-ignore: + - "**/*.md" + pull_request: + paths-ignore: + - "**/*.md" + +permissions: + # deployments permission to deploy GitHub pages website + deployments: write + # contents permission to update benchmark contents in gh-pages branch + contents: write + # allow posting comments to pull request + pull-requests: write + +name: Benchmark +jobs: + Compare: + runs-on: ubuntu-latest + steps: + - name: Fetch Repository + uses: actions/checkout@v6 + with: + fetch-depth: 0 # to be able to retrieve the last commit in main + + - name: Install Go + uses: actions/setup-go@v6 + with: + # NOTE: Keep this in sync with the version from go.mod + go-version: "1.25.x" + + - name: Run Benchmark + run: set -o pipefail; go test ./... -benchmem -run=^$ -bench . | tee output.txt + + # NOTE: Benchmarks could change with different CPU types + - name: Get GitHub Runner System Information + uses: kenchan0130/actions-system-info@v1.4.0 + id: system-info + + - name: Get Main branch SHA + id: get-main-branch-sha + run: | + SHA=$(git rev-parse origin/main) + echo "sha=$SHA" >> $GITHUB_OUTPUT + + - name: Get Benchmark Results from main branch + id: cache + uses: actions/cache/restore@v5 + with: + path: ./cache + key: ${{ steps.get-main-branch-sha.outputs.sha }}-${{ runner.os }}-${{ steps.system-info.outputs.cpu-model }}-benchmark + + # This will only run if we have Benchmark Results from main branch + - name: Compare PR Benchmark Results with main branch + uses: benchmark-action/github-action-benchmark@v1.20.7 + if: steps.cache.outputs.cache-hit == 'true' + with: + tool: 'go' + output-file-path: output.txt + external-data-json-path: ./cache/benchmark-data.json + # Do not save the data (This allows comparing benchmarks) + save-data-file: false + fail-on-alert: true + # Comment on the PR if the branch is not a fork + comment-on-alert: ${{ github.event.pull_request.head.repo.fork == false }} + github-token: ${{ secrets.GITHUB_TOKEN }} + summary-always: true + alert-threshold: "150%" + + - name: Store Benchmark Results for main branch + uses: benchmark-action/github-action-benchmark@v1.20.7 + if: ${{ github.ref_name == 'main' }} + with: + tool: 'go' + output-file-path: output.txt + external-data-json-path: ./cache/benchmark-data.json + # Save the data to external file (cache) + save-data-file: true + fail-on-alert: false + github-token: ${{ secrets.GITHUB_TOKEN }} + summary-always: true + alert-threshold: "150%" + + - name: Publish Benchmark Results to GitHub Pages + uses: benchmark-action/github-action-benchmark@v1.20.7 + if: ${{ github.ref_name == 'main' }} + with: + tool: 'go' + output-file-path: output.txt + benchmark-data-dir-path: "benchmarks" + fail-on-alert: false + github-token: ${{ secrets.GITHUB_TOKEN }} + comment-on-alert: true + summary-always: true + # Save the data to external file (GitHub Pages) + save-data-file: true + alert-threshold: "150%" + # TODO: reactivate it later -> when v3 is the stable one + #auto-push: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }} + auto-push: false + + - name: Update Benchmark Results cache + uses: actions/cache/save@v5 + if: ${{ github.ref_name == 'main' }} + with: + path: ./cache + key: ${{ steps.get-main-branch-sha.outputs.sha }}-${{ runner.os }}-${{ steps.system-info.outputs.cpu-model }}-benchmark \ No newline at end of file diff --git a/.github/workflows/issues.yml b/.github/workflows/issues.yml deleted file mode 100644 index 768b05b..0000000 --- a/.github/workflows/issues.yml +++ /dev/null @@ -1,21 +0,0 @@ -# Add all the issues created to the project. -name: Add issue or pull request to Project - -on: - issues: - types: - - opened - pull_request_target: - types: - - opened - - reopened - -jobs: - add-to-project: - runs-on: ubuntu-latest - steps: - - name: Add issue to project - uses: actions/add-to-project@v0.5.0 - with: - project-url: https://github.com/orgs/gorilla/projects/4 - github-token: ${{ secrets.ADD_TO_PROJECT_TOKEN }} diff --git a/.github/workflows/linter.yml b/.github/workflows/linter.yml new file mode 100644 index 0000000..8a2a6c6 --- /dev/null +++ b/.github/workflows/linter.yml @@ -0,0 +1,40 @@ +# Adapted from https://github.com/golangci/golangci-lint-action/blob/b56f6f529003f1c81d4d759be6bd5f10bf9a0fa0/README.md#how-to-use + +name: golangci-lint +on: + push: + branches: + - master + - main + paths-ignore: + - "**/*.md" + pull_request: + paths-ignore: + - "**/*.md" + +permissions: + # Required: allow read access to the content for analysis. + contents: read + # Optional: allow read access to pull request. Use with `only-new-issues` option. + pull-requests: read + # Optional: Allow write access to checks to allow the action to annotate code in the PR. + checks: write + +jobs: + golangci: + name: lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v6 + + - uses: actions/setup-go@v6 + with: + # NOTE: Keep this in sync with the version from go.mod + go-version: "1.25.x" + cache: false + + - name: golangci-lint + uses: golangci/golangci-lint-action@v9 + with: + # NOTE: Keep this in sync with the version from .golangci.yml + version: v2.6.2 diff --git a/.github/workflows/security.yml b/.github/workflows/security.yml deleted file mode 100644 index ff4a613..0000000 --- a/.github/workflows/security.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: Security -on: - push: - branches: - - main - pull_request: - branches: - - main -permissions: - contents: read -jobs: - scan: - strategy: - matrix: - go: ['1.20','1.21'] - fail-fast: true - runs-on: ubuntu-latest - steps: - - name: Checkout Code - uses: actions/checkout@v3 - - - name: Setup Go ${{ matrix.go }} - uses: actions/setup-go@v4 - with: - go-version: ${{ matrix.go }} - cache: false - - - name: Run GoSec - uses: securego/gosec@master - with: - args: -exclude-dir examples ./... - - - name: Run GoVulnCheck - uses: golang/govulncheck-action@v1 - with: - go-version-input: ${{ matrix.go }} - go-package: ./... diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 50a3946..e1e61d4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,35 +1,57 @@ name: Test + on: push: branches: + - master - main + paths-ignore: + - "**/*.md" pull_request: - branches: - - main -permissions: - contents: read + paths-ignore: + - "**/*.md" + jobs: unit: strategy: matrix: - go: ['1.20','1.21'] - os: [ubuntu-latest, macos-latest, windows-latest] - fail-fast: true - runs-on: ${{ matrix.os }} + go-version: [1.25.x] + platform: [ubuntu-latest, windows-latest, macos-latest] + runs-on: ${{ matrix.platform }} steps: - - name: Checkout Code - uses: actions/checkout@v3 + - name: Fetch Repository + uses: actions/checkout@v6 + + - name: Install Go + uses: actions/setup-go@v6 + with: + go-version: ${{ matrix.go-version }} + + - name: Test + run: go run gotest.tools/gotestsum@latest -f testname -- ./... -race -count=1 -coverprofile=coverage.txt -covermode=atomic -shuffle=on - - name: Setup Go ${{ matrix.go }} - uses: actions/setup-go@v4 + - name: Upload coverage reports to Codecov + if: ${{ matrix.platform == 'ubuntu-latest' && matrix.go-version == '1.25.x' }} + uses: codecov/codecov-action@v5.5.2 with: - go-version: ${{ matrix.go }} - cache: false + token: ${{ secrets.CODECOV_TOKEN }} + flags: unittests + slug: gofiber/schema + verbose: true - - name: Run Tests - run: go test -race -cover -coverprofile=coverage -covermode=atomic -v ./... + repeated: + runs-on: ubuntu-latest + steps: + - name: Fetch Repository + uses: actions/checkout@v6 - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v3 + - name: Install Go + uses: actions/setup-go@v6 with: - files: ./coverage + go-version: stable + + - name: Install gotestsum + run: go install gotest.tools/gotestsum@v1.12.3 + + - name: Test + run: gotestsum -f testname -- ./... -race -count=15 -shuffle=on diff --git a/.github/workflows/verify.yml b/.github/workflows/verify.yml deleted file mode 100644 index a3eb74b..0000000 --- a/.github/workflows/verify.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Verify -on: - push: - branches: - - main - pull_request: - branches: - - main -permissions: - contents: read -jobs: - lint: - strategy: - matrix: - go: ['1.20','1.21'] - fail-fast: true - runs-on: ubuntu-latest - steps: - - name: Checkout Code - uses: actions/checkout@v3 - - - name: Setup Go ${{ matrix.go }} - uses: actions/setup-go@v4 - with: - go-version: ${{ matrix.go }} - cache: false - - - name: Run GolangCI-Lint - uses: golangci/golangci-lint-action@v3 - with: - version: v1.53 - args: --timeout=5m diff --git a/.gitignore b/.gitignore index 84039fe..0cafd84 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,4 @@ coverage.coverprofile +vendor + +.idea diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000..374962b --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,9 @@ +# Created based on v2.6.2 +# NOTE: Keep this in sync with the version in .github/workflows/linter.yml +version: "2" + +run: + timeout: 5m + modules-download-mode: readonly + go: "1.24" + diff --git a/Makefile b/Makefile index 98f5ab7..3826015 100644 --- a/Makefile +++ b/Makefile @@ -1,34 +1,65 @@ -GO_LINT=$(shell which golangci-lint 2> /dev/null || echo '') -GO_LINT_URI=github.com/golangci/golangci-lint/cmd/golangci-lint@latest - -GO_SEC=$(shell which gosec 2> /dev/null || echo '') -GO_SEC_URI=github.com/securego/gosec/v2/cmd/gosec@latest - -GO_VULNCHECK=$(shell which govulncheck 2> /dev/null || echo '') -GO_VULNCHECK_URI=golang.org/x/vuln/cmd/govulncheck@latest - -.PHONY: golangci-lint -golangci-lint: - $(if $(GO_LINT), ,go install $(GO_LINT_URI)) - @echo "##### Running golangci-lint" - golangci-lint run -v - -.PHONY: gosec -gosec: - $(if $(GO_SEC), ,go install $(GO_SEC_URI)) - @echo "##### Running gosec" - gosec ./... - -.PHONY: govulncheck -govulncheck: - $(if $(GO_VULNCHECK), ,go install $(GO_VULNCHECK_URI)) - @echo "##### Running govulncheck" - govulncheck ./... - -.PHONY: verify -verify: golangci-lint gosec govulncheck +## help: ๐Ÿ’ก Display available commands +.PHONY: help +help: + @echo 'โšก๏ธ GoFiber/Fiber Development:' + @sed -n 's/^##//p' ${MAKEFILE_LIST} | column -t -s ':' | sed -e 's/^/ /' +## audit: ๐Ÿš€ Conduct quality checks +.PHONY: audit +audit: + go mod verify + go vet ./... + go run golang.org/x/vuln/cmd/govulncheck@latest ./... + +## benchmark: ๐Ÿ“ˆ Benchmark code performance +.PHONY: benchmark +benchmark: + go test ./... -benchmem -bench=. -count=4 -run=^Benchmark_$ + +## coverage: โ˜‚๏ธ Generate coverage report +.PHONY: coverage +coverage: + go run gotest.tools/gotestsum@latest -f testname -- ./... -race -count=1 -coverprofile=/tmp/coverage.out -covermode=atomic + go tool cover -html=/tmp/coverage.out + +## format: ๐ŸŽจ Fix code format issues +.PHONY: format +format: + go run mvdan.cc/gofumpt@latest -w -l . + +## markdown: ๐ŸŽจ Find markdown format issues (Requires markdownlint-cli2) +.PHONY: markdown +markdown: + markdownlint-cli2 "**/*.md" "#vendor" + +## lint: ๐Ÿšจ Run lint checks +.PHONY: lint +lint: + go run github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.6.2 run ./... + +## test: ๐Ÿšฆ Execute all tests .PHONY: test test: - @echo "##### Running tests" - go test -race -cover -coverprofile=coverage.coverprofile -covermode=atomic -v ./... \ No newline at end of file + go run gotest.tools/gotestsum@latest -f testname -- ./... -race -count=1 -shuffle=on + +## longtest: ๐Ÿšฆ Execute all tests 10x +.PHONY: longtest +longtest: + go run gotest.tools/gotestsum@latest -f testname -- ./... -race -count=15 -shuffle=on + +## tidy: ๐Ÿ“Œ Clean and tidy dependencies +.PHONY: tidy +tidy: + go mod tidy -v + +## betteralign: ๐Ÿ“ Optimize alignment of fields in structs +.PHONY: betteralign +betteralign: + go run github.com/dkorunic/betteralign/cmd/betteralign@latest -test_files -generated_files -apply ./... + +## generate: โšก๏ธ Generate msgp && interface implementations +.PHONY: generate +generate: + go install github.com/tinylib/msgp@latest + go install github.com/vburenin/ifacemaker@975a95966976eeb2d4365a7fb236e274c54da64c + go generate ./... diff --git a/README.md b/README.md index 58786ba..a40bc3e 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,6 @@ -# gorilla/schema +# gofiber/schema -![testing](https://github.com/gorilla/schema/actions/workflows/test.yml/badge.svg) -[![codecov](https://codecov.io/github/gorilla/schema/branch/main/graph/badge.svg)](https://codecov.io/github/gorilla/schema) -[![godoc](https://godoc.org/github.com/gorilla/schema?status.svg)](https://godoc.org/github.com/gorilla/schema) -[![sourcegraph](https://sourcegraph.com/github.com/gorilla/schema/-/badge.svg)](https://sourcegraph.com/github.com/gorilla/schema?badge) - - -![Gorilla Logo](https://github.com/gorilla/.github/assets/53367916/d92caabf-98e0-473e-bfbf-ab554ba435e5) - -Package gorilla/schema converts structs to and from form values. +Package gofiber/schema converts structs to and from form values. ## Example diff --git a/cache.go b/cache.go index 065b8d6..5b1e7d1 100644 --- a/cache.go +++ b/cache.go @@ -10,16 +10,23 @@ import ( "strconv" "strings" "sync" + + utils "github.com/gofiber/utils/v2" ) -var errInvalidPath = errors.New("schema: invalid path") +const maxParserIndex = 1000 + +var ( + errInvalidPath = errors.New("schema: invalid path") + errIndexTooLarge = errors.New("schema: index exceeds parser limit") +) // newCache returns a new cache. func newCache() *cache { c := cache{ m: make(map[reflect.Type]*structInfo), regconv: make(map[reflect.Type]Converter), - tag: "schema", + tag: utils.CopyString("schema"), } return &c } @@ -48,41 +55,58 @@ func (c *cache) parsePath(p string, t reflect.Type) ([]pathPart, error) { var field *fieldInfo var index64 int64 var err error - parts := make([]pathPart, 0) - path := make([]string, 0) - keys := strings.Split(p, ".") - for i := 0; i < len(keys); i++ { + var parts []pathPart + var path []string + for keyStart := 0; ; { if t.Kind() != reflect.Struct { return nil, errInvalidPath } if struc = c.get(t); struc == nil { return nil, errInvalidPath } - if field = struc.get(keys[i]); field == nil { + keyEnd := keyStart + for keyEnd < len(p) && p[keyEnd] != '.' { + keyEnd++ + } + if keyStart == keyEnd { + return nil, errInvalidPath + } + if field = struc.get(p[keyStart:keyEnd]); field == nil { return nil, errInvalidPath } // Valid field. Append index. path = append(path, field.name) - if field.isSliceOfStructs && (!field.unmarshalerInfo.IsValid || (field.unmarshalerInfo.IsValid && field.unmarshalerInfo.IsSliceElement)) { + if field.isSliceOfStructs && !isMultipartField(field.typ) && (!field.unmarshalerInfo.IsValid || (field.unmarshalerInfo.IsValid && field.unmarshalerInfo.IsSliceElement)) { // Parse a special case: slices of structs. // i+1 must be the slice index. // // Now that struct can implements TextUnmarshaler interface, // we don't need to force the struct's fields to appear in the path. // So checking i+2 is not necessary anymore. - i++ - if i+1 > len(keys) { + // We can skip this part if the type is multipart.FileHeader. It is another special case too. + keyStart = keyEnd + 1 + if keyStart >= len(p) { return nil, errInvalidPath } - if index64, err = strconv.ParseInt(keys[i], 10, 0); err != nil { + keyEnd = keyStart + for keyEnd < len(p) && p[keyEnd] != '.' { + keyEnd++ + } + if keyStart == keyEnd { return nil, errInvalidPath } + if index64, err = strconv.ParseInt(p[keyStart:keyEnd], 10, 0); err != nil { + return nil, errInvalidPath + } + if index64 > maxParserIndex { + return nil, errIndexTooLarge + } parts = append(parts, pathPart{ path: path, field: field, index: int(index64), }) - path = make([]string, 0) + path = nil // Get the next struct type, dropping ptrs. if field.typ.Kind() == reflect.Ptr { @@ -101,6 +125,14 @@ func (c *cache) parsePath(p string, t reflect.Type) ([]pathPart, error) { } else { t = field.typ } + + if keyEnd == len(p) { + break + } + keyStart = keyEnd + 1 + if keyStart >= len(p) { + return nil, errInvalidPath + } } // Add the remaining. parts = append(parts, pathPart{ @@ -130,7 +162,11 @@ func (c *cache) create(t reflect.Type, parentAlias string) *structInfo { info := &structInfo{} var anonymousInfos []*structInfo for i := 0; i < t.NumField(); i++ { - if f := c.createField(t.Field(i), parentAlias); f != nil { + structField := t.Field(i) + if structField.Anonymous && structField.Type.Kind() == reflect.Ptr { + info.anonymousPtrFields = append(info.anonymousPtrFields, i) + } + if f := c.createField(structField, parentAlias); f != nil { info.fields = append(info.fields, f) if ft := indirectType(f.typ); ft.Kind() == reflect.Struct && f.isAnonymous { anonymousInfos = append(anonymousInfos, c.create(ft, f.canonicalAlias)) @@ -147,6 +183,12 @@ func (c *cache) create(t reflect.Type, parentAlias string) *structInfo { } } } + info.fieldsByName = make(map[string]*fieldInfo, len(info.fields)) + for _, field := range info.fields { + if _, exists := info.fieldsByName[field.alias]; !exists { + info.fieldsByName[field.alias] = field + } + } return info } @@ -209,10 +251,15 @@ func (c *cache) converter(t reflect.Type) Converter { // ---------------------------------------------------------------------------- type structInfo struct { - fields []*fieldInfo + fields []*fieldInfo + fieldsByName map[string]*fieldInfo + anonymousPtrFields []int } func (i *structInfo) get(alias string) *fieldInfo { + if field, ok := i.fieldsByName[alias]; ok { + return field + } for _, field := range i.fields { if strings.EqualFold(field.alias, alias) { return field @@ -308,10 +355,9 @@ func (o tagOptions) Contains(option string) bool { func (o tagOptions) getDefaultOptionValue() string { for _, s := range o { - if strings.HasPrefix(s, "default:") { - return strings.Split(s, ":")[1] + if value, ok := strings.CutPrefix(s, "default:"); ok { + return value } } - return "" } diff --git a/converter.go b/converter.go index 4bae6df..e9768eb 100644 --- a/converter.go +++ b/converter.go @@ -7,6 +7,8 @@ package schema import ( "reflect" "strconv" + + utils "github.com/gofiber/utils/v2" ) type Converter func(string) reflect.Value @@ -71,35 +73,35 @@ func convertFloat64(value string) reflect.Value { } func convertInt(value string) reflect.Value { - if v, err := strconv.ParseInt(value, 10, 0); err == nil { + if v, err := utils.ParseInt(value); err == nil { return reflect.ValueOf(int(v)) } return invalidValue } func convertInt8(value string) reflect.Value { - if v, err := strconv.ParseInt(value, 10, 8); err == nil { - return reflect.ValueOf(int8(v)) + if v, err := utils.ParseInt8(value); err == nil { + return reflect.ValueOf(v) } return invalidValue } func convertInt16(value string) reflect.Value { - if v, err := strconv.ParseInt(value, 10, 16); err == nil { - return reflect.ValueOf(int16(v)) + if v, err := utils.ParseInt16(value); err == nil { + return reflect.ValueOf(v) } return invalidValue } func convertInt32(value string) reflect.Value { - if v, err := strconv.ParseInt(value, 10, 32); err == nil { - return reflect.ValueOf(int32(v)) + if v, err := utils.ParseInt32(value); err == nil { + return reflect.ValueOf(v) } return invalidValue } func convertInt64(value string) reflect.Value { - if v, err := strconv.ParseInt(value, 10, 64); err == nil { + if v, err := utils.ParseInt(value); err == nil { return reflect.ValueOf(v) } return invalidValue @@ -110,35 +112,35 @@ func convertString(value string) reflect.Value { } func convertUint(value string) reflect.Value { - if v, err := strconv.ParseUint(value, 10, 0); err == nil { + if v, err := utils.ParseUint(value); err == nil { return reflect.ValueOf(uint(v)) } return invalidValue } func convertUint8(value string) reflect.Value { - if v, err := strconv.ParseUint(value, 10, 8); err == nil { - return reflect.ValueOf(uint8(v)) + if v, err := utils.ParseUint8(value); err == nil { + return reflect.ValueOf(v) } return invalidValue } func convertUint16(value string) reflect.Value { - if v, err := strconv.ParseUint(value, 10, 16); err == nil { - return reflect.ValueOf(uint16(v)) + if v, err := utils.ParseUint16(value); err == nil { + return reflect.ValueOf(v) } return invalidValue } func convertUint32(value string) reflect.Value { - if v, err := strconv.ParseUint(value, 10, 32); err == nil { - return reflect.ValueOf(uint32(v)) + if v, err := utils.ParseUint32(value); err == nil { + return reflect.ValueOf(v) } return invalidValue } func convertUint64(value string) reflect.Value { - if v, err := strconv.ParseUint(value, 10, 64); err == nil { + if v, err := utils.ParseUint(value); err == nil { return reflect.ValueOf(v) } return invalidValue diff --git a/converter_test.go b/converter_test.go new file mode 100644 index 0000000..8f54f6c --- /dev/null +++ b/converter_test.go @@ -0,0 +1,67 @@ +package schema + +import ( + "reflect" + "testing" +) + +func TestConverters(t *testing.T) { + tests := []struct { + name string + v reflect.Value + want interface{} + valid bool + }{ + {"boolTrue", convertBool("true"), true, true}, + {"boolOn", convertBool("on"), true, true}, + {"boolInvalid", convertBool("x"), nil, false}, + {"float32", convertFloat32("1.5"), float32(1.5), true}, + {"float32Invalid", convertFloat32("x"), nil, false}, + {"float64", convertFloat64("2.5"), 2.5, true}, + {"float64Invalid", convertFloat64("x"), nil, false}, + {"int", convertInt("10"), int(10), true}, + {"intInvalid", convertInt("x"), nil, false}, + {"uint", convertUint("5"), uint(5), true}, + {"uintInvalid", convertUint("-1"), nil, false}, + {"string", convertString("abc"), "abc", true}, + } + for _, tt := range tests { + if tt.valid { + if !tt.v.IsValid() { + t.Errorf("%s: expected valid value", tt.name) + continue + } + if got := tt.v.Interface(); got != tt.want { + t.Errorf("%s: expected %v, got %v", tt.name, tt.want, got) + } + } else if tt.v.IsValid() { + t.Errorf("%s: expected invalid value", tt.name) + } + } +} + +func TestBuiltinConverters(t *testing.T) { + kinds := []reflect.Kind{ + reflect.Bool, reflect.Float32, reflect.Float64, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.String, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, + } + for _, k := range kinds { + if builtinConverters[k] == nil { + t.Errorf("missing converter for %v", k) + } + } +} + +func BenchmarkConvertBool(b *testing.B) { + for b.Loop() { + convertBool("true") + } +} + +func BenchmarkConvertInt(b *testing.B) { + for b.Loop() { + convertInt("42") + } +} diff --git a/decoder.go b/decoder.go index 54c88ec..177e85a 100644 --- a/decoder.go +++ b/decoder.go @@ -8,6 +8,7 @@ import ( "encoding" "errors" "fmt" + "mime/multipart" "reflect" "strings" ) @@ -79,27 +80,61 @@ func (d *Decoder) RegisterConverter(value interface{}, converterFunc Converter) // Keys are "paths" in dotted notation to the struct fields and nested structs. // // See the package documentation for a full explanation of the mechanics. -func (d *Decoder) Decode(dst interface{}, src map[string][]string) error { +func (d *Decoder) Decode(dst interface{}, src map[string][]string, files ...map[string][]*multipart.FileHeader) (err error) { + var multipartFiles map[string][]*multipart.FileHeader + + if len(files) > 0 { + multipartFiles = files[0] + } + + // Add files as empty string values to src in order to make path parsing work easily + for path := range multipartFiles { + src[path] = []string{""} + } + v := reflect.ValueOf(dst) if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct { return errors.New("schema: interface must be a pointer to struct") } + + // Catch panics from the decoder and return them as an error. + // This is needed because the decoder calls reflect and reflect panics + defer func() { + if r := recover(); r != nil { + if e, ok := r.(error); ok { + err = e + } else { + err = fmt.Errorf("schema: panic while decoding: %v", r) + } + } + }() + v = v.Elem() t := v.Type() - errors := MultiError{} + multiErrors := MultiError{} for path, values := range src { if parts, err := d.cache.parsePath(path, t); err == nil { - if err = d.decode(v, path, parts, values); err != nil { - errors[path] = err + if filesSlice, ok := multipartFiles[path]; ok { + if err = d.decode(v, path, parts, values, filesSlice); err != nil { + multiErrors[path] = err + } + } else { + if err = d.decode(v, path, parts, values, nil); err != nil { + multiErrors[path] = err + } + } + } else { + if errors.Is(err, errIndexTooLarge) { + multiErrors[path] = err + } else if !d.ignoreUnknownKeys { + multiErrors[path] = UnknownKeyError{Key: path} } - } else if !d.ignoreUnknownKeys { - errors[path] = UnknownKeyError{Key: path} } } - errors.merge(d.setDefaults(t, v)) - errors.merge(d.checkRequired(t, src)) - if len(errors) > 0 { - return errors + multiErrors.merge(d.setDefaults(t, v, src, "")) + multiErrors.merge(d.checkRequired(t, src)) + if len(multiErrors) > 0 { + return multiErrors } return nil } @@ -107,7 +142,7 @@ func (d *Decoder) Decode(dst interface{}, src map[string][]string) error { // setDefaults sets the default values when the `default` tag is specified, // default is supported on basic/primitive types and their pointers, // nested structs can also have default tags -func (d *Decoder) setDefaults(t reflect.Type, v reflect.Value) MultiError { +func (d *Decoder) setDefaults(t reflect.Type, v reflect.Value, src map[string][]string, prefix string) MultiError { struc := d.cache.get(t) if struc == nil { // unexpect, cache.get never return nil @@ -129,14 +164,14 @@ func (d *Decoder) setDefaults(t reflect.Type, v reflect.Value) MultiError { vCurrent := v.FieldByName(f.name) if vCurrent.Type().Kind() == reflect.Struct && f.defaultValue == "" { - errs.merge(d.setDefaults(vCurrent.Type(), vCurrent)) + errs.merge(d.setDefaults(vCurrent.Type(), vCurrent, src, prefix+f.canonicalAlias+".")) } else if isPointerToStruct(vCurrent) && f.defaultValue == "" { - errs.merge(d.setDefaults(vCurrent.Elem().Type(), vCurrent.Elem())) + errs.merge(d.setDefaults(vCurrent.Elem().Type(), vCurrent.Elem(), src, prefix+f.canonicalAlias+".")) } if f.defaultValue != "" && f.isRequired { errs.merge(MultiError{"default-" + f.name: errors.New("required fields cannot have a default value")}) - } else if f.defaultValue != "" && vCurrent.IsZero() && !f.isRequired { + } else if f.defaultValue != "" && vCurrent.IsZero() && !f.isRequired && !fieldProvided(src, prefix, f) { if f.typ.Kind() == reflect.Struct { errs.merge(MultiError{"default-" + f.name: errors.New("default option is supported only on: bool, float variants, string, unit variants types or their corresponding pointers or slices")}) } else if f.typ.Kind() == reflect.Slice { @@ -186,6 +221,15 @@ func isPointerToStruct(v reflect.Value) bool { return !v.IsZero() && v.Type().Kind() == reflect.Ptr && v.Elem().Type().Kind() == reflect.Struct } +func fieldProvided(src map[string][]string, prefix string, f *fieldInfo) bool { + for _, p := range f.paths(prefix) { + if _, ok := src[p]; ok { + return true + } + } + return false +} + // checkRequired checks whether required fields are empty // // check type t recursively if t has struct fields. @@ -252,7 +296,17 @@ func isEmptyFields(fields []fieldWithPrefix, src map[string][]string) bool { return false } for key := range src { - if !isEmpty(f.typ, src[key]) && strings.HasPrefix(key, path) { + nested := strings.IndexByte(key, '.') != -1 + + // for non required nested structs + c1 := strings.HasSuffix(f.prefix, ".") && key == path + + // for required nested structs + c2 := f.prefix == "" && nested && strings.HasPrefix(key, path) + + // for non nested fields + c3 := f.prefix == "" && !nested && key == path + if !isEmpty(f.typ, src[key]) && (c1 || c2 || c3) { return false } } @@ -273,8 +327,80 @@ func isEmpty(t reflect.Type, value []string) bool { return false } +var ( + multipartFileHeaderPointerType = reflect.TypeOf(&multipart.FileHeader{}) + sliceMultipartFileHeaderPointerType = reflect.TypeOf([]*multipart.FileHeader{}) +) + +// Supported multiple types: +// *multipart.FileHeader, *[]multipart.FileHeader, []*multipart.FileHeader +func handleMultipartField(field reflect.Value, files []*multipart.FileHeader) bool { + fieldType := field.Type() + if !isMultipartField(fieldType) { + return false + } + + // Skip if files are empty and field is multipart + if len(files) == 0 { + return true + } + + // Check for *multipart.FileHeader + if fieldType == multipartFileHeaderPointerType { + field.Set(reflect.ValueOf(files[0])) + return true + } + + // Check for []*multipart.FileHeader + if fieldType == sliceMultipartFileHeaderPointerType { + field.Set(reflect.ValueOf(files)) + return true + } + + // Check for *[]*multipart.FileHeader + if fieldType.Kind() == reflect.Pointer { + fieldType = fieldType.Elem() + + if field.IsNil() { + field.Set(reflect.New(fieldType)) + } + + if fieldType == sliceMultipartFileHeaderPointerType { + field.Elem().Set(reflect.ValueOf(files)) + return true + } + } + + return false +} + +// Supported multiple types: +// *multipart.FileHeader, *[]multipart.FileHeader, []*multipart.FileHeader +func isMultipartField(typ reflect.Type) bool { + // Check for *multipart.FileHeader + if typ == multipartFileHeaderPointerType { + return true + } + + // Check for []*multipart.FileHeader + if typ == sliceMultipartFileHeaderPointerType { + return true + } + + // Check for *[]*multipart.FileHeader + if typ.Kind() == reflect.Ptr { + typ = typ.Elem() + + if typ == sliceMultipartFileHeaderPointerType { + return true + } + } + + return false +} + // decode fills a struct field using a parsed path. -func (d *Decoder) decode(v reflect.Value, path string, parts []pathPart, values []string) error { +func (d *Decoder) decode(v reflect.Value, path string, parts []pathPart, values []string, files []*multipart.FileHeader) error { // Get the field walking the struct fields by index. for _, name := range parts[0].path { if v.Type().Kind() == reflect.Ptr { @@ -284,23 +410,24 @@ func (d *Decoder) decode(v reflect.Value, path string, parts []pathPart, values v = v.Elem() } - // alloc embedded structs + // Allocate embedded anonymous pointers required for promoted fields. if v.Type().Kind() == reflect.Struct { - for i := 0; i < v.NumField(); i++ { - field := v.Field(i) - if field.Type().Kind() == reflect.Ptr && field.IsNil() && v.Type().Field(i).Anonymous { - field.Set(reflect.New(field.Type().Elem())) - } - } + d.ensureAnonymousPtrs(v) } v = v.FieldByName(name) } + // Don't even bother for unexported fields. if !v.CanSet() { return nil } + // Check multipart files + if mp := handleMultipartField(v, files); mp { + return nil + } + // Dereference if needed. t := v.Type() if t.Kind() == reflect.Ptr { @@ -326,7 +453,7 @@ func (d *Decoder) decode(v reflect.Value, path string, parts []pathPart, values } v.Set(value) } - return d.decode(v.Index(idx), path, parts[1:], values) + return d.decode(v.Index(idx), path, parts[1:], values, files) } // Get the converter early in case there is one for a slice type. @@ -359,9 +486,10 @@ func (d *Decoder) decode(v reflect.Value, path string, parts []pathPart, values } else if m.IsValid { u := reflect.New(elemT) if m.IsSliceElementPtr { - u = reflect.New(reflect.PtrTo(elemT).Elem()) + u = reflect.New(reflect.PointerTo(elemT).Elem()) } - if err := u.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(value)); err != nil { + um, _ := reflect.TypeAssert[encoding.TextUnmarshaler](u) + if err := um.UnmarshalText([]byte(value)); err != nil { return ConversionError{ Key: path, Type: t, @@ -443,7 +571,8 @@ func (d *Decoder) decode(v reflect.Value, path string, parts []pathPart, values } else if m.IsValid { if m.IsPtr { u := reflect.New(v.Type()) - if err := u.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(val)); err != nil { + um, _ := reflect.TypeAssert[encoding.TextUnmarshaler](u) + if err := um.UnmarshalText([]byte(val)); err != nil { return ConversionError{ Key: path, Type: t, @@ -485,16 +614,26 @@ func (d *Decoder) decode(v reflect.Value, path string, parts []pathPart, values return nil } +func (d *Decoder) ensureAnonymousPtrs(v reflect.Value) { + info := d.cache.get(v.Type()) + for _, idx := range info.anonymousPtrFields { + field := v.Field(idx) + if field.IsNil() { + field.Set(reflect.New(field.Type().Elem())) + } + } +} + func isTextUnmarshaler(v reflect.Value) unmarshaler { // Create a new unmarshaller instance m := unmarshaler{} - if m.Unmarshaler, m.IsValid = v.Interface().(encoding.TextUnmarshaler); m.IsValid { + if m.Unmarshaler, m.IsValid = reflect.TypeAssert[encoding.TextUnmarshaler](v); m.IsValid { return m } // As the UnmarshalText function should be applied to the pointer of the // type, we check that type to see if it implements the necessary // method. - if m.Unmarshaler, m.IsValid = reflect.New(v.Type()).Interface().(encoding.TextUnmarshaler); m.IsValid { + if m.Unmarshaler, m.IsValid = reflect.TypeAssert[encoding.TextUnmarshaler](reflect.New(v.Type())); m.IsValid { m.IsPtr = true return m } @@ -506,23 +645,23 @@ func isTextUnmarshaler(v reflect.Value) unmarshaler { } if t.Kind() == reflect.Slice { // Check if the slice implements encoding.TextUnmarshaller - if m.Unmarshaler, m.IsValid = v.Interface().(encoding.TextUnmarshaler); m.IsValid { + if m.Unmarshaler, m.IsValid = reflect.TypeAssert[encoding.TextUnmarshaler](v); m.IsValid { return m } // If t is a pointer slice, check if its elements implement // encoding.TextUnmarshaler m.IsSliceElement = true if t = t.Elem(); t.Kind() == reflect.Ptr { - t = reflect.PtrTo(t.Elem()) + t = reflect.PointerTo(t.Elem()) v = reflect.Zero(t) m.IsSliceElementPtr = true - m.Unmarshaler, m.IsValid = v.Interface().(encoding.TextUnmarshaler) + m.Unmarshaler, m.IsValid = reflect.TypeAssert[encoding.TextUnmarshaler](v) return m } } v = reflect.New(t) - m.Unmarshaler, m.IsValid = v.Interface().(encoding.TextUnmarshaler) + m.Unmarshaler, m.IsValid = reflect.TypeAssert[encoding.TextUnmarshaler](v) return m } diff --git a/decoder_test.go b/decoder_test.go index d01569e..57d3c62 100644 --- a/decoder_test.go +++ b/decoder_test.go @@ -8,6 +8,7 @@ import ( "encoding/hex" "errors" "fmt" + "mime/multipart" "reflect" "strings" "testing" @@ -56,6 +57,32 @@ type S1 struct { F21 []*rudeBool `schema:"f21"` } +type LargeStructForBenchmark struct { + F1 string `schema:"f1"` + F2 string `schema:"f2"` + F3 int `schema:"f3"` + F4 int `schema:"f4"` + F5 []string `schema:"f5"` + F6 []int `schema:"f6"` + F7 float64 `schema:"f7"` + F8 bool `schema:"f8"` + F9 struct { + N1 time.Time `schema:"n1"` + N2 string `schema:"n2"` + } `schema:"f9"` +} + +// A simple struct for demonstration benchmarks +type SimpleStructForBenchmark struct { + A string `schema:"a"` + B int `schema:"b"` + C bool `schema:"c"` + D float64 `schema:"d"` + E struct { + F float64 `schema:"f"` + } `schema:"e"` +} + type S2 struct { F01 *[]*int `schema:"f1"` } @@ -133,34 +160,34 @@ func TestAll(t *testing.T) { }, F09: 0, F10: []S1{ - S1{ + { F10: []S1{ - S1{F06: &[]*int{&f101, &f102}}, - S1{F06: &[]*int{&f103, &f104}}, + {F06: &[]*int{&f101, &f102}}, + {F06: &[]*int{&f103, &f104}}, }, }, }, F11: []*S1{ - &S1{ + { F11: []*S1{ - &S1{F06: &[]*int{&f111, &f112}}, - &S1{F06: &[]*int{&f113, &f114}}, + {F06: &[]*int{&f111, &f112}}, + {F06: &[]*int{&f113, &f114}}, }, }, }, F12: &[]S1{ - S1{ + { F12: &[]S1{ - S1{F06: &[]*int{&f121, &f122}}, - S1{F06: &[]*int{&f123, &f124}}, + {F06: &[]*int{&f121, &f122}}, + {F06: &[]*int{&f123, &f124}}, }, }, }, F13: &[]*S1{ - &S1{ + { F13: &[]*S1{ - &S1{F06: &[]*int{&f131, &f132}}, - &S1{F06: &[]*int{&f133, &f134}}, + {F06: &[]*int{&f131, &f132}}, + {F06: &[]*int{&f133, &f134}}, }, }, }, @@ -409,11 +436,9 @@ func BenchmarkAll(b *testing.B) { "f13.0.f13.1.f6": {"133", "134"}, } - b.ResetTimer() - - for i := 0; i < b.N; i++ { - s := &S1{} - _ = NewDecoder().Decode(s, v) + decoder := NewDecoder() + for b.Loop() { + _ = decoder.Decode(S1{}, v) } } @@ -598,8 +623,10 @@ func TestSimpleExample(t *testing.T) { S05: "S5", Str: "Str", }, - Bif: []Baz{{ - F99: []string{"A", "B", "C"}}, + Bif: []Baz{ + { + F99: []string{"A", "B", "C"}, + }, }, } @@ -939,34 +966,34 @@ func TestAllNT(t *testing.T) { }, F9: 0, F10: []S1{ - S1{ + { F10: []S1{ - S1{F06: &[]*int{&f101, &f102}}, - S1{F06: &[]*int{&f103, &f104}}, + {F06: &[]*int{&f101, &f102}}, + {F06: &[]*int{&f103, &f104}}, }, }, }, F11: []*S1{ - &S1{ + { F11: []*S1{ - &S1{F06: &[]*int{&f111, &f112}}, - &S1{F06: &[]*int{&f113, &f114}}, + {F06: &[]*int{&f111, &f112}}, + {F06: &[]*int{&f113, &f114}}, }, }, }, F12: &[]S1{ - S1{ + { F12: &[]S1{ - S1{F06: &[]*int{&f121, &f122}}, - S1{F06: &[]*int{&f123, &f124}}, + {F06: &[]*int{&f121, &f122}}, + {F06: &[]*int{&f123, &f124}}, }, }, }, F13: &[]*S1{ - &S1{ + { F13: &[]*S1{ - &S1{F06: &[]*int{&f131, &f132}}, - &S1{F06: &[]*int{&f133, &f134}}, + {F06: &[]*int{&f131, &f132}}, + {F06: &[]*int{&f133, &f134}}, }, }, }, @@ -1287,7 +1314,7 @@ func TestRegisterConverterSlice(t *testing.T) { expected := []string{"one", "two", "three"} err := decoder.Decode(&result, map[string][]string{ - "multiple": []string{"one,two,three"}, + "multiple": {"one,two,three"}, }) if err != nil { t.Fatalf("Failed to decode: %v", err) @@ -1319,7 +1346,7 @@ func TestRegisterConverterMap(t *testing.T) { }{} err := decoder.Decode(&result, map[string][]string{ - "multiple": []string{"a:one,b:two"}, + "multiple": {"a:one,b:two"}, }) if err != nil { t.Fatal(err) @@ -1366,9 +1393,9 @@ type S16 struct { func TestCustomTypeSlice(t *testing.T) { data := map[string][]string{ - "Value.0": []string{"Louisa May Alcott"}, - "Value.1": []string{"Florence Nightingale"}, - "Value.2": []string{"Clara Barton"}, + "Value.0": {"Louisa May Alcott"}, + "Value.1": {"Florence Nightingale"}, + "Value.2": {"Clara Barton"}, } s := S13{} @@ -1394,9 +1421,9 @@ func TestCustomTypeSlice(t *testing.T) { func TestCustomTypeSliceWithError(t *testing.T) { data := map[string][]string{ - "Value.0": []string{"Louisa May Alcott"}, - "Value.1": []string{"Florence Nightingale"}, - "Value.2": []string{"Clara"}, + "Value.0": {"Louisa May Alcott"}, + "Value.1": {"Florence Nightingale"}, + "Value.2": {"Clara"}, } s := S13{} @@ -1409,9 +1436,9 @@ func TestCustomTypeSliceWithError(t *testing.T) { func TestNoTextUnmarshalerTypeSlice(t *testing.T) { data := map[string][]string{ - "Value.0": []string{"Louisa May Alcott"}, - "Value.1": []string{"Florence Nightingale"}, - "Value.2": []string{"Clara Barton"}, + "Value.0": {"Louisa May Alcott"}, + "Value.1": {"Florence Nightingale"}, + "Value.2": {"Clara Barton"}, } s := S15{} @@ -1434,7 +1461,7 @@ type S18 struct { func TestCustomType(t *testing.T) { data := map[string][]string{ - "Value": []string{"Louisa May Alcott"}, + "Value": {"Louisa May Alcott"}, } s := S17{} @@ -1451,7 +1478,7 @@ func TestCustomType(t *testing.T) { func TestCustomTypeWithError(t *testing.T) { data := map[string][]string{ - "Value": []string{"Louisa"}, + "Value": {"Louisa"}, } s := S17{} @@ -1464,7 +1491,7 @@ func TestCustomTypeWithError(t *testing.T) { func TestNoTextUnmarshalerType(t *testing.T) { data := map[string][]string{ - "Value": []string{"Louisa May Alcott"}, + "Value": {"Louisa May Alcott"}, } s := S18{} @@ -1477,9 +1504,9 @@ func TestNoTextUnmarshalerType(t *testing.T) { func TestExpectedType(t *testing.T) { data := map[string][]string{ - "bools": []string{"1", "a"}, - "date": []string{"invalid"}, - "Foo.Bar": []string{"a", "b"}, + "bools": {"1", "a"}, + "date": {"invalid"}, + "Foo.Bar": {"a", "b"}, } type B struct { @@ -1524,11 +1551,11 @@ type R1 struct { func TestRequiredField(t *testing.T) { var a R1 v := map[string][]string{ - "a": []string{"bbb"}, - "b.c": []string{"88"}, - "b.d": []string{"9"}, - "f": []string{""}, - "h": []string{"true"}, + "a": {"bbb"}, + "b.c": {"88"}, + "b.d": {"9"}, + "f": {""}, + "h": {"true"}, } err := NewDecoder().Decode(&a, v) if err == nil { @@ -1595,7 +1622,7 @@ type R2 struct { func TestRequiredStructFiled(t *testing.T) { v := map[string][]string{ - "a.b": []string{"3"}, + "a.b": {"3"}, } var a R2 err := NewDecoder().Decode(&a, v) @@ -1604,6 +1631,23 @@ func TestRequiredStructFiled(t *testing.T) { } } +type Node struct { + Value int `schema:"val,required"` + Next *Node `schema:"next,required"` +} + +func TestRecursiveStruct(t *testing.T) { + v := map[string][]string{ + "val": {"1"}, + "next.val": {"2"}, + } + var a Node + err := NewDecoder().Decode(&a, v) + if err != nil { + t.Errorf("error: %v", err) + } +} + func TestRequiredFieldIsMissingCorrectError(t *testing.T) { type RM1S struct { A string `schema:"rm1aa,required"` @@ -1670,11 +1714,12 @@ func TestAnonymousStructField(t *testing.T) { if a.B != "abc" { t.Errorf("B: expected %v, got %v", "abc", a.B) } - if a.AS1.A != 1 { - t.Errorf("AS1.A: expected %v, got %v", 1, a.AS1.A) + embedded := a.AS1 + if embedded.A != 1 { + t.Errorf("AS1.A: expected %v, got %v", 1, embedded.A) } - if a.AS1.E != 2 { - t.Errorf("AS1.E: expected %v, got %v", 2, a.AS1.E) + if embedded.E != 2 { + t.Errorf("AS1.E: expected %v, got %v", 2, embedded.E) } } a := AS2{} @@ -1708,8 +1753,8 @@ func TestAnonymousStructField(t *testing.T) { if a.D != "abc" { t.Errorf("D: expected %v, got %v", "abc", a.D) } - if a.AS3.C != 1 { - t.Errorf("AS3.C: expected %v, got %v", 1, a.AS3.C) + if embedded := a.AS3; embedded.C != 1 { + t.Errorf("AS3.C: expected %v, got %v", 1, embedded.C) } } } @@ -1874,11 +1919,13 @@ func TestComprehensiveDecodingErrors(t *testing.T) { if key, expected := "Y.s.v", (UnknownKeyError{Key: "Y.s.v"}); e[key] != expected { t.Errorf("%s: expected %#v, got %#v", key, expected, e[key]) } - if expected := 123; dst.I2.J.P == nil || *dst.I2.J.P != expected { - t.Errorf("I2.J.P: expected %#v, got %#v", expected, dst.I2.J.P) + // J is promoted from the embedded I2 struct. + if expected := 123; dst.J.P == nil || *dst.J.P != expected { + t.Errorf("I2.J.P: expected %#v, got %#v", expected, dst.J.P) } - if expected := ""; dst.X.S1.P == nil || *dst.X.S1.P != expected { - t.Errorf("X.S1.P: expected %#v, got %#v", expected, dst.X.S1.P) + // P is promoted from the embedded S1 inside X. + if expected := ""; dst.X.P == nil || *dst.X.P != expected { + t.Errorf("X.S1.P: expected %#v, got %#v", expected, dst.X.P) } if expected := "abc"; dst.X.T.V != expected { t.Errorf("X.T.V: expected %#v, got %#v", expected, dst.X.T.V) @@ -1927,7 +1974,7 @@ func (s *S20) UnmarshalText(text []byte) error { // implementations by its elements. func TestTextUnmarshalerTypeSlice(t *testing.T) { data := map[string][]string{ - "Value": []string{"a,b,c"}, + "Value": {"a,b,c"}, } s := struct { Value S20 @@ -1963,7 +2010,7 @@ type S21B []S21E // requirements imposed on a slice of structs. func TestTextUnmarshalerTypeSliceOfStructs(t *testing.T) { data := map[string][]string{ - "Value": []string{"raw a"}, + "Value": {"raw a"}, } // Implements encoding.TextUnmarshaler, should not throw invalid path // error. @@ -2001,7 +2048,7 @@ func (s *S22) UnmarshalText(text []byte) error { // especially including simply setting the zero value. func TestTextUnmarshalerEmpty(t *testing.T) { data := map[string][]string{ - "Value": []string{""}, // empty value + "Value": {""}, // empty value } // Implements encoding.TextUnmarshaler, should use the type's // UnmarshalText method. @@ -2032,8 +2079,8 @@ type S23 []*S23e func TestUnmashalPointerToEmbedded(t *testing.T) { data := map[string][]string{ - "A.0.F2": []string{"raw a"}, - "A.0.F3": []string{"raw b"}, + "A.0.F2": {"raw a"}, + "A.0.F3": {"raw b"}, } // Implements encoding.TextUnmarshaler, should not throw invalid path @@ -2122,7 +2169,6 @@ func TestDoubleEmbedded(t *testing.T) { if !reflect.DeepEqual(expected, s) { t.Errorf("Expected %v errors, got %v", expected, s) } - } func TestDefaultValuesAreSet(t *testing.T) { @@ -2280,7 +2326,6 @@ func TestRequiredFieldsCannotHaveDefaults(t *testing.T) { if err == nil || !strings.Contains(err.Error(), expected) { t.Errorf("decoding should fail with error msg %s got %q", expected, err) } - } func TestInvalidDefaultElementInSliceRaiseError(t *testing.T) { @@ -2337,7 +2382,7 @@ func TestInvalidDefaultsValuesHaveNoEffect(t *testing.T) { type D struct { B bool `schema:"b,default:invalid"` C *float32 `schema:"c,default:notAFloat"` - //uint types + // uint types D uint `schema:"d,default:notUint"` E uint8 `schema:"e,default:notUint"` F uint16 `schema:"f,default:notUint"` @@ -2376,7 +2421,6 @@ func TestInvalidDefaultsValuesHaveNoEffect(t *testing.T) { decoder := NewDecoder() err := decoder.Decode(&d, data) - if err != nil { t.Errorf("decoding should succeed but got error: %q", err) } @@ -2413,6 +2457,23 @@ func TestDefaultsAreNotSupportedForStructsAndStructSlices(t *testing.T) { } } +func TestDefaultValueWithColon(t *testing.T) { + t.Parallel() + type D struct { + URL string `schema:"url,default:http://localhost:8080"` + } + + var d D + decoder := NewDecoder() + if err := decoder.Decode(&d, map[string][]string{}); err != nil { + t.Fatalf("unexpected error: %v", err) + } + + if d.URL != "http://localhost:8080" { + t.Errorf("expected default url to be http://localhost:8080, got %s", d.URL) + } +} + func TestDecoder_MaxSize(t *testing.T) { t.Parallel() @@ -2506,8 +2567,61 @@ func TestDecoder_MaxSize(t *testing.T) { } } -func TestDecoder_SetMaxSize(t *testing.T) { +func TestDefaultsAppliedOnlyWhenMissing(t *testing.T) { + t.Parallel() + type Data struct { + B bool `schema:"b,default:true"` + I int `schema:"i,default:5"` + F float64 `schema:"f,default:1.5"` + S []int `schema:"s,default:1|2"` + } + + dec := NewDecoder() + + // Values are explicitly set โ€“ no defaults should be applied + withVals := Data{} + if err := dec.Decode(&withVals, map[string][]string{ + "b": {"false"}, + "i": {"0"}, + "f": {"0"}, + "s": {}, + }); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if withVals.B { + t.Errorf("B should be false when the value is set") + } + if withVals.I != 0 { + t.Errorf("I should be 0 when the value is set, got %d", withVals.I) + } + if withVals.F != 0 { + t.Errorf("F should be 0 when the value is set, got %f", withVals.F) + } + if len(withVals.S) != 0 { + t.Errorf("S should be empty when the value is set, got %v", withVals.S) + } + + // No values provided โ€“ defaults should be applied + withoutVals := Data{} + if err := dec.Decode(&withoutVals, map[string][]string{}); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !withoutVals.B { + t.Errorf("B should default to true when missing") + } + if withoutVals.I != 5 { + t.Errorf("Default I should be 5, got %d", withoutVals.I) + } + if withoutVals.F != 1.5 { + t.Errorf("Default F should be 1.5, got %f", withoutVals.F) + } + if !reflect.DeepEqual(withoutVals.S, []int{1, 2}) { + t.Errorf("Default S should be [1 2], got %v", withoutVals.S) + } +} + +func TestDecoder_SetMaxSize(t *testing.T) { t.Run("default maxsize should be equal to given constant", func(t *testing.T) { t.Parallel() dec := NewDecoder() @@ -2527,3 +2641,990 @@ func TestDecoder_SetMaxSize(t *testing.T) { } }) } + +func TestTimeDurationDecoding(t *testing.T) { + type DurationStruct struct { + Timeout time.Duration `schema:"timeout"` + } + + // Prepare the input data + input := map[string][]string{ + "timeout": {"2s"}, + } + + // Create a decoder with a converter for time.Duration + decoder := NewDecoder() + decoder.RegisterConverter(time.Duration(0), func(s string) reflect.Value { + d, err := time.ParseDuration(s) + if err != nil { + return reflect.Value{} + } + return reflect.ValueOf(d) + }) + + var result DurationStruct + err := decoder.Decode(&result, input) + if err != nil { + t.Fatalf("Failed to decode duration: %v", err) + } + + // Expect 2 seconds + if result.Timeout != 2*time.Second { + t.Errorf("Expected 2s, got %v", result.Timeout) + } +} + +func TestTimeDurationDecodingInvalid(t *testing.T) { + type DurationStruct struct { + Timeout time.Duration `schema:"timeout"` + } + + // Prepare the input data + input := map[string][]string{ + "timeout": {"invalid-duration"}, + } + + // Create a decoder with a converter for time.Duration + decoder := NewDecoder() + decoder.RegisterConverter(time.Duration(0), func(s string) reflect.Value { + // Attempt to parse the duration + d, err := time.ParseDuration(s) + if err != nil { + // Return an invalid reflect.Value to trigger a conversion error + return reflect.Value{} + } + return reflect.ValueOf(d) + }) + + var result DurationStruct + err := decoder.Decode(&result, input) + if err == nil { + t.Error("Expected an error decoding invalid duration, got nil") + } +} + +func TestMultipleConversionErrors(t *testing.T) { + type Fields struct { + IntField int `schema:"int_field"` + BoolField bool `schema:"bool_field"` + Duration time.Duration `schema:"duration_field"` + } + + input := map[string][]string{ + "int_field": {"invalid-int"}, + "bool_field": {"invalid-bool"}, + "duration_field": {"invalid-duration"}, + } + + decoder := NewDecoder() + decoder.RegisterConverter(time.Duration(0), func(s string) reflect.Value { + d, err := time.ParseDuration(s) + if err != nil { + return reflect.Value{} + } + return reflect.ValueOf(d) + }) + + var s Fields + err := decoder.Decode(&s, input) + if err == nil { + t.Fatal("Expected multiple conversion errors, got nil") + } + + // Check that all errors are reported (at least 3). + mErr, ok := err.(MultiError) + if !ok { + t.Fatalf("Expected MultiError, got %T", err) + } + if len(mErr) < 3 { + t.Errorf("Expected at least 3 errors, got %d: %v", len(mErr), mErr) + } +} + +func TestDecoderMultipartFiles(t *testing.T) { + type S struct { + A string `schema:"a,required"` + B int `schema:"b,required"` + C bool `schema:"c,required"` + D struct { + E float64 `schema:"e,required"` + F *multipart.FileHeader `schema:"f,required"` + F2 []*multipart.FileHeader `schema:"f2,required"` + F3 *[]*multipart.FileHeader `schema:"f3,required"` + F4 *multipart.FileHeader `schema:"f4,required"` + } `schema:"d,required"` + G *[]*multipart.FileHeader `schema:"g,required"` + J []struct { + K *[]*multipart.FileHeader `schema:"k,required"` + } `schema:"j,required"` + } + s := S{} + data := map[string][]string{ + "a": {"abc"}, + "b": {"123"}, + "c": {"true"}, + "d.e": {"3.14"}, + } + + // Create dummy file headers for testing + dummyFile := &multipart.FileHeader{ + Filename: "test.txt", + Size: 4, + } + + dummyFile2 := &multipart.FileHeader{ + Filename: "test2.txt", + Size: 4, + } + + dummyFile3 := &multipart.FileHeader{ + Filename: "test3.txt", + Size: 4, + } + + // Create slice for file headers + fileHeaders := map[string][]*multipart.FileHeader{ + "d.f": {dummyFile, dummyFile2}, + "d.f2": {dummyFile2, dummyFile3}, + "d.f3": {dummyFile, dummyFile2, dummyFile3}, + "d.f4": {}, + "g": {dummyFile, dummyFile2}, + "j.0.k": {dummyFile, dummyFile2}, + "j.1.k": {dummyFile2, dummyFile3}, + } + + decoder := NewDecoder() + err := decoder.Decode(&s, data, fileHeaders) + if err != nil { + t.Fatalf("Failed to decode: %v", err) + } + + if s.A != "abc" { + t.Errorf("Expected A to be 'abc', got %s", s.A) + } + + if s.B != 123 { + t.Errorf("Expected B to be 123, got %d", s.B) + } + + if s.C != true { + t.Errorf("Expected C to be true, got %t", s.C) + } + + if s.D.E != 3.14 { + t.Errorf("Expected D.E to be 3.14, got %f", s.D.E) + } + + if s.D.F == nil { + t.Error("Expected D.F to be a file header, got nil") + } + + if s.D.F2 == nil { + t.Error("Expected D.F2 to be a slice of file headers, got nil") + } + + if s.D.F3 == nil { + t.Error("Expected D.F3 to be a pointer to a slice of file headers, got nil") + } + + if s.D.F4 != nil { + fmt.Print(s.D.F4) + t.Error("Expected D.F4 to be nil, got a file header") + } + + if s.G == nil { + t.Error("Expected G to be a pointer to a slice of file headers, got nil") + } + + if len(s.D.F2) != 2 { + t.Errorf("Expected D.F2 to have 2 file headers, got %d", len(s.D.F2)) + } + + if len(*s.D.F3) != 3 { + t.Errorf("Expected D.F3 to have 3 file headers, got %d", len(*s.D.F3)) + } + + if len(*s.G) != 2 { + t.Errorf("Expected G to have 2 file headers, got %d", len(*s.G)) + } + + if s.D.F.Filename != "test.txt" { + t.Errorf("Expected D.F.Filename to be 'test.txt', got %s", s.D.F.Filename) + } + + if s.D.F2[0].Filename != "test2.txt" { + t.Errorf("Expected D.F2[0].Filename to be 'test2.txt', got %s", s.D.F2[0].Filename) + } + + if s.D.F2[1].Filename != "test3.txt" { + t.Errorf("Expected D.F2[1].Filename to be 'test3.txt', got %s", s.D.F2[1].Filename) + } + + if (*s.D.F3)[0].Filename != "test.txt" { + t.Errorf("Expected D.F3[0].Filename to be 'test.txt', got %s", (*s.D.F3)[0].Filename) + } + + if (*s.D.F3)[1].Filename != "test2.txt" { + t.Errorf("Expected D.F3[1].Filename to be 'test2.txt', got %s", (*s.D.F3)[1].Filename) + } + + if (*s.D.F3)[2].Filename != "test3.txt" { + t.Errorf("Expected D.F3[2].Filename to be 'test3.txt', got %s", (*s.D.F3)[2].Filename) + } + + if (*s.G)[0].Filename != "test.txt" { + t.Errorf("Expected G[0].Filename to be 'test.txt', got %s", (*s.G)[0].Filename) + } + + if (*s.G)[1].Filename != "test2.txt" { + t.Errorf("Expected G[1].Filename to be 'test2.txt', got %s", (*s.G)[1].Filename) + } + + if s.J[0].K == nil { + t.Error("Expected J[0].K to be a pointer to a slice of file headers, got nil") + } + + if s.J[1].K == nil { + t.Error("Expected J[1].K to be a pointer to a slice of file headers, got nil") + } + + if len(*s.J[0].K) != 2 { + t.Errorf("Expected J[0].K to have 2 file headers, got %d", len(*s.J[0].K)) + } + + if len(*s.J[1].K) != 2 { + t.Errorf("Expected J[1].K to have 2 file headers, got %d", len(*s.J[1].K)) + } + + if (*s.J[0].K)[0].Filename != "test.txt" { + t.Errorf("Expected J[0].K[0].Filename to be 'test.txt', got %s", (*s.J[0].K)[0].Filename) + } + + if (*s.J[0].K)[1].Filename != "test2.txt" { + t.Errorf("Expected J[0].K[1].Filename to be 'test2.txt', got %s", (*s.J[0].K)[1].Filename) + } + + if (*s.J[1].K)[0].Filename != "test2.txt" { + t.Errorf("Expected J[1].K[0].Filename to be 'test2.txt', got %s", (*s.J[1].K)[0].Filename) + } + + if (*s.J[1].K)[1].Filename != "test3.txt" { + t.Errorf("Expected J[1].K[1].Filename to be 'test3.txt', got %s", (*s.J[1].K)[1].Filename) + } +} + +func BenchmarkDecoderMultipartFiles(b *testing.B) { + type S struct { + A string `schema:"a,required"` + B int `schema:"b,required"` + C bool `schema:"c,required"` + D struct { + E float64 `schema:"e,required"` + F *multipart.FileHeader `schema:"f,required"` + F2 []*multipart.FileHeader `schema:"f2,required"` + } `schema:"d,required"` + G *[]*multipart.FileHeader `schema:"g,required"` + } + s := S{} + data := map[string][]string{ + "a": {"abc"}, + "b": {"123"}, + "c": {"true"}, + "d.e": {"3.14"}, + } + + // Create dummy file headers for testing + dummyFile := &multipart.FileHeader{ + Filename: "test.txt", + Size: 4, + } + + dummyFile2 := &multipart.FileHeader{ + Filename: "test2.txt", + Size: 4, + } + + dummyFile3 := &multipart.FileHeader{ + Filename: "test3.txt", + Size: 4, + } + + // Create slice for file headers + fileHeaders := map[string][]*multipart.FileHeader{ + "d.f": {dummyFile, dummyFile2}, + "d.f2": {dummyFile2, dummyFile3}, + "g": {dummyFile, dummyFile2}, + } + + decoder := NewDecoder() + var err error + for b.Loop() { + err = decoder.Decode(&s, data, fileHeaders) + } + + if err != nil { + b.Fatalf("Failed to decode: %v", err) + } +} + +func TestIsMultipartFile(t *testing.T) { + t.Parallel() + + tc := []struct { + typ reflect.Type + input map[string][]string + expected bool + }{ + { + typ: reflect.TypeOf(string("")), + expected: false, + }, + { + typ: reflect.TypeOf([]string{}), + expected: false, + }, + { + typ: reflect.TypeOf([]*multipart.FileHeader{}), + expected: true, + }, + { + typ: reflect.TypeOf(multipart.FileHeader{}), + expected: false, + }, + { + typ: reflect.TypeOf(&multipart.FileHeader{}), + expected: true, + }, + { + typ: reflect.TypeOf([]multipart.FileHeader{}), + expected: false, + }, + { + typ: reflect.TypeOf(&[]*multipart.FileHeader{}), + expected: true, + }, + } + + for _, tt := range tc { + if isMultipartField(tt.typ) != tt.expected { + t.Errorf("Expected %v, got %v", tt.expected, isMultipartField(tt.typ)) + } + } +} + +func BenchmarkIsMultipartFile(b *testing.B) { + cases := []struct { + typ reflect.Type + }{ + { + typ: reflect.TypeOf(string("")), + }, + { + typ: reflect.TypeOf([]string{}), + }, + { + typ: reflect.TypeOf([]*multipart.FileHeader{}), + }, + { + typ: reflect.TypeOf(multipart.FileHeader{}), + }, + { + typ: reflect.TypeOf(&multipart.FileHeader{}), + }, + { + typ: reflect.TypeOf([]multipart.FileHeader{}), + }, + { + typ: reflect.TypeOf(&[]*multipart.FileHeader{}), + }, + } + + for i, bc := range cases { + b.Run(fmt.Sprintf("IsMultipartFile-%d", i), func(b *testing.B) { + for b.Loop() { + isMultipartField(bc.typ) + } + }) + } +} + +func TestHandleMultipartField(t *testing.T) { + t.Parallel() + + // Create dummy file headers for testing + dummyFile := &multipart.FileHeader{ + Filename: "test.txt", + Size: 4, + } + + files := map[string][]*multipart.FileHeader{ + "f": {dummyFile}, + } + + type S struct { + F *multipart.FileHeader `schema:"f,required"` + F2 []*multipart.FileHeader `schema:"f2,required"` + F3 *[]*multipart.FileHeader `schema:"f3,required"` + F4 string `schema:"f4,required"` + } + + s := S{} + rv := reflect.ValueOf(&s).Elem() + + ok := handleMultipartField(rv.FieldByName("F"), files["f"]) + if !ok { + t.Error("Expected handleMultipartField to return true") + } + + ok = handleMultipartField(rv.FieldByName("F2"), files["f"]) + if !ok { + t.Error("Expected handleMultipartField to return true") + } + + ok = handleMultipartField(rv.FieldByName("F3"), files["f"]) + if !ok { + t.Error("Expected handleMultipartField to return true") + } + + ok = handleMultipartField(rv.FieldByName("F4"), files["f"]) + if ok { + t.Error("Expected handleMultipartField to return false") + } + + if s.F == nil { + t.Error("Expected F to be a file header, got nil") + } + + if s.F2 == nil { + t.Error("Expected F2 to be a slice of file headers, got nil") + } + + if s.F3 == nil { + t.Error("Expected F3 to be a pointer to a slice of file headers, got nil") + } + + if len(s.F2) != 1 { + t.Errorf("Expected F2 to have 1 file header, got %d", len(s.F2)) + } + + if len(*s.F3) != 1 { + t.Errorf("Expected F3 to have 1 file header, got %d", len(*s.F3)) + } + + if s.F.Filename != "test.txt" { + t.Errorf("Expected F.Filename to be 'test.txt', got %s", s.F.Filename) + } + + if s.F2[0].Filename != "test.txt" { + t.Errorf("Expected F2[0].Filename to be 'test.txt', got %s", s.F2[0].Filename) + } + + if (*s.F3)[0].Filename != "test.txt" { + t.Errorf("Expected F3[0].Filename to be 'test.txt', got %s", (*s.F3)[0].Filename) + } +} + +func TestDecodePanicIsCaughtAndReturnedAsError(t *testing.T) { + type R struct { + N1 []*struct { + Value string + } + } + // Simulate a path that uses an invalid (e.g. negative) slice index, + // which can trigger a panic (e.g. reflect: slice index out of range). + data := map[string][]string{ + "n1.-1.value": {"Foo"}, + } + + s := new(R) + decoder := NewDecoder() + err := decoder.Decode(s, data) + if err == nil { + t.Fatal("Expected an error when a panic occurs") + } + + expected := "schema: panic while decoding: reflect: slice index out of range" + if err.Error() != expected { + t.Fatalf("Expected panic error message %q, got: %v", expected, err) + } +} + +func TestDecodeIndexExceedsParserLimit(t *testing.T) { + type R struct { + N1 []*struct { + Value string + } + } + data := map[string][]string{ + "n1.1001.value": {"Foo"}, + } + + s := new(R) + decoder := NewDecoder() + err := decoder.Decode(s, data) + if err == nil { + t.Fatal("Expected an error when index exceeds parser limit") + } + + expected := MultiError{"n1.1001.value": errIndexTooLarge} + if !reflect.DeepEqual(err, expected) { + t.Fatalf("Expected %v, got: %v", expected, err) + } +} + +func BenchmarkHandleMultipartField(b *testing.B) { + // Create dummy file headers for testing + dummyFile := &multipart.FileHeader{ + Filename: "test.txt", + Size: 4, + } + + files := map[string][]*multipart.FileHeader{ + "f": {dummyFile}, + } + + type S struct { + F *multipart.FileHeader `schema:"f,required"` + F2 []*multipart.FileHeader `schema:"f2,required"` + F3 *[]*multipart.FileHeader `schema:"f3,required"` + F4 string `schema:"f4,required"` + } + + s := S{} + rv := reflect.ValueOf(&s).Elem() + + f := rv.FieldByName("F") + f2 := rv.FieldByName("F2") + f3 := rv.FieldByName("F3") + f4 := rv.FieldByName("F4") + + for b.Loop() { + handleMultipartField(f, files["f"]) + handleMultipartField(f2, files["f"]) + handleMultipartField(f3, files["f"]) + handleMultipartField(f4, files["f"]) + } +} + +func BenchmarkLargeStructDecode(b *testing.B) { + data := map[string][]string{ + "f1": {"Lorem"}, + "f2": {"Ipsum"}, + "f3": {"123"}, + "f4": {"456"}, + "f5": {"A", "B", "C", "D"}, + "f6": {"10", "20", "30", "40"}, + "f7": {"3.14159"}, + "f8": {"true"}, + "f9.n2": {"NestedStringValue"}, + } + + decoder := NewDecoder() + s := &LargeStructForBenchmark{} + for b.Loop() { + _ = decoder.Decode(s, data) + } +} + +func BenchmarkLargeStructDecodeParallel(b *testing.B) { + data := map[string][]string{ + "f1": {"Lorem"}, + "f2": {"Ipsum"}, + "f3": {"123"}, + "f4": {"456"}, + "f5": {"A", "B", "C", "D"}, + "f6": {"10", "20", "30", "40"}, + "f7": {"3.14159"}, + "f8": {"true"}, + "f9.n2": {"NestedStringValue"}, + } + + decoder := NewDecoder() + s := &LargeStructForBenchmark{} + b.ResetTimer() + + b.RunParallel(func(pb *testing.PB) { + for pb.Next() { + _ = decoder.Decode(s, data) + } + }) +} + +func BenchmarkSimpleStructDecode(b *testing.B) { + type S struct { + A string `schema:"a"` + B int `schema:"b"` + C bool `schema:"c"` + D float64 `schema:"d"` + E struct { + F float64 `schema:"f"` + } `schema:"e"` + } + s := S{} + data := map[string][]string{ + "a": {"abc"}, + "b": {"123"}, + "c": {"true"}, + "d": {"3.14"}, + "e.f": {"3.14"}, + } + decoder := NewDecoder() + for b.Loop() { + _ = decoder.Decode(&s, data) + } +} + +func BenchmarkCheckRequiredFields(b *testing.B) { + type S struct { + A string `schema:"a,required"` + B int `schema:"b,required"` + C bool `schema:"c,required"` + D struct { + E float64 `schema:"e,required"` + } `schema:"d,required"` + } + s := S{} + data := map[string][]string{ + "a": {"abc"}, + "b": {"123"}, + "c": {"true"}, + "d.e": {"3.14"}, + } + decoder := NewDecoder() + v := reflect.ValueOf(s) + // v = v.Elem() + t := v.Type() + + for b.Loop() { + _ = decoder.checkRequired(t, data) + } +} + +func BenchmarkTimeDurationDecoding(b *testing.B) { + type DurationStruct struct { + Timeout time.Duration `schema:"timeout"` + } + + // Sample input for decoding + input := map[string][]string{ + "timeout": {"2s"}, + } + + decoder := NewDecoder() + decoder.RegisterConverter(time.Duration(0), func(s string) reflect.Value { + d, _ := time.ParseDuration(s) + return reflect.ValueOf(d) + }) + + var ds DurationStruct + for b.Loop() { + _ = decoder.Decode(&ds, input) + } +} + +func TestConversionErrorError(t *testing.T) { + t.Parallel() + e := ConversionError{Key: "f", Index: -1} + if got := e.Error(); got != "schema: error converting value for \"f\"" { + t.Errorf("unexpected message %q", got) + } + e = ConversionError{Key: "f", Index: 2, Err: errors.New("boom")} + msg := e.Error() + if !strings.Contains(msg, "index 2 of \"f\"") || !strings.Contains(msg, "boom") { + t.Errorf("unexpected message %q", msg) + } +} + +type sliceValue []byte + +func (sliceValue) UnmarshalText([]byte) error { return nil } + +type valueUM string + +func (valueUM) UnmarshalText([]byte) error { return nil } + +type ptrUM string + +func (*ptrUM) UnmarshalText([]byte) error { return nil } + +type elemUM struct{} + +func (*elemUM) UnmarshalText([]byte) error { return nil } + +func TestIsTextUnmarshaler(t *testing.T) { + t.Parallel() + cases := []struct { + name string + val interface{} + check func(t *testing.T, u unmarshaler) + }{ + {"value", valueUM(""), func(t *testing.T, u unmarshaler) { + if !u.IsValid || u.IsPtr { + t.Fatalf("wrong flags: %+v", u) + } + }}, + {"ptr", ptrUM(""), func(t *testing.T, u unmarshaler) { + if !u.IsValid || !u.IsPtr { + t.Fatalf("wrong flags: %+v", u) + } + }}, + {"sliceValue", sliceValue{}, func(t *testing.T, u unmarshaler) { + if !u.IsValid { + t.Fatalf("not valid") + } + }}, + {"sliceElemPtr", []*elemUM{}, func(t *testing.T, u unmarshaler) { + if !u.IsValid || !u.IsSliceElement || !u.IsSliceElementPtr { + t.Fatalf("wrong flags: %+v", u) + } + }}, + {"invalid", 42, func(t *testing.T, u unmarshaler) { + if u.IsValid { + t.Fatalf("expected invalid") + } + }}, + } + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + t.Parallel() + c.check(t, isTextUnmarshaler(reflect.ValueOf(c.val))) + }) + } +} + +func TestHandleMultipartFieldAdditional(t *testing.T) { + t.Parallel() + fh1 := &multipart.FileHeader{Filename: "f1"} + fh2 := &multipart.FileHeader{Filename: "f2"} + + var a *multipart.FileHeader + if !handleMultipartField(reflect.ValueOf(&a).Elem(), []*multipart.FileHeader{fh1}) || a != fh1 { + t.Errorf("single header not set") + } + + var b []*multipart.FileHeader + if !handleMultipartField(reflect.ValueOf(&b).Elem(), []*multipart.FileHeader{fh1, fh2}) || len(b) != 2 || b[1] != fh2 { + t.Errorf("slice headers not set") + } + + var c *[]*multipart.FileHeader + if !handleMultipartField(reflect.ValueOf(&c).Elem(), []*multipart.FileHeader{fh1}) || c == nil || len(*c) != 1 || (*c)[0] != fh1 { + t.Errorf("pointer slice not set") + } + + var d *multipart.FileHeader + if !handleMultipartField(reflect.ValueOf(&d).Elem(), nil) || d != nil { + t.Errorf("empty files not handled") + } + + x := 0 + if handleMultipartField(reflect.ValueOf(&x).Elem(), []*multipart.FileHeader{fh1}) { + t.Errorf("non multipart field handled") + } +} + +type unsupported struct { + C complex64 `schema:"c"` +} + +type textErr struct{} + +func (*textErr) UnmarshalText([]byte) error { return errors.New("bad") } + +type withSlice struct { + A []struct { + B int `schema:"b"` + } `schema:"a"` +} + +type withText struct { + T textErr `schema:"t"` +} + +type valueErrUM string + +func (valueErrUM) UnmarshalText([]byte) error { return errors.New("bad") } + +type sliceUM struct{} + +func (*sliceUM) UnmarshalText([]byte) error { return errors.New("bad") } + +type panicType int + +func TestDecodeErrors(t *testing.T) { + t.Parallel() + t.Run("invalid pointer", func(t *testing.T) { + t.Parallel() + var s unsupported + if err := NewDecoder().Decode(s, nil); err == nil { + t.Fatalf("expected error") + } + }) + + t.Run("panic converter", func(t *testing.T) { + t.Parallel() + dec := NewDecoder() + dec.RegisterConverter(panicType(0), func(string) reflect.Value { panic("boom") }) + var target struct { + P panicType `schema:"p"` + } + if err := dec.Decode(&target, map[string][]string{"p": {"x"}}); err == nil { + t.Fatalf("expected panic error") + } + }) + + t.Run("panic error converter", func(t *testing.T) { + t.Parallel() + dec := NewDecoder() + dec.RegisterConverter(panicType(0), func(string) reflect.Value { panic(errors.New("x")) }) + var target struct { + P panicType `schema:"p"` + } + if err := dec.Decode(&target, map[string][]string{"p": {"x"}}); err == nil { + t.Fatalf("expected panic error") + } + }) + + t.Run("unsupported type", func(t *testing.T) { + t.Parallel() + var u unsupported + if err := NewDecoder().Decode(&u, map[string][]string{"c": {"1"}}); err == nil { + t.Fatalf("expected error") + } + }) + + t.Run("text unmarshaler error", func(t *testing.T) { + t.Parallel() + var w withText + err := NewDecoder().Decode(&w, map[string][]string{"t": {"x"}}) + if err == nil { + t.Fatalf("expected error") + } + if _, ok := err.(MultiError)["t"].(ConversionError); !ok { + t.Fatalf("wrong error type: %v", err) + } + }) + + t.Run("index larger", func(t *testing.T) { + t.Parallel() + dec := NewDecoder() + dec.MaxSize(0) + var s withSlice + err := dec.Decode(&s, map[string][]string{"a.1.b": {"5"}}) + if err == nil || !strings.Contains(err.(MultiError)["a.1.b"].Error(), "maxSize") { + t.Fatalf("unexpected error: %v", err) + } + }) + + t.Run("slice converter missing", func(t *testing.T) { + t.Parallel() + var s struct { + C []complex64 `schema:"c"` + } + if err := NewDecoder().Decode(&s, map[string][]string{"c": {"1"}}); err == nil { + t.Fatalf("expected error") + } + }) + + t.Run("slice textunmarshal error", func(t *testing.T) { + t.Parallel() + var s struct { + S []sliceUM `schema:"s"` + } + if err := NewDecoder().Decode(&s, map[string][]string{"s": {"a"}}); err == nil { + t.Fatalf("expected error") + } + }) + + t.Run("value unmarshal error", func(t *testing.T) { + t.Parallel() + var s struct { + V valueErrUM `schema:"v"` + } + if err := NewDecoder().Decode(&s, map[string][]string{"v": {"a"}}); err == nil { + t.Fatalf("expected error") + } + }) +} + +func TestDecodeMultipartFiles(t *testing.T) { + type payload struct { + Single *multipart.FileHeader `schema:"single"` + Multiple []*multipart.FileHeader `schema:"multi"` + PtrSlice *[]*multipart.FileHeader `schema:"ptr"` + } + + fh1 := &multipart.FileHeader{Filename: "a"} + fh2 := &multipart.FileHeader{Filename: "b"} + + src := map[string][]string{} + files := map[string][]*multipart.FileHeader{ + "single": {fh1}, + "multi": {fh1, fh2}, + "ptr": {fh2}, + } + + var p payload + if err := NewDecoder().Decode(&p, src, files); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if p.Single != fh1 { + t.Fatalf("single not set") + } + if len(p.Multiple) != 2 || p.Multiple[0] != fh1 || p.Multiple[1] != fh2 { + t.Fatalf("multi not set") + } + if p.PtrSlice == nil || len(*p.PtrSlice) != 1 || (*p.PtrSlice)[0] != fh2 { + t.Fatalf("ptr slice not set") + } +} + +func TestDecodeSliceTextUnmarshalerError(t *testing.T) { + type target struct { + B []rudeBool `schema:"b"` + } + + var s target + if err := NewDecoder().Decode(&s, map[string][]string{"b": {"maybe"}}); err == nil { + t.Fatalf("expected error") + } +} + +func TestDecodeCommaSeparatedZeroEmpty(t *testing.T) { + type target struct { + N []int `schema:"n"` + } + dec := NewDecoder() + dec.ZeroEmpty(true) + var s target + if err := dec.Decode(&s, map[string][]string{"n": {"1,,2"}}); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !reflect.DeepEqual(s.N, []int{1, 0, 2}) { + t.Fatalf("unexpected slice: %v", s.N) + } +} + +func TestDecodeCommaSeparatedPointerSlice(t *testing.T) { + type target struct { + N []*int `schema:"n"` + } + var s target + if err := NewDecoder().Decode(&s, map[string][]string{"n": {"1,2"}}); err != nil { + t.Fatalf("unexpected error: %v", err) + } + if len(s.N) != 2 || *s.N[0] != 1 || *s.N[1] != 2 { + t.Fatalf("unexpected values: %v %v", s.N[0], s.N[1]) + } +} + +func TestDecodeCommaSeparatedAliasSliceError(t *testing.T) { + type target struct { + A []IntAlias `schema:"a"` + } + + var s target + if err := NewDecoder().Decode(&s, map[string][]string{"a": {"1,a"}}); err == nil { + t.Fatalf("expected error") + } +} diff --git a/doc.go b/doc.go index aae9f33..50faaac 100644 --- a/doc.go +++ b/doc.go @@ -3,7 +3,7 @@ // license that can be found in the LICENSE file. /* -Package gorilla/schema fills a struct with form values. +Package gofiber/schema fills a struct with form values. The basic usage is really simple. Given this struct: @@ -60,14 +60,14 @@ certain fields, use a dash for the name and it will be ignored: The supported field types in the destination struct are: - * bool - * float variants (float32, float64) - * int variants (int, int8, int16, int32, int64) - * string - * uint variants (uint, uint8, uint16, uint32, uint64) - * struct - * a pointer to one of the above types - * a slice or a pointer to a slice of one of the above types + - bool + - float variants (float32, float64) + - int variants (int, int8, int16, int32, int64) + - string + - uint variants (uint, uint8, uint16, uint32, uint64) + - struct + - a pointer to one of the above types + - a slice or a pointer to a slice of one of the above types Non-supported types are simply ignored, however custom types can be registered to be converted. diff --git a/encoder.go b/encoder.go index 52f2c10..fc00972 100644 --- a/encoder.go +++ b/encoder.go @@ -61,8 +61,8 @@ func isZero(v reflect.Value) bool { IsZero() bool } if v.Type().Implements(reflect.TypeOf((*zero)(nil)).Elem()) { - iz := v.MethodByName("IsZero").Call([]reflect.Value{})[0] - return iz.Interface().(bool) + iz, _ := reflect.TypeAssert[bool](v.MethodByName("IsZero").Call([]reflect.Value{})[0]) + return iz } z := true for i := 0; i < v.NumField(); i++ { @@ -87,26 +87,28 @@ func (e *Encoder) encode(v reflect.Value, dst map[string][]string) error { errors := MultiError{} for i := 0; i < v.NumField(); i++ { + fieldValue := v.Field(i) + fieldType := fieldValue.Type() name, opts := fieldAlias(t.Field(i), e.cache.tag) if name == "-" { continue } // Encode struct pointer types if the field is a valid pointer and a struct. - if isValidStructPointer(v.Field(i)) && !e.hasCustomEncoder(v.Field(i).Type()) { - err := e.encode(v.Field(i).Elem(), dst) + if isValidStructPointer(fieldValue) && !e.hasCustomEncoder(fieldType) { + err := e.encode(fieldValue.Elem(), dst) if err != nil { - errors[v.Field(i).Elem().Type().String()] = err + errors[fieldValue.Elem().Type().String()] = err } continue } - encFunc := typeEncoder(v.Field(i).Type(), e.regenc) + encFunc := typeEncoder(fieldType, e.regenc) // Encode non-slice types and custom implementations immediately. if encFunc != nil { - value := encFunc(v.Field(i)) - if opts.Contains("omitempty") && isZero(v.Field(i)) { + value := encFunc(fieldValue) + if opts.Contains("omitempty") && isZero(fieldValue) { continue } @@ -114,31 +116,31 @@ func (e *Encoder) encode(v reflect.Value, dst map[string][]string) error { continue } - if v.Field(i).Type().Kind() == reflect.Struct { - err := e.encode(v.Field(i), dst) + if fieldType.Kind() == reflect.Struct { + err := e.encode(fieldValue, dst) if err != nil { - errors[v.Field(i).Type().String()] = err + errors[fieldType.String()] = err } continue } - if v.Field(i).Type().Kind() == reflect.Slice { - encFunc = typeEncoder(v.Field(i).Type().Elem(), e.regenc) + if fieldType.Kind() == reflect.Slice { + encFunc = typeEncoder(fieldType.Elem(), e.regenc) } if encFunc == nil { - errors[v.Field(i).Type().String()] = fmt.Errorf("schema: encoder not found for %v", v.Field(i)) + errors[fieldType.String()] = fmt.Errorf("schema: encoder not found for %v", fieldValue) continue } // Encode a slice. - if v.Field(i).Len() == 0 && opts.Contains("omitempty") { + if fieldValue.Len() == 0 && opts.Contains("omitempty") { continue } dst[name] = []string{} - for j := 0; j < v.Field(i).Len(); j++ { - dst[name] = append(dst[name], encFunc(v.Field(i).Index(j))) + for j := 0; j < fieldValue.Len(); j++ { + dst[name] = append(dst[name], encFunc(fieldValue.Index(j))) } } diff --git a/encoder_test.go b/encoder_test.go index 092f0de..d66da98 100644 --- a/encoder_test.go +++ b/encoder_test.go @@ -24,6 +24,16 @@ type inner struct { F12 int } +type SimpleStructForBenchmarkEncode struct { + A string `schema:"a"` + B int `schema:"b"` + C bool `schema:"c"` + D float64 `schema:"d"` + E struct { + F float64 `schema:"f"` + } `schema:"e"` +} + func TestFilled(t *testing.T) { f07 := "seven" var f08 int8 = 8 @@ -461,7 +471,8 @@ func TestRegisterEncoderStructIsZero(t *testing.T) { encoder := NewEncoder() encoder.RegisterEncoder(time.Time{}, func(value reflect.Value) string { - return value.Interface().(time.Time).Format(time.RFC3339Nano) + tv, _ := reflect.TypeAssert[time.Time](value) + return tv.Format(time.RFC3339Nano) }) err := encoder.Encode(ss[s], vals) @@ -511,7 +522,7 @@ func TestRegisterEncoderWithPtrType(t *testing.T) { return "" } - custom := value.Interface().(*CustomTime) + custom, _ := reflect.TypeAssert[*CustomTime](value) return custom.time.String() }) @@ -523,3 +534,261 @@ func TestRegisterEncoderWithPtrType(t *testing.T) { valExists(t, "DateStart", ss.DateStart.time.String(), vals) valExists(t, "DateEnd", "", vals) } + +func TestTimeDurationEncoding(t *testing.T) { + type DurationStruct struct { + Timeout time.Duration `schema:"timeout"` + } + + vals := map[string][]string{} + testData := DurationStruct{ + Timeout: 3 * time.Minute, + } + + enc := NewEncoder() + enc.RegisterEncoder(time.Duration(0), func(v reflect.Value) string { + d, _ := reflect.TypeAssert[time.Duration](v) + return d.String() // "3m0s" + }) + + err := enc.Encode(&testData, vals) + if err != nil { + t.Fatalf("Failed to encode time.Duration: %v", err) + } + + got, ok := vals["timeout"] + if !ok || len(got) < 1 { + t.Fatalf("Encoded map missing key 'timeout'") + } + if got[0] != (3 * time.Minute).String() { + t.Errorf("Expected %q, got %q", (3 * time.Minute).String(), got[0]) + } +} + +// Test for omitempty with zero time.Duration. +func TestTimeDurationOmitEmpty(t *testing.T) { + type DurationStruct struct { + Timeout time.Duration `schema:"timeout,omitempty"` + } + + vals := map[string][]string{} + testData := DurationStruct{ + Timeout: 0, + } + + enc := NewEncoder() + enc.RegisterEncoder(time.Duration(0), func(v reflect.Value) string { + d, _ := reflect.TypeAssert[time.Duration](v) + return d.String() + }) + + err := enc.Encode(&testData, vals) + if err != nil { + t.Fatalf("Failed to encode time.Duration: %v", err) + } + // Should be omitted since 0 for time.Duration is "zero" and tagged as omitempty + if _, found := vals["timeout"]; found { + t.Errorf("Expected 'timeout' to be omitted, but it was present: %v", vals["timeout"]) + } +} + +func TestEncoderZeroAndNonZeroFields(t *testing.T) { + type ZeroTestStruct struct { + A string `schema:"a,omitempty"` + B int `schema:"b,omitempty"` + C float64 `schema:"c,omitempty"` + D bool `schema:"d,omitempty"` + E *int `schema:"e,omitempty"` + F *string `schema:"f,omitempty"` + G string `schema:"g"` // no omitempty + } + + vals := map[string][]string{} + intVal := 42 + strVal := "Hello" + s := ZeroTestStruct{ + A: "", + B: 0, + C: 0.0, + D: false, + E: &intVal, + F: &strVal, + G: "MustEncode", + } + + enc := NewEncoder() + err := enc.Encode(&s, vals) + if err != nil { + t.Fatalf("Encoding error: %v", err) + } + + // Fields A, B, C, D are zero and should be omitted + if _, found := vals["a"]; found { + t.Errorf("Expected 'a' to be omitted for zero string") + } + if _, found := vals["b"]; found { + t.Errorf("Expected 'b' to be omitted for zero int") + } + if _, found := vals["c"]; found { + t.Errorf("Expected 'c' to be omitted for zero float") + } + if _, found := vals["d"]; found { + t.Errorf("Expected 'd' to be omitted for false bool") + } + + // E is a pointer to an int, so it should appear + gotE, found := vals["e"] + if !found { + t.Error("Expected 'e' to be present") + } else if len(gotE) != 1 || gotE[0] != "42" { + t.Errorf("Expected '42', got %v", gotE) + } + + // F is a pointer to string, so it should appear + gotF, found := vals["f"] + if !found { + t.Error("Expected 'f' to be present") + } else if len(gotF) != 1 || gotF[0] != "Hello" { + t.Errorf("Expected 'Hello', got %v", gotF) + } + + // G has no omitempty tag and must be encoded + gotG, found := vals["g"] + if !found { + t.Error("Expected 'g' to be present") + } else if len(gotG) != 1 || gotG[0] != "MustEncode" { + t.Errorf("Expected 'MustEncode', got %v", gotG) + } +} + +func BenchmarkSimpleStructEncode(b *testing.B) { + s := SimpleStructForBenchmarkEncode{ + A: "abc", + B: 123, + C: true, + D: 3.14, + E: struct { + F float64 `schema:"f"` + }{F: 6.28}, + } + enc := NewEncoder() + + vals := map[string][]string{} + for b.Loop() { + _ = enc.Encode(&s, vals) + } +} + +func BenchmarkSimpleStructEncodeParallel(b *testing.B) { + s := SimpleStructForBenchmarkEncode{ + A: "abc", + B: 123, + C: true, + D: 3.14, + E: struct { + F float64 `schema:"f"` + }{F: 6.28}, + } + enc := NewEncoder() + + b.ResetTimer() + b.RunParallel(func(pb *testing.PB) { + vals := map[string][]string{} + for pb.Next() { + _ = enc.Encode(&s, vals) + } + }) +} + +type LargeStructForBenchmarkEncode struct { + F1 string `schema:"f1"` + F2 string `schema:"f2"` + F3 int `schema:"f3"` + F4 int `schema:"f4"` + F5 []string `schema:"f5"` + F6 []int `schema:"f6"` + F7 float64 `schema:"f7"` + F8 bool `schema:"f8"` + F9 struct { + N1 time.Time `schema:"n1"` + N2 string `schema:"n2"` + } `schema:"f9"` +} + +func BenchmarkLargeStructEncode(b *testing.B) { + s := LargeStructForBenchmarkEncode{ + F1: "Lorem", F2: "Ipsum", F3: 123, F4: 456, + F5: []string{"A", "B", "C", "D"}, + F6: []int{10, 20, 30, 40}, + F7: 3.14159, F8: true, + F9: struct { + N1 time.Time `schema:"n1"` + N2 string `schema:"n2"` + }{ + N1: time.Date(2025, 1, 1, 12, 0, 0, 0, time.UTC), + N2: "NestedStringValue", + }, + } + enc := NewEncoder() + + // Optionally register a custom encoder for time.Time + enc.RegisterEncoder(time.Time{}, func(v reflect.Value) string { + tVal, _ := reflect.TypeAssert[time.Time](v) + return tVal.Format(time.RFC3339) + }) + + vals := map[string][]string{} + for b.Loop() { + _ = enc.Encode(&s, vals) + } +} + +func BenchmarkLargeStructEncodeParallel(b *testing.B) { + s := LargeStructForBenchmarkEncode{ + F1: "Lorem", F2: "Ipsum", F3: 123, F4: 456, + F5: []string{"A", "B", "C", "D"}, + F6: []int{10, 20, 30, 40}, + F7: 3.14159, F8: true, + F9: struct { + N1 time.Time `schema:"n1"` + N2 string `schema:"n2"` + }{ + N1: time.Date(2025, 1, 1, 12, 0, 0, 0, time.UTC), + N2: "NestedStringValue", + }, + } + enc := NewEncoder() + enc.RegisterEncoder(time.Time{}, func(v reflect.Value) string { + tVal, _ := reflect.TypeAssert[time.Time](v) + return tVal.Format(time.RFC3339) + }) + + b.ResetTimer() + b.RunParallel(func(pb *testing.PB) { + vals := map[string][]string{} + for pb.Next() { + _ = enc.Encode(&s, vals) + } + }) +} + +func BenchmarkTimeDurationEncoding(b *testing.B) { + type DurationStruct struct { + Timeout time.Duration `schema:"timeout"` + } + + testData := DurationStruct{ + Timeout: 5 * time.Second, + } + + enc := NewEncoder() + enc.RegisterEncoder(time.Duration(0), func(v reflect.Value) string { + d, _ := reflect.TypeAssert[time.Duration](v) + return d.String() + }) + + vals := map[string][]string{} + for b.Loop() { + _ = enc.Encode(&testData, vals) + } +} diff --git a/go.mod b/go.mod index c18d1bd..fd0c31a 100644 --- a/go.mod +++ b/go.mod @@ -1,3 +1,8 @@ -module github.com/gorilla/schema +module github.com/gofiber/schema -go 1.20 +go 1.25 + +require ( + github.com/gofiber/utils/v2 v2.0.0 // indirect + github.com/google/uuid v1.6.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..9fca9dd --- /dev/null +++ b/go.sum @@ -0,0 +1,4 @@ +github.com/gofiber/utils/v2 v2.0.0 h1:SCC3rpsEDWupFSHtc0RKxg/BKgV0s1qKfZg9Jv6D0sM= +github.com/gofiber/utils/v2 v2.0.0/go.mod h1:xF9v89FfmbrYqI/bQUGN7gR8ZtXot2jxnZvmAUtiavE= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= diff --git a/multierror_test.go b/multierror_test.go new file mode 100644 index 0000000..a1b97b2 --- /dev/null +++ b/multierror_test.go @@ -0,0 +1,61 @@ +package schema + +import ( + "errors" + "strings" + "testing" +) + +func TestMultiErrorError(t *testing.T) { + var m MultiError + if got := m.Error(); got != "(0 errors)" { + t.Fatalf("expected (0 errors), got %q", got) + } + + errA := errors.New("a") + m = MultiError{"a": errA} + if got := m.Error(); got != errA.Error() { + t.Fatalf("expected %q, got %q", errA.Error(), got) + } + + errB := errors.New("b") + m = MultiError{"a": errA, "b": errB} + out := m.Error() + if !strings.HasSuffix(out, "(and 1 other error)") { + t.Fatalf("unexpected output %q", out) + } + if !strings.HasPrefix(out, errA.Error()) && !strings.HasPrefix(out, errB.Error()) { + t.Fatalf("unexpected prefix %q", out) + } + + errC := errors.New("c") + m = MultiError{"a": errA, "b": errB, "c": errC} + out = m.Error() + if !strings.HasSuffix(out, "(and 2 other errors)") { + t.Fatalf("unexpected output %q", out) + } +} + +func TestMultiErrorMerge(t *testing.T) { + errA := errors.New("a") + m1 := MultiError{"a": errA} + errB := errors.New("b") + m2 := MultiError{"a": errors.New("ignore"), "b": errB} + m1.merge(m2) + if len(m1) != 2 { + t.Fatalf("expected len 2, got %d", len(m1)) + } + if m1["a"] != errA { + t.Errorf("existing key overwritten") + } + if m1["b"].Error() != errB.Error() { + t.Errorf("missing merged error") + } +} + +func BenchmarkMultiErrorError(b *testing.B) { + m := MultiError{"a": errors.New("a"), "b": errors.New("b"), "c": errors.New("c")} + for b.Loop() { + _ = m.Error() + } +} diff --git a/util_test.go b/util_test.go new file mode 100644 index 0000000..b0b3ca9 --- /dev/null +++ b/util_test.go @@ -0,0 +1,182 @@ +package schema + +import ( + "reflect" + "testing" +) + +func TestParseTagAndOptions(t *testing.T) { + alias, opts := parseTag("name,omitempty,default:foo") + if alias != "name" { + t.Fatalf("expected alias name, got %s", alias) + } + if !opts.Contains("omitempty") { + t.Fatalf("expected omitempty option") + } + if val := opts.getDefaultOptionValue(); val != "foo" { + t.Fatalf("expected default foo, got %s", val) + } +} + +func TestFieldAlias(t *testing.T) { + type S struct { + Field string `json:"custom,omitempty"` + } + f, ok := reflect.TypeOf(S{}).FieldByName("Field") + if !ok { + t.Fatal("field not found") + } + alias, opts := fieldAlias(f, "json") + if alias != "custom" { + t.Fatalf("expected alias custom, got %s", alias) + } + if !opts.Contains("omitempty") { + t.Fatalf("expected omitempty option") + } +} + +func TestTagOptionsContains(t *testing.T) { + opts := tagOptions{"a", "b", "default:val"} + if !opts.Contains("a") || opts.Contains("c") { + t.Fatalf("contains failed") + } + if val := opts.getDefaultOptionValue(); val != "val" { + t.Fatalf("expected default val, got %s", val) + } +} + +func TestIsValidStructPointer(t *testing.T) { + type S struct{} + if !isValidStructPointer(reflect.ValueOf(&S{})) { + t.Errorf("expected true for struct pointer") + } + if isValidStructPointer(reflect.ValueOf(S{})) { + t.Errorf("expected false for struct value") + } + var sp *S + if isValidStructPointer(reflect.ValueOf(sp)) { + t.Errorf("expected false for nil pointer") + } + var i int + if isValidStructPointer(reflect.ValueOf(&i)) { + t.Errorf("expected false for pointer to non-struct") + } +} + +func TestConvertPointer(t *testing.T) { + v := convertPointer(reflect.Bool, "true") + if !v.IsValid() || !v.Elem().Bool() { + t.Fatalf("expected true, got %v", v) + } + + v = convertPointer(reflect.Int, "10") + if !v.IsValid() || v.Elem().Int() != 10 { + t.Fatalf("expected 10, got %v", v) + } + + v = convertPointer(reflect.String, "abc") + if !v.IsValid() || v.Elem().String() != "abc" { + t.Fatalf("expected abc, got %v", v) + } + + v = convertPointer(reflect.Complex64, "1") + if v.IsValid() { + t.Fatalf("expected invalid value for unsupported kind") + } +} + +func BenchmarkParseTag(b *testing.B) { + for b.Loop() { + parseTag("field,omitempty,default:value") + } +} + +func BenchmarkIsZero(b *testing.B) { + type S struct{ A int } + v := reflect.ValueOf(S{}) + for b.Loop() { + isZero(v) + } +} + +func BenchmarkConvertPointer(b *testing.B) { + for b.Loop() { + convertPointer(reflect.Int, "42") + } +} + +type customZero struct{ A int } + +func (c customZero) IsZero() bool { return c.A == 0 } + +func TestIsZeroCases(t *testing.T) { + var sl []int + if !isZero(reflect.ValueOf(sl)) { + t.Errorf("nil slice should be zero") + } + sl = []int{} + if !isZero(reflect.ValueOf(sl)) { + t.Errorf("empty slice should be zero") + } + sl = []int{1} + if isZero(reflect.ValueOf(sl)) { + t.Errorf("non-empty slice considered zero") + } + + arr := [2]int{} + if !isZero(reflect.ValueOf(arr)) { + t.Errorf("zero array should be zero") + } + arr = [2]int{0, 1} + if isZero(reflect.ValueOf(arr)) { + t.Errorf("non-zero array considered zero") + } + + type S struct { + A int + B string + } + if !isZero(reflect.ValueOf(S{})) { + t.Errorf("zero struct should be zero") + } + if isZero(reflect.ValueOf(S{A: 1})) { + t.Errorf("non-zero struct considered zero") + } + + if !isZero(reflect.ValueOf(customZero{})) { + t.Errorf("IsZero method not used for zero value") + } + if isZero(reflect.ValueOf(customZero{A: 1})) { + t.Errorf("IsZero method not used for non-zero value") + } +} + +func TestIsZeroFuncAndMap(t *testing.T) { + tests := map[string]func(){ + "nil": nil, + "non-nil": func() {}, + } + for name, fn := range tests { + t.Run(name, func(t *testing.T) { + defer func() { + if r := recover(); r == nil { + t.Errorf("expected panic for %s func", name) + } + }() + isZero(reflect.ValueOf(fn)) + }) + } + + var m map[string]int + if !isZero(reflect.ValueOf(m)) { + t.Errorf("nil map should be zero") + } + m = map[string]int{} + if !isZero(reflect.ValueOf(m)) { + t.Errorf("empty map should be zero") + } + m["a"] = 1 + if isZero(reflect.ValueOf(m)) { + t.Errorf("non-empty map considered zero") + } +}