diff --git a/.cliff.toml b/.cliff.toml new file mode 100644 index 0000000..ae70028 --- /dev/null +++ b/.cliff.toml @@ -0,0 +1,181 @@ +# git-cliff ~ configuration file +# https://git-cliff.org/docs/configuration + +[changelog] +header = """ +""" + +footer = """ + +----- + +**[{{ remote.github.repo }}]({{ self::remote_url() }}) license terms** + +[![License][license-badge]][license-url] + +[license-badge]: http://img.shields.io/badge/license-Apache%20v2-orange.svg +[license-url]: {{ self::remote_url() }}/?tab=Apache-2.0-1-ov-file#readme + +{%- macro remote_url() -%} + https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }} +{%- endmacro -%} +""" + +body = """ +{%- if version %} +## [{{ version | trim_start_matches(pat="v") }}]({{ self::remote_url() }}/tree/{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }} +{%- else %} +## [unreleased] +{%- endif %} +{%- if message %} + {%- raw %}\n{% endraw %} +{{ message }} + {%- raw %}\n{% endraw %} +{%- endif %} +{%- if version %} + {%- if previous.version %} + +**Full Changelog**: <{{ self::remote_url() }}/compare/{{ previous.version }}...{{ version }}> + {%- endif %} +{%- else %} + {%- raw %}\n{% endraw %} +{%- endif %} + +{%- if statistics %}{% if statistics.commit_count %} + {%- raw %}\n{% endraw %} +{{ statistics.commit_count }} commits in this release. + {%- raw %}\n{% endraw %} +{%- endif %}{% endif %} +----- + +{%- for group, commits in commits | group_by(attribute="group") %} + {%- raw %}\n{% endraw %} +### {{ group | upper_first }} + {%- raw %}\n{% endraw %} + {%- for commit in commits %} + {%- if commit.remote.pr_title %} + {%- set commit_message = commit.remote.pr_title %} + {%- else %} + {%- set commit_message = commit.message %} + {%- endif %} +* {{ commit_message | split(pat="\n") | first | trim }} + {%- if commit.remote.username %} +{%- raw %} {% endraw %}by [@{{ commit.remote.username }}](https://github.com/{{ commit.remote.username }}) + {%- endif %} + {%- if commit.remote.pr_number %} +{%- raw %} {% endraw %}in [#{{ commit.remote.pr_number }}]({{ self::remote_url() }}/pull/{{ commit.remote.pr_number }}) + {%- endif %} +{%- raw %} {% endraw %}[...]({{ self::remote_url() }}/commit/{{ commit.id }}) + {%- endfor %} +{%- endfor %} + +{%- if github %} +{%- raw %}\n{% endraw -%} + {%- set all_contributors = github.contributors | length %} + {%- if github.contributors | filter(attribute="username", value="dependabot[bot]") | length < all_contributors %} +----- + +### People who contributed to this release + {% endif %} + {%- for contributor in github.contributors | filter(attribute="username") | sort(attribute="username") %} + {%- if contributor.username != "dependabot[bot]" %} +* [@{{ contributor.username }}](https://github.com/{{ contributor.username }}) + {%- endif %} + {%- endfor %} + + {% if github.contributors | filter(attribute="is_first_time", value=true) | length != 0 %} +----- + {%- raw %}\n{% endraw %} + +### New Contributors + {%- endif %} + + {%- for contributor in github.contributors | filter(attribute="is_first_time", value=true) %} + {%- if contributor.username != "dependabot[bot]" %} +* @{{ contributor.username }} made their first contribution + {%- if contributor.pr_number %} + in [#{{ contributor.pr_number }}]({{ self::remote_url() }}/pull/{{ contributor.pr_number }}) \ + {%- endif %} + {%- endif %} + {%- endfor %} +{%- endif %} + +{%- raw %}\n{% endraw %} + +{%- macro remote_url() -%} + https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }} +{%- endmacro -%} +""" +# Remove leading and trailing whitespaces from the changelog's body. +trim = true +# Render body even when there are no releases to process. +render_always = true +# An array of regex based postprocessors to modify the changelog. +postprocessors = [ + # Replace the placeholder with a URL. + #{ pattern = '', replace = "https://github.com/orhun/git-cliff" }, +] +# output file path +# output = "test.md" + +[git] +# Parse commits according to the conventional commits specification. +# See https://www.conventionalcommits.org +conventional_commits = false +# Exclude commits that do not match the conventional commits specification. +filter_unconventional = false +# Require all commits to be conventional. +# Takes precedence over filter_unconventional. +require_conventional = false +# Split commits on newlines, treating each line as an individual commit. +split_commits = false +# An array of regex based parsers to modify commit messages prior to further processing. +commit_preprocessors = [ + # Replace issue numbers with link templates to be updated in `changelog.postprocessors`. + #{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](/issues/${2}))"}, + # Check spelling of the commit message using https://github.com/crate-ci/typos. + # If the spelling is incorrect, it will be fixed automatically. + #{ pattern = '.*', replace_command = 'typos --write-changes -' } +] +# Prevent commits that are breaking from being excluded by commit parsers. +protect_breaking_commits = false +# An array of regex based parsers for extracting data from the commit message. +# Assigns commits to groups. +# Optionally sets the commit's scope and can decide to exclude commits from further processing. +commit_parsers = [ + { message = "^[Cc]hore\\([Rr]elease\\): prepare for", skip = true }, + { message = "(^[Mm]erge)|([Mm]erge conflict)", skip = true }, + { field = "author.name", pattern = "dependabot*", group = "Updates" }, + { message = "([Ss]ecurity)|([Vv]uln)", group = "Security" }, + { body = "(.*[Ss]ecurity)|([Vv]uln)", group = "Security" }, + { message = "([Cc]hore\\(lint\\))|(style)|(lint)|(codeql)|(golangci)", group = "Code quality" }, + { message = "(^[Dd]oc)|((?i)readme)|(badge)|(typo)|(documentation)", group = "Documentation" }, + { message = "(^[Ff]eat)|(^[Ee]nhancement)", group = "Implemented enhancements" }, + { message = "(^ci)|(\\(ci\\))|(fixup\\s+ci)|(fix\\s+ci)|(license)|(example)", group = "Miscellaneous tasks" }, + { message = "^test", group = "Testing" }, + { message = "(^fix)|(panic)", group = "Fixed bugs" }, + { message = "(^refact)|(rework)", group = "Refactor" }, + { message = "(^[Pp]erf)|(performance)", group = "Performance" }, + { message = "(^[Cc]hore)", group = "Miscellaneous tasks" }, + { message = "^[Rr]evert", group = "Reverted changes" }, + { message = "(upgrade.*?go)|(go\\s+version)", group = "Updates" }, + { message = ".*", group = "Other" }, +] +# Exclude commits that are not matched by any commit parser. +filter_commits = false +# An array of link parsers for extracting external references, and turning them into URLs, using regex. +link_parsers = [] +# Include only the tags that belong to the current branch. +use_branch_tags = false +# Order releases topologically instead of chronologically. +topo_order = false +# Order releases topologically instead of chronologically. +topo_order_commits = true +# Order of commits in each group/release within the changelog. +# Allowed values: newest, oldest +sort_commits = "newest" +# Process submodules commits +recurse_submodules = false + +#[remote.github] +#owner = "go-openapi" diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 7dea424..b02b363 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -1,114 +1,211 @@ ## Contribution Guidelines +You'll find below general guidelines, which mostly correspond to standard practices for open sourced repositories. + +>**TL;DR** +> +> If you're already an experience go developer on github, then you should just feel at home with us +> and you may well skip the rest of this document. +> +> You'll essentially find the usual guideline for a go library project on github. + +These guidelines are general to all libraries published on github by the `go-openapi` organization. + +You'll find more detailed (or repo-specific) instructions in the [maintainer's docs](../docs). + +## How can I contribute? + +There are many ways in which you can contribute. Here are a few ideas: + + * Reporting Issues / Bugs + * Suggesting Improvements + * Code + * bug fixes and new features that are within the main project scope + * improving test coverage + * addressing code quality issues + * Documentation + * Art work that makes the project look great + +## Questions & issues + +### Asking questions + +You may inquire about anything about this library by reporting a "Question" issue on github. + +### Reporting issues + +Reporting a problem with our libraries _is_ a valuable contribution. + +You can do this on the github issues page of this repository. + +Please be as specific as possible when describing your issue. + +Whenever relevant, please provide information about your environment (go version, OS). + +Adding a code snippet to reproduce the issue is great, and as a big time saver for maintainers. + +### Triaging issues + +You can help triage issues which may include: + +* reproducing bug reports +* asking for important information, such as version numbers or reproduction instructions +* answering questions and sharing your insight in issue comments + +## Code contributions + ### Pull requests are always welcome -We are always thrilled to receive pull requests, and do our best to -process them as fast as possible. Not sure if that typo is worth a pull -request? Do it! We will appreciate it. +We are always thrilled to receive pull requests, and we do our best to +process them as fast as possible. + +Not sure if that typo is worth a pull request? Do it! We will appreciate it. + +If your pull request is not accepted on the first try, don't be discouraged! +If there's a problem with the implementation, hopefully you received feedback on what to improve. + +If you have a lot of ideas or a lot of issues to solve, try to refrain a bit and post focused +pull requests. +Think that they must be reviewed by a maintainer and it is easy to lost track of things on big PRs. + +We're trying very hard to keep the go-openapi packages lean and focused. +These packages constitute a toolkit: it won't do everything for everybody out of the box, +but everybody can use it to do just about everything related to OpenAPI. + +This means that we might decide against incorporating a new feature. + +However, there might be a way to implement that feature *on top of* our libraries. + +### Environment + +You just need a `go` compiler to be installed. No special tools are needed to work with our libraries. + +The go compiler version required is always the old stable (latest minor go version - 1). -If your pull request is not accepted on the first try, don't be -discouraged! If there's a problem with the implementation, hopefully you -received feedback on what to improve. +If you're already used to work with `go` you should already have everything in place. -We're trying very hard to keep go-swagger lean and focused. We don't want it -to do everything for everybody. This means that we might decide against -incorporating a new feature. However, there might be a way to implement -that feature *on top of* go-swagger. +Although not required, you'll be certainly more productive with a local installation of `golangci-lint`, +the meta-linter our CI uses. +If you don't have it, you may install it like so: + +```sh +go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@latest +``` ### Conventions -Fork the repo and make changes on your fork in a feature branch: +#### Git flow + +Fork the repo and make changes to your fork in a feature branch. + +To submit a pull request, push your branch to your fork (e.g. `upstream` remote): +github will propose to open a pull request on the original repository. + +Typically you'd follow some common naming conventions: + +- if it's a bugfix branch, name it `fix/XXX-something`where XXX is the number of the + issue on github +- if it's a feature branch, create an enhancement issue to announce your + intentions, and name it `feature/XXX-something` where XXX is the number of the issue. + +> NOTE: we don't enforce naming conventions on branches: it's your fork after all. + +#### Tests + +Submit unit tests for your changes. + +Go has a great built-in test framework ; use it! + +Take a look at existing tests for inspiration, and run the full test suite on your branch +before submitting a pull request. + +Our CI measures test coverage and the test coverage of every patch. +Although not a blocking step - because there are so many special cases - +this is an indicator that maintainers consider when approving a PR. + +Please try your best to cover about 80% of your patch. + +#### Code style -- If it's a bugfix branch, name it XXX-something where XXX is the number of the - issue -- If it's a feature branch, create an enhancement issue to announce your - intentions, and name it XXX-something where XXX is the number of the issue. +You may read our stance on code style [there](../docs/STYLE.md). -Submit unit tests for your changes. Go has a great test framework built in; use -it! Take a look at existing tests for inspiration. Run the full test suite on -your branch before submitting a pull request. +#### Documentation -Update the documentation when creating or modifying features. Test -your documentation changes for clarity, concision, and correctness, as -well as a clean documentation build. See ``docs/README.md`` for more -information on building the docs and how docs get released. +Don't forget to update the documentation when creating or modifying features. -Write clean code. Universally formatted code promotes ease of writing, reading, -and maintenance. Always run `gofmt -s -w file.go` on each changed file before -committing your changes. Most editors have plugins that do this automatically. +Most documentation for this library is directly found in code as comments for godoc. + +The documentation for the go-openapi packages is published on the public go docs site: + + + +Check your documentation changes for clarity, concision, and correctness. + +If you want to assess the rendering of your changes when published to `pkg.go.dev`, you may +want to install the `pkgsite` tool proposed by `golang.org`. + +```sh +go install golang.org/x/pkgsite/cmd/pkgsite@latest +``` + +Then run on the repository folder: +```sh +pkgsite . +``` + +This wil run a godoc server locally where you may see the documentation generated from your local repository. + +#### Commit messages Pull requests descriptions should be as clear as possible and include a reference to all the issues that they address. Pull requests must not contain commits from other users or branches. -Commit messages must start with a capitalized and short summary (max. 50 -chars) written in the imperative, followed by an optional, more detailed -explanatory text which is separated from the summary by an empty line. +Commit messages are not required to follow the "conventional commit" rule, but it's certainly a good +thing to follow this guidelinea (e.g. "fix: blah blah", "ci: did this", "feat: did that" ...). -Code review comments may be added to your pull request. Discuss, then make the -suggested modifications and push additional commits to your feature branch. Be -sure to post a comment after pushing. The new commits will show up in the pull -request automatically, but the reviewers will not be notified unless you -comment. +The title in your commit message is used directly to produce our release notes: try to keep them neat. -Before the pull request is merged, make sure that you squash your commits into -logical units of work using `git rebase -i` and `git push -f`. After every -commit the test suite should be passing. Include documentation changes in the -same commit so that a revert would remove all traces of the feature or fix. +The commit message body should detail your changes. -Commits that fix or close an issue should include a reference like `Closes #XXX` -or `Fixes #XXX`, which will automatically close the issue when merged. +If an issue should be closed by a commit, please add this reference in the commit body: -### Sign your work +``` +* fixes #{issue number} +``` -The sign-off is a simple line at the end of the explanation for the -patch, which certifies that you wrote it or otherwise have the right to -pass it on as an open-source patch. The rules are pretty simple: if you -can certify the below (from -[developercertificate.org](http://developercertificate.org/)): +#### Code review -``` -Developer Certificate of Origin -Version 1.1 +Code review comments may be added to your pull request. -Copyright (C) 2004, 2006 The Linux Foundation and its contributors. -660 York Street, Suite 102, -San Francisco, CA 94110 USA +Discuss, then make the suggested modifications and push additional commits to your feature branch. -Everyone is permitted to copy and distribute verbatim copies of this -license document, but changing it is not allowed. +Be sure to post a comment after pushing. The new commits will show up in the pull +request automatically, but the reviewers will not be notified unless you comment. +Before the pull request is merged, +**make sure that you squash your commits into logical units of work** +using `git rebase -i` and `git push -f`. -Developer's Certificate of Origin 1.1 +After every commit the test suite should be passing. -By making a contribution to this project, I certify that: +Include documentation changes in the same commit so that a revert would remove all traces of the feature or fix. -(a) The contribution was created in whole or in part by me and I - have the right to submit it under the open source license - indicated in the file; or +#### Sign your work -(b) The contribution is based upon previous work that, to the best - of my knowledge, is covered under an appropriate open source - license and I have the right under that license to submit that - work with modifications, whether created in whole or in part - by me, under the same open source license (unless I am - permitted to submit under a different license), as indicated - in the file; or +The sign-off is a simple line at the end of your commit message, +which certifies that you wrote it or otherwise have the right to +pass it on as an open-source patch. -(c) The contribution was provided directly to me by some other - person who certified (a), (b) or (c) and I have not modified - it. +We require the simple DCO below with an email signing your commit. +PGP-signed commit are greatly appreciated but not required. -(d) I understand and agree that this project and the contribution - are public and that a record of the contribution (including all - personal information I submit with it, including my sign-off) is - maintained indefinitely and may be redistributed consistent with - this project or the open source license(s) involved. -``` +The rules are pretty simple: -then you just add a line to every git commit message: +* read our [DCO](./DCO.md) (from [developercertificate.org](http://developercertificate.org/)) +* if you agree with these terms, then you just add a line to every git commit message Signed-off-by: Joe Smith diff --git a/.github/DCO.md b/.github/DCO.md new file mode 100644 index 0000000..e168dc4 --- /dev/null +++ b/.github/DCO.md @@ -0,0 +1,40 @@ + # Developer's Certificate of Origin + +``` +Developer Certificate of Origin +Version 1.1 + +Copyright (C) 2004, 2006 The Linux Foundation and its contributors. +660 York Street, Suite 102, +San Francisco, CA 94110 USA + +Everyone is permitted to copy and distribute verbatim copies of this +license document, but changing it is not allowed. + + +Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +(a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +(b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +(c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +(d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. +``` diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml index 1f18207..4916494 100644 --- a/.github/workflows/auto-merge.yml +++ b/.github/workflows/auto-merge.yml @@ -2,42 +2,73 @@ name: Dependabot auto-merge on: pull_request permissions: - contents: write - pull-requests: write + contents: read jobs: dependabot: + permissions: + contents: write + pull-requests: write runs-on: ubuntu-latest - if: github.event.pull_request.user.login == 'dependabot[bot]' + if: ${{ github.event.pull_request.user.login == 'dependabot[bot]' }} steps: - - name: Dependabot metadata + - + name: Dependabot metadata id: metadata - uses: dependabot/fetch-metadata@v2 - - - name: Auto-approve all dependabot PRs - run: gh pr review --approve "$PR_URL" + uses: dependabot/fetch-metadata@08eff52bf64351f401fb50d4972fa95b9f2c2d1b # v2.4.0 + - + name: Auto-approve all dependabot PRs env: PR_URL: ${{github.event.pull_request.html_url}} GH_TOKEN: ${{secrets.GITHUB_TOKEN}} - - - name: Auto-merge dependabot PRs for development dependencies - if: contains(steps.metadata.outputs.dependency-group, 'development-dependencies') - run: gh pr merge --auto --rebase "$PR_URL" + run: gh pr review --approve "$PR_URL" + - + name: Auto-merge dependabot PRs for development dependencies + if: ${{ contains(steps.metadata.outputs.dependency-group, 'development-dependencies') }} env: PR_URL: ${{github.event.pull_request.html_url}} GH_TOKEN: ${{secrets.GITHUB_TOKEN}} - - - name: Auto-merge dependabot PRs for go-openapi patches - if: contains(steps.metadata.outputs.dependency-group, 'go-openapi-dependencies') && (steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch') run: gh pr merge --auto --rebase "$PR_URL" + - + name: Auto-merge dependabot PRs for go-openapi patches + if: >- + ${{ + contains(steps.metadata.outputs.dependency-group, 'go-openapi-dependencies') && + ( + steps.metadata.outputs.update-type == 'version-update:semver-minor' || + steps.metadata.outputs.update-type == 'version-update:semver-patch' + ) + }} env: PR_URL: ${{github.event.pull_request.html_url}} GH_TOKEN: ${{secrets.GITHUB_TOKEN}} - - - name: Auto-merge dependabot PRs for golang.org updates - if: contains(steps.metadata.outputs.dependency-group, 'golang-org-dependencies') run: gh pr merge --auto --rebase "$PR_URL" + - + name: Auto-merge dependabot PRs for golang.org updates + if: ${{ contains(steps.metadata.outputs.dependency-group, 'golang-org-dependencies') }} env: PR_URL: ${{github.event.pull_request.html_url}} GH_TOKEN: ${{secrets.GITHUB_TOKEN}} + run: gh pr merge --auto --rebase "$PR_URL" + # Auto merge is current disabled: we need automatic PRs to swap identity (e.g. using a Github App), + # so the pull_request event is properly captured and the PR can validate. + #actions-bot: + # permissions: + # contents: write + # pull-requests: write + # runs-on: ubuntu-latest + # if: ${{ github.event.pull_request.user.login == 'github-actions[bot]' }} + # steps: + # - + # name: Auto-approve all github-actions bot PRs + # env: + # PR_URL: ${{github.event.pull_request.html_url}} + # GH_TOKEN: ${{secrets.GITHUB_TOKEN}} + # run: gh pr review --approve "$PR_URL" + # - + # name: Auto-merge github-actions bot PRs + # env: + # PR_URL: ${{github.event.pull_request.html_url}} + # GH_TOKEN: ${{secrets.GITHUB_TOKEN}} + # run: gh pr merge --auto --rebase "$PR_URL" diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000..b11a32c --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,41 @@ +name: "CodeQL" + +permissions: + contents: read + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + path-ignore: + - '**/*.md' + schedule: + - cron: '39 19 * * 5' + +jobs: + analyze: + name: Analyze. + runs-on: ubuntu-latest + timeout-minutes: 360 + permissions: + contents: read + security-events: write + # actions: read # <- is needed only for private repositories + strategy: + fail-fast: false + matrix: + language: ['go','actions'] + steps: + - + name: Checkout repository + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - + # Initializes the CodeQL tools for scanning. + name: Initialize CodeQL + uses: github/codeql-action/init@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3 + with: + languages: ${{ matrix.language }} + - + name: Analyze ${{ matrix.language }} + uses: github/codeql-action/analyze@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3 diff --git a/.github/workflows/contributors.yml b/.github/workflows/contributors.yml new file mode 100644 index 0000000..58a74f1 --- /dev/null +++ b/.github/workflows/contributors.yml @@ -0,0 +1,48 @@ +name: Contributors + +permissions: + contents: read + +on: + schedule: + - cron: '18 4 * * 6' + + workflow_dispatch: + +jobs: + update-contributors: + name: all-time contributors + permissions: + pull-requests: write + contents: write + runs-on: ubuntu-latest + steps: + - + name: Checkout repository + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - + name: Identify all-time contributors to this repository + uses: github/contributors@8b7586939baa0af4e801dbd22c88adf6e0db8915 # v1.7.5 + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPOSITORY: ${{ github.repository }} + LINK_TO_PROFILE: 'True' + - + name: Rename contributor file + run: | + rm -rf contributors.json + mv contributors.md CONTRIBUTORS.md + - + name: Create a PR + id: create-pull-request + uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8 + with: + commit-message: "doc: updated contributors file" + branch: doc/contributors-bot + delete-branch: true + title: "doc: updated contributors file" + token: ${{ secrets.GITHUB_TOKEN }} + labels: "bot" + assignees: fredbi + reviewers: fredbi + sign-commits: true diff --git a/.github/workflows/go-test.yml b/.github/workflows/go-test.yml index 6c2e0a1..5dddca1 100644 --- a/.github/workflows/go-test.yml +++ b/.github/workflows/go-test.yml @@ -1,27 +1,32 @@ -name: go test +name: go-test on: push: - tags: - - v* branches: - master pull_request: +permissions: + pull-requests: read + contents: read + jobs: lint: name: Lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 - - uses: actions/setup-go@v6 + - + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - + uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 with: go-version: stable check-latest: true cache: true - - name: golangci-lint - uses: golangci/golangci-lint-action@v8 + - + name: golangci-lint + uses: golangci/golangci-lint-action@0a35821d5c230e903fcfe077583637dea1b27b47 # v9.0.0 with: version: latest only-new-issues: true @@ -30,28 +35,274 @@ jobs: test: name: Unit tests runs-on: ${{ matrix.os }} + needs: [lint] strategy: matrix: os: [ ubuntu-latest, macos-latest, windows-latest ] - go_version: ['oldstable', 'stable' ] + go: ['oldstable', 'stable' ] + + steps: + - + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - + uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 + with: + go-version: '${{ matrix.go }}' + check-latest: true + cache: true + - + name: Install Tools + # TODO: pin version -> fork + update dedicated github action + run: | + go install gotest.tools/gotestsum@latest + - + name: Run unit tests + shell: bash + run: > + gotestsum + --jsonfile 'unit.report.${{ matrix.os }}-${{ matrix.go }}.json' + -- + -race + -p 2 + -count 1 + -timeout=20m + -coverprofile='unit.coverage.${{ matrix.os }}-${{ matrix.go }}.out' + -covermode=atomic + -coverpkg=$(go list)/... + ./... + - + name: Upload coverage artifacts + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + with: + # *.coverage.* pattern is automatically detected by codecov + path: '**/*.coverage.*.out' + name: 'unit.coverage.${{ matrix.os }}-${{ matrix.go }}' + retention-days: 1 + - + name: Upload test report artifacts + # upload report even if test fail. BTW, this is when they are valuable. + if: ${{ !cancelled() }} + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + with: + path: '**/unit.report.*.json' + name: 'unit.report.${{ matrix.os }}-${{ matrix.go }}' + retention-days: 1 + + fuzz-test: + name: fuzz test + runs-on: ubuntu-latest + env: + CORPUS_MAX_SIZE_MB: 100 + steps: + - + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - + uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 + with: + go-version: stable + check-latest: true + cache: true + - + name: Locate go fuzz cache + run: | + GOCACHE=$(go env GOCACHE) + echo "CORPUS_DIR=${GOCACHE}/fuzz" >> "${GITHUB_ENV}" + - + name: Retrieve fuzz corpus from cache + uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + key: ${{ runner.os }}-go-fuzz + path: + ${{ env.CORPUS_DIR }} + - + name: Manage fuzz corpus cache size + run: | + mkdir -p "${CORPUS_DIR}" + CURRENT_SIZE=$(du -sm "${CORPUS_DIR}"|cut -f1) + echo "corpus size: ${CURRENT_SIZE}MB" + if [[ "${CURRENT_SIZE}" -gt "${CORPUS_MAX_SIZE}" ]] ; then + # remove the 50 oldest corpus files + echo "::warning:Large fuzz corpus pruned" + find "${CORPUS_DIR}" -type f|ls -t|tail -n +50|xargs rm -f + fi + - + name: Run go fuzz tests + run: > + go test + -fuzz=Fuzz + -run=Fuzz + -fuzztime=1m30s + -fuzzminimizetime=5m + ./... + - + name: Upload failed corpus + if: ${{ failure() }} + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0 + with: + path: ${{ env.CORPUS_DIR }} + name: '${{ runner.os }}-fuzz-corpus-failure' + retention-days: 60 + - + name: Report fuzz corpus cache size + run: | + FINAL_SIZE=$(du -m "${CORPUS_DIR}"|cut -f1) + echo "::notice title=fuzz corpus size:${FINAL_SIZE}MB" + + + test-complete: + # description: | + # Be explicit about all tests being passed. This allows for setting up only a few status checks on PRs. + name: tests completed + needs: [test,fuzz-test] + runs-on: ubuntu-latest + steps: + - + name: Tests completed + run: | + echo "::notice title=Success:All tests passed" + + collect-coverage: + # description: | + # Gather, merge then uploads test coverage files from all test jobs (this includes integration tests, + # like codegen-test). This reduces the number of failures due to codecov hitting github API rate limit. + name: collect test coverage + needs: [test-complete] + if: ${{ !cancelled() && needs.test-complete.result == 'success' }} + runs-on: ubuntu-latest + steps: + - + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + ref: ${{ github.event.pull_request.head.ref }} + repository: ${{ github.event.pull_request.head.repo.full_name }} + - + name: Download coverage artifacts + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 + with: + run-id: "${{ github.run_id }}" + pattern: "*.coverage.*" + # artifacts resolve as folders + path: coverage/ + - + name: Upload coverage to codecov + uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1 + with: + name: Aggregated coverage + # All *.coverage.*.out files uploaded should be detected by the codecov action. + # NOTE: we lose the flags on individual test reports (e.g. by os, by go version, unit vs integration tests) + fail_ci_if_error: false + verbose: false + collect-reports: + # description: | + # Gather, merge then uploads test report files from unit test jobs. + # + # At this moment test reports are published on both codecov + # (see ) and the github actions UI + # (see ). + name: collect test reports + needs: [test] + if: ${{ !cancelled() }} + runs-on: ubuntu-latest steps: - - uses: actions/setup-go@v6 - with: - go-version: '${{ matrix.go_version }}' - check-latest: true - cache: true - - - uses: actions/checkout@v5 - - name: Run unit tests - shell: bash - run: go test -v -race -coverprofile="coverage-${{ matrix.os }}.${{ matrix.go_version }}.out" -covermode=atomic -coverpkg=$(go list)/... ./... - - - name: Upload coverage to codecov - uses: codecov/codecov-action@v5 - with: - files: './coverage-${{ matrix.os }}.${{ matrix.go_version }}.out' - flags: '${{ matrix.go_version }}-${{ matrix.os }}' - fail_ci_if_error: false - verbose: true + - + uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0 + with: + go-version: stable + check-latest: true + cache: true + - + name: Download test report artifacts + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0 + with: + run-id: "${{ github.run_id }}" + pattern: "*.report.*" + # artifacts resolve as folders + path: reports/ + - + name: Convert test reports to a merged JUnit XML + # NOTE: codecov test reports only support JUnit format at this moment. See https://docs.codecov.com/docs/test-analytics. + # Ideally, codecov improve a bit their platform, so we may only need a single pass to CTRF format. + # + # As a contemplated alternative, we could use gotestsum above to produce the JUnit XML directly. + # At this moment, we keep a json format to dispatch test reports to codecov as well as to CTRF reports. + # + # TODO(fredbi): sec compliance - pin go-junit-report + # TODO(fredbi): investigate - use mikepenz/action-junit-report@v5, that packages most of the following scripts + # in a single action. Alternative: for that action. + run: | + go install github.com/jstemmer/go-junit-report/v2@latest + go-junit-report -version + + find reports/ -name \*.json | xargs cat | go-junit-report -parser gojson -out=reports/junit_report.xml + - + name: Upload test results to Codecov + # This allows for using the test results UI on codecov + uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1 + with: + files: '**/junit_report.xml' + report_type: 'test_results' + fail_ci_if_error: false + handle_no_reports_found: true + verbose: true + - + name: Convert test reports to CTRF JSON + # description: | + # This step publishes CTRF test reports on github UI (actions) + # TODO: pin this dependency + run: | + go install github.com/ctrf-io/go-ctrf-json-reporter/cmd/go-ctrf-json-reporter@v0.0.10 + + appName="${{ github.repository }}" + buildNumber="${{ github.run_id }}" + appVersion="${{ github.event.pull_request.head.sha }}" + if [[ -z "${appVersion}" ]] ; then + # for push events + appVersion="${{ github.sha }}" + fi + + # reconstruct platform information from the file name + # set -x + while read report ; do + # 'unit.report.${{ matrix.os }}-${{ matrix.go }}.json' + reformated=$(echo "${report##*/}"|sed -E 's/(go)([[:digit:]]+)\.([[:digit:]]+)/\1\2\3/') # e.g. go1.24 becomes go124 + mapfile -d'.' -t -s 2 -n 2 split < <(echo $reformated) # skip the first 2 parts, stop on 2 more parts + envstring="${split[0]}" + osPlatform="${envstring%-*}" + osRelease="${envstring##*-}" + + # this is a best effort only: tests may be cancelled upstream and produce incorrect reports + go-ctrf-json-reporter \ + -quiet \ + -appName "${appName}" \ + -appVersion "${appVersion}" \ + -buildNumber "${buildNumber}" \ + -osPlatform "${osPlatform}" \ + -osRelease "${osRelease}" \ + -output "./reports/ctrf_report_${osPlatform}_${osRelease}.json" < "${report}" || true + done < <(find reports -name \*.json) + + # NOTE: at this moment, we don't upload CTRF reports as artifacts. + # Some of the CTRF reports are therefore not available (flaky tests, history, ...). + # + # See https://github.com/ctrf-io/github-test-reporter?tab=readme-ov-file#report-showcase + # for more reporting possibilities. At the moment, we keep it simple, as most advanced features + # require a github token (thus adding the complexity of a separate workflow starting on pull_request_target). + # + # For the moment, we are contented with these simple reports. This is an opportunity to compare the insight they + # provide as compared to what is uploaded to codecov. + # + # Codecov analytics are pretty poor at this moment. On the other hand, they manage the bot that pushes back + # PR comments. + # + # They also handle the storage of past test reports, so as to assess flaky tests. + - + name: Publish Test Summary Results + uses: ctrf-io/github-test-reporter@024bc4b64d997ca9da86833c6b9548c55c620e40 # v1.0.26 + with: + report-path: 'reports/ctrf_report_*.json' + use-suite-name: true + summary-report: true # post a report to the github actions summary + github-report: true + failed-folded-report: true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..78e3b1a --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,43 @@ +name: Release + +permissions: + contents: read + +# description: | +# Build a github release on pushed tag. +# +# The only available asset is a release note. + +on: + push: + tags: + - v[0-9]+* + +jobs: + gh-release: + name: Create release + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - + name: Checkout code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + fetch-depth: 0 + - + name: Generate release notes + id: notes + env: + GITHUB_TOKEN: ${{ github.token }} + GITHUB_REPO: ${{ github.repository }} + uses: orhun/git-cliff-action@d77b37db2e3f7398432d34b72a12aa3e2ba87e51 # v4.6.0 + with: + config: '.cliff.toml' + args: >- + --current + - + name: Create github release + uses: softprops/action-gh-release@5be0e66d93ac7ed76da52eca8bb058f665c3a5fe # v2.4.2 + with: + body: ${{ steps.notes.outputs.content }} diff --git a/.github/workflows/scanner.yml b/.github/workflows/scanner.yml new file mode 100644 index 0000000..56852a0 --- /dev/null +++ b/.github/workflows/scanner.yml @@ -0,0 +1,69 @@ +name: Vulnerability scans + +permissions: read-all + +# description: | +# A fast vulnerability scan on the repo that effectively supplements ossf scorecard and codesql +# and may run every day. +# +# * trivy reports are often more easily actionable than codeql reports. +# * govumnscan supplements this analysis with a more go-specific approach +# +# NOTE: at this moment, we don't want to adopt snyk, which requires a token, provides an extra dashboard etc. +# Most likely, snyk would be redundant with trivy. + +on: + branch_protection_rule: + push: + branches: [ "master" ] + schedule: + - cron: '18 4 * * 3' + +jobs: + analysis: + name: Vulnerability scan + runs-on: ubuntu-latest + permissions: + contents: read + security-events: write + steps: + - + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + persist-credentials: false + - + name: Vulnerability scan by trivy + uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # v0.33.1 + with: + scan-type: repo + format: sarif + hide-progress: false + output: trivy-code-report.sarif + scanners: vuln,secret + exit-code: 0 + - + name: Upload trivy findings to code scanning dashboard + uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3 + with: + category: trivy + sarif_file: trivy-code-report.sarif + + govulnscan: + name: go vulnerability scan + runs-on: ubuntu-latest + permissions: + contents: read + security-events: write + steps: + - + id: govulncheck + uses: golang/govulncheck-action@b625fbe08f3bccbe446d94fbf87fcc875a4f50ee # v1.0.4 + with: + output-format: sarif + output-file: govulnscan-report.sarif + - + name: Upload govulnscan findings to code scanning dashboard + uses: github/codeql-action/upload-sarif@014f16e7ab1402f30e7c3329d33797e7948572db # v4.31.3 + with: + category: govulnscan + sarif_file: govulnscan-report.sarif diff --git a/.gitignore b/.gitignore index 769c244..59cd294 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,4 @@ -secrets.yml +*.out +*.cov +.idea +.env diff --git a/.golangci.yml b/.golangci.yml index 7cea1af..fdae591 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -2,34 +2,17 @@ version: "2" linters: default: all disable: - - cyclop - depguard - - errchkjson - - errorlint - - exhaustruct - - forcetypeassert - funlen - - gochecknoglobals - - gochecknoinits - - gocognit - - godot - godox - - gosmopolitan - - inamedparam - #- intrange # disabled while < go1.22 - - ireturn - - lll - - musttag - - nestif + - exhaustruct - nlreturn - nonamedreturns - noinlineerr - paralleltest - recvcheck - testpackage - - thelper - tparallel - - unparam - varnamelen - whitespace - wrapcheck @@ -41,8 +24,15 @@ linters: goconst: min-len: 2 min-occurrences: 3 + cyclop: + max-complexity: 20 gocyclo: - min-complexity: 45 + min-complexity: 20 + exhaustive: + default-signifies-exhaustive: true + default-case-required: true + lll: + line-length: 180 exclusions: generated: lax presets: @@ -58,6 +48,7 @@ formatters: enable: - gofmt - goimports + - gofumpt exclusions: generated: lax paths: diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md new file mode 100644 index 0000000..aace4fc --- /dev/null +++ b/CONTRIBUTORS.md @@ -0,0 +1,24 @@ +# Contributors + +- Repository: ['go-openapi/jsonpointer'] + +| Total Contributors | Total Contributions | +| --- | --- | +| 12 | 90 | + +| Username | All Time Contribution Count | All Commits | +| --- | --- | --- | +| @fredbi | 43 | https://github.com/go-openapi/jsonpointer/commits?author=fredbi | +| @casualjim | 33 | https://github.com/go-openapi/jsonpointer/commits?author=casualjim | +| @magodo | 3 | https://github.com/go-openapi/jsonpointer/commits?author=magodo | +| @youyuanwu | 3 | https://github.com/go-openapi/jsonpointer/commits?author=youyuanwu | +| @gaiaz-iusipov | 1 | https://github.com/go-openapi/jsonpointer/commits?author=gaiaz-iusipov | +| @gbjk | 1 | https://github.com/go-openapi/jsonpointer/commits?author=gbjk | +| @gordallott | 1 | https://github.com/go-openapi/jsonpointer/commits?author=gordallott | +| @ianlancetaylor | 1 | https://github.com/go-openapi/jsonpointer/commits?author=ianlancetaylor | +| @mfleader | 1 | https://github.com/go-openapi/jsonpointer/commits?author=mfleader | +| @Neo2308 | 1 | https://github.com/go-openapi/jsonpointer/commits?author=Neo2308 | +| @olivierlemasle | 1 | https://github.com/go-openapi/jsonpointer/commits?author=olivierlemasle | +| @testwill | 1 | https://github.com/go-openapi/jsonpointer/commits?author=testwill | + + _this file was generated by the [Contributors GitHub Action](https://github.com/github/contributors)_ diff --git a/LICENSE b/LICENSE index d645695..261eeb9 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,3 @@ - Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000..f3b5193 --- /dev/null +++ b/NOTICE @@ -0,0 +1,39 @@ +Copyright 2015-2025 go-swagger maintainers + +// SPDX-FileCopyrightText: Copyright 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +This software library, github.com/go-openapi/jsonpointer, includes software developed +by the go-swagger and go-openapi maintainers ("go-swagger maintainers"). + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this software except in compliance with the License. + +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0. + +This software is copied from, derived from, and inspired by other original software products. +It ships with copies of other software which license terms are recalled below. + +The original software was authored on 25-02-2013 by sigu-399 (https://github.com/sigu-399, sigu.399@gmail.com). + +github.com/sigh-399/jsonpointer +=========================== + +// SPDX-FileCopyrightText: Copyright 2013 sigu-399 ( https://github.com/sigu-399 ) +// SPDX-License-Identifier: Apache-2.0 + +Copyright 2013 sigu-399 ( https://github.com/sigu-399 ) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/README.md b/README.md index 45bd31b..00cbfd7 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,79 @@ -# gojsonpointer [![Build Status](https://github.com/go-openapi/jsonpointer/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/jsonpointer/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer) +# jsonpointer -[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) -[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE) -[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/jsonpointer.svg)](https://pkg.go.dev/github.com/go-openapi/jsonpointer) -[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/jsonpointer)](https://goreportcard.com/report/github.com/go-openapi/jsonpointer) + +[![Tests][test-badge]][test-url] [![Coverage][cov-badge]][cov-url] [![CI vuln scan][vuln-scan-badge]][vuln-scan-url] [![CodeQL][codeql-badge]][codeql-url] + + + +[![Release][release-badge]][release-url] [![Go Report Card][gocard-badge]][gocard-url] [![CodeFactor Grade][codefactor-badge]][codefactor-url] [![License][license-badge]][license-url] + + + +[![GoDoc][godoc-badge]][godoc-url] [![go version][goversion-badge]][goversion-url] ![Top language][top-badge] ![Commits since latest release][commits-badge] -An implementation of JSON Pointer - Go language +--- + +An implementation of JSON Pointer for golang, which supports go `struct`. ## Status -Completed YES -Tested YES +API is stable. + +## Import this library in your project + +```cmd +go get github.com/go-openapi/jsonpointer +``` + +## Basic usage + +See also some [examples](./examples_test.go) + +### Retrieving a value + +```go + import ( + "github.com/go-openapi/jsonpointer" + ) + + + var doc any + + ... + + pointer, err := jsonpointer.New("/foo/1") + if err != nil { + ... // error: e.g. invalid JSON pointer specification + } + + value, kind, err := pointer.Get(doc) + if err != nil { + ... // error: e.g. key not found, index out of bounds, etc. + } + + ... +``` + +### Setting a value + +```go + ... + var doc any + ... + pointer, err := jsonpointer.New("/foo/1") + if err != nil { + ... // error: e.g. invalid JSON pointer specification + } + + doc, err = p.Set(doc, "value") + if err != nil { + ... // error: e.g. key not found, index out of bounds, etc. + } +``` + +## Change log + +See ## References @@ -18,9 +81,58 @@ Tested YES also known as [RFC6901](https://www.rfc-editor.org/rfc/rfc6901) -### Note +## Licensing + +This library ships under the [SPDX-License-Identifier: Apache-2.0](./LICENSE). + +See the license [NOTICE](./NOTICE), which recalls the licensing terms of all the pieces of software +on top of which it has been built. + +## Limitations + +The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, +the reference token MUST contain either...' is not implemented. + +That is because our implementation of the JSON pointer only supports explicit references to array elements: +the provision in the spec to resolve non-existent members as "the last element in the array", +using the special trailing character "-" is not implemented. + +## Other documentation -The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, the reference token MUST contain either...' is not implemented. +* [All-time contributors](./CONTRIBUTORS.md) +* [Contributing guidelines](.github/CONTRIBUTING.md) +* [Maintainers documentation](docs/MAINTAINERS.md) +* [Code style](docs/STYLE.md) -That is because our implementation of the JSON pointer only supports explicit references to array elements: the provision in the spec -to resolve non-existent members as "the last element in the array", using the special trailing character "-". + +[test-badge]: https://github.com/go-openapi/jsonpointer/actions/workflows/go-test.yml/badge.svg +[test-url]: https://github.com/go-openapi/jsonpointer/actions/workflows/go-test.yml +[cov-badge]: https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg +[cov-url]: https://codecov.io/gh/go-openapi/jsonpointer +[vuln-scan-badge]: https://github.com/go-openapi/jsonpointer/actions/workflows/scanner.yml/badge.svg +[vuln-scan-url]: https://github.com/go-openapi/jsonpointer/actions/workflows/scanner.yml +[codeql-badge]: https://github.com/go-openapi/jsonpointer/actions/workflows/codeql.yml/badge.svg +[codeql-url]: https://github.com/go-openapi/jsonpointer/actions/workflows/codeql.yml + +[release-badge]: https://badge.fury.io/go/github.com%2Fgo-openapi%2Fjsonpointer.svg +[release-url]: https://badge.fury.io/go/github.com%2Fgo-openapi%2Fjsonpointer + +[gocard-badge]: https://goreportcard.com/badge/github.com/go-openapi/jsonpointer +[gocard-url]: https://goreportcard.com/report/github.com/go-openapi/jsonpointer +[codefactor-badge]: https://img.shields.io/codefactor/grade/github/go-openapi/jsonpointer +[codefactor-url]: https://www.codefactor.io/repository/github/go-openapi/jsonpointer + +[doc-badge]: https://img.shields.io/badge/doc-site-blue?link=https%3A%2F%2Fgoswagger.io%2Fgo-openapi%2F +[doc-url]: https://goswagger.io/go-openapi +[godoc-badge]: https://pkg.go.dev/badge/github.com/go-openapi/jsonpointer +[godoc-url]: http://pkg.go.dev/github.com/go-openapi/jsonpointer +[slack-badge]: https://slackin.goswagger.io/badge.svg +[slack-url]: https://slackin.goswagger.io + +[license-badge]: http://img.shields.io/badge/license-Apache%20v2-orange.svg +[license-url]: https://github.com/go-openapi/jsonpointer/?tab=Apache-2.0-1-ov-file#readme + +[goversion-badge]: https://img.shields.io/github/go-mod/go-version/go-openapi/jsonpointer +[goversion-url]: https://github.com/go-openapi/jsonpointer/blob/master/go.mod +[top-badge]: https://img.shields.io/github/languages/top/go-openapi/jsonpointer +[commits-badge]: https://img.shields.io/github/commits-since/go-openapi/jsonpointer/latest diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..2a7b6f0 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,19 @@ +# Security Policy + +This policy outlines the commitment and practices of the go-openapi maintainers regarding security. + +## Supported Versions + +| Version | Supported | +| ------- | ------------------ | +| 0.22.x | :white_check_mark: | + +## Reporting a vulnerability + +If you become aware of a security vulnerability that affects the current repository, +please report it privately to the maintainers. + +Please follow the instructions provided by github to +[Privately report a security vulnerability](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing-information-about-vulnerabilities/privately-reporting-a-security-vulnerability#privately-reporting-a-security-vulnerability). + +TL;DR: on Github, navigate to the project's "Security" tab then click on "Report a vulnerability". diff --git a/docs/MAINTAINERS.md b/docs/MAINTAINERS.md new file mode 100644 index 0000000..0c73976 --- /dev/null +++ b/docs/MAINTAINERS.md @@ -0,0 +1,157 @@ +# Maintainer's guide + +## Repo structure + +Single go module. + +> **NOTE** +> +> Some `go-openapi` repos are mono-repos with multiple modules, +> with adapted CI workflows. + +## Repo configuration + +* default branch: master +* protected branches: master +* branch protection rules: + * require pull requests and approval + * required status checks: + - DCO (simple email sign-off) + - Lint + - tests completed +* auto-merge enabled (used for dependabot updates) + +## Continuous Integration + +### Code Quality checks + +* meta-linter: golangci-lint +* linter config: [`.golangci.yml`](../.golangci.yml) (see our [posture](./STYLE.md) on linters) + +* Code quality assessment: [CodeFactor](https://www.codefactor.io/dashboard) +* Code quality badges + * go report card: + * CodeFactor: + +> **NOTES** +> +> codefactor inherits roles from github. There is no need to create a dedicated account. +> +> The codefactor app is installed at the organization level (`github.com/go-openapi`). +> +> There is no special token to setup in github for CI usage. + +### Testing + +* Test reports + * Uploaded to codecov: +* Test coverage reports + * Uploaded to codecov: + +* Fuzz testing + * Fuzz tests are handled separately by CI and may reuse a cached version of the fuzzing corpus. + At this moment, cache may not be shared between feature branches or feature branch and master. + The minimized corpus produced on failure is uploaded as an artifact and should be added manually + to `testdata/fuzz/...`. + +Coverage threshold status is informative and not blocking. +This is because the thresholds are difficult to tune and codecov oftentimes reports false negatives +or may fail to upload coverage. + +All tests use our fork of `stretchr/testify`: `github.com/go-openapi/testify`. +This allows for minimal test dependencies. + +> **NOTES** +> +> codecov inherits roles from github. There is no need to create a dedicated account. +> However, there is only 1 maintainer allowed to be the admin of the organization on codecov +> with their free plan. +> +> The codecov app is installed at the organization level (`github.com/go-openapi`). +> +> There is no special token to setup in github for CI usage. +> A organization-level token used to upload coverage and test reports is managed at codecov: +> no setup is required on github. + +### Automated updates + +* dependabot + * configuration: [`dependabot.yaml`](../.github/dependabot.yaml) + + Principle: + + * codecov applies updates and security patches to the github-actions and golang ecosystems. + * all updates from "trusted" dependencies (github actions, golang.org packages, go-openapi packages + are auto-merged if they successfully pass CI. + +* go version udpates + + Principle: + + * we support the 2 latest minor versions of the go compiler (`stable`, `oldstable`) + * `go.mod` should be updated (manually) whenever there is a new go minor release + (e.g. every 6 months). + +* contributors + * a [`CONTRIBUTORS.md`](../CONTRIBUTORS.md) file is updated weekly, with all-time contributors to the repository + * the `github-actions[bot]` posts a pull request to do that automatically + * at this moment, this pull request is not auto-approved/auto-merged (bot cannot approve its own PRs) + +### Vulnerability scanners + +There are 3 complementary scanners - obviously, there is some overlap, but each has a different focus. + +* github `CodeQL` +* `trivy` +* `govulnscan` + +None of these tools require an additional account or token. + +Github CodeQL configuration is set to "Advanced", so we may collect a CI status for this check (e.g. for badges). + +Scanners run on every commit to master and at least once a week. + +Reports are centralized in github security reports for code scanning tools. + +## Releases + +The release process is minimalist: + +* push a semver tag (i.e v{major}.{minor}.{patch}) to the master branch. +* the CI handles this to generate a github release with release notes + +* release notes generator: git-cliff +* configuration: [`cliff.toml`](../.cliff.toml) + +Tags are preferably PGP-signed. + +The tag message introduces the release notes (e.g. a summary of this release). + +The release notes generator does not assume that commits are necessarily "conventional commits". + +## Other files + +Standard documentation: + +* [`CONTRIBUTING.md`](../.github/CONTRIBUTING.md) guidelines +* [`DCO.md`](../.github/DCO.md) terms for first-time contributors to read +* [`CODE_OF_CONDUCT.md`](../CODE_OF_CONDUCT.md) +* [`SECURIY.md`](../SECURITY.md) policy: how to report vulnerabilities privately +* [`LICENSE`](../LICENSE) terms +* [`NOTICE`](../NOTICE) on supplementary license terms (original authors, copied code etc) + +Reference documentation (released): + +* [godoc](https://pkg.go/dev/go-openapi/jsonpointer) + +## TODOs & other ideas + +A few things remain ahead to ease a bit a maintainer's job: + +* reuse CI workflows (e.g. in `github.com/go-openapi/workflows`) +* reusable actions with custom tools pinned (e.g. in `github.com/go-openapi/gh-actions`) +* open-source license checks +* auto-merge for CONTRIBUTORS.md (requires a github app to produce tokens) +* more automated code renovation / relinting work (possibly built with CLAUDE) +* organization-level documentation web site +* ... diff --git a/docs/STYLE.md b/docs/STYLE.md new file mode 100644 index 0000000..056fdb5 --- /dev/null +++ b/docs/STYLE.md @@ -0,0 +1,83 @@ +# Coding style at `go-openapi` + +> **TL;DR** +> +> Let's be honest: at `go-openapi` and `go-swagger` we've never been super-strict on code style etc. +> +> But perhaps now (2025) is the time to adopt a different stance. + +Even though our repos have been early adopters of `golangci-lint` years ago +(we used some other metalinter before), our decade-old codebase is only realigned to new rules from time to time. + +Now go-openapi and go-swagger make up a really large codebase, which is taxing to maintain and keep afloat. + +Code quality and the harmonization of rules have thus become things that we need now. + +## Meta-linter + +Universally formatted go code promotes ease of writing, reading, and maintenance. + +You should run `golangci-lint run` before committing your changes. + +Many editors have plugins that do that automatically. + +> We use the `golangci-lint` meta-linter. The configuration lies in [`.golangci.yml`](../.golangci.yml). +> You may read for additional reference. + +## Linting rules posture + +Thanks to go's original design, we developers don't have to waste much time arguing about code figures of style. + +However, the number of available linters has been growing to the point that we need to pick a choice. + +We enable all linters published by `golangci-lint` by default, then disable a few ones. + +Here are the reasons why they are disabled (update: Nov. 2025, `golangci-lint v2.6.1`): + +```yaml + disable: + - depguard # we don't want to configure rules to constrain import. That's the reviewer's job + - exhaustruct # we don't want to configure regexp's to check type name. That's the reviewer's job + - funlen # we accept cognitive complexity as a meaningful metric, but function length is relevant + - godox # we don't see any value in forbidding TODO's etc in code + - nlreturn # we usually apply this "blank line" rule to make code less compact. We just don't want to enforce it + - nonamedreturns # we don't see any valid reason why we couldn't used named returns + - noinlineerr # there is no value added forbidding inlined err + - paralleltest # we like parallel tests. We just don't want them to be enforced everywhere + - recvcheck # we like the idea of having pointer and non-pointer receivers + - testpackage # we like test packages. We just don't want them to be enforced everywhere + - tparallel # see paralleltest + - varnamelen # sometimes, we like short variables. The linter doesn't catch cases when a short name is good + - whitespace # no added value + - wrapcheck # although there is some sense with this linter's general idea, it produces too much noise + - wsl # no added value. Noise + - wsl_v5 # no added value. Noise +``` + +As you may see, we agree with the objective of most linters, at least the principle they are supposed to enforce. +But all linters do not support fine-grained tuning to tolerate some cases and not some others. + +When this is possible, we enable linters with relaxed constraints: + +```yaml + settings: + dupl: + threshold: 200 # in a older code base such as ours, we have to be tolerant with a little redundancy + # Hopefully, we'll be able to gradually get rid of those. + goconst: + min-len: 2 + min-occurrences: 3 + cyclop: + max-complexity: 20 # the default is too low for most of our functions. 20 is a nicer trade-off + gocyclo: + min-complexity: 20 + exhaustive: # when using default in switch, this should be good enough + default-signifies-exhaustive: true + default-case-required: true + lll: + line-length: 180 # we just want to avoid extremely long lines. + # It is no big deal if a line or two don't fit on your terminal. +``` + +Final note: since we have switched to a forked version of `stretchr/testify`, +we no longer benefit from the great `testifylint` linter for tests. diff --git a/errors.go b/errors.go index b84343d..8c50dde 100644 --- a/errors.go +++ b/errors.go @@ -1,5 +1,10 @@ +// SPDX-FileCopyrightText: Copyright (c) 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + package jsonpointer +import "fmt" + type pointerError string func (e pointerError) Error() string { @@ -7,12 +12,24 @@ func (e pointerError) Error() string { } const ( - // ErrPointer is an error raised by the jsonpointer package + // ErrPointer is a sentinel error raised by all errors from this package. ErrPointer pointerError = "JSON pointer error" - // ErrInvalidStart states that a JSON pointer must start with a separator ("/") + // ErrInvalidStart states that a JSON pointer must start with a separator ("/"). ErrInvalidStart pointerError = `JSON pointer must be empty or start with a "` + pointerSeparator - // ErrUnsupportedValueType indicates that a value of the wrong type is being set + // ErrUnsupportedValueType indicates that a value of the wrong type is being set. ErrUnsupportedValueType pointerError = "only structs, pointers, maps and slices are supported for setting values" ) + +func errNoKey(key string) error { + return fmt.Errorf("object has no key %q: %w", key, ErrPointer) +} + +func errOutOfBounds(length, idx int) error { + return fmt.Errorf("index out of bounds array[0,%d] index '%d': %w", length-1, idx, ErrPointer) +} + +func errInvalidReference(token string) error { + return fmt.Errorf("invalid token reference %q: %w", token, ErrPointer) +} diff --git a/examples_test.go b/examples_test.go new file mode 100644 index 0000000..e4c4222 --- /dev/null +++ b/examples_test.go @@ -0,0 +1,131 @@ +// SPDX-FileCopyrightText: Copyright (c) 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package jsonpointer + +import ( + "encoding/json" + "errors" + "fmt" +) + +var ErrExampleStruct = errors.New("example error") + +type exampleDocument struct { + Foo []string `json:"foo"` +} + +func ExampleNew() { + empty, err := New("") + if err != nil { + fmt.Println(err) + + return + } + fmt.Printf("empty pointer: %q\n", empty.String()) + + key, err := New("/foo") + if err != nil { + fmt.Println(err) + + return + } + fmt.Printf("pointer to object key: %q\n", key.String()) + + elem, err := New("/foo/1") + if err != nil { + fmt.Println(err) + + return + } + fmt.Printf("pointer to array element: %q\n", elem.String()) + + escaped0, err := New("/foo~0") + if err != nil { + fmt.Println(err) + + return + } + // key contains "~" + fmt.Printf("pointer to key %q: %q\n", Unescape("foo~0"), escaped0.String()) + + escaped1, err := New("/foo~1") + if err != nil { + fmt.Println(err) + + return + } + // key contains "/" + fmt.Printf("pointer to key %q: %q\n", Unescape("foo~1"), escaped1.String()) + + // output: + // empty pointer: "" + // pointer to object key: "/foo" + // pointer to array element: "/foo/1" + // pointer to key "foo~": "/foo~0" + // pointer to key "foo/": "/foo~1" +} + +func ExamplePointer_Get() { + var doc exampleDocument + + if err := json.Unmarshal(testDocumentJSONBytes, &doc); err != nil { // populates doc + fmt.Println(err) + + return + } + + pointer, err := New("/foo/1") + if err != nil { + fmt.Println(err) + + return + } + + value, kind, err := pointer.Get(doc) + if err != nil { + fmt.Println(err) + + return + } + + fmt.Printf( + "value: %q\nkind: %v\n", + value, kind, + ) + + // Output: + // value: "baz" + // kind: string +} + +func ExamplePointer_Set() { + var doc exampleDocument + + if err := json.Unmarshal(testDocumentJSONBytes, &doc); err != nil { // populates doc + fmt.Println(err) + + return + } + + pointer, err := New("/foo/1") + if err != nil { + fmt.Println(err) + + return + } + + result, err := pointer.Set(&doc, "hey my") + if err != nil { + fmt.Println(err) + + return + } + + fmt.Printf("result: %#v\n", result) + fmt.Printf("doc: %#v\n", doc) + + // Output: + // result: &jsonpointer.exampleDocument{Foo:[]string{"bar", "hey my"}} + // doc: jsonpointer.exampleDocument{Foo:[]string{"bar", "hey my"}} +} diff --git a/fuzz_test.go b/fuzz_test.go new file mode 100644 index 0000000..9a2f4b8 --- /dev/null +++ b/fuzz_test.go @@ -0,0 +1,38 @@ +// SPDX-FileCopyrightText: Copyright (c) 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package jsonpointer + +import ( + "iter" + "slices" + "strings" + "testing" + + "github.com/go-openapi/testify/v2/require" +) + +func FuzzParse(f *testing.F) { + // initial seed + cumulated := make([]string, 0, 100) + for generator := range generators() { + f.Add(generator) + + cumulated = append(cumulated, generator) + f.Add(strings.Join(cumulated, "")) + } + + f.Fuzz(func(t *testing.T, input string) { + require.NotPanics(t, func() { + _, _ = New(input) + }) + }) +} + +func generators() iter.Seq[string] { + return slices.Values([]string{ + `a`, + ``, `/`, `/`, `/a~1b`, `/a~1b`, `/c%d`, `/e^f`, `/g|h`, `/i\j`, `/k"l`, `/ `, `/m~0n`, + `/foo`, `/0`, + }) +} diff --git a/go.mod b/go.mod index 7620dfd..def3073 100644 --- a/go.mod +++ b/go.mod @@ -2,14 +2,7 @@ module github.com/go-openapi/jsonpointer require ( github.com/go-openapi/swag/jsonname v0.25.1 - github.com/stretchr/testify v1.11.1 -) - -require ( - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/kr/pretty v0.3.1 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect + github.com/go-openapi/testify/v2 v2.0.2 ) go 1.24.0 diff --git a/go.sum b/go.sum index 84a82b5..63f34ee 100644 --- a/go.sum +++ b/go.sum @@ -1,21 +1,12 @@ -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/go-openapi/swag/jsonname v0.25.1 h1:Sgx+qbwa4ej6AomWC6pEfXrA6uP2RkaNjA9BR8a1RJU= github.com/go-openapi/swag/jsonname v0.25.1/go.mod h1:71Tekow6UOLBD3wS7XhdT98g5J5GR13NOTQ9/6Q11Zo= -github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= -github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/go-openapi/testify/v2 v2.0.2 h1:X999g3jeLcoY8qctY/c/Z8iBHTbwLz7R2WXd6Ub6wls= +github.com/go-openapi/testify/v2 v2.0.2/go.mod h1:HCPmvFFnheKK2BuwSA0TbbdxJ3I16pjwMkYkP4Ywn54= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= -github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/iface_example_test.go b/iface_example_test.go new file mode 100644 index 0000000..1865c48 --- /dev/null +++ b/iface_example_test.go @@ -0,0 +1,147 @@ +package jsonpointer_test + +import ( + "fmt" + + "github.com/go-openapi/jsonpointer" +) + +var ( + _ jsonpointer.JSONPointable = CustomDoc{} + _ jsonpointer.JSONSetable = &CustomDoc{} +) + +// CustomDoc accepts 2 preset properties "propA" and "propB", plus any number of extra properties. +// +// All values are strings. +type CustomDoc struct { + a string + b string + c map[string]string +} + +// JSONLookup implements [jsonpointer.JSONPointable]. +func (d CustomDoc) JSONLookup(key string) (any, error) { + switch key { + case "propA": + return d.a, nil + case "propB": + return d.b, nil + default: + if len(d.c) == 0 { + return nil, fmt.Errorf("key %q not found: %w", key, ErrExampleIface) + } + extra, ok := d.c[key] + if !ok { + return nil, fmt.Errorf("key %q not found: %w", key, ErrExampleIface) + } + + return extra, nil + } +} + +// JSONSet implements [jsonpointer.JSONSetable]. +func (d *CustomDoc) JSONSet(key string, value any) error { + asString, ok := value.(string) + if !ok { + return fmt.Errorf("a CustomDoc only access strings as values, but got %T: %w", value, ErrExampleIface) + } + + switch key { + case "propA": + d.a = asString + + return nil + case "propB": + d.b = asString + + return nil + default: + if len(d.c) == 0 { + d.c = make(map[string]string) + } + d.c[key] = asString + + return nil + } +} + +func Example_iface() { + doc := CustomDoc{ + a: "initial value for a", + b: "initial value for b", + // no extra values + } + + pointerA, err := jsonpointer.New("/propA") + if err != nil { + fmt.Println(err) + + return + } + + // get the initial value for a + propA, kind, err := pointerA.Get(doc) + if err != nil { + fmt.Println(err) + + return + } + fmt.Printf("propA (%v): %v\n", kind, propA) + + pointerB, err := jsonpointer.New("/propB") + if err != nil { + fmt.Println(err) + + return + } + + // get the initial value for b + propB, kind, err := pointerB.Get(doc) + if err != nil { + fmt.Println(err) + + return + } + fmt.Printf("propB (%v): %v\n", kind, propB) + + pointerC, err := jsonpointer.New("/extra") + if err != nil { + fmt.Println(err) + + return + } + + // not found yet + _, _, err = pointerC.Get(doc) + fmt.Printf("propC: %v\n", err) + + _, err = pointerA.Set(&doc, "new value for a") // doc is updated in place + if err != nil { + fmt.Println(err) + + return + } + + _, err = pointerB.Set(&doc, "new value for b") + if err != nil { + fmt.Println(err) + + return + } + + _, err = pointerC.Set(&doc, "new extra value") + if err != nil { + fmt.Println(err) + + return + } + + fmt.Printf("updated doc: %v", doc) + + // output: + // propA (string): initial value for a + // propB (string): initial value for b + // propC: key "extra" not found: example error + // updated doc: {new value for a new value for b map[extra:new extra value]} +} diff --git a/pointer.go b/pointer.go index 7513c47..7df49af 100644 --- a/pointer.go +++ b/pointer.go @@ -1,28 +1,7 @@ -// Copyright 2013 sigu-399 ( https://github.com/sigu-399 ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author sigu-399 -// author-github https://github.com/sigu-399 -// author-mail sigu.399@gmail.com -// -// repository-name jsonpointer -// repository-desc An implementation of JSON Pointer - Go language -// -// description Main and unique file. -// -// created 25-02-2013 +// SPDX-FileCopyrightText: Copyright (c) 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 +// Package jsonpointer provides a golang implementation for json pointers. package jsonpointer import ( @@ -41,29 +20,47 @@ const ( pointerSeparator = `/` ) -var ( - jsonPointableType = reflect.TypeOf(new(JSONPointable)).Elem() - jsonSetableType = reflect.TypeOf(new(JSONSetable)).Elem() -) - -// JSONPointable is an interface for structs to implement when they need to customize the -// json pointer process +// JSONPointable is an interface for structs to implement, +// when they need to customize the json pointer process or want to avoid the use of reflection. type JSONPointable interface { - JSONLookup(string) (any, error) + // JSONLookup returns a value pointed at this (unescaped) key. + JSONLookup(key string) (any, error) } -// JSONSetable is an interface for structs to implement when they need to customize the -// json pointer process +// JSONSetable is an interface for structs to implement, +// when they need to customize the json pointer process or want to avoid the use of reflection. type JSONSetable interface { - JSONSet(string, any) error + // JSONSet sets the value pointed at the (unescaped) key. + JSONSet(key string, value any) error } -// Pointer is a representation of a json pointer +// Pointer is a representation of a json pointer. +// +// Use [Pointer.Get] to retrieve a value or [Pointer.Set] to set a value. +// +// It works with any go type interpreted as a JSON document, which means: +// +// - if a type implements [JSONPointable], its [JSONPointable.JSONLookup] method is used to resolve [Pointer.Get] +// - if a type implements [JSONSetable], its [JSONPointable.JSONSet] method is used to resolve [Pointer.Set] +// - a go map[K]V is interpreted as an object, with type K assignable to a string +// - a go slice []T is interpreted as an array +// - a go struct is interpreted as an object, with exported fields interpreted as keys +// - promoted fields from an embedded struct are traversed +// - scalars (e.g. int, float64 ...), channels, functions and go arrays cannot be traversed +// +// For struct s resolved by reflection, key mappings honor the conventional struct tag `json`. +// +// Fields that do not specify a `json` tag, or specify an empty one, or are tagged as `json:"-"` are ignored. +// +// # Limitations +// +// - Unlike go standard marshaling, untagged fields do not default to the go field name and are ignored. +// - anonymous fields are not traversed if untagged type Pointer struct { referenceTokens []string } -// New creates a new json pointer for the given string +// New creates a new json pointer from its string representation. func New(jsonPointerString string) (Pointer, error) { var p Pointer err := p.parse(jsonPointerString) @@ -71,34 +68,40 @@ func New(jsonPointerString string) (Pointer, error) { return p, err } -// Get uses the pointer to retrieve a value from a JSON document +// Get uses the pointer to retrieve a value from a JSON document. +// +// It returns the value with its type as a [reflect.Kind] or an error. func (p *Pointer) Get(document any) (any, reflect.Kind, error) { return p.get(document, jsonname.DefaultJSONNameProvider) } -// Set uses the pointer to set a value from a JSON document +// Set uses the pointer to set a value from a data type +// that represent a JSON document. +// +// It returns the updated document. func (p *Pointer) Set(document any, value any) (any, error) { return document, p.set(document, value, jsonname.DefaultJSONNameProvider) } -// DecodedTokens returns the decoded tokens of this JSON pointer +// DecodedTokens returns the decoded (unescaped) tokens of this JSON pointer. func (p *Pointer) DecodedTokens() []string { result := make([]string, 0, len(p.referenceTokens)) - for _, t := range p.referenceTokens { - result = append(result, Unescape(t)) + for _, token := range p.referenceTokens { + result = append(result, Unescape(token)) } + return result } -// IsEmpty returns true if this is an empty json pointer -// this indicates that it points to the root document +// IsEmpty returns true if this is an empty json pointer. +// +// This indicates that it points to the root document. func (p *Pointer) IsEmpty() bool { return len(p.referenceTokens) == 0 } -// Pointer to string representation function +// String representation of a pointer. func (p *Pointer) String() string { - if len(p.referenceTokens) == 0 { return emptyPointer } @@ -137,20 +140,21 @@ func (p *Pointer) Offset(document string) (int64, error) { return offset, nil } -// "Constructor", parses the given string JSON pointer +// "Constructor", parses the given string JSON pointer. func (p *Pointer) parse(jsonPointerString string) error { - var err error - - if jsonPointerString != emptyPointer { - if !strings.HasPrefix(jsonPointerString, pointerSeparator) { - err = errors.Join(ErrInvalidStart, ErrPointer) - } else { - referenceTokens := strings.Split(jsonPointerString, pointerSeparator) - p.referenceTokens = append(p.referenceTokens, referenceTokens[1:]...) - } + if jsonPointerString == emptyPointer { + return nil + } + + if !strings.HasPrefix(jsonPointerString, pointerSeparator) { + // non empty pointer must start with "/" + return errors.Join(ErrInvalidStart, ErrPointer) } - return err + referenceTokens := strings.Split(jsonPointerString, pointerSeparator) + p.referenceTokens = append(p.referenceTokens, referenceTokens[1:]...) + + return nil } func (p *Pointer) get(node any, nameProvider *jsonname.NameProvider) (any, reflect.Kind, error) { @@ -160,7 +164,7 @@ func (p *Pointer) get(node any, nameProvider *jsonname.NameProvider) (any, refle kind := reflect.Invalid - // Full document when empty + // full document when empty if len(p.referenceTokens) == 0 { return node, kind, nil } @@ -186,101 +190,103 @@ func (p *Pointer) set(node, data any, nameProvider *jsonname.NameProvider) error if knd != reflect.Pointer && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array { return errors.Join( + fmt.Errorf("unexpected type: %T", node), //nolint:err113 // err wrapping is carried out by errors.Join, not fmt.Errorf. ErrUnsupportedValueType, ErrPointer, ) } + l := len(p.referenceTokens) + + // full document when empty + if l == 0 { + return nil + } + if nameProvider == nil { nameProvider = jsonname.DefaultJSONNameProvider } - // Full document when empty - if len(p.referenceTokens) == 0 { - return nil + var decodedToken string + lastIndex := l - 1 + + if lastIndex > 0 { // skip if we only have one token in pointer + for _, token := range p.referenceTokens[:lastIndex] { + decodedToken = Unescape(token) + next, err := p.resolveNodeForToken(node, decodedToken, nameProvider) + if err != nil { + return err + } + + node = next + } } - lastI := len(p.referenceTokens) - 1 - for i, token := range p.referenceTokens { - isLastToken := i == lastI - decodedToken := Unescape(token) + // last token + decodedToken = Unescape(p.referenceTokens[lastIndex]) - if isLastToken { + return setSingleImpl(node, data, decodedToken, nameProvider) +} - return setSingleImpl(node, data, decodedToken, nameProvider) +func (p *Pointer) resolveNodeForToken(node any, decodedToken string, nameProvider *jsonname.NameProvider) (next any, err error) { + // check for nil during traversal + if isNil(node) { + return nil, fmt.Errorf("cannot traverse through nil value at %q: %w", decodedToken, ErrPointer) + } + + pointable, ok := node.(JSONPointable) + if ok { + r, err := pointable.JSONLookup(decodedToken) + if err != nil { + return nil, err } - // Check for nil during traversal - if isNil(node) { - return fmt.Errorf("cannot traverse through nil value at %q: %w", decodedToken, ErrPointer) + fld := reflect.ValueOf(r) + if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Pointer { + return fld.Addr().Interface(), nil } - rValue := reflect.Indirect(reflect.ValueOf(node)) - kind := rValue.Kind() + return r, nil + } - if rValue.Type().Implements(jsonPointableType) { - r, err := node.(JSONPointable).JSONLookup(decodedToken) - if err != nil { - return err - } - fld := reflect.ValueOf(r) - if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Pointer { - node = fld.Addr().Interface() - continue - } - node = r - continue + rValue := reflect.Indirect(reflect.ValueOf(node)) + kind := rValue.Kind() + + switch kind { + case reflect.Struct: + nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) + if !ok { + return nil, fmt.Errorf("object has no field %q: %w", decodedToken, ErrPointer) } - switch kind { //nolint:exhaustive - case reflect.Struct: - nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) - if !ok { - return fmt.Errorf("object has no field %q: %w", decodedToken, ErrPointer) - } - fld := rValue.FieldByName(nm) - if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Pointer { - node = fld.Addr().Interface() - continue - } - node = fld.Interface() + return typeFromValue(rValue.FieldByName(nm)), nil - case reflect.Map: - kv := reflect.ValueOf(decodedToken) - mv := rValue.MapIndex(kv) + case reflect.Map: + kv := reflect.ValueOf(decodedToken) + mv := rValue.MapIndex(kv) - if !mv.IsValid() { - return fmt.Errorf("object has no key %q: %w", decodedToken, ErrPointer) - } - if mv.CanAddr() && mv.Kind() != reflect.Interface && mv.Kind() != reflect.Map && mv.Kind() != reflect.Slice && mv.Kind() != reflect.Pointer { - node = mv.Addr().Interface() - continue - } - node = mv.Interface() + if !mv.IsValid() { + return nil, errNoKey(decodedToken) + } - case reflect.Slice: - tokenIndex, err := strconv.Atoi(decodedToken) - if err != nil { - return err - } - sLength := rValue.Len() - if tokenIndex < 0 || tokenIndex >= sLength { - return fmt.Errorf("index out of bounds array[0,%d] index '%d': %w", sLength, tokenIndex, ErrPointer) - } + return typeFromValue(mv), nil - elem := rValue.Index(tokenIndex) - if elem.CanAddr() && elem.Kind() != reflect.Interface && elem.Kind() != reflect.Map && elem.Kind() != reflect.Slice && elem.Kind() != reflect.Pointer { - node = elem.Addr().Interface() - continue - } - node = elem.Interface() + case reflect.Slice: + tokenIndex, err := strconv.Atoi(decodedToken) + if err != nil { + return nil, errors.Join(err, ErrPointer) + } - default: - return fmt.Errorf("invalid token reference %q: %w", decodedToken, ErrPointer) + sLength := rValue.Len() + if tokenIndex < 0 || tokenIndex >= sLength { + return nil, errOutOfBounds(sLength, tokenIndex) } - } - return nil + return typeFromValue(rValue.Index(tokenIndex)), nil + + default: + return nil, errInvalidReference(decodedToken) + } } func isNil(input any) bool { @@ -289,7 +295,7 @@ func isNil(input any) bool { } kind := reflect.TypeOf(input).Kind() - switch kind { //nolint:exhaustive + switch kind { case reflect.Pointer, reflect.Map, reflect.Slice, reflect.Chan: return reflect.ValueOf(input).IsNil() default: @@ -297,12 +303,20 @@ func isNil(input any) bool { } } -// GetForToken gets a value for a json pointer token 1 level deep +func typeFromValue(v reflect.Value) any { + if v.CanAddr() && v.Kind() != reflect.Interface && v.Kind() != reflect.Map && v.Kind() != reflect.Slice && v.Kind() != reflect.Pointer { + return v.Addr().Interface() + } + + return v.Interface() +} + +// GetForToken gets a value for a json pointer token 1 level deep. func GetForToken(document any, decodedToken string) (any, reflect.Kind, error) { return getSingleImpl(document, decodedToken, jsonname.DefaultJSONNameProvider) } -// SetForToken gets a value for a json pointer token 1 level deep +// SetForToken sets a value for a json pointer token 1 level deep. func SetForToken(document any, decodedToken string, value any) (any, error) { return document, setSingleImpl(document, value, decodedToken, jsonname.DefaultJSONNameProvider) } @@ -325,13 +339,15 @@ func getSingleImpl(node any, decodedToken string, nameProvider *jsonname.NamePro return getSingleImpl(*typed, decodedToken, nameProvider) } - switch kind { //nolint:exhaustive + switch kind { case reflect.Struct: nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) if !ok { return nil, kind, fmt.Errorf("object has no field %q: %w", decodedToken, ErrPointer) } + fld := rValue.FieldByName(nm) + return fld.Interface(), kind, nil case reflect.Map: @@ -341,78 +357,99 @@ func getSingleImpl(node any, decodedToken string, nameProvider *jsonname.NamePro if mv.IsValid() { return mv.Interface(), kind, nil } - return nil, kind, fmt.Errorf("object has no key %q: %w", decodedToken, ErrPointer) + + return nil, kind, errNoKey(decodedToken) case reflect.Slice: tokenIndex, err := strconv.Atoi(decodedToken) if err != nil { - return nil, kind, err + return nil, kind, errors.Join(err, ErrPointer) } sLength := rValue.Len() if tokenIndex < 0 || tokenIndex >= sLength { - return nil, kind, fmt.Errorf("index out of bounds array[0,%d] index '%d': %w", sLength-1, tokenIndex, ErrPointer) + return nil, kind, errOutOfBounds(sLength, tokenIndex) } elem := rValue.Index(tokenIndex) return elem.Interface(), kind, nil default: - return nil, kind, fmt.Errorf("invalid token reference %q: %w", decodedToken, ErrPointer) + return nil, kind, errInvalidReference(decodedToken) } } func setSingleImpl(node, data any, decodedToken string, nameProvider *jsonname.NameProvider) error { - rValue := reflect.Indirect(reflect.ValueOf(node)) - - // Check for nil to prevent panic when calling rValue.Type() + // check for nil to prevent panic when calling rValue.Type() if isNil(node) { return fmt.Errorf("cannot set field %q on nil value: %w", decodedToken, ErrPointer) } - if ns, ok := node.(JSONSetable); ok { // pointer impl + if ns, ok := node.(JSONSetable); ok { return ns.JSONSet(decodedToken, data) } - if rValue.Type().Implements(jsonSetableType) { - return node.(JSONSetable).JSONSet(decodedToken, data) - } + rValue := reflect.Indirect(reflect.ValueOf(node)) - switch rValue.Kind() { //nolint:exhaustive + switch rValue.Kind() { case reflect.Struct: nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken) if !ok { return fmt.Errorf("object has no field %q: %w", decodedToken, ErrPointer) } + fld := rValue.FieldByName(nm) - if fld.IsValid() { - fld.Set(reflect.ValueOf(data)) + if !fld.CanSet() { + return fmt.Errorf("can't set struct field %s to %v: %w", nm, data, ErrPointer) + } + + value := reflect.ValueOf(data) + valueType := value.Type() + assignedType := fld.Type() + + if !valueType.AssignableTo(assignedType) { + return fmt.Errorf("can't set value with type %T to field %s with type %v: %w", data, nm, assignedType, ErrPointer) } + + fld.Set(value) + return nil case reflect.Map: kv := reflect.ValueOf(decodedToken) rValue.SetMapIndex(kv, reflect.ValueOf(data)) + return nil case reflect.Slice: tokenIndex, err := strconv.Atoi(decodedToken) if err != nil { - return err + return errors.Join(err, ErrPointer) } + sLength := rValue.Len() if tokenIndex < 0 || tokenIndex >= sLength { - return fmt.Errorf("index out of bounds array[0,%d] index '%d': %w", sLength, tokenIndex, ErrPointer) + return errOutOfBounds(sLength, tokenIndex) } elem := rValue.Index(tokenIndex) if !elem.CanSet() { return fmt.Errorf("can't set slice index %s to %v: %w", decodedToken, data, ErrPointer) } - elem.Set(reflect.ValueOf(data)) + + value := reflect.ValueOf(data) + valueType := value.Type() + assignedType := elem.Type() + + if !valueType.AssignableTo(assignedType) { + return fmt.Errorf("can't set value with type %T to slice element %d with type %v: %w", data, tokenIndex, assignedType, ErrPointer) + } + + elem.Set(value) + return nil default: - return fmt.Errorf("invalid token reference %q: %w", decodedToken, ErrPointer) + return errInvalidReference(decodedToken) } } @@ -443,13 +480,14 @@ func offsetSingleObject(dec *json.Decoder, decodedToken string) (int64, error) { return 0, fmt.Errorf("invalid token %#v: %w", tk, ErrPointer) } } + return 0, fmt.Errorf("token reference %q not found: %w", decodedToken, ErrPointer) } func offsetSingleArray(dec *json.Decoder, decodedToken string) (int64, error) { idx, err := strconv.Atoi(decodedToken) if err != nil { - return 0, fmt.Errorf("token reference %q is not a number: %v: %w", decodedToken, err, ErrPointer) + return 0, fmt.Errorf("token reference %q is not a number: %w: %w", decodedToken, err, ErrPointer) } var i int for i = 0; i < idx && dec.More(); i++ { @@ -475,10 +513,12 @@ func offsetSingleArray(dec *json.Decoder, decodedToken string) (int64, error) { if !dec.More() { return 0, fmt.Errorf("token reference %q not found: %w", decodedToken, ErrPointer) } + return dec.InputOffset(), nil } // drainSingle drains a single level of object or array. +// // The decoder has to guarantee the beginning delim (i.e. '{' or '[') has been consumed. func drainSingle(dec *json.Decoder) error { for dec.More() { @@ -500,14 +540,15 @@ func drainSingle(dec *json.Decoder) error { } } - // Consumes the ending delim + // consumes the ending delim if _, err := dec.Token(); err != nil { return err } + return nil } -// Specific JSON pointer encoding here +// JSON pointer encoding: // ~0 => ~ // ~1 => / // ... and vice versa @@ -520,16 +561,23 @@ const ( ) var ( - encRefTokReplacer = strings.NewReplacer(encRefTok1, decRefTok1, encRefTok0, decRefTok0) - decRefTokReplacer = strings.NewReplacer(decRefTok1, encRefTok1, decRefTok0, encRefTok0) + encRefTokReplacer = strings.NewReplacer(encRefTok1, decRefTok1, encRefTok0, decRefTok0) //nolint:gochecknoglobals // it's okay to declare a replacer as a private global + decRefTokReplacer = strings.NewReplacer(decRefTok1, encRefTok1, decRefTok0, encRefTok0) //nolint:gochecknoglobals // it's okay to declare a replacer as a private global ) -// Unescape unescapes a json pointer reference token string to the original representation +// Unescape unescapes a json pointer reference token string to the original representation. func Unescape(token string) string { return encRefTokReplacer.Replace(token) } -// Escape escapes a pointer reference token string +// Escape escapes a pointer reference token string. +// +// The JSONPointer specification defines "/" as a separator and "~" as an escape prefix. +// +// Keys containing such characters are escaped with the following rules: +// +// - "~" is escaped as "~0" +// - "/" is escaped as "~1" func Escape(token string) string { return decRefTokReplacer.Replace(token) } diff --git a/pointer_test.go b/pointer_test.go index 6f7262d..4c68758 100644 --- a/pointer_test.go +++ b/pointer_test.go @@ -1,192 +1,217 @@ -// Copyright 2013 sigu-399 ( https://github.com/sigu-399 ) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// author sigu-399 -// author-github https://github.com/sigu-399 -// author-mail sigu.399@gmail.com -// -// repository-name jsonpointer -// repository-desc An implementation of JSON Pointer - Go language -// -// description Automated tests on package. -// -// created 03-03-2013 +// SPDX-FileCopyrightText: Copyright (c) 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 package jsonpointer import ( "encoding/json" "fmt" + "reflect" "strconv" "testing" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" + "github.com/go-openapi/swag/jsonname" + "github.com/go-openapi/testify/v2/assert" + "github.com/go-openapi/testify/v2/require" ) -const ( - TestDocumentNBItems = 11 - TestNodeObjNBItems = 4 - TestDocumentString = `{ -"foo": ["bar", "baz"], -"obj": { "a":1, "b":2, "c":[3,4], "d":[ {"e":9}, {"f":[50,51]} ] }, -"": 0, -"a/b": 1, -"c%d": 2, -"e^f": 3, -"g|h": 4, -"i\\j": 5, -"k\"l": 6, -" ": 7, -"m~n": 8 -}` -) +func TestEscaping(t *testing.T) { + t.Parallel() + + t.Run("escaped pointer strings against test document", func(t *testing.T) { + ins := []string{`/`, `/`, `/a~1b`, `/a~1b`, `/c%d`, `/e^f`, `/g|h`, `/i\j`, `/k"l`, `/ `, `/m~0n`} + outs := []float64{0, 0, 1, 1, 2, 3, 4, 5, 6, 7, 8} + + for i := range ins { + t.Run("should create a JSONPointer", func(t *testing.T) { + p, err := New(ins[i]) + require.NoError(t, err, "input: %v", ins[i]) + + t.Run("should get JSONPointer from document", func(t *testing.T) { + result, _, err := p.Get(testDocumentJSON(t)) + require.NoError(t, err, "input: %v", ins[i]) + assert.InDeltaf(t, outs[i], result, 1e-6, "input: %v", ins[i]) + }) + }) + } + }) -var testDocumentJSON any + t.Run("special escapes", func(t *testing.T) { + t.Parallel() -type testStructJSON struct { - Foo []string `json:"foo"` - Obj struct { - A int `json:"a"` - B int `json:"b"` - C []int `json:"c"` - D []struct { - E int `json:"e"` - F []int `json:"f"` - } `json:"d"` - } `json:"obj"` -} + t.Run("with escape then unescape", func(t *testing.T) { + const original = "a/" -type aliasedMap map[string]any + t.Run("unescaping an escaped string should yield the original", func(t *testing.T) { + esc := Escape(original) + assert.Equal(t, "a~1", esc) -var testStructJSONDoc testStructJSON -var testStructJSONPtr *testStructJSON + unesc := Unescape(esc) + assert.Equal(t, original, unesc) + }) + }) -func init() { - if err := json.Unmarshal([]byte(TestDocumentString), &testDocumentJSON); err != nil { - panic(err) - } - if err := json.Unmarshal([]byte(TestDocumentString), &testStructJSONDoc); err != nil { - panic(err) - } + t.Run("with multiple escapes", func(t *testing.T) { + unesc := Unescape("~01") + assert.Equal(t, "~1", unesc) + assert.Equal(t, "~01", Escape(unesc)) - testStructJSONPtr = &testStructJSONDoc + const ( + original = "~/" + escaped = "~0~1" + ) + + assert.Equal(t, escaped, Escape(original)) + assert.Equal(t, original, Unescape(escaped)) + }) + + t.Run("with escaped characters in pointer", func(t *testing.T) { + t.Run("escaped ~", func(t *testing.T) { + s := Escape("m~n") + assert.Equal(t, "m~0n", s) + }) + t.Run("escaped /", func(t *testing.T) { + s := Escape("m/n") + assert.Equal(t, "m~1n", s) + }) + }) + }) } -func TestEscaping(t *testing.T) { - ins := []string{`/`, `/`, `/a~1b`, `/a~1b`, `/c%d`, `/e^f`, `/g|h`, `/i\j`, `/k"l`, `/ `, `/m~0n`} - outs := []float64{0, 0, 1, 1, 2, 3, 4, 5, 6, 7, 8} +func TestFullDocument(t *testing.T) { + t.Parallel() - for i := range ins { - p, err := New(ins[i]) - require.NoError(t, err, "input: %v", ins[i]) - result, _, err := p.Get(testDocumentJSON) + t.Run("with empty pointer", func(t *testing.T) { + const in = `` - require.NoError(t, err, "input: %v", ins[i]) - assert.InDeltaf(t, outs[i], result, 1e-6, "input: %v", ins[i]) - } + p, err := New(in) + require.NoErrorf(t, err, "New(%v) error %v", in, err) -} + t.Run("should resolve full doc", func(t *testing.T) { + result, _, err := p.Get(testDocumentJSON(t)) + require.NoErrorf(t, err, "Get(%v) error %v", in, err) -func TestFullDocument(t *testing.T) { - const in = `` + asMap, ok := result.(map[string]any) + require.True(t, ok) - p, err := New(in) - require.NoErrorf(t, err, "New(%v) error %v", in, err) + require.Lenf(t, asMap, testDocumentNBItems(), "Get(%v) = %v, expect full document", in, result) + }) - result, _, err := p.Get(testDocumentJSON) - require.NoErrorf(t, err, "Get(%v) error %v", in, err) + t.Run("should resolve full doc, with nil name provider", func(t *testing.T) { + result, _, err := p.get(testDocumentJSON(t), nil) + require.NoErrorf(t, err, "Get(%v) error %v", in, err) - asMap, ok := result.(map[string]any) - require.True(t, ok) - require.Lenf(t, asMap, TestDocumentNBItems, "Get(%v) = %v, expect full document", in, result) + asMap, ok := result.(map[string]any) + require.True(t, ok) + require.Lenf(t, asMap, testDocumentNBItems(), "Get(%v) = %v, expect full document", in, result) - result, _, err = p.get(testDocumentJSON, nil) - require.NoErrorf(t, err, "Get(%v) error %v", in, err) + t.Run("should set value in doc, with nil name provider", func(t *testing.T) { + setter, err := New("/foo/0") + require.NoErrorf(t, err, "New(%v) error %v", in, err) - asMap, ok = result.(map[string]any) - require.True(t, ok) - require.Lenf(t, asMap, TestDocumentNBItems, "Get(%v) = %v, expect full document", in, result) + const value = "hey" + require.NoError(t, setter.set(asMap, value, nil)) + + foos, ok := asMap["foo"] + require.True(t, ok) + + asArray, ok := foos.([]any) + require.True(t, ok) + require.Len(t, asArray, 2) + + foo := asArray[0] + bar, ok := foo.(string) + require.True(t, ok) + + require.Equal(t, value, bar) + }) + }) + }) } func TestDecodedTokens(t *testing.T) { + t.Parallel() + p, err := New("/obj/a~1b") require.NoError(t, err) assert.Equal(t, []string{"obj", "a/b"}, p.DecodedTokens()) } func TestIsEmpty(t *testing.T) { - p, err := New("") - require.NoError(t, err) + t.Parallel() - assert.True(t, p.IsEmpty()) - p, err = New("/obj") - require.NoError(t, err) + t.Run("with empty pointer", func(t *testing.T) { + p, err := New("") + require.NoError(t, err) + + assert.True(t, p.IsEmpty()) + }) + + t.Run("with non-empty pointer", func(t *testing.T) { + p, err := New("/obj") + require.NoError(t, err) - assert.False(t, p.IsEmpty()) + assert.False(t, p.IsEmpty()) + }) } func TestGetSingle(t *testing.T) { - const in = `/obj` + t.Parallel() + + const key = "obj" t.Run("should create a new JSON pointer", func(t *testing.T) { + const in = "/" + key + _, err := New(in) require.NoError(t, err) }) - t.Run(`should find token "obj" in JSON`, func(t *testing.T) { - result, _, err := GetForToken(testDocumentJSON, "obj") + t.Run(fmt.Sprintf("should find token %q in JSON", key), func(t *testing.T) { + result, _, err := GetForToken(testDocumentJSON(t), key) require.NoError(t, err) - assert.Len(t, result, TestNodeObjNBItems) + assert.Len(t, result, testNodeObjNBItems()) }) - t.Run(`should find token "obj" in type alias interface`, func(t *testing.T) { - type alias interface{} - var in alias = testDocumentJSON - result, _, err := GetForToken(in, "obj") + t.Run(fmt.Sprintf("should find token %q in type alias interface", key), func(t *testing.T) { + type alias any + var in alias = testDocumentJSON(t) + + result, _, err := GetForToken(in, key) require.NoError(t, err) - assert.Len(t, result, TestNodeObjNBItems) + assert.Len(t, result, testNodeObjNBItems()) }) - t.Run(`should find token "obj" in pointer to interface`, func(t *testing.T) { - in := &testDocumentJSON - result, _, err := GetForToken(in, "obj") + t.Run(fmt.Sprintf("should find token %q in pointer to interface", key), func(t *testing.T) { + in := testDocumentJSON(t) + + result, _, err := GetForToken(&in, key) require.NoError(t, err) - assert.Len(t, result, TestNodeObjNBItems) + assert.Len(t, result, testNodeObjNBItems()) }) - t.Run(`should not find token "Obj" in struct`, func(t *testing.T) { - result, _, err := GetForToken(testStructJSONDoc, "Obj") + t.Run(`should NOT find token "Obj" in struct`, func(t *testing.T) { + result, _, err := GetForToken(testStructJSONDoc(t), "Obj") require.Error(t, err) assert.Nil(t, result) }) t.Run(`should not find token "Obj2" in struct`, func(t *testing.T) { - result, _, err := GetForToken(testStructJSONDoc, "Obj2") + result, _, err := GetForToken(testStructJSONDoc(t), "Obj2") require.Error(t, err) assert.Nil(t, result) }) - t.Run(`should not find token in nil`, func(t *testing.T) { - result, _, err := GetForToken(nil, "obj") + t.Run("should not find token in nil", func(t *testing.T) { + result, _, err := GetForToken(nil, key) require.Error(t, err) assert.Nil(t, result) }) - t.Run(`should not find token in nil interface`, func(t *testing.T) { - var in interface{} - result, _, err := GetForToken(in, "obj") + t.Run("should not find token in nil interface", func(t *testing.T) { + var in any + + result, _, err := GetForToken(in, key) require.Error(t, err) assert.Nil(t, result) }) @@ -219,130 +244,292 @@ func (p pointableMap) JSONLookup(token string) (any, error) { } func TestPointableInterface(t *testing.T) { - p := &pointableImpl{"hello"} + t.Parallel() - result, _, err := GetForToken(p, "some") - require.NoError(t, err) - assert.Equal(t, p.a, result) + t.Run("with pointable type", func(t *testing.T) { + p := &pointableImpl{"hello"} - result, _, err = GetForToken(p, "something") - require.Error(t, err) - assert.Nil(t, result) + result, _, err := GetForToken(p, "some") + require.NoError(t, err) + assert.Equal(t, p.a, result) - pm := pointableMap{"swapped": "hello", "a": "world"} - result, _, err = GetForToken(pm, "swap") - require.NoError(t, err) - assert.Equal(t, pm["swapped"], result) + result, _, err = GetForToken(p, "something") + require.Error(t, err) + assert.Nil(t, result) + }) - result, _, err = GetForToken(pm, "a") - require.NoError(t, err) - assert.Equal(t, pm["a"], result) + t.Run("with pointable map", func(t *testing.T) { + p := pointableMap{"swapped": "hello", "a": "world"} + result, _, err := GetForToken(p, "swap") + require.NoError(t, err) + assert.Equal(t, p["swapped"], result) + + result, _, err = GetForToken(p, "a") + require.NoError(t, err) + assert.Equal(t, p["a"], result) + }) } func TestGetNode(t *testing.T) { + t.Parallel() + const in = `/obj` - p, err := New(in) - require.NoError(t, err) + t.Run("should build pointer", func(t *testing.T) { + p, err := New(in) + require.NoError(t, err) - result, _, err := p.Get(testDocumentJSON) - require.NoError(t, err) - assert.Len(t, result, TestNodeObjNBItems) + t.Run("should resolve pointer against document", func(t *testing.T) { + result, _, err := p.Get(testDocumentJSON(t)) + require.NoError(t, err) + assert.Len(t, result, testNodeObjNBItems()) + }) - result, _, err = p.Get(aliasedMap(testDocumentJSON.(map[string]any))) - require.NoError(t, err) - assert.Len(t, result, TestNodeObjNBItems) + t.Run("with aliased map", func(t *testing.T) { + asMap, ok := testDocumentJSON(t).(map[string]any) + require.True(t, ok) + alias := aliasedMap(asMap) - result, _, err = p.Get(testStructJSONDoc) - require.NoError(t, err) - assert.Equal(t, testStructJSONDoc.Obj, result) + result, _, err := p.Get(alias) + require.NoError(t, err) + assert.Len(t, result, testNodeObjNBItems()) + }) - result, _, err = p.Get(testStructJSONPtr) - require.NoError(t, err) - assert.Equal(t, testStructJSONDoc.Obj, result) + t.Run("with struct", func(t *testing.T) { + doc := testStructJSONDoc(t) + expected := testStructJSONDoc(t).Obj + + result, _, err := p.Get(doc) + require.NoError(t, err) + assert.Equal(t, expected, result) + }) + + t.Run("with pointer to struct", func(t *testing.T) { + doc := testStructJSONPtr(t) + expected := testStructJSONDoc(t).Obj + + result, _, err := p.Get(doc) + require.NoError(t, err) + assert.Equal(t, expected, result) + }) + }) } func TestArray(t *testing.T) { + t.Parallel() + ins := []string{`/foo/0`, `/foo/0`, `/foo/1`} outs := []string{"bar", "bar", "baz"} - for i := range ins { - p, err := New(ins[i]) - require.NoError(t, err) - - result, _, err := p.Get(testStructJSONDoc) - require.NoError(t, err) - assert.Equal(t, outs[i], result) + for i, pointer := range ins { + expected := outs[i] - result, _, err = p.Get(testStructJSONPtr) - require.NoError(t, err) - assert.Equal(t, outs[i], result) + t.Run(fmt.Sprintf("with pointer %q", pointer), func(t *testing.T) { + p, err := New(pointer) + require.NoError(t, err) - result, _, err = p.Get(testDocumentJSON) - require.NoError(t, err) - assert.Equal(t, outs[i], result) + t.Run("should resolve against struct", func(t *testing.T) { + result, _, err := p.Get(testStructJSONDoc(t)) + require.NoError(t, err) + assert.Equal(t, expected, result) + }) + + t.Run("should resolve against pointer to struct", func(t *testing.T) { + result, _, err := p.Get(testStructJSONPtr(t)) + require.NoError(t, err) + assert.Equal(t, expected, result) + }) + + t.Run("should resolve against dynamic JSON map", func(t *testing.T) { + result, _, err := p.Get(testDocumentJSON(t)) + require.NoError(t, err) + assert.Equal(t, expected, result) + }) + }) } } +func TestStruct(t *testing.T) { + t.Parallel() + + t.Run("with untagged struct field", func(t *testing.T) { + type Embedded struct { + D int `json:"d"` + } + + s := struct { + Embedded + + A int `json:"a"` + B int + Anonymous struct { + C int `json:"c"` + } + }{} + + { + s.A = 1 + s.B = 2 + s.Anonymous.C = 3 + s.D = 4 + } + + t.Run(`should resolve field A tagged "a"`, func(t *testing.T) { + pointerA, err := New("/a") + require.NoError(t, err) + + value, kind, err := pointerA.Get(s) + require.NoError(t, err) + require.Equal(t, reflect.Int, kind) + require.Equal(t, 1, value) + + _, err = pointerA.Set(&s, 9) + require.NoError(t, err) + + value, _, err = pointerA.Get(s) + require.NoError(t, err) + require.Equal(t, 9, value) + }) + + t.Run(`should resolve embedded field D with tag`, func(t *testing.T) { + pointerD, err := New("/d") + require.NoError(t, err) + + value, kind, err := pointerD.Get(s) + require.NoError(t, err) + require.Equal(t, reflect.Int, kind) + require.Equal(t, 4, value) + + _, err = pointerD.Set(&s, 6) + require.NoError(t, err) + + value, _, err = pointerD.Get(s) + require.NoError(t, err) + require.Equal(t, 6, value) + }) + + t.Run("with known limitations", func(t *testing.T) { + t.Run(`should not resolve field B without tag`, func(t *testing.T) { + pointerB, err := New("/B") + require.NoError(t, err) + + _, _, err = pointerB.Get(s) + require.Error(t, err) + require.ErrorContains(t, err, `has no field "B"`) + + _, err = pointerB.Set(&s, 8) + require.Error(t, err) + require.ErrorContains(t, err, `has no field "B"`) + }) + + t.Run(`should not resolve field C with tag, but anonymous`, func(t *testing.T) { + pointerC, err := New("/c") + require.NoError(t, err) + + _, _, err = pointerC.Get(s) + require.Error(t, err) + require.ErrorContains(t, err, `has no field "c"`) + + _, err = pointerC.Set(&s, 7) + require.Error(t, err) + require.ErrorContains(t, err, `has no field "c"`) + }) + }) + }) +} + func TestOtherThings(t *testing.T) { - _, err := New("abc") - require.Error(t, err) + t.Parallel() - p, err := New("") - require.NoError(t, err) - assert.Empty(t, p.String()) + t.Run("single string pointer should be valid", func(t *testing.T) { + _, err := New("abc") + require.Error(t, err) + }) - p, err = New("/obj/a") - require.NoError(t, err) - assert.Equal(t, "/obj/a", p.String()) + t.Run("empty string pointer should be valid", func(t *testing.T) { + p, err := New("") + require.NoError(t, err) + assert.Empty(t, p.String()) + }) - s := Escape("m~n") - assert.Equal(t, "m~0n", s) - s = Escape("m/n") - assert.Equal(t, "m~1n", s) + t.Run("string representation of a pointer", func(t *testing.T) { + p, err := New("/obj/a") + require.NoError(t, err) + assert.Equal(t, "/obj/a", p.String()) + }) - p, err = New("/foo/3") - require.NoError(t, err) - _, _, err = p.Get(testDocumentJSON) - require.Error(t, err) + t.Run("out of bound array index should error", func(t *testing.T) { + t.Run("with index overflow", func(t *testing.T) { + p, err := New("/foo/3") + require.NoError(t, err) - p, err = New("/foo/a") - require.NoError(t, err) - _, _, err = p.Get(testDocumentJSON) - require.Error(t, err) + _, _, err = p.Get(testDocumentJSON(t)) + require.Error(t, err) + }) - p, err = New("/notthere") - require.NoError(t, err) - _, _, err = p.Get(testDocumentJSON) - require.Error(t, err) + t.Run("with index unerflow", func(t *testing.T) { + p, err := New("/foo/-3") + require.NoError(t, err) - p, err = New("/invalid") - require.NoError(t, err) - _, _, err = p.Get(1234) - require.Error(t, err) + _, _, err = p.Get(testDocumentJSON(t)) + require.Error(t, err) + }) + }) - p, err = New("/foo/1") - require.NoError(t, err) - expected := "hello" - bbb := testDocumentJSON.(map[string]any)["foo"] - bbb.([]any)[1] = "hello" + t.Run("referring to a key in an array should error", func(t *testing.T) { + p, err := New("/foo/a") + require.NoError(t, err) + _, _, err = p.Get(testDocumentJSON(t)) + require.Error(t, err) + }) - v, _, err := p.Get(testDocumentJSON) - require.NoError(t, err) - assert.Equal(t, expected, v) + t.Run("referring to a non-existing key in an array should error", func(t *testing.T) { + p, err := New("/notthere") + require.NoError(t, err) + _, _, err = p.Get(testDocumentJSON(t)) + require.Error(t, err) + }) - esc := Escape("a/") - assert.Equal(t, "a~1", esc) - unesc := Unescape(esc) - assert.Equal(t, "a/", unesc) + t.Run("resolving pointer against an unsupported type (int) should error", func(t *testing.T) { + p, err := New("/invalid") + require.NoError(t, err) + _, _, err = p.Get(1234) + require.Error(t, err) + }) - unesc = Unescape("~01") - assert.Equal(t, "~1", unesc) - assert.Equal(t, "~0~1", Escape("~/")) - assert.Equal(t, "~/", Unescape("~0~1")) + t.Run("with pointer to an array index", func(t *testing.T) { + for index := range 2 { + p, err := New(fmt.Sprintf("/foo/%d", index)) + require.NoError(t, err) + + v, _, err := p.Get(testDocumentJSON(t)) + require.NoError(t, err) + + expected := extractFooKeyIndex(t, index) + assert.Equal(t, expected, v) + } + }) +} + +func extractFooKeyIndex(t *testing.T, index int) any { + t.Helper() + + asMap, ok := testDocumentJSON(t).(map[string]any) + require.True(t, ok) + + // {"foo": [ ... ] } + bbb, ok := asMap["foo"] + require.True(t, ok) + + asArray, ok := bbb.([]any) + require.True(t, ok) + + return asArray[index] } func TestObject(t *testing.T) { + t.Parallel() + ins := []string{`/obj/a`, `/obj/b`, `/obj/c/0`, `/obj/c/1`, `/obj/c/1`, `/obj/d/1/f/0`} outs := []float64{1, 2, 3, 4, 4, 50} @@ -350,26 +537,20 @@ func TestObject(t *testing.T) { p, err := New(ins[i]) require.NoError(t, err) - result, _, err := p.Get(testDocumentJSON) + result, _, err := p.Get(testDocumentJSON(t)) require.NoError(t, err) assert.InDelta(t, outs[i], result, 1e-6) - result, _, err = p.Get(testStructJSONDoc) + result, _, err = p.Get(testStructJSONDoc(t)) require.NoError(t, err) assert.InDelta(t, outs[i], result, 1e-6) - result, _, err = p.Get(testStructJSONPtr) + result, _, err = p.Get(testStructJSONPtr(t)) require.NoError(t, err) assert.InDelta(t, outs[i], result, 1e-6) } } -/* - type setJSONDocEle struct { - B int `json:"b"` - C int `json:"c"` - } -*/ type setJSONDoc struct { A []struct { B int `json:"b"` @@ -392,6 +573,7 @@ func (s settableDoc) MarshalJSON() ([]byte, error) { res.D = s.Int return json.Marshal(res) } + func (s *settableDoc) UnmarshalJSON(data []byte) error { var res struct { A settableColl `json:"a"` @@ -406,7 +588,7 @@ func (s *settableDoc) UnmarshalJSON(data []byte) error { return nil } -// JSONLookup implements an interface to customize json pointer lookup +// JSONLookup implements an interface to customize json pointer lookup. func (s settableDoc) JSONLookup(token string) (any, error) { switch token { case "a": @@ -418,7 +600,7 @@ func (s settableDoc) JSONLookup(token string) (any, error) { } } -// JSONLookup implements an interface to customize json pointer lookup +// JSONLookup implements an interface to customize json pointer lookup. func (s *settableDoc) JSONSet(token string, data any) error { switch token { case "a": @@ -471,11 +653,12 @@ type settableColl struct { func (s settableColl) MarshalJSON() ([]byte, error) { return json.Marshal(s.Items) } + func (s *settableColl) UnmarshalJSON(data []byte) error { return json.Unmarshal(data, &s.Items) } -// JSONLookup implements an interface to customize json pointer lookup +// JSONLookup implements an interface to customize json pointer lookup. func (s settableColl) JSONLookup(token string) (any, error) { if tok, err := strconv.Atoi(token); err == nil { return &s.Items[tok], nil @@ -483,7 +666,7 @@ func (s settableColl) JSONLookup(token string) (any, error) { return nil, fmt.Errorf("%s is not a valid index: %w", token, ErrPointer) } -// JSONLookup implements an interface to customize json pointer lookup +// JSONLookup implements an interface to customize json pointer lookup. func (s *settableColl) JSONSet(token string, data any) error { if _, err := strconv.Atoi(token); err == nil { _, err := SetForToken(s.Items, token, data) @@ -504,11 +687,14 @@ type settableInt struct { func (s settableInt) MarshalJSON() ([]byte, error) { return json.Marshal(s.Value) } + func (s *settableInt) UnmarshalJSON(data []byte) error { return json.Unmarshal(data, &s.Value) } func TestSetNode(t *testing.T) { + t.Parallel() + const jsonText = `{"a":[{"b": 1, "c": 2}], "d": 3}` var jsonDocument any @@ -535,7 +721,9 @@ func TestSetNode(t *testing.T) { chNodeVI := changedNode["c"] require.IsType(t, 0, chNodeVI) - changedNodeValue := chNodeVI.(int) + changedNodeValue, ok := chNodeVI.(int) + require.True(t, ok) + require.Equal(t, 999, changedNodeValue) assert.Len(t, sliceNode, 1) }) @@ -735,6 +923,8 @@ func TestSetNode(t *testing.T) { } func TestOffset(t *testing.T) { + t.Parallel() + cases := []struct { name string ptr string @@ -797,3 +987,123 @@ func TestOffset(t *testing.T) { }) } } + +func TestEdgeCases(t *testing.T) { + t.Parallel() + + t.Run("set at pointer against an unsupported type (int) should error", func(t *testing.T) { + p, err := New("/invalid") + require.NoError(t, err) + _, err = p.Set(1, 1234) + require.Error(t, err) + require.ErrorIs(t, err, ErrUnsupportedValueType) + }) + + t.Run("set with empty pointer", func(t *testing.T) { + p, err := New("") + require.NoError(t, err) + + doc := testDocumentJSON(t) + newDoc, err := p.Set(doc, 1) + require.NoError(t, err) + + require.Equal(t, doc, newDoc) + }) + + t.Run("with out of bounds index", func(t *testing.T) { + p, err := New("/foo/10") + require.NoError(t, err) + + t.Run("should error on Get", func(t *testing.T) { + _, _, err := p.Get(testStructJSONDoc(t)) + require.Error(t, err) + require.ErrorContains(t, err, "index out of bounds") + }) + + t.Run("should error on Set", func(t *testing.T) { + _, err := p.Set(testStructJSONPtr(t), "peek-a-boo") + require.Error(t, err) + require.ErrorContains(t, err, "index out of bounds") + }) + }) + + t.Run("Set with invalid pointer token", func(t *testing.T) { + doc := testStructJSONDoc(t) + pointer, err := New("/foo/x") + require.NoError(t, err) + + _, err = pointer.Set(&doc, "yay") + require.Error(t, err) + require.ErrorContains(t, err, `Atoi: parsing "x"`) + }) + + t.Run("Set with invalid reference in struct", func(t *testing.T) { + doc := struct { + A func() `json:"a"` + B []int `json:"b"` + }{ + A: func() {}, + B: []int{0, 1}, + } + + t.Run("should error when attempting to set a struct field value that is not assignable", func(t *testing.T) { + pointerA, err := New("/a") + require.NoError(t, err) + + _, err = pointerA.Set(&doc, "waou") + require.Error(t, err) + require.ErrorContains(t, err, `can't set value with type string to field A`) + }) + + t.Run("should error when attempting to set a slice element value that is not assignable", func(t *testing.T) { + pointerB, err := New("/b/0") + require.NoError(t, err) + + _, err = pointerB.Set(&doc, "waou") + require.Error(t, err) + require.ErrorContains(t, err, `can't set value with type string to slice element 0 with type int`) + }) + + t.Run("should error when attempting to set a value that does not exist", func(t *testing.T) { + pointerB, err := New("/x") + require.NoError(t, err) + + _, _, err = pointerB.Get(&doc) + require.Error(t, err) + require.ErrorContains(t, err, `no field`) + + _, err = pointerB.Set(&doc, "oops") + require.Error(t, err) + require.ErrorContains(t, err, `no field`) + }) + }) +} + +func TestInternalEdgeCases(t *testing.T) { + t.Parallel() + + t.Run("setSingleImpl should error on any node not a struct, map or slice", func(t *testing.T) { + var node int + + err := setSingleImpl(&node, 3, "a", jsonname.DefaultJSONNameProvider) + require.Error(t, err) + require.ErrorContains(t, err, `invalid token reference "a"`) + }) + + t.Run("with simulated unsettable", func(t *testing.T) { + type unsettable struct { + A string `json:"a"` + } + doc := unsettable{ + A: "a", + } + + t.Run("setSingleImpl should error on struct field that is not settable", func(t *testing.T) { + node := doc // doesn't pass a pointer: unsettable + + err := setSingleImpl(node, "new value", "a", jsonname.DefaultJSONNameProvider) + require.Error(t, err) + require.ErrorContains(t, err, `can't set struct field`) + }) + }) +} diff --git a/struct_example_test.go b/struct_example_test.go new file mode 100644 index 0000000..2986902 --- /dev/null +++ b/struct_example_test.go @@ -0,0 +1,114 @@ +package jsonpointer_test + +import ( + "errors" + "fmt" + + "github.com/go-openapi/jsonpointer" +) + +var ErrExampleIface = errors.New("example error") + +type ExampleDoc struct { + PromotedDoc + + Promoted EmbeddedDoc `json:"promoted"` + AnonPromoted EmbeddedDoc `json:"-"` + A string `json:"propA"` + Ignored string `json:"-"` + Untagged string + + unexported string +} + +type EmbeddedDoc struct { + B string `json:"propB"` +} + +type PromotedDoc struct { + C string `json:"propC"` +} + +func Example_struct() { + doc := ExampleDoc{ + PromotedDoc: PromotedDoc{ + C: "c", + }, + Promoted: EmbeddedDoc{ + B: "promoted", + }, + A: "a", + Ignored: "ignored", + unexported: "unexported", + } + + { + // tagged simple field + pointerA, _ := jsonpointer.New("/propA") + a, _, err := pointerA.Get(doc) + if err != nil { + fmt.Println(err) + return + } + fmt.Printf("a: %v\n", a) + } + + { + // tagged struct field is resolved + pointerB, _ := jsonpointer.New("/promoted/propB") + b, _, err := pointerB.Get(doc) + if err != nil { + fmt.Println(err) + return + } + fmt.Printf("b: %v\n", b) + } + + { + // tagged embedded field is resolved + pointerC, _ := jsonpointer.New("/propC") + c, _, err := pointerC.Get(doc) + if err != nil { + fmt.Println(err) + return + } + fmt.Printf("c: %v\n", c) + } + + { + // exlicitly ignored by JSON tag. + pointerI, _ := jsonpointer.New("/ignored") + _, _, err := pointerI.Get(doc) + fmt.Printf("ignored: %v\n", err) + } + + { + // unexported field is ignored: use [JSONPointable] to alter this behavior. + pointerX, _ := jsonpointer.New("/unexported") + _, _, err := pointerX.Get(doc) + fmt.Printf("unexported: %v\n", err) + } + + { + // Limitation: anonymous field is not resolved. + pointerC, _ := jsonpointer.New("/propB") + _, _, err := pointerC.Get(doc) + fmt.Printf("anonymous: %v\n", err) + } + + { + // Limitation: untagged exported field is ignored, unlike with json standard MarshalJSON. + pointerU, _ := jsonpointer.New("/untagged") + _, _, err := pointerU.Get(doc) + fmt.Printf("untagged: %v\n", err) + } + + // output: + // a: a + // b: promoted + // c: c + // ignored: object has no field "ignored": JSON pointer error + // unexported: object has no field "unexported": JSON pointer error + // anonymous: object has no field "propB": JSON pointer error + // untagged: object has no field "untagged": JSON pointer error +} diff --git a/testdata/test_document.json b/testdata/test_document.json new file mode 100644 index 0000000..40ea4a2 --- /dev/null +++ b/testdata/test_document.json @@ -0,0 +1,34 @@ +{ + "foo": [ + "bar", + "baz" + ], + "obj": { + "a":1, + "b":2, + "c":[ + 3, + 4 + ], + "d":[ + { + "e":9 + }, + { + "f":[ + 50, + 51 + ] + } + ] + }, + "": 0, + "a/b": 1, + "c%d": 2, + "e^f": 3, + "g|h": 4, + "i\\j": 5, + "k\"l": 6, + " ": 7, + "m~n": 8 +} diff --git a/testdata_test.go b/testdata_test.go new file mode 100644 index 0000000..9559b14 --- /dev/null +++ b/testdata_test.go @@ -0,0 +1,66 @@ +// SPDX-FileCopyrightText: Copyright (c) 2015-2025 go-swagger maintainers +// SPDX-License-Identifier: Apache-2.0 + +package jsonpointer + +import ( + _ "embed" // initialize embed + "encoding/json" + "testing" + + "github.com/go-openapi/testify/v2/require" +) + +//go:embed testdata/*.json +var testDocumentJSONBytes []byte + +func testDocumentJSON(t *testing.T) any { + t.Helper() + + var document any + require.NoError(t, json.Unmarshal(testDocumentJSONBytes, &document)) + + return document +} + +func testStructJSONDoc(t *testing.T) testStructJSON { + t.Helper() + + var document testStructJSON + require.NoError(t, json.Unmarshal(testDocumentJSONBytes, &document)) + + return document +} + +func testStructJSONPtr(t *testing.T) *testStructJSON { + t.Helper() + + document := testStructJSONDoc(t) + + return &document +} + +// number of items in the test document. +func testDocumentNBItems() int { + return 11 +} + +// number of objects nodes in the test document. +func testNodeObjNBItems() int { + return 4 +} + +type testStructJSON struct { + Foo []string `json:"foo"` + Obj struct { + A int `json:"a"` + B int `json:"b"` + C []int `json:"c"` + D []struct { + E int `json:"e"` + F []int `json:"f"` + } `json:"d"` + } `json:"obj"` +} + +type aliasedMap map[string]any