diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..b6d76d3 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,39 @@ +# Git +.git +.gitignore +.github + +# Docker +.dockerignore + +# IDE +.idea +.vscode + +# Byte-compiled / optimized / DLL files +__pycache__/ +**/__pycache__/ +*.pyc +*.pyo +*.pyd +.Python +*.py[cod] +*$py.class +.pytest_cache/ +..mypy_cache/ + +# poetry +.venv +requirements*.txt + +# C extensions +*.so + +# Virtual environment +.venv +venv + +.DS_Store +.AppleDouble +.LSOverride +._* diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..7f578f1 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,24 @@ +# Check http://editorconfig.org for more information +# This is the main config file for this project: +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true + +[*.{py, pyi}] +indent_style = space +indent_size = 4 + +[Makefile] +indent_style = tab + +[*.md] +trim_trailing_whitespace = false + +[*.{diff,patch}] +trim_trailing_whitespace = false diff --git a/.github/.stale.yml b/.github/.stale.yml new file mode 100644 index 0000000..7b81464 --- /dev/null +++ b/.github/.stale.yml @@ -0,0 +1,17 @@ +# Number of days of inactivity before an issue becomes stale +daysUntilStale: 120 +# Number of days of inactivity before a stale issue is closed +daysUntilClose: 30 +# Issues with these labels will never be considered stale +exemptLabels: + - pinned + - security +# Label to use when marking an issue as stale +staleLabel: stale +# Comment to post when marking an issue as stale. Set to `false` to disable +markComment: > + This issue has been automatically marked as stale because it has not had + recent activity. It will be closed if no further activity occurs in 30 days. Thank you + for your contributions. +# Comment to post when closing a stale issue. Set to `false` to disable +closeComment: false diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..236ee7b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,36 @@ +--- +name: ":bug: Bug report" +about: If something isn't working 🔧 +title: '' +labels: bug, needs-triage +assignees: +--- + +## :bug: Bug Report + + + +## :microscope: How To Reproduce + +Steps to reproduce the behavior: + +1. ... + +### Code sample + + + +### Environment + +* OS: [e.g. Linux / Windows / macOS] +* Python version +* stac-model version + + +## :chart_with_upwards_trend: Expected behavior + + + +## :paperclip: Additional context + + diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..8f2da54 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,3 @@ +# Configuration: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository + +blank_issues_enabled: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..41bf0cd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,23 @@ +--- +name: ":rocket: Feature request" +about: Suggest an idea for this project 🏖 +title: '' +labels: enhancement, needs-triage +assignees: +--- + +## :rocket: Feature Request + + + +## :sound: Motivation + + + +## :satellite: Alternatives + + + +## :paperclip: Additional context + + diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 0000000..bf287fd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,28 @@ +--- +name: "red_question_mark Question" +about: Ask a question about this project 🎓 +title: '' +labels: question, needs-triage +assignees: +--- + +## Checklist + + + +- [ ] I've searched the project's [`issues`][1], looking for the following terms: + - [...] + +## :question: Question + + + +How can I [...]? + +Is it possible to [...]? + +## :paperclip: Additional context + + + +[1]: https://github.com/crim-ca/stac-model/issues diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..c088256 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,30 @@ +## Description + + + +## Related Issue + + + +## Type of Change + + + +- [ ] :books: Examples, docs, tutorials or dependencies update; +- [ ] :wrench: Bug fix (non-breaking change which fixes an issue); +- [ ] :clinking_glasses: Improvement (non-breaking change which improves an existing feature); +- [ ] :rocket: New feature (non-breaking change which adds functionality); +- [ ] :boom: Breaking change (fix or feature that would cause existing functionality to change); +- [ ] :closed_lock_with_key: Security fix. + +## Checklist + + + +- [ ] I've read the [`CONTRIBUTING.md`][2] guide; +- [ ] I've updated the code style using `make codestyle`; +- [ ] I've written tests for all new methods and classes that I created; +- [ ] I've written the docstring in `Google` format for all the methods and classes that I used. + +[1]: https://github.com/rbavery/stac-model/blob/master/CODE_OF_CONDUCT.md +[2]: https://github.com/rbavery/stac-model/blob/master/CONTRIBUTING.md diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..8f872f1 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,35 @@ +# Configuration: https://dependabot.com/docs/config-file/ +# Docs: https://docs.github.com/en/github/administering-a-repository/keeping-your-dependencies-updated-automatically + +version: 2 + +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "monthly" + allow: + - dependency-type: "all" + commit-message: + prefix: ":arrow_up:" + open-pull-requests-limit: 5 + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + allow: + - dependency-type: "all" + commit-message: + prefix: ":arrow_up:" + open-pull-requests-limit: 5 + + - package-ecosystem: "docker" + directory: "/docker" + schedule: + interval: "monthly" + allow: + - dependency-type: "all" + commit-message: + prefix: ":arrow_up:" + open-pull-requests-limit: 5 diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 0000000..8ad8b33 --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,28 @@ +# Release drafter configuration https://github.com/release-drafter/release-drafter#configuration +# Emojis were chosen to match the https://gitmoji.dev/ + +name-template: "v$NEXT_PATCH_VERSION" +tag-template: "v$NEXT_PATCH_VERSION" + +categories: + - title: ":rocket: Features" + labels: [enhancement, feature] + - title: ":wrench: Fixes & Refactoring" + labels: [bug, refactoring, bugfix, fix] + - title: ":package: Build System & CI/CD" + labels: [build, ci, testing] + - title: ":boom: Breaking Changes" + labels: [breaking] + - title: ":memo: Documentation" + labels: [documentation] + - title: ":arrow_up: Dependencies updates" + labels: [dependencies] + +template: | + ## What's Changed + + $CHANGES + + ## :busts_in_silhouette: List of contributors + + $CONTRIBUTORS diff --git a/.github/remark.yaml b/.github/remark.yaml index 9d1b95c..a58537b 100644 --- a/.github/remark.yaml +++ b/.github/remark.yaml @@ -4,7 +4,8 @@ plugins: # Apply some recommended defaults for consistency - remark-preset-lint-consistent - remark-preset-lint-recommended - - lint-no-html + - - lint-no-html + - false # General formatting - - remark-lint-emphasis-marker - '*' @@ -12,7 +13,9 @@ plugins: - remark-lint-blockquote-indentation - remark-lint-no-consecutive-blank-lines - - remark-lint-maximum-line-length - - 150 + - 120 +# GFM - autolink literals, footnotes, strikethrough, tables, tasklist + - remark-gfm # Code - remark-lint-fenced-code-flag - remark-lint-fenced-code-marker @@ -37,7 +40,7 @@ plugins: - - remark-lint-unordered-list-marker-style - '-' - - remark-lint-list-item-indent - - space + - space # Tables - remark-lint-table-pipes - remark-lint-no-literal-urls diff --git a/.github/workflows/greetings.yml b/.github/workflows/greetings.yml new file mode 100644 index 0000000..19f1ce0 --- /dev/null +++ b/.github/workflows/greetings.yml @@ -0,0 +1,16 @@ +name: Greetings + +on: [pull_request, issues] + +jobs: + greeting: + runs-on: ubuntu-latest + steps: + - uses: actions/first-interaction@v1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + pr-message: 'Hello @${{ github.actor }}, thank you for submitting a PR!' + issue-message: | + Hello @${{ github.actor }}, thank you for submitting an issue! + + If this is a bug report, please provide **minimum viable code to reproduce your issue**. diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml new file mode 100644 index 0000000..0c06b2b --- /dev/null +++ b/.github/workflows/release-drafter.yml @@ -0,0 +1,16 @@ +name: Release Drafter + +on: + push: + # branches to consider in the event; optional, defaults to all + branches: + - main + +jobs: + update_release_draft: + runs-on: ubuntu-latest + steps: + # Drafts your next Release notes as Pull Requests are merged into "master" + - uses: release-drafter/release-drafter@v5.15.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/stac-model.yml b/.github/workflows/stac-model.yml new file mode 100644 index 0000000..b400632 --- /dev/null +++ b/.github/workflows/stac-model.yml @@ -0,0 +1,36 @@ +name: Check Python Linting and Tests + +on: [push, pull_request] + +jobs: + stac-model: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12"] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2.2.2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install poetry + run: make poetry-install + + - name: Set up cache + uses: actions/cache@v2.1.6 + with: + path: .venv + key: venv-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }}-${{ hashFiles('poetry.lock') }} + - name: Install dependencies + run: make install-dev + + - name: Run checks + run: | + make lint-all + + - name: Run tests + run: | + make test diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 87f33a5..bddc2c4 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -5,10 +5,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + node-version: [16] steps: - uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + cache: npm - uses: actions/checkout@v2 - run: | npm install - npm test \ No newline at end of file + npm test diff --git a/.gitignore b/.gitignore index b68addb..a9b169b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,1040 @@ +Untitled.ipynb /package-lock.json -/node_modules \ No newline at end of file +/node_modules +.vscode +.idea + +### ArchLinuxPackages ### +*.tar +*.tar.* +*.jar +*.exe +*.msi +*.zip +*.tgz +*.log +*.log.* +*.sig + +pkg/ +src/ + +### C ### +# Prerequisites +*.d + +# Object files +*.o +*.ko +*.obj +*.elf + +# Linker output +*.ilk +*.map +*.exp + +# Precompiled Headers +*.gch +*.pch + +# Libraries +*.lib +*.a +*.la +*.lo + +# Shared objects (inc. Windows DLLs) +*.dll +*.so +*.so.* +*.dylib + +# Executables +*.out +*.app +*.i*86 +*.x86_64 +*.hex + +# Debug files +*.dSYM/ +*.su +*.idb +*.pdb + +# Kernel Module Compile Results +*.mod* +*.cmd +.tmp_versions/ +modules.order +Module.symvers +Mkfile.old +dkms.conf + +### certificates ### +*.pem +*.key +*.crt +*.cer +*.der +*.priv + +### Database ### +*.accdb +*.db +*.dbf +*.mdb +*.sqlite3 +*.db-shm +*.db-wal + +### Diff ### +*.patch +*.diff + +### Django ### +*.pot +*.pyc +__pycache__/ +local_settings.py +db.sqlite3 +db.sqlite3-journal +media + +# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ +# in your Git repository. Update and uncomment the following line accordingly. +# /staticfiles/ + +### Django.Python Stack ### +# Byte-compiled / optimized / DLL files +*.py[cod] +*$py.class + +# C extensions + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo + +# Django stuff: + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock +# requirements extracted from poetry lock +requirements-lock.txt + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +### Git ### +# Created by git for backups. To disable backups in Git: +# $ git config --global mergetool.keepBackup false +*.orig + +# Created by git when using merge tools for conflicts +*.BACKUP.* +*.BASE.* +*.LOCAL.* +*.REMOTE.* +*_BACKUP_*.txt +*_BASE_*.txt +*_LOCAL_*.txt +*_REMOTE_*.txt + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### MicrosoftOffice ### +*.tmp + +# Word temporary +~$*.doc* + +# Word Auto Backup File +Backup of *.doc* + +# Excel temporary +~$*.xls* + +# Excel Backup File +*.xlk + +# PowerPoint temporary +~$*.ppt* + +# Visio autosave temporary files +*.~vsd* + +### OSX ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint +.idea/**/sonarlint/ + +# SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml +.idea/**/markdown-navigator/ + +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +# Azure Toolkit for IntelliJ plugin +# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij +.idea/**/azureSettings.xml + +### Python ### +# Byte-compiled / optimized / DLL files + +# C extensions + +# Distribution / packaging + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. + +# Installer logs + +# Unit test / coverage reports + +# Translations + +# Django stuff: + +# Flask stuff: + +# Scrapy stuff: + +# Sphinx documentation + +# PyBuilder + +# Jupyter Notebook + +# IPython + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm + +# Celery stuff + +# SageMath parsed files + +# Environments + +# Spyder project settings + +# Rope project settings + +# mkdocs documentation + +# mypy + +# Pyre type checker + +# pytype static type analyzer + +# Cython debug symbols + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. + +### Python Patch ### +# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration +poetry.toml + +# ruff +.ruff_cache/ + +# LSP config files +pyrightconfig.json + +### Spreadsheet ### +*.xlr +*.xls +*.xlsx + +### SSH ### +**/.ssh/id_* +**/.ssh/*_id_* +**/.ssh/known_hosts + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +### Zsh ### +# Zsh compiled script + zrecompile backup +*.zwc +*.zwc.old + +# Zsh completion-optimization dumpfile +*zcompdump* + +# Zsh history +.zsh_history + +# Zsh sessions +.zsh_sessions + +# Zsh zcalc history +.zcalc_history + +# A popular plugin manager's files +._zinit +.zinit_lstupd + +# zdharma/zshelldoc tool's files +zsdoc/data + +# robbyrussell/oh-my-zsh/plugins/per-directory-history plugin's files +# (when set-up to store the history in the local directory) +.directory_history + +# MichaelAquilina/zsh-autoswitch-virtualenv plugin's files +# (for Zsh plugins using Python) + +# Zunit tests' output +/tests/_output/* +!/tests/_output/.gitkeep + +### VisualStudio ### +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.meta +*.iobj +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp_proj +*_wpftmp.csproj +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +*.code-workspace + +# Local History for Visual Studio Code + +# Windows Installer files from build outputs + +# JetBrains Rider +*.sln.iml + +### VisualStudio Patch ### +# Additional files built by Visual Studio + +# End of https://www.toptal.com/developers/gitignore/api/linux,archlinuxpackages,osx,windows,python,c,django,database,pycharm,visualstudio,visualstudiocode,vim,zsh,git,diff,microsoftoffice,spreadsheet,ssh,certificates diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..3ff038d --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +default_language_version: + python: python3.10 + +default_stages: [commit, push] + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: 'v0.1.12' # Use the latest version of ruff-pre-commit + hooks: + - id: ruff + pass_filenames: false + args: + - --config=pyproject.toml diff --git a/.remarkignore b/.remarkignore new file mode 100644 index 0000000..fb1251d --- /dev/null +++ b/.remarkignore @@ -0,0 +1,18 @@ +# To save time scanning +.idea/ +.vscode/ +.tox/ +.git/ +.github/**/*.yaml +.github/**/*.yml +*.egg-info/ +build/ +dist/ +downloads/ +env/ + +# actual items to ignore +.pytest_cache/ +node_modules/ +docs/_build/ +docs/build/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 880efdc..0b30f5a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,73 @@ # Changelog + All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [Unreleased] +## [Unreleased](https://github.com/crim-ca/dlm-extension/tree/main) + +### Added +- n/a + +### Changed +- n/a + +### Deprecated +- n/a + +### Removed +- n/a + +### Fixed +- n/a + +## [0.1.1.alpha4](https://github.com/crim-ca/dlm-extension/tree/0.1.1.alpha4) + +### Added +- more [Task Enum](README.md#task-enum) tasks +- [Model Output Object](README.md#model-output-object) +- batch_size and hardware summary +- [`mlm:accelerator`, `mlm:accelerator_constrained`, `mlm:accelerator_summary`](./README.md#accelerator-type-enum) + to specify hardware requirements for the model +- Use common metadata + [Asset Object](https://github.com/radiantearth/stac-spec/blob/master/collection-spec/collection-spec.md#asset-object) + to refer to model asset and source code. +- use `classification:classes` in Model Output +- add `scene-classification` to the Enum Tasks to allow disambiguation between pixel-wise and patch-based classification + +### Changed +- `disk_size` replaced by `file:size` (see [Best Practices - File Extension](best-practices.md#file-extension)) +- `memory_size` under `dlm:architecture` moved directly under Item properties as `mlm:memory_size` +- replaced all hardware/accelerator/runtime definitions into distinct `mlm` fields directly under the + STAC Item properties (top-level, not nested) to allow better search support by STAC API. +- reorganized `dlm:architecture` nested fields to exist at the top level of properties as `mlm:name`, `mlm:summary` + and so on to provide STAC API search capabilities. +- replaced `normalization:mean`, etc. with [statistics](./README.md#bands-and-statistics) from STAC 1.1 common metadata +- added `pydantic` models for internal schema objects in `stac_model` package and published to PYPI +- specified [rel_type](README.md#relation-types) to be `derived_from` and + specify how model item or collection json should be named +- replaced all Enum Tasks names to use hyphens instead of spaces +- replaced `dlm:task` by `mlm:tasks` using an array of value instead of a single one, allowing models to represent + multiple tasks they support simultaneously or interchangeably depending on context +- replace `pre_processing_function` and `post_processing_function` to use similar definitions + to the [Processing Extension - Expression Object](https://github.com/stac-extensions/processing#expression-object) + such that more extended definitions of custom processors can be defined. +- updated JSON schema to reflect changes of MLM fields + +### Deprecated +- any `dlm`-prefixed field or property + +### Removed +- Data Object, replaced with [Model Input Object](./README.md#model-input-object) that uses the `name` field from + the [common metadata band object][stac-bands] which also records `data_type` and `nodata` type + +### Fixed +- n/a + +[stac-bands]: https://github.com/radiantearth/stac-spec/blob/f9b3c59ba810541c9da70c5f8d39635f8cba7bcd/item-spec/common-metadata.md#bands + +## [v1.0.0-beta3](https://github.com/crim-ca/dlm-extension/tree/v1.0.0-beta3) ### Added - Added example model architecture summary text. @@ -39,7 +102,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Fixed examples to refer to local files. - Fixed formatting of tables and descriptions in README. -[v1.0.0-beta2]: +## [v1.0.0-beta2](https://github.com/crim-ca/dlm-extension/tree/v1.0.0-beta2) ### Added - Initial release of the extension description and schema. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..0334f37 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,91 @@ +# How to contribute to stac-model + +## Project setup + +1. If you don't have `Poetry` installed run: + +```bash +make poetry-install +``` + +> This installs Poetry as a [standalone application][poetry-install]. +> If you prefer, you can simply install it inside your virtual environment. + +2. Initialize project dependencies with poetry and install `pre-commit` hooks: + +```bash +make install-dev +make pre-commit-install +``` + +You're then ready to run and test your contributions. + +To activate your `virtualenv` run `poetry shell`. + +Want to know more about Poetry? Check [its documentation][poetry-docs]. + +Poetry's [commands][poetry-cli] let you easily make descriptive python environments +and run commands in those environments, like: + +- `poetry add numpy@latest` +- `poetry run pytest` +- `poetry publish --build` + +etc. + +3. Run linting checks: + +```bash +make lint-all +``` + +4. Run `pytest` with + +```bash +make test +``` + +5. Upload your changes to your fork, then make a PR from there to the main repo: + +```bash +git checkout -b your-branch +git add . +git commit -m ":tada: Initial commit" +git remote add origin https://github.com/your-fork/stac-model.git +git push -u origin your-branch +``` + +## Building and releasing stac-model + +Building a new version of `stac-model` contains steps: + +- Bump the version with `poetry version `. + You can pass the new version explicitly, or a rule such as `major`, `minor`, or `patch`. + For more details, refer to the [Semantic Versions][semver] standard; +- Make a commit to `GitHub`; +- Create a `GitHub release`; +- And... publish :slight_smile: `poetry publish --build` + +### Before submitting + +Before submitting your code please do the following steps: + +1. Add any changes you want +2. Add tests for the new changes +3. Edit documentation if you have changed something significant +4. Run `make codestyle` to format your changes. +5. Run `make lint-all` to ensure that types, security and docstrings are okay. + +## Other help + +You can contribute by spreading a word about this library. +It would also be a huge contribution to write +a short article on how you are using this project. +You can also share how the ML Model extension does or does +not serve your needs with us in the Github Discussions or raise +Issues for bugs. + +[poetry-install]: https://github.com/python-poetry/install.python-poetry.org +[poetry-docs]: https://python-poetry.org/docs/ +[poetry-cli]: https://python-poetry.org/docs/cli/#commands +[semver]: https://semver.org/ diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..3dd26e5 --- /dev/null +++ b/Makefile @@ -0,0 +1,126 @@ +#* Variables +SHELL := /usr/bin/env bash +PYTHON := python +PYTHONPATH := `pwd` + +#* Poetry +.PHONY: poetry-install +poetry-install: + curl -sSL https://install.python-poetry.org | $(PYTHON) - + +.PHONY: poetry-remove +poetry-remove: + curl -sSL https://install.python-poetry.org | $(PYTHON) - --uninstall + +.PHONY: poetry-plugins +poetry-plugins: + poetry self add poetry-plugin-up + +.PHONY: poetry-env +poetry-env: + poetry config virtualenvs.in-project true + +#* Installation +.PHONY: install +install: poetry-env + poetry lock -n && poetry export --without-hashes > requirements-lock.txt + poetry install -n + -poetry run mypy --install-types --non-interactive ./ + +.PHONY: install-dev +install-dev: poetry-env install + poetry install -n --with dev + +.PHONY: pre-commit-install +pre-commit-install: + poetry run pre-commit install + + +#* Formatters +.PHONY: codestyle +codestyle: + poetry run ruff format --config=pyproject.toml stac_model tests + +.PHONY: format +format: codestyle + +#* Linting +.PHONY: test +test: + PYTHONPATH=$(PYTHONPATH) poetry run pytest -c pyproject.toml --cov-report=html --cov=stac_model tests/ + +.PHONY: mypy +mypy: + poetry run mypy --config-file pyproject.toml ./ + +.PHONY: check-safety +check-safety: + poetry check + poetry run safety check --full-report + poetry run bandit -ll --recursive stac_model tests + +.PHONY: lint +lint: + poetry run ruff --config=pyproject.toml ./ + poetry run pydocstyle --count --config=pyproject.toml ./ + poetry run pydoclint --config=pyproject.toml ./ + +.PHONY: check-lint +check-lint: lint + +.PHONY: install-npm +install-npm: + npm install + +.PHONY: check-markdown +check-markdown: install-npm + npm run check-markdown + +.PHONY: format-markdown +format-markdown: install-npm + npm run format-markdown + +.PHONY: check-examples +check-examples: install-npm + npm run check-examples + +.PHONY: format-examples +format-examples: install-npm + npm run format-examples + +fix-%: format-%s + +.PHONY: lint-all +lint-all: lint mypy check-safety check-markdown + +.PHONY: update-dev-deps +update-dev-deps: + poetry up --only=dev-dependencies --latest + +#* Cleaning +.PHONY: pycache-remove +pycache-remove: + find . | grep -E "(__pycache__|\.pyc|\.pyo$$)" | xargs rm -rf + +.PHONY: dsstore-remove +dsstore-remove: + find . | grep -E ".DS_Store" | xargs rm -rf + +.PHONY: mypycache-remove +mypycache-remove: + find . | grep -E ".mypy_cache" | xargs rm -rf + +.PHONY: ipynbcheckpoints-remove +ipynbcheckpoints-remove: + find . | grep -E ".ipynb_checkpoints" | xargs rm -rf + +.PHONY: pytestcache-remove +pytestcache-remove: + find . | grep -E ".pytest_cache" | xargs rm -rf + +.PHONY: build-remove +build-remove: + rm -rf build/ + +.PHONY: cleanup +cleanup: pycache-remove dsstore-remove mypycache-remove ipynbcheckpoints-remove pytestcache-remove diff --git a/README.md b/README.md index bad0999..07261ea 100644 --- a/README.md +++ b/README.md @@ -1,207 +1,645 @@ -# Deep Learning Model Extension Specification +# Machine Learning Model Extension Specification [![hackmd-github-sync-badge](https://hackmd.io/lekSD_RVRiquNHRloXRzeg/badge)](https://hackmd.io/lekSD_RVRiquNHRloXRzeg?both) -- **Title:** Deep Learning Model Extension -- **Identifier:** -- **Field Name Prefix:** dlm -- **Scope:** Item, Collection -- **Extension [Maturity Classification][stac-ext-maturity]:** Proposal -- **Owner**: - [@sfoucher](https://github.com/sfoucher) - [@fmigneault](https://github.com/fmigneault) - [@ymoisan](https://github.com/ymoisan) - -[stac-ext-maturity]: https://github.com/radiantearth/stac-spec/tree/master/extensions/README.md#extension-maturity - -This document explains the Template Extension to the [SpatioTemporal Asset Catalog][stac-spec] (STAC) specification. -This document explains the fields of the STAC Deep Learning Model (dlm) Extension to a STAC Item. -The main objective is to be able to build model collections that can be searched -and that contain enough information to be able to deploy an inference service. -When Deep Learning models are trained using satellite imagery, it is important -to track essential information if you want to make them searchable and reusable: -1. Input data origin and specifications -2. Model base transforms -3. Model output and its semantic interpretation -4. Runtime environment to be able to run the model +- **Title:** Machine Learning Model Extension +- **Identifier:** [https://stac-extensions.github.io/mlm/v1.0.0/schema.json](https://stac-extensions.github.io/mlm/v1.0.0/schema.json) +- **Field Name Prefix:** mlm +- **Scope:** Collection, Item, Asset, Links +- **Extension Maturity Classification:** Proposal +- **Owner:** + - [@fmigneault](https://github.com/fmigneault) + - [@rbavery](https://github.com/rbavery) + - [@ymoisan](https://github.com/ymoisan) + - [@sfoucher](https://github.com/sfoucher) + +The STAC Machine Learning Model (MLM) Extension provides a standard set of fields to describe machine learning models +trained on overhead imagery and enable running model inference. + +The main objectives of the extension are: + +1) to enable building model collections that can be searched alongside associated STAC datasets +2) record all necessary bands, parameters, modeling artifact locations, and high-level processing steps to deploy + an inference service. + +Specifically, this extension records the following information to make ML models searchable and reusable: +1. Sensor band specifications +2. Model input transforms including resize and normalization +3. Model output shape, data type, and its semantic interpretation +4. An optional, flexible description of the runtime environment to be able to run the model 5. Scientific references -[stac-spec]: https://github.com/radiantearth/stac-spec +The MLM specification is biased towards providing metadata fields for supervised machine learning models. +However, fields that relate to supervised ML are optional and users can use the fields they need for different tasks. -Check the original technical report -[here](https://github.com/crim-ca/CCCOT03/raw/main/CCCOT03_Rapport%20Final_FINAL_EN.pdf) for more details. +See [Best Practices](./best-practices.md) for guidance on what other STAC extensions you should use in conjunction +with this extension. +The Machine Learning Model Extension purposely omits and delegates some definitions to other STAC extensions to favor +reusability and avoid metadata duplication whenever possible. A properly defined MLM STAC Item/Collection should almost +never have the Machine Learning Model Extension exclusively in `stac_extensions`. -![](https://i.imgur.com/cVAg5sA.png) +For details about the earlier (legacy) version of the MLM Extension, formerly known as +the *Deep Learning Model Extension* (DLM), please refer to the [DLM LEGACY](README_DLM_LEGACY.md) document. +DLM was renamed to the current MLM Extension and refactored to form a cohesive definition across all machine +learning approaches, regardless of whether the approach constitutes a deep neural network or other statistical approach. +It also combines multiple definitions from the predecessor [ML-Model](https://github.com/stac-extensions/ml-model) +extension to synthesize common use cases into a single reference for Machine Learning Models. + +For more details about the [`stac-model`](stac_model) Python package, which provides definitions of the MLM extension +using both [`Pydantic`](https://docs.pydantic.dev/latest/) and [`PySTAC`](https://pystac.readthedocs.io/en/stable/) +connectors, please refer to the [STAC Model](README_STAC_MODEL.md) document. + +> :warning:
+> FIXME: update examples - Examples: - - [Example with a UNet trained with thelper](examples/item.json) + - **Example with a ??? trained with torchgeo** :warning: TODO update example - [Collection example](examples/collection.json): Shows the basic usage of the extension in a STAC Collection -- [JSON Schema](json-schema/schema.json) +- [JSON Schema](json-schema/schema.json) TODO update - [Changelog](./CHANGELOG.md) ## Item Properties and Collection Fields -| Field Name | Type | Description | -|------------------|---------------------------------------------|------------------------------------------------------------------------| -| dlm:data | [Data Object](#data-object) | Describes the EO data compatible with the model. | -| dlm:inputs | [Inputs Object](#inputs-object) | Describes the transformation between the EO data and the model inputs. | -| dlm:architecture | [Architecture Object](#architecture-object) | Describes the model architecture. | -| dlm:runtime | [Runtime Object](#runtime-object) | Describes the runtime environments to run the model (inference). | -| dlm:outputs | [Outputs Object](#outputs-object) | Describes each model output and how to interpret it. | - -In addition, fields from the following extensions must be imported in the item: -- [Scientific Extension Specification][stac-ext-sci] to describe relevant publications. -- [EO Extension Specification][stac-ext-eo] to describe eo data. -- [Version Extension Specification][stac-ext-ver] to define version tags. +The fields in the table below can be used in these parts of STAC documents: + +- [ ] Catalogs +- [x] Collections +- [x] Item Properties (incl. Summaries in Collections) +- [x] Assets (for both Collections and Items, incl. Item Asset Definitions in Collections, except `mlm:name`) +- [ ] Links + +| Field Name | Type | Description | +|-----------------------------|---------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| mlm:name | string | **REQUIRED** A unique name for the model. This can include, but must be distinct, from simply naming the model architecture. If there is a publication or other published work related to the model, use the official name of the model. | +| mlm:architecture | [Model Architecture](#model-architecture) string | **REQUIRED** A generic and well established architecture name of the model. | +| mlm:tasks | \[[Task Enum](#task-enum)] | **REQUIRED** Specifies the Machine Learning tasks for which the model can be used for. If multi-tasks outputs are provided by distinct model heads, specify all available tasks under the main properties and specify respective tasks in each [Model Output Object](#model-output-object). | +| mlm:framework | string | Framework used to train the model (ex: PyTorch, TensorFlow). | +| mlm:framework_version | string | The `framework` library version. Some models require a specific version of the machine learning `framework` to run. | +| mlm:memory_size | integer | The in-memory size of the model on the accelerator during inference (bytes). | +| mlm:total_parameters | integer | Total number of model parameters, including trainable and non-trainable parameters. | +| mlm:pretrained | boolean | Indicates if the model was pretrained. If the model was pretrained, consider providing `pretrained_source` if it is known. | +| mlm:pretrained_source | string \| null | The source of the pretraining. Can refer to popular pretraining datasets by name (i.e. Imagenet) or less known datasets by URL and description. If trained from scratch (i.e.: `pretrained = false`), the `null` value should be set explicitly. | +| mlm:batch_size_suggestion | integer | A suggested batch size for the accelerator and summarized hardware. | +| mlm:accelerator | [Accelerator Type Enum](#accelerator-type-enum) \| null | The intended computational hardware that runs inference. If undefined or set to `null` explicitly, the model does not require any specific accelerator. | +| mlm:accelerator_constrained | boolean | Indicates if the intended `accelerator` is the only `accelerator` that can run inference. If undefined, it should be assumed `false`. | +| mlm:accelerator_summary | string | A high level description of the `accelerator`, such as its specific generation, or other relevant inference details. | +| mlm:accelerator_count | integer | A minimum amount of `accelerator` instances required to run the model. | +| mlm:input | \[[Model Input Object](#model-input-object)] | **REQUIRED** Describes the transformation between the EO data and the model input. | +| mlm:output | \[[Model Output Object](#model-output-object)] | **REQUIRED** Describes each model output and how to interpret it. | +| mlm:hyperparameters | [Model Hyperparameters Object](#model-hyperparameters-object) | Additional hyperparameters relevant for the model. | + +To decide whether above fields should be applied under Item `properties` or under respective Assets, the context of +each field must be considered. For example, the `mlm:name` should always be provided in the Item `properties`, since +it relates to the model as a whole. In contrast, some models could support multiple `mlm:accelerator`, which could be +handled by distinct source code represented by different Assets. In such case, `mlm:accelerator` definitions should be +nested under their relevant Asset. If a field is defined both at the Item and Asset level, the value at the Asset level +would be considered for that specific Asset, and the value at the Item level would be used for other Assets that did +not override it for their respective reference. For some of the fields, further details are provided in following +sections to provide more precisions regarding some potentially ambiguous use cases. + +In addition, fields from the multiple relevant extensions should be defined as applicable. See +[Best Practices - Recommended Extensions to Compose with the ML Model Extension](best-practices.md#recommended-extensions-to-compose-with-the-ml-model-extension) +for more details. + +For the [Extent Object][stac-extent] +in STAC Collections and the corresponding spatial and temporal fields in Items, please refer to section +[Best Practices - Using STAC Common Metadata Fields for the ML Model Extension][stac-mlm-meta]. + +[stac-mlm-meta]: best-practices.md#using-stac-common-metadata-fields-for-the-ml-model-extension +[stac-extent]: https://github.com/radiantearth/stac-spec/blob/master/collection-spec/collection-spec.md#extent-object + +### Model Architecture + +In most cases, this should correspond to common architecture names defined in the literature, +such as `ResNet`, `VGG`, `GAN` or `Vision Transformer`. For more examples of proper names (including casing), +the [Papers With Code - Computer Vision Methods](https://paperswithcode.com/methods/area/computer-vision) can be used. +Note that this field is not an explicit "Enum", and is used only as an indicator of common architecture occurrences. +If no specific or predefined architecture can be associated with the described model, simply employ `unknown` or +another custom name as deemed appropriate. + +### Task Enum + +It is recommended to define `mlm:tasks` of the entire model at the STAC Item level, +and `tasks` of respective [Model Output Object](#model-output-object) with the following values. +Although other values are permitted to support more use cases, they should be used sparingly to allow better +interoperability of models and their representation. + +As a general rule of thumb, if a task is not represented below, an appropriate name can be formulated by taking +definitions listed in [Papers With Code](https://paperswithcode.com/sota). The names +should be normalized to lowercase and use hyphens instead of spaces. + +| Task Name | Corresponding `label:tasks` | Description | +|-------------------------|-----------------------------|--------------------------------------------------------------------------------------------------------------------------| +| `regression` | `regression` | Generic regression that estimates a numeric and continuous value. | +| `classification` | `classification` | Generic classification task that assigns class labels to an output. | +| `scene-classification` | *n/a* | Specific classification task where the model assigns a single class label to an entire scene/area. | +| `detection` | `detection` | Generic detection of the "presence" of objects or entities, with or without positions. | +| `object-detection` | *n/a* | Task corresponding to the identification of positions as bounding boxes of object detected in the scene. | +| `segmentation` | `segmentation` | Generic tasks that regroups all types of segmentations tasks consisting of applying labels to pixels. | +| `semantic-segmentation` | *n/a* | Specific segmentation task where all pixels are attributed labels, without consideration for segments as unique objects. | +| `instance-segmentation` | *n/a* | Specific segmentation task that assigns distinct labels for groups of pixels corresponding to object instances. | +| `panoptic-segmentation` | *n/a* | Specific segmentation task that combines instance segmentation of objects and semantic labels for non-objects. | +| `similarity-search` | *n/a* | Generic task to identify whether a query input corresponds to another reference within a corpus. | +| `generative` | *n/a* | Generic task that encompasses all synthetic data generation techniques. | +| `image-captioning` | *n/a* | Specific task of describing the content of an image in words. | +| `super-resolution` | *n/a* | Specific task that increases the quality and resolution of an image by increasing its high-frequency details. | + +If the task falls within the category of supervised machine learning and uses labels during training, +this should align with the `label:tasks` values defined in [STAC Label Extension][stac-ext-label-props] for relevant +STAC Collections and Items published with the model described by this extension. + +It is to be noted that multiple "*generic*" tasks names (`classification`, `detection`, etc.) are defined to allow +correspondance with `label:tasks`, but these can lead to some ambiguity depending on context. For example, a model +that supports `classification` could mean that the model can predict patch-based classes over an entire scene +(i.e.: `scene-classification` for a single prediction over an entire area of interest as a whole), +or that it can predict pixel-wise "classifications", such as land-cover labels for +every single pixel coordinate over the area of interest. Maybe counter-intuitively to some users, +such a model that produces pixel-wise "classifications" should be attributed the `segmentation` task +(and more specifically `semantic-segmentation`) rather than `classification`. To avoid this kind of ambiguity, +it is strongly recommended that `tasks` always aim to provide the most specific definitions possible to explicitly +describe what the model accomplishes. -[stac-ext-sci]: https://github.com/radiantearth/stac-spec/tree/v1.0.0-beta.2/extensions/scientific/README.md -[stac-ext-eo]: https://github.com/radiantearth/stac-spec/tree/v1.0.0-beta.2/extensions/eo/README.md -[stac-ext-ver]: https://github.com/radiantearth/stac-spec/tree/v1.0.0-beta.2/extensions/version/README.md - -### Data Object +[stac-ext-label-props]: https://github.com/stac-extensions/label#item-properties -| Field Name | Type | Description | -|-----------------|--------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------| -| process_level | [Process Level Enum](#process-level-enum) | Data processing level that represents the apparent variability of the data. | -| data_type | [Data Type Enum](#data-type-enum) | Data type (`uint8`, `uint16`, etc.) enum based on numpy base types for data normalization and pre-processing. | -| nodata | integer \| string | Value indicating *nodata*, which could require special data preparation by the network (see [No Data Value](#no-data-value)). | -| number_of_bands | integer | Number of bands used by the model | -| useful_bands | \[[Model Band Object](#model-band-object)] | Describes bands by index in the relevant order for the model input. | +### Framework + +This should correspond to the common library name of a well-established ML framework. +No "Enum" are *enforced* to allow easy addition of newer frameworks, but it is **STRONGLY** recommended +to use common names when applicable. Below are a few notable entries. + +- `PyTorch` +- `TensorFlow` +- `Scikit-learn` +- `Huggingface` +- `Keras` +- `ONNX` +- `rgee` +- `spatialRF` +- `JAX` +- `MXNet` +- `Caffe` +- `PyMC` +- `Weka` + +### Accelerator Type Enum + +It is recommended to define `accelerator` with one of the following values: + +- `amd64` models compatible with AMD or Intel CPUs (no hardware specific optimizations) +- `cuda` models compatible with NVIDIA GPUs +- `xla` models compiled with XLA. Models trained on TPUs are typically compiled with XLA. +- `amd-rocm` models trained on AMD GPUs +- `intel-ipex-cpu` for models optimized with IPEX for Intel CPUs +- `intel-ipex-gpu` for models optimized with IPEX for Intel GPUs +- `macos-arm` for models trained on Apple Silicon + +> :warning:
+> If `mlm:accelerator = amd64`, this explicitly indicates that the model does not (and will not try to) use any +> accelerator, even if some are available from the runtime environment. This is to be distinguished from +> the value `mlm:accelerator = null`, which means that the model *could* make use of some accelerators if provided, +> but is not constrained by any specific one. To improve comprehension by users, it is recommended that any model +> using `mlm:accelerator = amd64` also set explicitly `mlm:accelerator_constrained = true` to illustrate that the +> model **WILL NOT** use accelerators, although the hardware resolution should be identical nonetheless. + +When `mlm:accelerator = null` is employed, the value of `mlm:accelerator_constrained` can be ignored, since even if +set to `true`, there would be no `accelerator` to contain against. To avoid confusion, it is suggested to set the +`mlm:accelerator_constrained = false` or omit the field entirely in this case. + +### Model Input Object + +| Field Name | Type | Description | +|-------------------------|---------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| name | string | **REQUIRED** Name of the input variable defined by the model. If no explicit name is defined by the model, an informative name (e.g.: `"RGB Time Series"`) can be used instead. | +| bands | \[string] | **REQUIRED** The names of the raster bands used to train or fine-tune the model, which may be all or a subset of bands available in a STAC Item's [Band Object](#bands-and-statistics). If no band applies for one input, use an empty array. | +| input | [Input Structure Object](#input-structure-object) | **REQUIRED** The N-dimensional array definition that describes the shape, dimension ordering, and data type. | +| norm_by_channel | boolean | Whether to normalize each channel by channel-wise statistics or to normalize by dataset statistics. If True, use an array of `statistics` of same dimensionality and order as the `bands` field in this object. | +| norm_type | [Normalize Enum](#normalize-enum) \| null | Normalization method. Select an appropriate option or `null` when none applies. Consider using `pre_processing_function` for custom implementations or more complex combinations. | +| norm_clip | \[number] | When `norm_type = "clip"`, this array supplies the value for each `bands` item, which is used to divide each band before clipping values between 0 and 1. | +| resize_type | [Resize Enum](#resize-enum) \| null | High-level descriptor of the rescaling method to change image shape. Select an appropriate option or `null` when none applies. Consider using `pre_processing_function` for custom implementations or more complex combinations. | +| statistics | \[[Statistics Object](#bands-and-statistics)] | Dataset statistics for the training dataset used to normalize the inputs. | +| pre_processing_function | [Processing Expression](#processing-expression) \| null | Custom preprocessing function where normalization and rescaling, and any other significant operations takes place. | + +Fields that accept the `null` value can be considered `null` when omitted entirely for parsing purposes. +However, setting `null` explicitly when this information is known by the model provider can help users understand +what is the expected behavior of the model. It is therefore recommended to provide `null` explicitly when applicable. + +#### Bands and Statistics + +Depending on the supported `stac_version` and other `stac_extensions` employed by the STAC Item using MLM, +the [STAC 1.1 - Band Object][stac-1.1-band], +the [STAC Raster - Band Object][stac-raster-band] or +the [STAC EO - Band Object][stac-eo-band] can be used for +representing bands information, including notably the `nodata` value, +the `data_type` (see also [Data Type Enum](#data-type-enum)), +and [Common Band Names][stac-band-names]. + +> :information_source:
+> Due to how the schema for [`eo:bands`][stac-eo-band] is defined, it is not sufficient to *only* provide +> the `eo:bands` property at the STAC Item level. The schema validation of the EO extension explicitly looks +> for a corresponding set of bands under an Asset, and if none is found, it disallows `eo:bands` in the Item properties. +> Therefore, `eo:bands` should either be specified *only* under the Asset containing the `mlm:model` role +> (see [Model Asset](#model-asset)), or define them *both* under the Asset and Item properties. If the second +> approach is selected, it is recommended that the `eo:bands` under the Asset contains only the `name` or the +> `common_name` property, such that all other details about the bands are defined at the Item level. +>

+> For more details, refer to [stac-extensions/eo#12](https://github.com/stac-extensions/eo/issues/12). +>
+> For an example, please refer to [examples/item_eo_bands.json](examples/item_eo_bands.json). +> Notably in this example, the `assets.weights.eo:bands` property provides the `name` to fulfill the Asset requirement, +> while all additional band details are provided in `properties.eo:bands`. + +Only bands used as input to the model should be included in the MLM `bands` field. +To avoid duplicating the information, MLM only uses the `name` of whichever "Band Object" is defined in the STAC Item. + +One distinction from the [STAC 1.1 - Band Object][stac-1.1-band] in MLM is that [Statistics][stac-1.1-stats] object +(or the corresponding [STAC Raster - Statistics][stac-raster-stats] for STAC 1.0) are not +defined at the "Band Object" level, but at the [Model Input](#model-input-object) level. +This is because, in machine learning, it is common to need overall statistics for the dataset used to train the model +to normalize all bands, rather than normalizing the values over a single product. Furthermore, statistics could be +applied differently for distinct [Model Input](#model-input-object) definitions, in order to adjust for intrinsic +properties of the model. + +[stac-1.1-band]: https://github.com/radiantearth/stac-spec/pull/1254 +[stac-1.1-stats]: https://github.com/radiantearth/stac-spec/blob/bands/item-spec/common-metadata.md#statistics-object +[stac-eo-band]: https://github.com/stac-extensions/eo?tab=readme-ov-file#band-object +[stac-raster-band]: https://github.com/stac-extensions/raster?tab=readme-ov-file#raster-band-object +[stac-raster-stats]: https://github.com/stac-extensions/raster?tab=readme-ov-file#statistics-object +[stac-band-names]: https://github.com/stac-extensions/eo?tab=readme-ov-file#common-band-names -#### Process Level Enum +#### Data Type Enum -It is recommended to use the [STAC Processing Extension][stac-ext-proc] -to represent the `processing:level` of the relevant level `L0` for raw data up to `L4` for Analysis-Ready Data (ARD). +When describing the `data_type` provided by a [Band](#bands-and-statistics), whether for defining +the [Input Structure](#input-structure-object) or the [Result Structure](#result-structure-object), +the [Data Types from the STAC Raster extension][raster-data-types] should be used if using STAC 1.0 or earlier, +and can use [Data Types from STAC 1.1 Core][stac-1.1-data-types] for later versions. +Both definitions should define equivalent values. + +[raster-data-types]: https://github.com/stac-extensions/raster?tab=readme-ov-file#data-types +[stac-1.1-data-types]: https://github.com/radiantearth/stac-spec/blob/bands/item-spec/common-metadata.md#data-types + +#### Input Structure Object + +| Field Name | Type | Description | +|------------|----------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| shape | \[integer] | **REQUIRED** Shape of the input n-dimensional array (e.g.: $B \times C \times H \times W$), including the batch size dimension. Each dimension must either be greater than 0 or -1 to indicate a variable dimension size. | +| dim_order | \[[Dimension Order](#dimension-order)] | **REQUIRED** Order of the `shape` dimensions by name. | +| data_type | [Data Type Enum](#data-type-enum) | **REQUIRED** The data type of values in the n-dimensional array. For model inputs, this should be the data type of the processed input supplied to the model inference function, not the data type of the source bands. | + +A common use of `-1` for one dimension of `shape` is to indicate a variable batch-size. +However, this value is not strictly reserved for the `b` dimension. +For example, if the model is capable of automatically adjusting its input layer to adapt to the provided input data, +then the corresponding dimensions that can be adapted can employ `-1` as well. + +#### Dimension Order + +Recommended values should use common names as much as possible to allow better interpretation by users and scripts +that could need to resolve the dimension ordering for reshaping requirements according to the ML framework employed. + +Below are some notable common names recommended for use, but others can be employed as needed. + +- `batch` +- `channel` +- `time` +- `height` +- `width` +- `depth` +- `token` +- `class` +- `score` +- `confidence` + +For example, a tensor of multiple RBG images represented as $B \times C \times H \times W$ should +indicate `dim_order = ["batch", "channel", "height", "width"]`. + +#### Normalize Enum + +Select one option from: +- `min-max` +- `z-score` +- `l1` +- `l2` +- `l2sqr` +- `hamming` +- `hamming2` +- `type-mask` +- `relative` +- `inf` +- `clip` + +See [OpenCV - Normalization Flags][opencv-normalization-flags] +for details about the relevant methods. Equivalent methods from other packages are applicable as well. + +When a normalization technique is specified, it is expected that the corresponding [Statistics](#bands-and-statistics) +parameters necessary to perform it would be provided for the corresponding input. +For example, the `min-max` normalization would require that at least the `minimum` and `maximum` statistic properties +are provided, while the `z-score` would require `mean` and `stddev`. + +If none of the above values applies, `null` (literal, not string) can be used instead. +If a custom normalization operation, or a combination of operations (with or without [Resize](#resize-enum)), +must be defined instead, consider using a [Processing Expression](#processing-expression) reference. + +[opencv-normalization-flags]: https://docs.opencv.org/4.x/d2/de8/group__core__array.html#gad12cefbcb5291cf958a85b4b67b6149f + +#### Resize Enum + +Select one option from: +- `crop` +- `pad` +- `interpolation-nearest` +- `interpolation-linear` +- `interpolation-cubic` +- `interpolation-area` +- `interpolation-lanczos4` +- `interpolation-max` +- `wrap-fill-outliers` +- `wrap-inverse-map` + +See [OpenCV - Interpolation Flags][opencv-interpolation-flags] +for details about the relevant methods. Equivalent methods from other packages are applicable as well. + +If none of the above values applies, `null` (literal, not string) can be used instead. +If a custom rescaling operation, or a combination of operations (with or without [Normalization](#normalize-enum)), +must be defined instead, consider using a [Processing Expression](#processing-expression) reference. + +[opencv-interpolation-flags]: https://docs.opencv.org/4.x/da/d54/group__imgproc__transform.html#ga5bb5a1fea74ea38e1a5445ca803ff121 + +#### Processing Expression + +Taking inspiration from [Processing Extension - Expression Object][stac-proc-expr], the processing expression defines +at the very least a `format` and the applicable `expression` for it to perform pre/post-processing operations on MLM +inputs/outputs. + +| Field Name | Type | Description | +| ---------- | ------ | ----------- | +| format | string | **REQUIRED** The type of the expression that is specified in the `expression` property. | +| expression | \* | **REQUIRED** An expression compliant with the `format` specified. The expression can be any data type and depends on the `format` given, e.g. string or object. | + +On top of the examples already provided by [Processing Extension - Expression Object][stac-proc-expr], +the following formats are recommended as alternative scripts and function references. + +| Format | Type | Description | Expression Example | +|----------| ------ |----------------------------------------|------------------------------------------------------------------------------------------------------| +| `python` | string | A Python entry point reference. | `my_package.my_module:my_processing_function` or `my_package.my_module:MyClass.my_method` | +| `docker` | string | An URI with image and tag to a Docker. | `ghcr.io/NAMESPACE/IMAGE_NAME:latest` | +| `uri` | string | An URI to some binary or script. | `{"href": "https://raw.githubusercontent.com/ORG/REPO/TAG/package/cli.py", "type": "text/x-python"}` | + +> :information_source:
+> Above definitions are only indicative, and more can be added as desired with even more custom definitions. +> It is left as an implementation detail for users to resolve how these expressions should be handled at runtime. + +> :warning:
+> See also discussion regarding additional processing expressions: +> [stac-extensions/processing#31](https://github.com/stac-extensions/processing/issues/31) + +[stac-proc-expr]: https://github.com/stac-extensions/processing#expression-object + +### Model Output Object + +| Field Name | Type | Description | +|--------------------------|---------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| name | string | **REQUIRED** Name of the output variable defined by the model. If no explicit name is defined by the model, an informative name (e.g.: `"CLASSIFICATION"`) can be used instead. | +| tasks | \[[Task Enum](#task-enum)] | **REQUIRED** Specifies the Machine Learning tasks for which the output can be used for. This can be a subset of `mlm:tasks` defined under the Item `properties` as applicable. | +| result | [Result Structure Object](#result-structure-object) | **REQUIRED** The structure that describes the resulting output arrays/tensors from one model head. | +| classification:classes | \[[Class Object](#class-object)] | A list of class objects adhering to the [Classification Extension](https://github.com/stac-extensions/classification). | +| post_processing_function | [Processing Expression](#processing-expression) \| null | Custom postprocessing function where normalization and rescaling, and any other significant operations takes place. | + +While only `tasks` is a required field, all fields are recommended for tasks that produce a fixed +shape tensor and have output classes. Outputs that have variable dimensions, can define the `result` with the +appropriate dimension value `-1` in the `shape` field. When the model does not produce specific classes, such +as for `regression`, `image-captioning`, `super-resolution` and some `generative` tasks, to name a few, the +`classification:classes` can be omitted. + +#### Result Structure Object + +| Field Name | Type | Description | +|------------|----------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| shape | \[integer] | **REQUIRED** Shape of the n-dimensional result array (e.g.: $B \times H \times W$ or $B \times C$), possibly including a batch size dimension. The dimensions must either be greater than 0 or -1 to indicate a variable size. | +| dim_order | \[[Dimension Order](#dimension-order)] | **REQUIRED** Order of the `shape` dimensions by name for the result array. | +| data_type | [Data Type Enum](#data-type-enum) | **REQUIRED** The data type of values in the n-dimensional array. For model outputs, this should be the data type of the result of the model inference without extra post processing. | + +#### Class Object + +See the documentation for the +[Class Object](https://github.com/stac-extensions/classification?tab=readme-ov-file#class-object). + +### Model Hyperparameters Object + +The hyperparameters are an open JSON object definition that can be used to provide relevant configurations for the +model. Those can combine training details, inference runtime parameters, or both. For example, training hyperparameters +could indicate the number of epochs that were used, the optimizer employed, the number of estimators contained in an +ensemble of models, or the random state value. For inference, parameters such as the model temperature, a confidence +cut-off threshold, or a non-maximum suppression threshold to limit proposal could be specified. The specific parameter +names, and how they should be employed by the model, are specific to each implementation. + +Following is an example of what the hyperparameters definition could look like: + +```json +{ + "mlm:hyperparameters": { + "nms_max_detections": 500, + "nms_threshold": 0.25, + "iou_threshold": 0.5, + "random_state": 12345 + } +} +``` -[stac-ext-proc]: https://github.com/stac-extensions/processing#suggested-processing-levels +## Assets Objects + +| Field Name | Type | Description | +|-----------------|----------------------------|-------------------------------------------------------------------------------------------| +| mlm:model | [Asset Object][stac-asset] | **REQUIRED** Asset object containing the model definition. | +| mlm:source_code | [Asset Object][stac-asset] | **RECOMMENDED** Source code description. Can describe a Git repository, ZIP archive, etc. | +| mlm:container | [Asset Object][stac-asset] | **RECOMMENDED** Information to run the model in a container with URI to the container. | +| mlm:training | [Asset Object][stac-asset] | **RECOMMENDED** Information to run the training pipeline of the model being described. | +| mlm:inference | [Asset Object][stac-asset] | **RECOMMENDED** Information to run the inference pipeline of the model being described. | + +It is recommended that the [Assets][stac-asset] defined in a STAC Item using MLM extension use the above field property +names for nesting the Assets in order to improve their quick identification, although the specific names employed are +left up to user preference. However, the MLM Asset definitions **MUST** include the +appropriate [MLM Asset Roles](#mlm-asset-roles) to ensure their discovery. + +[stac-asset]: https://github.com/radiantearth/stac-spec/blob/master/collection-spec/collection-spec.md#asset-object + +### MLM Asset Roles + +Asset `roles` should include relevant names that describe them. This does not only include +the [Recommended Asset Roles](https://github.com/radiantearth/stac-spec/blob/master/item-spec/item-spec.md#asset-roles) +from the core specification, such as `data` or `metadata`, but also descriptors such as `mlm:model`, `mlm:weights` and +so on, as applicable for the relevant MLM Assets being described. Please refer to the following sections +for `roles` requirements by specific MLM Assets. + +Note that `mlm:` prefixed roles are used for identification purpose of the Assets, but non-prefixed roles can be +provided as well to offer generic descriptors. For example, `["mlm:model", "model", "data"]` could be considered for +the [Model Asset](#model-asset). + +In order to provide more context, the following roles are also recommended were applicable: + +| Asset Role | Additional Roles | Description | +|---------------------------|-------------------------|------------------------------------------------------------------------------------------| +| mlm:inference-runtime (*) | `runtime` | Describes an Asset that provides runtime reference to perform model inference. | +| mlm:training-runtime (*) | `runtime` | Describes an Asset that provides runtime reference to perform model training. | +| mlm:checkpoint (*) | `weights`, `checkpoint` | Describes an Asset that provides a model checkpoint with embedded model configurations. | +| mlm:weights | `weights`, `checkpoint` | Describes an Asset that provides a model weights (typically some Tensor representation). | +| mlm:model | `model` | Required role for [Model Asset](#model-asset). | +| mlm:source_code | `code` | Required role for [Model Asset](#source-code-asset). | + +> :information_source:
+> (*) These roles are offered as direct conversions from the previous extension +> that provided [ML-Model Asset Roles][ml-model-asset-roles] to provide easier upgrade to the MLM extension. + +[ml-model-asset-roles]: https://github.com/stac-extensions/ml-model?tab=readme-ov-file#asset-objects + +### Model Asset + +| Field Name | Type | Description | +|-------------------|-------------------------------------------|--------------------------------------------------------------------------------------------------| +| title | string | Description of the model asset. | +| href | string | URI to the model artifact. | +| type | string | The media type of the artifact (see [Model Artifact Media-Type](#model-artifact-media-type). | +| roles | \[string] | **REQUIRED** Specify `mlm:model`. Can include `["mlm:weights", "mlm:checkpoint"]` as applicable. | +| mlm:artifact_type | [Artifact Type Enum](#artifact-type-enum) | Specifies the kind of model artifact. Typically related to a particular ML framework. | + +Recommended Asset `roles` include `mlm:weights` or `mlm:checkpoint` for model weights that need to be loaded by a +model definition and `mlm:compiled` for models that can be loaded directly without an intermediate model definition. +In each case, the `mlm:model` should be applied as well to indicate that this asset represents the model. + +It is also recommended to make use of the +[file](https://github.com/stac-extensions/file?tab=readme-ov-file#asset--link-object-fields) +extension for this Asset, as it can provide useful information to validate the contents of the model definition, +by comparison with fields `file:checksum` and `file:size` for example. + +#### Model Artifact Media-Type + +Very few ML framework, libraries or model artifacts provide explicit [IANA registered][iana-media-type] media-type +to represent the contents they handle. When those are not provided, custom media-types can be considered. +However, "*unofficial but well-established*" parameters should be reused over custom media-types when possible. + +For example, the unofficial `application/octet-stream; framework=pytorch` definition is appropriate to represent a +PyTorch `.pt` file, since its underlying format is a serialized pickle structure, and its `framework` parameter +provides a clearer indication about the targeted ML framework and its contents. Since artifacts will typically be +downloaded using a request stream into a runtime environment in order to employ the model, +the `application/octet-stream` media-type is relevant for representing this type of arbitrary binary data. +Being an official media-type, it also has the benefit to increase chances that +HTTP clients will handle download of the contents appropriately when performing requests. In contrast, custom +media-types such as `application/x-pytorch` have higher chances to be considered unacceptable (HTTP 406 Not Acceptable) +by servers, which is why they should preferably be avoided. + +Users can consider adding more parameters to provide additional context, such as `profile=compiled` to provide an +additional hint that the specific [PyTorch Ahead-of-Time Compilation][pytorch-aot-inductor] profile +is used for the artifact described by the media-type. However, users need to remember that those parameters are not +official. In order to validate the specific framework and artifact type employed by the model, the MLM properties +`mlm:framework` (see [MLM Fields](#item-properties-and-collection-fields)) and +`mlm:artifact_type` (see [Model Asset](#model-asset)) should be employed instead to perform this validation if needed. + +[iana-media-type]: https://www.iana.org/assignments/media-types/media-types.xhtml + +#### Artifact Type Enum + +This value can be used to provide additional details about the specific model artifact being described. +For example, PyTorch offers [various strategies][pytorch-frameworks] for providing model definitions, +such as Pickle (`.pt`), [TorchScript][pytorch-jit-script], +or [PyTorch Ahead-of-Time Compilation][pytorch-aot-inductor] (`.pt2`) approach. +Since they all refer to the same ML framework, the [Model Artifact Media-Type](#model-artifact-media-type) +can be insufficient in this case to detect which strategy should be used with. + +Following are some proposed *Artifact Type* values for corresponding approaches, but other names are +permitted as well. Note that the names are selected using the framework-specific definitions to help +the users understand the source explicitly, although this is not strictly required either. + +| Artifact Type | Description | +|--------------------|--------------------------------------------------------------------------------------| +| `torch.save` | A model artifact obtained by [Serialized Pickle Object][pytorch-save] (i.e.: `.pt`). | +| `torch.jit.script` | A model artifact obtained by [`TorchScript`][pytorch-jit-script]. | +| `torch.export` | A model artifact obtained by [`torch.export`][pytorch-export] (i.e.: `.pt2`). | +| `torch.compile` | A model artifact obtained by [`torch.compile`][pytorch-compile]. | + +[pytorch-compile]: https://pytorch.org/tutorials/intermediate/torch_compile_tutorial.html +[pytorch-export]: https://pytorch.org/docs/main/export.html +[pytorch-frameworks]: https://pytorch.org/docs/main/export.html#existing-frameworks +[pytorch-aot-inductor]: https://pytorch.org/docs/main/torch.compiler_aot_inductor.html +[pytorch-jit-script]: https://pytorch.org/docs/stable/jit.html +[pytorch-save]: https://pytorch.org/tutorials/beginner/saving_loading_models.html + +### Source Code Asset + +| Field Name | Type | Description | +|----------------|-----------|-------------------------------------------------------------------------------| +| title | string | Title of the source code. | +| href | string | URI to the code repository, a ZIP archive, or an individual code/script file. | +| type | string | Media-type of the URI. | +| roles | \[string] | **RECOMMENDED** Specify one or more of `["model", "code", "metadata"]` | +| description | string | Description of the source code. | +| mlm:entrypoint | string | Specific entrypoint reference in the code to use for running model inference. | + +If the referenced code does not directly offer a callable script to run the model, the `mlm:entrypoint` field should be +added to the [Asset Object][stac-asset] in order to provide a pointer to the inference function to execute the model. +For example, `my_package.my_module:predict` would refer to the `predict` function located in the `my_module` inside the +`my_package` library provided by the repository. + +It is strongly recommended to use a specific media-type such as `text/x-python` if the source code refers directly +to a script of a known programming language. Using the HTML rendering of that source file, such as though GitHub +for example, should be avoided. Using the "Raw Contents" endpoint for such cases is preferable. +The `text/html` media-type should be reserved for cases where the URI generally points at a Git repository. +Note that the URI including the specific commit hash, release number or target branch should be preferred over +other means of referring to checkout procedures, although this specification does not prohibit the use of additional +properties to better describe the Asset. + +Since the source code of a model provides useful example on how to use it, it is also recommended to define relevant +references to documentation using the `example` extension. +See the [Best Practices - Example Extension](best-practices.md#example-extension) section for more details. + +Recommended asset `roles` include `code` and `metadata`, +since the source code asset might also refer to more detailed metadata than this specification captures. + +### Container Asset + +| Field Name | Type | Description | +|-------------|-----------|-----------------------------------------------------------------------------------| +| title | string | Description of the container. | +| href | string | URI of the published container, including the container registry, image and tag. | +| type | string | Media-type of the container, typically `application/vnd.oci.image.index.v1+json`. | +| roles | \[string] | Specify `["runtime"]` and any other custom roles. | + +If you're unsure how to containerize your model, we suggest starting from the latest official container image for +your framework that works with your model and pinning the container version. + +Examples: +- [Pytorch Dockerhub](https://hub.docker.com/r/pytorch/pytorch/tags) +- [Pytorch Docker Run Example](https://github.com/pytorch/pytorch?tab=readme-ov-file#docker-image) +- [Tensorflow Dockerhub](https://hub.docker.com/r/tensorflow/tensorflow/tags?page=8&ordering=last_updated) +- [Tensorflow Docker Run Example](https://www.tensorflow.org/install/docker#gpu_support) + +Using a base image for a framework looks like: + +```dockerfile +# In your Dockerfile, pull the latest base image with all framework dependencies including accelerator drivers +FROM pytorch/pytorch:2.1.2-cuda11.8-cudnn8-runtime + +### Your specific environment setup to run your model +RUN pip install my_package +``` -#### Data Type Enum +You can also use other base images. Pytorch and Tensorflow offer docker images for serving models for inference. +- [Torchserve](https://pytorch.org/serve/) +- [TFServing](https://github.com/tensorflow/serving) -It is recommended to use the [STAC Raster Extension - Raster Band Object][stac-ext-raster-band-obj] -in STAC Collections and Items that refer to a STAC Item using `dlm`'s `data_type`. The values should be one of the known -data types defined by `raster:bands`'s `data_type` as presented in [Data Types][stac-ext-raster-dtype]. - -If source imagery has different `data_type` values than the one specified by `dlm`'s `data_type` property, -this should provide -an indication that the source imagery might require a preprocessing step (scaling, normalization, conversion, etc.) -to adapt the samples to the relevant format expected by the described model. - -[stac-ext-raster-dtype]: https://github.com/stac-extensions/raster/#data-types -[stac-ext-raster-band-obj]: https://github.com/stac-extensions/raster/#raster-band-object - -#### No Data Value - -It is recommended to use the [STAC Raster Extension - Raster Band Object](https://github.com/stac-extensions/raster/#raster-band-object) -in STAC Collections and Items that refer to a STAC Item using `dlm`'s `nodata`. This value should either map -to the `raster:bands`'s `nodata` property of relevant bands, or a classification label value representing -a "*background*" pixel mask (see [STAC Label Extension - Raster Label Notes][stac-ext-raster-label]) -from a `label:type` defined as `raster` with the relevant `raster` asset provided. - -If source imagery has different `nodata` values than the one specified by `dlm`'s `nodata` property, this should provide -an indication that the source imagery might require a preprocessing step to adapt the samples to the values expected by -the described model. - -[stac-ext-raster-label]: https://github.com/stac-extensions/label#raster-label-notes - -#### Model Band Object - -Can be combined with `eo:bands`'s [`Band Object`][stac-ext-eo-band-obj]. - -[stac-ext-eo-band-obj]: https://github.com/stac-extensions/eo#band-object - -| Field Name | Type | Description | -|-----------------|---------|------------------------------------------| -| index | integer | **REQUIRED** Index of the spectral band. | -| name | string | Short name of the band for convenience. | - -### Inputs Object - -| Field Name | Type | Description | -|-------------------------|---------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------| -| name | string | Python name of the input variable. | -| input_tensors | [Tensor Object](#tensor-object) | Shape of the input tensor ($N \times C \times H \times W$). | -| scaling_factor | number | Scaling factor to apply to get data within `[0,1]`. For instance `scaling_factor=0.004` for 8-bit data. | -| normalization:mean | list of numbers | Mean vector value to be removed from the data. The vector size must be consistent with `input_tensors:dim` and `selected_bands`. | -| normalization:std | list of numbers | Standard-deviation values used to normalize the data. The vector size must be consistent with `input_tensors:dim` and `selected_bands`. | -| selected_band | list of integers | Specifies the bands selected from the data described in dlm:data. | -| pre_processing_function | string | Defines a python pre-processing function (path and inputs should be specified). | - -#### Tensor Object - -| Field Name | Type | Description | -|------------|--------|-------------------------------------| -| batch | number | Batch size dimension (must be > 0). | -| dim | number | Number of channels (must be > 0). | -| height | number | Height of the tensor (must be > 0). | -| width | number | Width of the tensor (must be > 0). | - -### Architecture Object - -| Field Name | Type | Description | -|-------------------------|---------|-------------------------------------------------------------| -| total_nb_parameters | integer | Total number of parameters. | -| estimated_total_size_mb | number | The equivalent memory size in MB. | -| type | string | Type of network (ex: ResNet-18). | -| summary | string | Summary of the layers, can be the output of `print(model)`. | -| pretrained | string | Indicates the source of the pretraining (ex: ImageNet). | - -### Runtime Object - -| Field Name | Type | Description | -|-------------------|------------------------------------|------------------------------------------------------------------------------| -| framework | string | Used framework (ex: PyTorch, TensorFlow). | -| version | string | Framework version (some models require a specific version of the framework). | -| model_handler | string | Inference execution function. | -| model_src_url | string | Url of the source code (ex: GitHub repo). | -| model_commit_hash | string | Hash value pointing to a specific version of the code. | -| docker | \[[Docker Object](#docker-object)] | Information for the deployment of the model in a docker instance. | - -#### Docker Object - -| Field Name | Type | Description | -|-------------|---------|-------------------------------------------------------| -| docker_file | string | Url of the Dockerfile. | -| image_name | string | Name of the docker image. | -| tag | string | Tag of the image. | -| working_dir | string | Working directory in the instance that can be mapped. | -| run | string | Running command. | -| gpu | boolean | True if the docker image requires a GPU. | - -### Outputs Object - -| Field Name | Type | Description | -|--------------------------|-------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| task | [Task Enum](#task-enum) | Specifies the Machine Learning task for which the output can be used for. | -| number_of_classes | integer | Number of classes. | -| final_layer_size | \[integer] | Sizes of the output tensor as ($N \times C \times H \times W$). | -| class_name_mapping | list | Mapping of the output index to a short class name, for each record we specify the index and the class name. | -| dont_care_index | integer | Some models are using a *do not care* value which is ignored in the input data. This is an optional parameter. | -| post_processing_function | string | Some models are using a complex post-processing that can be specified using a post processing function. The python package should be specified as well as the input and outputs type. For example:`my_python_module_name:my_processing_function(Tensor) -> Tensor` | - -#### Task Enum - -It is recommended to define `task` with one of the following values: -- `regression` -- `classification` -- `object detection` -- `segmentation` (generic) -- `semantic segmentation` -- `instance segmentation` -- `panoptic segmentation` - -This should align with the `label:tasks` values defined in [STAC Label Extension][stac-ext-label-props] for relevant -STAC Collections and Items employed with the model described by this extension. +## Relation Types -[stac-ext-label-props]: https://github.com/stac-extensions/label#item-properties +The following types should be used as applicable `rel` types in the +[Link Object](https://github.com/radiantearth/stac-spec/tree/master/item-spec/item-spec.md#link-object) +of STAC Items describing Band Assets that result from the inference of a model described by the MLM extension. -## Relation types +| Type | Description | +|--------------|----------------------------------------------------------| +| derived_from | This link points to a STAC Collection or Item using MLM. | -The following types should be used as applicable `rel` types in the -[Link Object](https://github.com/radiantearth/stac-spec/tree/master/item-spec/item-spec.md#link-object). +It is recommended that the link using `derived_from` referring to another STAC definition using the MLM extension +specifies the [`mlm:name`](#item-properties-and-collection-fields) value to make the derived reference more explicit. -| Type | Description | -|----------------|---------------------------------------| -| fancy-rel-type | This link points to a fancy resource. | +Note that a derived product from model inference described by STAC should also consider using +additional indications that it came of a model, such as described by +the [Best Practices - Processing Extension](best-practices.md#processing-extension). ## Contributing @@ -216,14 +654,12 @@ for running tests are copied here for convenience. ### Running tests -The same checks that run as checks on PRs are part of the repository and can be run locally to verify -that changes are valid. -To run tests locally, you'll need `npm`, which is a standard part of any [node.js][nodejs] installation. +The same checks that run as checks on PRs are part of the repository and can be run locally to verify that changes +are valid. To run tests locally, you'll need `npm`, which is a standard part of +any [node.js](https://nodejs.org/en/download/) installation. -[nodejs]: https://nodejs.org/en/download/ +First, install everything with npm once. Navigate to the root of this repository and on your command line run: -First you'll need to install everything with npm once. Just navigate to the root of this repository and on -your command line run: ```bash npm install ``` diff --git a/README_DLM_LEGACY.md b/README_DLM_LEGACY.md new file mode 100644 index 0000000..fbc815a --- /dev/null +++ b/README_DLM_LEGACY.md @@ -0,0 +1,9 @@ +# Deep Learning Model (DLM) Extension + +> :information_source:
+> This is legacy documentation references of Deep Learning Model extension +> preceding the current Machine Learning Model (MLM) extension. + +Check the original [Technical Report](https://github.com/crim-ca/CCCOT03/raw/main/CCCOT03_Rapport%20Final_FINAL_EN.pdf). + +![Image Description](https://i.imgur.com/cVAg5sA.png) diff --git a/README_STAC_MODEL.md b/README_STAC_MODEL.md new file mode 100644 index 0000000..b524f8a --- /dev/null +++ b/README_STAC_MODEL.md @@ -0,0 +1,101 @@ +# stac-model + + + +
+ +[![Python support][bp1]][bp2] +[![PyPI Release][bp3]][bp2] +[![Repository][bscm1]][bp4] +[![Releases][bscm2]][bp5] +[![Docs][bdoc1]][bdoc2] + +[![Contributions Welcome][bp8]][bp9] + +[![Poetry][bp11]][bp12] +[![Pre-commit][bp15]][bp16] +[![Semantic versions][blic3]][bp5] +[![Pipelines][bscm6]][bscm7] + +_A PydanticV2 and PySTAC validation and serialization library for the STAC ML Model Extension_ + +
+ +> :warning:
+> FIXME: update description with ML framework connectors (pytorch, scikit-learn, etc.) + +## Installation + +```shell +pip install -U stac-model +``` + +or install with `Poetry`: + +```shell +poetry add stac-model +``` +Then you can run + +```shell +stac-model --help +``` + +## Creating example metadata JSON for a STAC Item + +```shell +stac-model +``` + +This will make [this example item](./examples/item_basic.json) for an example model. + +## :chart_with_upwards_trend: Releases + +You can see the list of available releases on the [GitHub Releases][github-releases] page. + +## :page_facing_up: License +[![License][blic1]][blic2] + +This project is licenced under the terms of the `Apache Software License 2.0` licence. +See [LICENSE][blic2] for more details. + +## :heartpulse: Credits +[![Python project templated from galactipy.][bp6]][bp7] + + + +[bp1]: https://img.shields.io/pypi/pyversions/stac-model?style=for-the-badge +[bp2]: https://pypi.org/project/stac-model/ +[bp3]: https://img.shields.io/pypi/v/stac-model?style=for-the-badge&logo=pypi&color=3775a9 +[bp4]: https://github.com/stac-extensions/stac-model +[bp5]: https://github.com/stac-extensions/stac-model/releases +[bp6]: https://img.shields.io/badge/made%20with-galactipy%20%F0%9F%8C%8C-179287?style=for-the-badge&labelColor=193A3E +[bp7]: https://kutt.it/7fYqQl +[bp8]: https://img.shields.io/static/v1.svg?label=Contributions&message=Welcome&color=0059b3&style=for-the-badge +[bp9]: https://github.com/stac-extensions/stac-model/blob/main/CONTRIBUTING.md +[bp11]: https://img.shields.io/endpoint?url=https://python-poetry.org/badge/v0.json&style=for-the-badge +[bp12]: https://python-poetry.org/ + +[bp15]: https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white&style=for-the-badge +[bp16]: https://github.com/stac-extensions/stac-model/blob/main/.pre-commit-config.yaml + +[blic1]: https://img.shields.io/github/license/stac-extensions/stac-model?style=for-the-badge +[blic2]: https://github.com/stac-extensions/stac-model/blob/main/LICENCE +[blic3]: https://img.shields.io/badge/%F0%9F%93%A6-semantic%20versions-4053D6?style=for-the-badge + +[github-releases]: https://github.com/stac-extensions/stac-model/releases + +[bscm1]: https://img.shields.io/badge/GitHub-100000?style=for-the-badge&logo=github&logoColor=white +[bscm2]: https://img.shields.io/github/v/release/stac-extensions/stac-model?style=for-the-badge&logo=semantic-release&color=347d39 +[bscm6]: https://img.shields.io/github/actions/workflow/status/stac-extensions/stac-model/build.yml?style=for-the-badge&logo=github +[bscm7]: https://github.com/stac-extensions/stac-model/actions/workflows/build.yml + +[hub1]: https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuring-dependabot-version-updates#enabling-dependabot-version-updates +[hub2]: https://github.com/marketplace/actions/close-stale-issues +[hub5]: https://github.com/stac-extensions/stac-model/blob/main/.github/workflows/build.yml +[hub6]: https://docs.github.com/en/code-security/dependabot +[hub8]: https://github.com/stac-extensions/stac-model/blob/main/.github/release-drafter.yml +[hub9]: https://github.com/stac-extensions/stac-model/blob/main/.github/.stale.yml + +[bdoc1]: https://img.shields.io/badge/docs-github%20pages-0a507a?style=for-the-badge +[bdoc2]: https://stac-extensions.github.io/stac-model diff --git a/best-practices.md b/best-practices.md new file mode 100644 index 0000000..f574b1e --- /dev/null +++ b/best-practices.md @@ -0,0 +1,271 @@ +# ML Model Extension Best Practices + +This document makes a number of recommendations for creating real world ML Model Extensions. +None of them are required to meet the core specification, but following these practices will improve the documentation +of your model and make life easier for client tooling and users. They come about from practical experience of +implementors and introduce a bit more 'constraint' for those who are creating STAC objects representing their +models or creating tools to work with STAC. + +- [Using STAC Common Metadata Fields for the ML Model Extension](#using-stac-common-metadata-fields-for-the-ml-model-extension) +- [Recommended Extensions to Compose with the ML Model Extension](#recommended-extensions-to-compose-with-the-ml-model-extension) + - [Processing Extension](#processing-extension) + - [ML-AOI and Label Extensions](#ml-aoi-and-label-extensions) + - [Classification Extension](#classification-extension) + - [Scientific Extension](#scientific-extension) + - [File Extension](#file-extension) + - [Example Extension](#example-extension) + - [Version Extension](#version-extension) + +## Using STAC Common Metadata Fields for the ML Model Extension + +It is recommended to use the `start_datetime` and `end_datetime`, `geometry`, and `bbox` in a STAC Item, +and the corresponding +[Extent Object](https://github.com/radiantearth/stac-spec/blob/master/collection-spec/collection-spec.md#extent-object) +in a Collection, to represent the *recommended context* of the data the model was trained with and for which the model +should have appropriate domain knowledge for inference. + +For example, if a model was trained using the [EuroSAT][EuroSAT-github] dataset, and represented using MLM, it would +be reasonable to describe it with a time range of 2015-2018 and an area corresponding to the European Urban Atlas, as +described by the [EuroSAT paper][EuroSAT-paper]. However, it could also be considered adequate to define a wider extent, +considering that it would not be unexpected to have reasonably similar classes and domain distribution in following +years and in other locations. Provided that the exact extent applicable for a model is difficult to define reliably, +it is left to the good judgement of users to provide adequate values. Note that users employing the model can also +choose to apply it for contexts outside the *recommended* extent for the same reason. + +[EuroSAT-github]: https://github.com/phelber/EuroSAT +[EuroSAT-paper]: https://www.researchgate.net/publication/319463676 + +As another example, let us consider a model which is trained on imagery from all over the world +and is robust enough to be applied to any time period. In this case, the common metadata to use with the model +could include the bbox of "the world" `[-90, -180, 90, 180]` and the `start_datetime` and `end_datetime` range +would ideally be generic values like `["1900-01-01T00:00:00Z", null]` (see warning below). +However, due to limitations with the STAC 1.0 specification, this time extent is not applicable. + +> :warning:
+> The `null` value is not allowed for datetime specification. +> As a workaround, the `end_datetime` can be set with a "very large value" +> (similarly to `start_datetime` set with a small value), such as `"9999-12-31T23:59:59Z"`. +> Alternatively, the model can instead be described with only `datetime` corresponding to its publication date. +>

+> For more details, see the following [discussion](https://github.com/radiantearth/stac-spec/issues/1268). + +It is to be noted that generic and very broad spatiotemporal +extents like above rarely reflect the reality regarding the capabilities and precision of the model to predict reliable +results. If a more restrained area and time of interest can be identified, such as the ranges for which the training +dataset applies, or a test split dataset that validates the applicability of the model on other domains, those should +be provided instead. Nevertheless, users of the model are still free to apply it outside the specified extents. + +If specific datasets with training/validation/test splits are known to support the claims of the suggested extent for +the model, it is recommended that they are included as reference to the STAC Item/Collection using MLM. For more +information regarding these references, see the [ML-AOI and Label Extensions](#ml-aoi-and-label-extensions) details. + +## Recommended Extensions to Compose with the ML Model Extension + +### Processing Extension + +It is recommended to use at least the `processing:lineage` and `processing:level` fields from +the [Processing Extension](https://github.com/stac-extensions/processing) to make it clear +how [Model Input Objects](./README.md#model-input-object) are processed by the data provider prior to an +inference preprocessing pipeline. This can help users locate the correct version of the dataset used during model +inference or help them reproduce the data processing pipeline. + +For example: + +```json +{ + "processing:lineage": "GRD Post Processing", + "processing:level": "L1C", + "processing:facility": "Copernicus S1 Core Ground Segment - DPA", + "processing:software": { + "Sentinel-1 IPF": "002.71" + } +} +``` + +STAC Items or STAC Assets resulting from the model inference should be +annotated with [`processing:level = L4`](https://github.com/stac-extensions/processing?tab=readme-ov-file#suggested-processing-levels) +(as described below) to indicate that they correspond from the output of an ML model. + +> processing:level = L4
+> Model output or results from analyses of lower level data +> (i.e.: variables that are not directly measured by the instruments, but are derived from these measurements). + +Furthermore, the [`processing:expression`](https://github.com/stac-extensions/processing?tab=readme-ov-file#expression-object) +should be specified with a reference to the STAC Item employing the MLM extension to provide full context of the source +of the derived product. + +A potential representation of a STAC Asset could be as follows: +```json +{ + "model-output": { + "mlm:name": "" + } + } +} +``` + +Furthermore, the STAC Item representing the derived product could also include +a [Link Object](https://github.com/radiantearth/stac-spec/tree/master/item-spec/item-spec.md#link-object) +referring back to the MLM definition using `rel: derived_from`, as described in +[MLM Relation Types](README.md#relation-types). Such a link would like something like the following: + +```json +{ + "links": [ + { + "rel": "derived_from", + "type": "application/geo+json", + "href": "", + "mlm:name": "", + "ml-aoi:split": "train" + }, + { + "rel": "derived_from", + "type": "application/json", + "href": "", + "ml-aoi:split": "validate" + }, + { + "rel": "derived_from", + "type": "application/json", + "href": "", + "ml-aoi:split": "test" + } + ] +} +``` + +### Classification Extension + +Since it is expected that a model will provide some kind of classification values as output, the +[Classification Extension](https://github.com/stac-extensions/classification) can be leveraged inside +MLM definition to indicate which class values can be contained in the resulting output from the model prediction. + +For more details, see the [Model Output Object](README.md#model-output-object) definition. + +> :information_source:
+> Update according to [stac-extensions/classification#48](https://github.com/stac-extensions/classification/issues/48). + +### Scientific Extension + +Provided that most models derive from previous scientific work, it is strongly recommended to employ the +[Scientific Extension][stac-ext-sci] to provide references corresponding to the +original source of the model (`sci:doi`, `sci:citation`). This can help users find more information about the model, +its underlying architecture, or ways to improve it by piecing together the related work (`sci:publications`) that +lead to its creation. + +This extension can also be used for the purpose of publishing new models, by providing to users the necessary details +regarding how they should cite its use (i.e.: `sci:citation` field and `cite-as` relation type). + +[stac-ext-sci]: https://github.com/stac-extensions/scientific + +### File Extension + +In order to provide a reliable and reproducible machine learning pipeline, external references to data required by the +model should employ the [file](https://github.com/stac-extensions/file?tab=readme-ov-file#asset--link-object-fields) to +validate that they are properly retrieved for inference. + +One of the most typical case is the definition of an external file reference to model weights, often stored on a +Git LFS or S3 bucket due to their size. Providing the `file:checksum` and `file:size` for this file can help ensure +that the model is properly instantiated from the expected weights, or that sufficient storage is allocated to run it. + +```json +{ + "stac_extensions": [ + "https://stac-extensions.github.io/mlm/v1.0.0/schema.json", + "https://stac-extensions.github.io/file/v2.1.0/schema.json" + ], + "assets": { + "model": { + "type": "application/x-pytorch", + "href": "", + "roles": [ + "mlm:model", + "mlm:weights", + "data" + ], + "file:size": 123456789, + "file:checksum": "12209f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08", + "mlm:artifact_type": "torch.save" + } + } +} +``` + +### Example Extension + +In order to help users understand how to apply and run the described machine learning model, +the [Example Extension](https://github.com/stac-extensions/example-links#fields) can be used to provide code examples +demonstrating how it can be applied. + +For example, a [Model Card on Hugging Face](https://huggingface.co/docs/hub/en/model-cards) +is often provided (see [Hugging Face Model examples](https://huggingface.co/models)) to describe the model, which +can embed sample code and references to more details about the model. This kind of reference should be added under +the `links` of the STAC Item using MLM. + +Typically, a STAC Item using the MLM extension to describe the training or +inference strategies to apply a model should define the [Source Code Asset](README.md#source-code-asset). +This code is in itself ideal to guide users how to run it, and should therefore be replicated as an `example` link +reference to offer more code samples to execute the model. + +> :information_source:
+> Update according to [stac-extensions/example-links#4](https://github.com/stac-extensions/example-links/issues/4). + +### Version Extension + +In the even that a model is retrained with gradually added annotations or improved training strategies leading to +better performances, the existing model and newer models represented by STAC Items with MLM should also make use of +the [Version Extension][stac-ext-version]. Using the fields and link relation types defined +by this extension, the retraining cycle of the model can better be described, with a full history of the newer versions +developed. + +Additionally, the `version:experimental` field should be considered for models being trained and still under evaluation +before widespread deployment. This can be particularly useful for annotating models experiments during cross-validation +training process to find the "best model". This field could also be used to indicate if a model is provided for +educational purposes only. + +[stac-ext-version]: https://github.com/stac-extensions/version diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 0000000..8c81a07 --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,25 @@ +FROM python:3.10-slim-buster + +ENV LANG=C.UTF-8 \ + LC_ALL=C.UTF-8 \ + PATH="${PATH}:/root/.poetry/bin" + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + curl \ + && rm -rf /var/lib/apt/lists/* + +COPY pyproject.toml ./ + +# Install Poetry +RUN curl -sSL https://install.python-poetry.org | POETRY_HOME=/opt/poetry python && \ + cd /usr/local/bin && \ + ln -s /opt/poetry/bin/poetry && \ + poetry config virtualenvs.create false + +# Allow installing dev dependencies to run tests +ARG INSTALL_DEV=false +RUN bash -c "if [ $INSTALL_DEV == 'true' ] ; then poetry install --no-root ; else poetry install --no-root --no-dev ; fi" + +CMD mkdir -p /workspace +WORKDIR /workspace diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 0000000..614d5b4 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,47 @@ +# Docker for stac-model + +## Installation + +To create Docker you need to run: + +```bash +make docker-build +``` + +which is equivalent to: + +```bash +make docker-build VERSION=latest +``` + +You may provide name and version for the image. +Default name is `IMAGE := stac_model`. +Default version is `VERSION := latest`. + +```bash +make docker-build IMAGE=some_name VERSION=0.1.0 +``` + +## Usage + +```bash +docker run -it --rm \ + -v $(pwd):/workspace \ + stac_model bash +``` + +## How to clean up + +To uninstall docker image run `make docker-remove` with `VERSION`: + +```bash +make docker-remove VERSION=0.1.0 +``` + +you may also choose the image name + +```bash +make docker-remove IMAGE=some_name VERSION=latest +``` + +If you want to clean all, including `build` and `pycache` run `make cleanup` diff --git a/examples/collection.json b/examples/collection.json index c710628..7a71b3f 100644 --- a/examples/collection.json +++ b/examples/collection.json @@ -4,46 +4,43 @@ "https://stac-extensions.github.io/item-assets/v1.0.0/schema.json" ], "type": "Collection", - "id": "EO-DL-model-catalog", - "title": "A title", - "description": "Collection that refers to a STAC Item with DLM Extension", + "id": "ml-model-examples", + "title": "Machine Learning Model examples", + "description": "Collection of items contained in the Machine Learning Model examples.", "license": "Apache-2.0", "extent": { "spatial": { "bbox": [ [ - 172.9, - 1.3, - 173, - 1.4 + -7.882190080512502, + 37.13739173208318, + 27.911651652899923, + 58.21798141355221 ] ] }, "temporal": { "interval": [ [ - "2015-06-23T00:00:00Z", - null + "1900-01-01T00:00:00Z", + "9999-12-31T23:59:59Z" ] ] } }, - "assets": { - "example": { - "href": "item.json" - } - }, "item_assets": { - "data": { + "weights": { + "title": "model weights", "roles": [ - "data" - ], + "mlm:model", + "mlm:weights" + ] } }, "summaries": { "datetime": { - "minimum": "2015-06-23T00:00:00Z", - "maximum": "2019-07-10T13:44:56Z" + "minimum": "1900-01-01T00:00:00Z", + "maximum": "9999-12-31T23:59:59Z" } }, "links": [ @@ -52,7 +49,19 @@ "rel": "self" }, { - "href": "item.json", + "href": "item_basic.json", + "rel": "item" + }, + { + "href": "item_eo_bands.json", + "rel": "item" + }, + { + "href": "item_raster_bands.json", + "rel": "item" + }, + { + "href": "item_multi_io.json", "rel": "item" } ] diff --git a/examples/item.json b/examples/item.json deleted file mode 100644 index 850a604..0000000 --- a/examples/item.json +++ /dev/null @@ -1,256 +0,0 @@ -{ - "stac_version": "1.0.0", - "stac_extensions": [ - "https://schemas.stacspec.org/v1.0.0-beta.3/extensions/dl-model/json-schema/schema.json", - "https://stac-extensions.github.io/eo/v1.1.0/schema.json", - "https://stac-extensions.github.io/processing/v1.1.0/schema.json", - "https://stac-extensions.github.io/scientific/v1.0.0/schema.json" - ], - "id": "dlm-resnet18-unet-scse", - "type": "Feature", - "geometry": { - "type": "Polygon", - "coordinates": [ - [ - [ - 180, - 90 - ], - [ - -180, - -90 - ], - [ - -180, - 90 - ], - [ - 180, - 90 - ], - [ - 180, - -90 - ] - ] - ] - }, - "bbox": [ - -180, - -90, - 180, - 90 - ], - "collection": "EO-DL-model-catalog", - "links": [ - { - "rel": "self", - "href": "https://landsat-stac.s3.amazonaws.com/some-eo-models/example-thelper-item.json" - }, - { - "rel": "collection", - "href": "https://landsat-stac.s3.amazonaws.com/some-eo-models/catalog.json" - } - ], - "assets": [ - { - "model_archive": { - "href": "https://drive.google.com/file/d/1PYyZVgH95454sb9LYHSfchbg8GuT__fR/view?usp=sharing", - "type": "application/zip", - "title": "model archive", - "description": "model archive as a google drive link", - "role": [ - "model artefact" - ] - } - } - ], - "properties": { - "datetime": "2016-05-03T13:22:30Z", - "title": "resnet18+unet_scse", - "description": "UNet architecture with a resnet18 backbone and a SCSE layer fine-tuned on Pleiade imagery", - "license": "MIT", - "created": "2020-12-12T00:00:01.000Z", - "updated": "2021-01-04T00:30:55.000Z", - "providers": [ - { - "name": "Effigis Inc.", - "roles": [ - "image licensor" - ], - "url": "https://effigis.com/en/" - }, - { - "name": "Airbus Inc.", - "roles": [ - "image provider" - ], - "url": "https://www.intelligence-airbusds.com/" - } - ], - "platform": "Pleiade", - "gsd": 0.5, - "eo:bands": [ - { - "name": "50-cm panchromatic", - "common_name": "pancro", - "center_wavelength": 400 - }, - { - "name": "blue", - "common_name": "blue", - "center_wavelength": 490 - }, - { - "name": "green", - "common_name": "green", - "center_wavelength": 500 - }, - { - "name": "red", - "common_name": "red", - "center_wavelength": 660 - }, - { - "name": "Near Infrared", - "common_name": "nir", - "center_wavelength": 850 - } - ], - "sci:publications": [ - { - "citation": "Abhijit Guha Roy and Nassir Navab and Christian Wachinger (2018). Concurrent Spatial and Channel Squeeze & Excitation in Fully Convolutional Networks, arXiv 1803.02579", - "doi": "10.1007/978-3-030-00928-1_48" - } - ], - "processing:level": "L4", - "dlm:runtime": { - "framework": "PyTorch", - "version": 1.5, - "model_handler": "thelper.cli.inference_session", - "model_src_url": "https://github.com/crim-ca/gin-model-repo", - "model_commit_hash": null, - "requirement_file": null, - "docker": { - "docker_file": "https://github.com/crim-ca/CCCOT03/blob/main/docker/thelper-geo.dockerfile", - "image_name": "thelper-geo:latest", - "tag": null, - "gpu": true, - "working_dir": "/workspace", - "run": "thelper infer --config /workspace/config.yml --save-dir /workspace --ckpt-path /workspace/ckpt.best.pth" - } - }, - "dlm:archive": [ - { - "name": "config.yml", - "role": [ - "config file" - ] - }, - { - "name": "./test_pleiade/256/input_test.tif", - "role": [ - "test set" - ] - }, - { - "name": "ckpt.best.pth", - "role": [ - "model weight" - ] - } - ], - "dlm:data": { - "process_level": "ortho", - "dtype": "uint16", - "number_of_bands": 4, - "useful_bands": [ - { - "index": 2, - "name": "red" - }, - { - "index": 1, - "name": "green" - }, - { - "index": 3, - "name": "nir" - } - ], - "nodata_value": 0, - "test_file": "input_test.tif", - "item_examples": [ - { - "title": "a pleiade stac item", - "url": "https://example.com/stac/pleiade/item.json" - } - ] - }, - "dlm:inputs": { - "name": "data", - "input_tensors": { - "batch": 1, - "channels": 3, - "height": 224, - "width": 224 - }, - "scaling_factor": 0.003921569, - "normalization:mean": [ - 0.245, - 0.34, - 0.67 - ], - "normalization:std": [ - 0.1, - 0.1, - 0.2 - ], - "selected_bands": [ - 0, - 1, - 3 - ], - "pre_processing_function": null - }, - "dlm:outputs": { - "task": "semantic segmentation", - "number_of_classes": 5, - "dont_care_index": 0, - "final_layer_size": [ - 1, - 5, - 64, - 64 - ], - "class_name_mapping": [ - { - "0": "dontcare" - }, - { - "1": "Bare Exposed Rock" - }, - { - "2": "High density Residential" - }, - { - "3": "Cropland" - }, - { - "4": "Mixed Forest Land" - }, - { - "5": "Lake" - } - ], - "post_processing_function": null - }, - "dlm:architecture": { - "total_nb_parameters": 42813873, - "estimated_total_size_mb": 183.72, - "type": "unet-resnet-18-scse", - "pretrained": "imagenet", - "summary": "----------------------------------------------------------------\n Layer (type) Output Shape Param\n================================================================\n Conv2d-1 [-1, 64, 32, 32] 9,408\n BatchNorm2d-2 [-1, 64, 32, 32] 128\n ReLU-3 [-1, 64, 32, 32] 0\n MaxPool2d-4 [-1, 64, 16, 16] 0\n Conv2d-5 [-1, 64, 16, 16] 36,864\n BatchNorm2d-6 [-1, 64, 16, 16] 128\n ReLU-7 [-1, 64, 16, 16] 0\n Conv2d-8 [-1, 64, 16, 16] 36,864\n BatchNorm2d-9 [-1, 64, 16, 16] 128\n ReLU-10 [-1, 64, 16, 16] 0\n BasicBlock-11 [-1, 64, 16, 16] 0\n Conv2d-12 [-1, 64, 16, 16] 36,864\n BatchNorm2d-13 [-1, 64, 16, 16] 128\n ReLU-14 [-1, 64, 16, 16] 0\n Conv2d-15 [-1, 64, 16, 16] 36,864\n BatchNorm2d-16 [-1, 64, 16, 16] 128\n ReLU-17 [-1, 64, 16, 16] 0\n BasicBlock-18 [-1, 64, 16, 16] 0\n Conv2d-19 [-1, 128, 8, 8] 73,728\n BatchNorm2d-20 [-1, 128, 8, 8] 256\n ReLU-21 [-1, 128, 8, 8] 0\n Conv2d-22 [-1, 128, 8, 8] 147,456\n BatchNorm2d-23 [-1, 128, 8, 8] 256\n Conv2d-24 [-1, 128, 8, 8] 8,192\n BatchNorm2d-25 [-1, 128, 8, 8] 256\n ReLU-26 [-1, 128, 8, 8] 0\n BasicBlock-27 [-1, 128, 8, 8] 0\n Conv2d-28 [-1, 128, 8, 8] 147,456\n BatchNorm2d-29 [-1, 128, 8, 8] 256\n ReLU-30 [-1, 128, 8, 8] 0\n Conv2d-31 [-1, 128, 8, 8] 147,456\n BatchNorm2d-32 [-1, 128, 8, 8] 256\n ReLU-33 [-1, 128, 8, 8] 0\n BasicBlock-34 [-1, 128, 8, 8] 0\n Conv2d-35 [-1, 256, 4, 4] 294,912\n BatchNorm2d-36 [-1, 256, 4, 4] 512\n ReLU-37 [-1, 256, 4, 4] 0\n Conv2d-38 [-1, 256, 4, 4] 589,824\n BatchNorm2d-39 [-1, 256, 4, 4] 512\n Conv2d-40 [-1, 256, 4, 4] 32,768\n BatchNorm2d-41 [-1, 256, 4, 4] 512\n ReLU-42 [-1, 256, 4, 4] 0\n BasicBlock-43 [-1, 256, 4, 4] 0\n Conv2d-44 [-1, 256, 4, 4] 589,824\n BatchNorm2d-45 [-1, 256, 4, 4] 512\n ReLU-46 [-1, 256, 4, 4] 0\n Conv2d-47 [-1, 256, 4, 4] 589,824\n BatchNorm2d-48 [-1, 256, 4, 4] 512\n ReLU-49 [-1, 256, 4, 4] 0\n BasicBlock-50 [-1, 256, 4, 4] 0\n Conv2d-51 [-1, 512, 2, 2] 1,179,648\n BatchNorm2d-52 [-1, 512, 2, 2] 1,024\n ReLU-53 [-1, 512, 2, 2] 0\n Conv2d-54 [-1, 512, 2, 2] 2,359,296\n BatchNorm2d-55 [-1, 512, 2, 2] 1,024\n Conv2d-56 [-1, 512, 2, 2] 131,072\n BatchNorm2d-57 [-1, 512, 2, 2] 1,024\n ReLU-58 [-1, 512, 2, 2] 0\n BasicBlock-59 [-1, 512, 2, 2] 0\n Conv2d-60 [-1, 512, 2, 2] 2,359,296\n BatchNorm2d-61 [-1, 512, 2, 2] 1,024\n ReLU-62 [-1, 512, 2, 2] 0\n Conv2d-63 [-1, 512, 2, 2] 2,359,296\n BatchNorm2d-64 [-1, 512, 2, 2] 1,024\n ReLU-65 [-1, 512, 2, 2] 0\n BasicBlock-66 [-1, 512, 2, 2] 0\n MaxPool2d-67 [-1, 512, 1, 1] 0\n Conv2d-68 [-1, 1024, 1, 1] 4,719,616\n BatchNorm2d-69 [-1, 1024, 1, 1] 2,048\n ReLU-70 [-1, 1024, 1, 1] 0\n_ActivatedBatchNorm-71 [-1, 1024, 1, 1] 0 AdaptiveAvgPool2d-72 [-1, 1024, 1, 1] 0\n Linear-73 [-1, 64] 65,600\n ReLU-74 [-1, 64] 0\n Linear-75 [-1, 1024] 66,560\n Conv2d-76 [-1, 1, 1, 1] 1,024\n SCSEBlock-77 [-1, 1024, 1, 1] 0\n ConvTranspose2d-78 [-1, 512, 2, 2] 8,389,120\n DecoderUnetSCSE-79 [-1, 512, 2, 2] 0\n Conv2d-80 [-1, 1024, 2, 2] 9,438,208\n BatchNorm2d-81 [-1, 1024, 2, 2] 2,048\n ReLU-82 [-1, 1024, 2, 2] 0\n_ActivatedBatchNorm-83 [-1, 1024, 2, 2] 0 AdaptiveAvgPool2d-84 [-1, 1024, 1, 1] 0\n Linear-85 [-1, 64] 65,600\n ReLU-86 [-1, 64] 0\n Linear-87 [-1, 1024] 66,560\n Conv2d-88 [-1, 1, 2, 2] 1,024\n SCSEBlock-89 [-1, 1024, 2, 2] 0\n ConvTranspose2d-90 [-1, 256, 4, 4] 4,194,560\n DecoderUnetSCSE-91 [-1, 256, 4, 4] 0\n Conv2d-92 [-1, 512, 4, 4] 2,359,808\n BatchNorm2d-93 [-1, 512, 4, 4] 1,024\n ReLU-94 [-1, 512, 4, 4] 0\n_ActivatedBatchNorm-95 [-1, 512, 4, 4] 0 AdaptiveAvgPool2d-96 [-1, 512, 1, 1] 0\n Linear-97 [-1, 32] 16,416\n ReLU-98 [-1, 32] 0\n Linear-99 [-1, 512] 16,896\n Conv2d-100 [-1, 1, 4, 4] 512\n SCSEBlock-101 [-1, 512, 4, 4] 0\nConvTranspose2d-102 [-1, 128, 8, 8] 1,048,704 DecoderUnetSCSE-103 [-1, 128, 8, 8] 0\n Conv2d-104 [-1, 256, 8, 8] 590,080\n BatchNorm2d-105 [-1, 256, 8, 8] 512\n ReLU-106 [-1, 256, 8, 8] 0\n_ActivatedBatchNorm-107 [-1, 256, 8, 8] 0 AdaptiveAvgPool2d-108 [-1, 256, 1, 1] 0\n Linear-109 [-1, 16] 4,112\n ReLU-110 [-1, 16] 0\n Linear-111 [-1, 256] 4,352\n Conv2d-112 [-1, 1, 8, 8] 256\n SCSEBlock-113 [-1, 256, 8, 8] 0\nConvTranspose2d-114 [-1, 64, 16, 16] 262,208 DecoderUnetSCSE-115 [-1, 64, 16, 16] 0\n Conv2d-116 [-1, 128, 16, 16] 147,584\n BatchNorm2d-117 [-1, 128, 16, 16] 256\n ReLU-118 [-1, 128, 16, 16] 0\n_ActivatedBatchNorm-119 [-1, 128, 16, 16] 0 AdaptiveAvgPool2d-120 [-1, 128, 1, 1] 0\n Linear-121 [-1, 8] 1,032\n ReLU-122 [-1, 8] 0\n Linear-123 [-1, 128] 1,152\n Conv2d-124 [-1, 1, 16, 16] 128\n SCSEBlock-125 [-1, 128, 16, 16] 0\nConvTranspose2d-126 [-1, 32, 32, 32] 65,568 DecoderUnetSCSE-127 [-1, 32, 32, 32] 0\n Conv2d-128 [-1, 64, 32, 32] 55,360\n BatchNorm2d-129 [-1, 64, 32, 32] 128\n ReLU-130 [-1, 64, 32, 32] 0\n_ActivatedBatchNorm-131 [-1, 64, 32, 32] 0 AdaptiveAvgPool2d-132 [-1, 64, 1, 1] 0\n Linear-133 [-1, 4] 260\n ReLU-134 [-1, 4] 0\n Linear-135 [-1, 64] 320\n Conv2d-136 [-1, 1, 32, 32] 64\n SCSEBlock-137 [-1, 64, 32, 32] 0\nConvTranspose2d-138 [-1, 16, 64, 64] 16,400 DecoderUnetSCSE-139 [-1, 16, 64, 64] 0\n Conv2d-140 [-1, 64, 64, 64] 31,808\n BatchNorm2d-141 [-1, 64, 64, 64] 128\n ReLU-142 [-1, 64, 64, 64] 0\n_ActivatedBatchNorm-143 [-1, 64, 64, 64] 0\n Conv2d-144 [-1, 5, 64, 64] 325\nEncoderDecoderNet-145 [-1, 5, 64, 64] 0 ================================================================ Total params= 42,813,873 Trainable params= 42,813,873 Non-trainable params= 0 ---------------------------------------------------------------- Input size (MB)= 0.05 Forward/backward pass size (MB)= 20.35 Params size (MB)= 163.32 Estimated Total Size (MB)= 183.72 ----------------------------------------------------------------" - } - } -} diff --git a/examples/item.yml b/examples/item.yml deleted file mode 100644 index 812c61e..0000000 --- a/examples/item.yml +++ /dev/null @@ -1,357 +0,0 @@ -stac_version: 1.0.0-beta.2 # schema version -stac_extensions: # stac extension required - - dl-model # deep-learning model extension - - eo # eo extension - - scientific # scientific extension required for citations - - provider # metadata about providers -id: 11234 # Some ID for this item -type: Feature # Required by STAC item -geometry: # Required by STAC item - type: Polygon - coordinates: - - - - 180.0 - - 90.0 - - - -180.0 - - -90 - - - -180.0 - - 90.0 - - - 180.0 - - 90.0 - - - 180.0 - - -90.0 -bbox: # Required by STAC item - - -180.0 - - -90.0 - - 180.0 - - 90 -collection: a eo model catalog # name of the model catalog -# -# Links and assets (part of the core specs) -# -links: - - rel: self - href: https://landsat-stac.s3.amazonaws.com/some-eo-models/example-thelper-item.json - - rel: collection - href: https://landsat-stac.s3.amazonaws.com/some-eo-models/catalog.json -assets: - - model_archive: - href: https://drive.google.com/file/d/1PYyZVgH95454sb9LYHSfchbg8GuT__fR/view?usp=sharing - type: application/zip - title: model archive - description: model archive as a google drive link - role: - - model artefact -properties: - # - # General properties defined in the item core schema - # - datetime: "2016-05-03T13:22:30Z" - title: resnet18+unet_scse # short name of the model - description: >- # short description - UNet architecture with a resnet18 backbone and a SCSE layer - fine-tuned on Pleiade imagery - license: MIT # license of utilisation - created: 2020-12-12T00:00:01Z - updated: 2021-01-04T00:30:55Z - providers: # optional provider information (data, etc.) - - name: Effigis Inc. - roles: - - image licensor - url: https://effigis.com/en/ - - name: Airbus Inc. - roles: - - image provider - url: https://www.intelligence-airbusds.com/ - # Section on instruments (core stac item) - platform: Pleiade - gsd: 0.50 - # eo extension fields (describe the sensor spectal bands) - eo:bands: - - name: 50-cm panchromatic - common_name: pancro - center_wavelength: 400 - - name: blue - common_name: blue - center_wavelength: 490 - - name: green - common_name: green - center_wavelength: 500 - - name: red - common_name: red - center_wavelength: 660 - - name: Near Infrared - common_name: nir - center_wavelength: 850 - # - # Scientific references - # Based on STAC scientific extension - # - sci:publications: # relevant publications - - citation: >- - Abhijit Guha Roy and Nassir Navab and Christian Wachinger (2018). - Concurrent Spatial and Channel Squeeze & Excitation in Fully Convolutional Networks, - arXiv 1803.02579 - # - # Runtime metadata - # Describes the runtime environment - # - dlm:runtime: - framework: PyTorch # deep learning framework used - version: 1.5 # framework version - model_handler: thelper.cli.inference_session - model_src_url: https://github.com/crim-ca/gin-model-repo - model_commit_hash: - requirement_file: - # - # Docker specifications (optional) - # - docker: - docker_file: https://github.com/crim-ca/CCCOT03/blob/main/docker/thelper-geo.dockerfile # link to the docker file - image_name: thelper-geo:latest # official image name for the inference - tag: - gpu: true # we specify if this image needs a gpu or not - working_dir: /workspace # docker instance working directory - run: thelper infer --config /workspace/config.yml --save-dir /workspace --ckpt-path /workspace/ckpt.best.pth - # - # Describe the content of the model archive - # - dlm:archive: - - name: config.yml - role: - - config file - - name: ./test_pleiade/256/input_test.tif - role: - - test set - - name: ckpt.best.pth - role: - - model weight - # - # EO data description (required) - # - dlm:data: - process_level: ortho # expected processing level - dtype: uint16 # data type (enum) - number_of_bands: 4 # number of bands in the test file - useful_bands: # describes the bands that should be loaded - - index: 2 - name: red - - index: 1 - name: green - - index: 3 - name: nir - nodata_value: 0 - test_file: input_test.tif # name of a test file in the model archive - item_examples: # points toward relevant eo items - - title: a pleiade stac item - url: https://example.com/stac/pleiade/item.json - # - # Model input description (required) - # - dlm:inputs: - name: data # expected variable name - input_tensors: - batch: 1 # batch size - channels: 3 # number of channels - height: 224 # tensor height - width: 224 # tensor width - scaling_factor: 0.003921569 # scaling factor (usually 1/255) - normalization:mean: # input statistical normalization (mean) - - 0.245 - - 0.34 - - 0.67 - normalization:std: # input statistical normalization (std) - - 0.1 - - 0.1 - - 0.2 - selected_bands: # selected bands (0 is the first band) - - 0 - - 1 - - 3 - pre_processing_function: - # - # Model output description (required) - # - dlm:outputs: - task: semantic segmentation # describe the ML task (enum) - number_of_classes: 5 # number of classes - dont_care_index: 0 # Index value used for the excluded segments (don't care data) - final_layer_size: # size of the output - - 1 - - 5 - - 64 - - 64 - class_name_mapping: # mapping to short class names - - 0: dontcare - - 1: Bare Exposed Rock - - 2: High density Residential - - 3: Cropland - - 4: Mixed Forest Land - - 5: Lake - post_processing_function: - # - # Describes the model architecture - # - dlm:architecture: # describe the model architecture - total_nb_parameters: 42813873 # total number of parameters - estimated_total_size_mb: 183.72 # Total memory size in MB - type: unet-resnet-18-scse # type of architecture - pretrained: imagenet - summary: >- - ---------------------------------------------------------------- - Layer (type) Output Shape Param - ================================================================ - Conv2d-1 [-1, 64, 32, 32] 9,408 - BatchNorm2d-2 [-1, 64, 32, 32] 128 - ReLU-3 [-1, 64, 32, 32] 0 - MaxPool2d-4 [-1, 64, 16, 16] 0 - Conv2d-5 [-1, 64, 16, 16] 36,864 - BatchNorm2d-6 [-1, 64, 16, 16] 128 - ReLU-7 [-1, 64, 16, 16] 0 - Conv2d-8 [-1, 64, 16, 16] 36,864 - BatchNorm2d-9 [-1, 64, 16, 16] 128 - ReLU-10 [-1, 64, 16, 16] 0 - BasicBlock-11 [-1, 64, 16, 16] 0 - Conv2d-12 [-1, 64, 16, 16] 36,864 - BatchNorm2d-13 [-1, 64, 16, 16] 128 - ReLU-14 [-1, 64, 16, 16] 0 - Conv2d-15 [-1, 64, 16, 16] 36,864 - BatchNorm2d-16 [-1, 64, 16, 16] 128 - ReLU-17 [-1, 64, 16, 16] 0 - BasicBlock-18 [-1, 64, 16, 16] 0 - Conv2d-19 [-1, 128, 8, 8] 73,728 - BatchNorm2d-20 [-1, 128, 8, 8] 256 - ReLU-21 [-1, 128, 8, 8] 0 - Conv2d-22 [-1, 128, 8, 8] 147,456 - BatchNorm2d-23 [-1, 128, 8, 8] 256 - Conv2d-24 [-1, 128, 8, 8] 8,192 - BatchNorm2d-25 [-1, 128, 8, 8] 256 - ReLU-26 [-1, 128, 8, 8] 0 - BasicBlock-27 [-1, 128, 8, 8] 0 - Conv2d-28 [-1, 128, 8, 8] 147,456 - BatchNorm2d-29 [-1, 128, 8, 8] 256 - ReLU-30 [-1, 128, 8, 8] 0 - Conv2d-31 [-1, 128, 8, 8] 147,456 - BatchNorm2d-32 [-1, 128, 8, 8] 256 - ReLU-33 [-1, 128, 8, 8] 0 - BasicBlock-34 [-1, 128, 8, 8] 0 - Conv2d-35 [-1, 256, 4, 4] 294,912 - BatchNorm2d-36 [-1, 256, 4, 4] 512 - ReLU-37 [-1, 256, 4, 4] 0 - Conv2d-38 [-1, 256, 4, 4] 589,824 - BatchNorm2d-39 [-1, 256, 4, 4] 512 - Conv2d-40 [-1, 256, 4, 4] 32,768 - BatchNorm2d-41 [-1, 256, 4, 4] 512 - ReLU-42 [-1, 256, 4, 4] 0 - BasicBlock-43 [-1, 256, 4, 4] 0 - Conv2d-44 [-1, 256, 4, 4] 589,824 - BatchNorm2d-45 [-1, 256, 4, 4] 512 - ReLU-46 [-1, 256, 4, 4] 0 - Conv2d-47 [-1, 256, 4, 4] 589,824 - BatchNorm2d-48 [-1, 256, 4, 4] 512 - ReLU-49 [-1, 256, 4, 4] 0 - BasicBlock-50 [-1, 256, 4, 4] 0 - Conv2d-51 [-1, 512, 2, 2] 1,179,648 - BatchNorm2d-52 [-1, 512, 2, 2] 1,024 - ReLU-53 [-1, 512, 2, 2] 0 - Conv2d-54 [-1, 512, 2, 2] 2,359,296 - BatchNorm2d-55 [-1, 512, 2, 2] 1,024 - Conv2d-56 [-1, 512, 2, 2] 131,072 - BatchNorm2d-57 [-1, 512, 2, 2] 1,024 - ReLU-58 [-1, 512, 2, 2] 0 - BasicBlock-59 [-1, 512, 2, 2] 0 - Conv2d-60 [-1, 512, 2, 2] 2,359,296 - BatchNorm2d-61 [-1, 512, 2, 2] 1,024 - ReLU-62 [-1, 512, 2, 2] 0 - Conv2d-63 [-1, 512, 2, 2] 2,359,296 - BatchNorm2d-64 [-1, 512, 2, 2] 1,024 - ReLU-65 [-1, 512, 2, 2] 0 - BasicBlock-66 [-1, 512, 2, 2] 0 - MaxPool2d-67 [-1, 512, 1, 1] 0 - Conv2d-68 [-1, 1024, 1, 1] 4,719,616 - BatchNorm2d-69 [-1, 1024, 1, 1] 2,048 - ReLU-70 [-1, 1024, 1, 1] 0 - _ActivatedBatchNorm-71 [-1, 1024, 1, 1] 0 - AdaptiveAvgPool2d-72 [-1, 1024, 1, 1] 0 - Linear-73 [-1, 64] 65,600 - ReLU-74 [-1, 64] 0 - Linear-75 [-1, 1024] 66,560 - Conv2d-76 [-1, 1, 1, 1] 1,024 - SCSEBlock-77 [-1, 1024, 1, 1] 0 - ConvTranspose2d-78 [-1, 512, 2, 2] 8,389,120 - DecoderUnetSCSE-79 [-1, 512, 2, 2] 0 - Conv2d-80 [-1, 1024, 2, 2] 9,438,208 - BatchNorm2d-81 [-1, 1024, 2, 2] 2,048 - ReLU-82 [-1, 1024, 2, 2] 0 - _ActivatedBatchNorm-83 [-1, 1024, 2, 2] 0 - AdaptiveAvgPool2d-84 [-1, 1024, 1, 1] 0 - Linear-85 [-1, 64] 65,600 - ReLU-86 [-1, 64] 0 - Linear-87 [-1, 1024] 66,560 - Conv2d-88 [-1, 1, 2, 2] 1,024 - SCSEBlock-89 [-1, 1024, 2, 2] 0 - ConvTranspose2d-90 [-1, 256, 4, 4] 4,194,560 - DecoderUnetSCSE-91 [-1, 256, 4, 4] 0 - Conv2d-92 [-1, 512, 4, 4] 2,359,808 - BatchNorm2d-93 [-1, 512, 4, 4] 1,024 - ReLU-94 [-1, 512, 4, 4] 0 - _ActivatedBatchNorm-95 [-1, 512, 4, 4] 0 - AdaptiveAvgPool2d-96 [-1, 512, 1, 1] 0 - Linear-97 [-1, 32] 16,416 - ReLU-98 [-1, 32] 0 - Linear-99 [-1, 512] 16,896 - Conv2d-100 [-1, 1, 4, 4] 512 - SCSEBlock-101 [-1, 512, 4, 4] 0 - ConvTranspose2d-102 [-1, 128, 8, 8] 1,048,704 - DecoderUnetSCSE-103 [-1, 128, 8, 8] 0 - Conv2d-104 [-1, 256, 8, 8] 590,080 - BatchNorm2d-105 [-1, 256, 8, 8] 512 - ReLU-106 [-1, 256, 8, 8] 0 - _ActivatedBatchNorm-107 [-1, 256, 8, 8] 0 - AdaptiveAvgPool2d-108 [-1, 256, 1, 1] 0 - Linear-109 [-1, 16] 4,112 - ReLU-110 [-1, 16] 0 - Linear-111 [-1, 256] 4,352 - Conv2d-112 [-1, 1, 8, 8] 256 - SCSEBlock-113 [-1, 256, 8, 8] 0 - ConvTranspose2d-114 [-1, 64, 16, 16] 262,208 - DecoderUnetSCSE-115 [-1, 64, 16, 16] 0 - Conv2d-116 [-1, 128, 16, 16] 147,584 - BatchNorm2d-117 [-1, 128, 16, 16] 256 - ReLU-118 [-1, 128, 16, 16] 0 - _ActivatedBatchNorm-119 [-1, 128, 16, 16] 0 - AdaptiveAvgPool2d-120 [-1, 128, 1, 1] 0 - Linear-121 [-1, 8] 1,032 - ReLU-122 [-1, 8] 0 - Linear-123 [-1, 128] 1,152 - Conv2d-124 [-1, 1, 16, 16] 128 - SCSEBlock-125 [-1, 128, 16, 16] 0 - ConvTranspose2d-126 [-1, 32, 32, 32] 65,568 - DecoderUnetSCSE-127 [-1, 32, 32, 32] 0 - Conv2d-128 [-1, 64, 32, 32] 55,360 - BatchNorm2d-129 [-1, 64, 32, 32] 128 - ReLU-130 [-1, 64, 32, 32] 0 - _ActivatedBatchNorm-131 [-1, 64, 32, 32] 0 - AdaptiveAvgPool2d-132 [-1, 64, 1, 1] 0 - Linear-133 [-1, 4] 260 - ReLU-134 [-1, 4] 0 - Linear-135 [-1, 64] 320 - Conv2d-136 [-1, 1, 32, 32] 64 - SCSEBlock-137 [-1, 64, 32, 32] 0 - ConvTranspose2d-138 [-1, 16, 64, 64] 16,400 - DecoderUnetSCSE-139 [-1, 16, 64, 64] 0 - Conv2d-140 [-1, 64, 64, 64] 31,808 - BatchNorm2d-141 [-1, 64, 64, 64] 128 - ReLU-142 [-1, 64, 64, 64] 0 - _ActivatedBatchNorm-143 [-1, 64, 64, 64] 0 - Conv2d-144 [-1, 5, 64, 64] 325 - EncoderDecoderNet-145 [-1, 5, 64, 64] 0 - ================================================================ - Total params= 42,813,873 - Trainable params= 42,813,873 - Non-trainable params= 0 - ---------------------------------------------------------------- - Input size (MB)= 0.05 - Forward/backward pass size (MB)= 20.35 - Params size (MB)= 163.32 - Estimated Total Size (MB)= 183.72 - ---------------------------------------------------------------- diff --git a/examples/item_basic.json b/examples/item_basic.json new file mode 100644 index 0000000..c766f10 --- /dev/null +++ b/examples/item_basic.json @@ -0,0 +1,138 @@ +{ + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/mlm/v1.0.0/schema.json" + ], + "type": "Feature", + "id": "example-model", + "collection": "ml-model-examples", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -7.882190080512502, + 37.13739173208318 + ], + [ + -7.882190080512502, + 58.21798141355221 + ], + [ + 27.911651652899923, + 58.21798141355221 + ], + [ + 27.911651652899923, + 37.13739173208318 + ], + [ + -7.882190080512502, + 37.13739173208318 + ] + ] + ] + }, + "bbox": [ + -7.882190080512502, + 37.13739173208318, + 27.911651652899923, + 58.21798141355221 + ], + "properties": { + "description": "Basic STAC Item with only the MLM extension and no other extension cross-references.", + "datetime": null, + "start_datetime": "1900-01-01T00:00:00Z", + "end_datetime": "9999-12-31T23:59:59Z", + "mlm:name": "example-model", + "mlm:tasks": [ + "classification" + ], + "mlm:architecture": "ResNet", + "mlm:input": [ + { + "name": "Model with RGB input that does not refer to any band.", + "bands": [], + "input": { + "shape": [ + -1, + 3, + 64, + 64 + ], + "dim_order": [ + "batch", + "channel", + "height", + "width" + ], + "data_type": "float32" + } + } + ], + "mlm:output": [ + { + "name": "classification", + "tasks": [ + "classification" + ], + "result": { + "shape": [ + -1, + 1 + ], + "dim_order": [ + "batch", + "class" + ], + "data_type": "uint8" + }, + "classification_classes": [ + { + "value": 0, + "name": "BACKGROUND", + "description": "Background non-city.", + "color_hint": [ + 0, + 0, + 0 + ] + }, + { + "value": 1, + "name": "CITY", + "description": "A city is detected.", + "color_hint": [ + 0, + 0, + 255 + ] + } + ] + } + ] + }, + "assets": { + "model": { + "href": "https://huggingface.co/example/model-card", + "title": "Pytorch weights checkpoint", + "description": "Example model.", + "type": "text/html", + "roles": [ + "mlm:model" + ] + } + }, + "links": [ + { + "rel": "collection", + "href": "./collection.json", + "type": "application/json" + }, + { + "rel": "self", + "href": "./item_basic.json", + "type": "application/geo+json" + } + ] +} diff --git a/examples/item_eo_bands.json b/examples/item_eo_bands.json new file mode 100644 index 0000000..f5831ec --- /dev/null +++ b/examples/item_eo_bands.json @@ -0,0 +1,573 @@ +{ + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/mlm/v1.0.0/schema.json", + "https://stac-extensions.github.io/eo/v1.1.0/schema.json", + "https://stac-extensions.github.io/raster/v1.1.0/schema.json", + "https://stac-extensions.github.io/file/v1.0.0/schema.json", + "https://stac-extensions.github.io/ml-aoi/v0.2.0/schema.json" + ], + "type": "Feature", + "id": "resnet-18_sentinel-2_all_moco_classification", + "collection": "ml-model-examples", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -7.882190080512502, + 37.13739173208318 + ], + [ + -7.882190080512502, + 58.21798141355221 + ], + [ + 27.911651652899923, + 58.21798141355221 + ], + [ + 27.911651652899923, + 37.13739173208318 + ], + [ + -7.882190080512502, + 37.13739173208318 + ] + ] + ] + }, + "bbox": [ + -7.882190080512502, + 37.13739173208318, + 27.911651652899923, + 58.21798141355221 + ], + "properties": { + "description": "Sourced from torchgeo python library, identifier is ResNet18_Weights.SENTINEL2_ALL_MOCO", + "datetime": null, + "start_datetime": "1900-01-01T00:00:00Z", + "end_datetime": "9999-12-31T23:59:59Z", + "mlm:name": "Resnet-18 Sentinel-2 ALL MOCO", + "mlm:tasks": [ + "classification" + ], + "mlm:architecture": "ResNet", + "mlm:framework": "pytorch", + "mlm:framework_version": "2.1.2+cu121", + "file:size": 43000000, + "mlm:memory_size": 1, + "mlm:total_parameters": 11700000, + "mlm:pretrained_source": "EuroSat Sentinel-2", + "mlm:accelerator": "cuda", + "mlm:accelerator_constrained": false, + "mlm:accelerator_summary": "Unknown", + "mlm:batch_size_suggestion": 256, + "mlm:input": [ + { + "name": "13 Band Sentinel-2 Batch", + "bands": [ + "B01", + "B02", + "B03", + "B04", + "B05", + "B06", + "B07", + "B08", + "B8A", + "B09", + "B10", + "B11", + "B12" + ], + "input": { + "shape": [ + -1, + 13, + 64, + 64 + ], + "dim_order": [ + "batch", + "channel", + "height", + "width" + ], + "data_type": "float32" + }, + "norm_by_channel": true, + "norm_type": "z-score", + "resize_type": null, + "statistics": [ + { + "mean": 1354.40546513, + "stddev": 245.71762908 + }, + { + "mean": 1118.24399958, + "stddev": 333.00778264 + }, + { + "mean": 1042.92983953, + "stddev": 395.09249139 + }, + { + "mean": 947.62620298, + "stddev": 593.75055589 + }, + { + "mean": 1199.47283961, + "stddev": 566.4170017 + }, + { + "mean": 1999.79090914, + "stddev": 861.18399006 + }, + { + "mean": 2369.22292565, + "stddev": 1086.63139075 + }, + { + "mean": 2296.82608323, + "stddev": 1117.98170791 + }, + { + "mean": 732.08340178, + "stddev": 404.91978886 + }, + { + "mean": 12.11327804, + "stddev": 4.77584468 + }, + { + "mean": 1819.01027855, + "stddev": 1002.58768311 + }, + { + "mean": 1118.92391149, + "stddev": 761.30323499 + }, + { + "mean": 2594.14080798, + "stddev": 1231.58581042 + } + ], + "pre_processing_function": { + "format": "python", + "expression": "torchgeo.datamodules.eurosat.EuroSATDataModule.collate_fn" + } + } + ], + "mlm:output": [ + { + "name": "classification", + "tasks": [ + "classification" + ], + "result": { + "shape": [ + -1, + 10 + ], + "dim_order": [ + "batch", + "class" + ], + "data_type": "float32" + }, + "classification_classes": [ + { + "value": 0, + "name": "Annual Crop", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 1, + "name": "Forest", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 2, + "name": "Herbaceous Vegetation", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 3, + "name": "Highway", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 4, + "name": "Industrial Buildings", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 5, + "name": "Pasture", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 6, + "name": "Permanent Crop", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 7, + "name": "Residential Buildings", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 8, + "name": "River", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 9, + "name": "SeaLake", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + } + ], + "post_processing_function": null + } + ], + "eo:bands": [ + { + "name": "B01", + "common_name": "coastal", + "description": "Coastal aerosol (band 1)", + "center_wavelength": 0.443, + "full_width_half_max": 0.027 + }, + { + "name": "B02", + "common_name": "blue", + "description": "Blue (band 2)", + "center_wavelength": 0.49, + "full_width_half_max": 0.098 + }, + { + "name": "B03", + "common_name": "green", + "description": "Green (band 3)", + "center_wavelength": 0.56, + "full_width_half_max": 0.045 + }, + { + "name": "B04", + "common_name": "red", + "description": "Red (band 4)", + "center_wavelength": 0.665, + "full_width_half_max": 0.038 + }, + { + "name": "B05", + "common_name": "rededge", + "description": "Red edge 1 (band 5)", + "center_wavelength": 0.704, + "full_width_half_max": 0.019 + }, + { + "name": "B06", + "common_name": "rededge", + "description": "Red edge 2 (band 6)", + "center_wavelength": 0.74, + "full_width_half_max": 0.018 + }, + { + "name": "B07", + "common_name": "rededge", + "description": "Red edge 3 (band 7)", + "center_wavelength": 0.783, + "full_width_half_max": 0.028 + }, + { + "name": "B08", + "common_name": "nir", + "description": "NIR 1 (band 8)", + "center_wavelength": 0.842, + "full_width_half_max": 0.145 + }, + { + "name": "B8A", + "common_name": "nir08", + "description": "NIR 2 (band 8A)", + "center_wavelength": 0.865, + "full_width_half_max": 0.033 + }, + { + "name": "B09", + "common_name": "nir09", + "description": "NIR 3 (band 9)", + "center_wavelength": 0.945, + "full_width_half_max": 0.026 + }, + { + "name": "B10", + "common_name": "cirrus", + "description": "SWIR - Cirrus (band 10)", + "center_wavelength": 1.375, + "full_width_half_max": 0.026 + }, + { + "name": "B11", + "common_name": "swir16", + "description": "SWIR 1 (band 11)", + "center_wavelength": 1.61, + "full_width_half_max": 0.143 + }, + { + "name": "B12", + "common_name": "swir22", + "description": "SWIR 2 (band 12)", + "center_wavelength": 2.19, + "full_width_half_max": 0.242 + } + ], + "raster:bands": [ + { + "name": "B01", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 60, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B02", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B03", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B04", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B05", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B06", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B07", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B08", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B8A", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B09", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 60, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B10", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 60, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B11", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B12", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + } + ] + }, + "assets": { + "weights": { + "href": "https://huggingface.co/torchgeo/resnet18_sentinel2_all_moco/resolve/main/resnet18_sentinel2_all_moco-59bfdff9.pth", + "title": "Pytorch weights checkpoint", + "description": "A Resnet-18 classification model trained on normalized Sentinel-2 imagery with Eurosat landcover labels with torchgeo", + "type": "application/octet-stream; application=pytorch", + "roles": [ + "mlm:model", + "mlm:weights" + ], + "$comment": "Following 'eo:bands' is required to fulfil schema validation of 'eo' extension.", + "eo:bands": [ + { + "name": "coastal" + }, + { + "name": "blue" + }, + { + "name": "green" + }, + { + "name": "red" + }, + { + "name": "rededge1" + }, + { + "name": "rededge2" + }, + { + "name": "rededge3" + }, + { + "name": "nir" + }, + { + "name": "nir08" + }, + { + "name": "nir09" + }, + { + "name": "cirrus" + }, + { + "name": "swir16" + }, + { + "name": "swir22" + } + ] + }, + "source_code": { + "href": "https://github.com/microsoft/torchgeo/blob/61efd2e2c4df7ebe3bd03002ebbaeaa3cfe9885a/torchgeo/models/resnet.py#L207", + "title": "Model implementation.", + "description": "Source code to run the model.", + "type": "text/x-python", + "roles": [ + "mlm:model", + "code", + "metadata" + ] + } + }, + "links": [ + { + "rel": "collection", + "href": "./collection.json", + "type": "application/json" + }, + { + "rel": "self", + "href": "./item_eo_bands.json", + "type": "application/geo+json" + }, + { + "rel": "derived_from", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l2a", + "type": "application/json", + "ml-aoi:split": "train" + } + ] +} diff --git a/examples/item_multi_io.json b/examples/item_multi_io.json new file mode 100644 index 0000000..3975699 --- /dev/null +++ b/examples/item_multi_io.json @@ -0,0 +1,273 @@ +{ + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/mlm/v1.0.0/schema.json", + "https://stac-extensions.github.io/raster/v1.1.0/schema.json", + "https://stac-extensions.github.io/file/v1.0.0/schema.json", + "https://stac-extensions.github.io/ml-aoi/v0.2.0/schema.json" + ], + "type": "Feature", + "id": "resnet-18_sentinel-2_all_moco_classification", + "collection": "ml-model-examples", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -7.882190080512502, + 37.13739173208318 + ], + [ + -7.882190080512502, + 58.21798141355221 + ], + [ + 27.911651652899923, + 58.21798141355221 + ], + [ + 27.911651652899923, + 37.13739173208318 + ], + [ + -7.882190080512502, + 37.13739173208318 + ] + ] + ] + }, + "bbox": [ + -7.882190080512502, + 37.13739173208318, + 27.911651652899923, + 58.21798141355221 + ], + "properties": { + "description": "Sourced from torchgeo python library, identifier is ResNet18_Weights.SENTINEL2_ALL_MOCO", + "datetime": null, + "start_datetime": "1900-01-01T00:00:00Z", + "end_datetime": "9999-12-31T23:59:59Z", + "mlm:name": "Resnet-18 Sentinel-2 ALL MOCO", + "mlm:tasks": [ + "classification" + ], + "mlm:architecture": "ResNet", + "mlm:framework": "pytorch", + "mlm:framework_version": "2.1.2+cu121", + "file:size": 43000000, + "mlm:memory_size": 1, + "mlm:total_parameters": 11700000, + "mlm:pretrained_source": "EuroSat Sentinel-2", + "mlm:accelerator": "cuda", + "mlm:accelerator_constrained": false, + "mlm:accelerator_summary": "Unknown", + "mlm:batch_size_suggestion": 256, + "mlm:input": [ + { + "name": "RGB", + "bands": [ + "B04", + "B03", + "B02" + ], + "input": { + "shape": [ + -1, + 3, + 64, + 64 + ], + "dim_order": [ + "batch", + "channel", + "height", + "width" + ], + "data_type": "uint16" + }, + "norm_by_channel": false, + "norm_type": null, + "resize_type": null + }, + { + "name": "NDVI", + "bands": [ + "B04", + "B08" + ], + "pre_processing_function": { + "format": "gdal-calc", + "expression": "(A - B) / (A + B)" + }, + "input": { + "shape": [ + -1, + 1, + 64, + 64 + ], + "dim_order": [ + "batch", + "ndvi", + "height", + "width" + ], + "data_type": "uint16" + } + } + ], + "mlm:output": [ + { + "name": "vegetation-segmentation", + "tasks": [ + "semantic-segmentation" + ], + "result": { + "shape": [ + -1, + 1 + ], + "dim_order": [ + "batch", + "class" + ], + "data_type": "uint8" + }, + "classification_classes": [ + { + "value": 0, + "name": "NON_VEGETATION", + "description": "background pixels", + "color_hint": null + }, + { + "value": 1, + "name": "VEGETATION", + "description": "pixels where vegetation was detected", + "color_hint": [ + 0, + 255, + 0 + ] + } + ], + "post_processing_function": null + }, + { + "name": "inverse-mask", + "tasks": [ + "semantic-segmentation" + ], + "result": { + "shape": [ + -1, + 1 + ], + "dim_order": [ + "batch", + "class" + ], + "data_type": "uint8" + }, + "classification_classes": [ + { + "value": 0, + "name": "NON_VEGETATION", + "description": "background pixels", + "color_hint": [ + 255, + 255, + 255 + ] + }, + { + "value": 1, + "name": "VEGETATION", + "description": "pixels where vegetation was detected", + "color_hint": [ + 0, + 0, + 0 + ] + } + ], + "post_processing_function": { + "format": "gdal-calc", + "expression": "logical_not(A)" + } + } + ], + "raster:bands": [ + { + "name": "B02 - blue", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B03 - green", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B04 - red", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B08 - nir", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + } + ] + }, + "assets": { + "weights": { + "href": "https://huggingface.co/torchgeo/resnet50_sentinel2_rgb_moco/blob/main/resnet50_sentinel2_rgb_moco.pth", + "title": "Pytorch weights checkpoint", + "description": "A Resnet-50 classification model trained on Sentinel-2 RGB imagery with torchgeo.", + "type": "application/octet-stream; application=pytorch", + "roles": [ + "mlm:model", + "mlm:weights" + ] + } + }, + "links": [ + { + "rel": "collection", + "href": "./collection.json", + "type": "application/json" + }, + { + "rel": "self", + "href": "./item_multi_io.json", + "type": "application/geo+json" + }, + { + "rel": "derived_from", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l2a", + "type": "application/json", + "ml-aoi:split": "train" + } + ] +} diff --git a/examples/item_raster_bands.json b/examples/item_raster_bands.json new file mode 100644 index 0000000..4faed91 --- /dev/null +++ b/examples/item_raster_bands.json @@ -0,0 +1,382 @@ +{ + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/mlm/v1.0.0/schema.json", + "https://stac-extensions.github.io/raster/v1.1.0/schema.json", + "https://stac-extensions.github.io/file/v1.0.0/schema.json", + "https://stac-extensions.github.io/ml-aoi/v0.2.0/schema.json" + ], + "type": "Feature", + "id": "resnet-18_sentinel-2_all_moco_classification", + "collection": "ml-model-examples", + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -7.882190080512502, + 37.13739173208318 + ], + [ + -7.882190080512502, + 58.21798141355221 + ], + [ + 27.911651652899923, + 58.21798141355221 + ], + [ + 27.911651652899923, + 37.13739173208318 + ], + [ + -7.882190080512502, + 37.13739173208318 + ] + ] + ] + }, + "bbox": [ + -7.882190080512502, + 37.13739173208318, + 27.911651652899923, + 58.21798141355221 + ], + "properties": { + "description": "Sourced from torchgeo python library, identifier is ResNet18_Weights.SENTINEL2_ALL_MOCO", + "datetime": null, + "start_datetime": "1900-01-01T00:00:00Z", + "end_datetime": "9999-12-31T23:59:59Z", + "mlm:name": "Resnet-18 Sentinel-2 ALL MOCO", + "mlm:tasks": [ + "classification" + ], + "mlm:architecture": "ResNet", + "mlm:framework": "pytorch", + "mlm:framework_version": "2.1.2+cu121", + "file:size": 43000000, + "mlm:memory_size": 1, + "mlm:total_parameters": 11700000, + "mlm:pretrained_source": "EuroSat Sentinel-2", + "mlm:accelerator": "cuda", + "mlm:accelerator_constrained": false, + "mlm:accelerator_summary": "Unknown", + "mlm:batch_size_suggestion": 256, + "mlm:input": [ + { + "name": "13 Band Sentinel-2 Batch", + "bands": [ + "B01", + "B02", + "B03", + "B04", + "B05", + "B06", + "B07", + "B08", + "B8A", + "B09", + "B10", + "B11", + "B12" + ], + "input": { + "shape": [ + -1, + 13, + 64, + 64 + ], + "dim_order": [ + "batch", + "channel", + "height", + "width" + ], + "data_type": "float32" + }, + "norm_type": null, + "resize_type": null, + "pre_processing_function": { + "format": "python", + "expression": "torchgeo.datamodules.eurosat.EuroSATDataModule.collate_fn" + } + } + ], + "mlm:output": [ + { + "name": "classification", + "tasks": [ + "classification" + ], + "result": { + "shape": [ + -1, + 10 + ], + "dim_order": [ + "batch", + "class" + ], + "data_type": "float32" + }, + "classification_classes": [ + { + "value": 0, + "name": "Annual Crop", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 1, + "name": "Forest", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 2, + "name": "Herbaceous Vegetation", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 3, + "name": "Highway", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 4, + "name": "Industrial Buildings", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 5, + "name": "Pasture", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 6, + "name": "Permanent Crop", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 7, + "name": "Residential Buildings", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 8, + "name": "River", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + }, + { + "value": 9, + "name": "SeaLake", + "description": null, + "title": null, + "color_hint": null, + "nodata": false + } + ], + "post_processing_function": null + } + ], + "raster:bands": [ + { + "name": "B01", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 60, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B02", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B03", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B04", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B05", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B06", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B07", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B08", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B8A", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B09", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 60, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B10", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 60, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B11", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + }, + { + "name": "B12", + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": 0, + "unit": "m" + } + ] + }, + "assets": { + "weights": { + "href": "https://huggingface.co/torchgeo/resnet18_sentinel2_all_moco/resolve/main/resnet18_sentinel2_all_moco-59bfdff9.pth", + "title": "Pytorch weights checkpoint", + "description": "A Resnet-18 classification model trained on normalized Sentinel-2 imagery with Eurosat landcover labels with torchgeo", + "type": "application/octet-stream; application=pytorch", + "roles": [ + "mlm:model", + "mlm:weights" + ] + }, + "source_code": { + "href": "https://github.com/microsoft/torchgeo/blob/61efd2e2c4df7ebe3bd03002ebbaeaa3cfe9885a/torchgeo/models/resnet.py#L207", + "title": "Model implementation.", + "description": "Source code to run the model.", + "type": "text/x-python", + "roles": [ + "mlm:model", + "code", + "metadata" + ] + } + }, + "links": [ + { + "rel": "collection", + "href": "./collection.json", + "type": "application/json" + }, + { + "rel": "self", + "href": "./item_raster_bands.json", + "type": "application/geo+json" + }, + { + "rel": "derived_from", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l2a", + "type": "application/json", + "ml-aoi:split": "train" + } + ] +} diff --git a/examples/model-arch-summary.txt b/examples/model-arch-summary.txt deleted file mode 100644 index 60f0792..0000000 --- a/examples/model-arch-summary.txt +++ /dev/null @@ -1,155 +0,0 @@ - ---------------------------------------------------------------- - Layer (type) Output Shape Param - ================================================================ - Conv2d-1 [-1, 64, 32, 32] 9,408 - BatchNorm2d-2 [-1, 64, 32, 32] 128 - ReLU-3 [-1, 64, 32, 32] 0 - MaxPool2d-4 [-1, 64, 16, 16] 0 - Conv2d-5 [-1, 64, 16, 16] 36,864 - BatchNorm2d-6 [-1, 64, 16, 16] 128 - ReLU-7 [-1, 64, 16, 16] 0 - Conv2d-8 [-1, 64, 16, 16] 36,864 - BatchNorm2d-9 [-1, 64, 16, 16] 128 - ReLU-10 [-1, 64, 16, 16] 0 - BasicBlock-11 [-1, 64, 16, 16] 0 - Conv2d-12 [-1, 64, 16, 16] 36,864 - BatchNorm2d-13 [-1, 64, 16, 16] 128 - ReLU-14 [-1, 64, 16, 16] 0 - Conv2d-15 [-1, 64, 16, 16] 36,864 - BatchNorm2d-16 [-1, 64, 16, 16] 128 - ReLU-17 [-1, 64, 16, 16] 0 - BasicBlock-18 [-1, 64, 16, 16] 0 - Conv2d-19 [-1, 128, 8, 8] 73,728 - BatchNorm2d-20 [-1, 128, 8, 8] 256 - ReLU-21 [-1, 128, 8, 8] 0 - Conv2d-22 [-1, 128, 8, 8] 147,456 - BatchNorm2d-23 [-1, 128, 8, 8] 256 - Conv2d-24 [-1, 128, 8, 8] 8,192 - BatchNorm2d-25 [-1, 128, 8, 8] 256 - ReLU-26 [-1, 128, 8, 8] 0 - BasicBlock-27 [-1, 128, 8, 8] 0 - Conv2d-28 [-1, 128, 8, 8] 147,456 - BatchNorm2d-29 [-1, 128, 8, 8] 256 - ReLU-30 [-1, 128, 8, 8] 0 - Conv2d-31 [-1, 128, 8, 8] 147,456 - BatchNorm2d-32 [-1, 128, 8, 8] 256 - ReLU-33 [-1, 128, 8, 8] 0 - BasicBlock-34 [-1, 128, 8, 8] 0 - Conv2d-35 [-1, 256, 4, 4] 294,912 - BatchNorm2d-36 [-1, 256, 4, 4] 512 - ReLU-37 [-1, 256, 4, 4] 0 - Conv2d-38 [-1, 256, 4, 4] 589,824 - BatchNorm2d-39 [-1, 256, 4, 4] 512 - Conv2d-40 [-1, 256, 4, 4] 32,768 - BatchNorm2d-41 [-1, 256, 4, 4] 512 - ReLU-42 [-1, 256, 4, 4] 0 - BasicBlock-43 [-1, 256, 4, 4] 0 - Conv2d-44 [-1, 256, 4, 4] 589,824 - BatchNorm2d-45 [-1, 256, 4, 4] 512 - ReLU-46 [-1, 256, 4, 4] 0 - Conv2d-47 [-1, 256, 4, 4] 589,824 - BatchNorm2d-48 [-1, 256, 4, 4] 512 - ReLU-49 [-1, 256, 4, 4] 0 - BasicBlock-50 [-1, 256, 4, 4] 0 - Conv2d-51 [-1, 512, 2, 2] 1,179,648 - BatchNorm2d-52 [-1, 512, 2, 2] 1,024 - ReLU-53 [-1, 512, 2, 2] 0 - Conv2d-54 [-1, 512, 2, 2] 2,359,296 - BatchNorm2d-55 [-1, 512, 2, 2] 1,024 - Conv2d-56 [-1, 512, 2, 2] 131,072 - BatchNorm2d-57 [-1, 512, 2, 2] 1,024 - ReLU-58 [-1, 512, 2, 2] 0 - BasicBlock-59 [-1, 512, 2, 2] 0 - Conv2d-60 [-1, 512, 2, 2] 2,359,296 - BatchNorm2d-61 [-1, 512, 2, 2] 1,024 - ReLU-62 [-1, 512, 2, 2] 0 - Conv2d-63 [-1, 512, 2, 2] 2,359,296 - BatchNorm2d-64 [-1, 512, 2, 2] 1,024 - ReLU-65 [-1, 512, 2, 2] 0 - BasicBlock-66 [-1, 512, 2, 2] 0 - MaxPool2d-67 [-1, 512, 1, 1] 0 - Conv2d-68 [-1, 1024, 1, 1] 4,719,616 - BatchNorm2d-69 [-1, 1024, 1, 1] 2,048 - ReLU-70 [-1, 1024, 1, 1] 0 - _ActivatedBatchNorm-71 [-1, 1024, 1, 1] 0 - AdaptiveAvgPool2d-72 [-1, 1024, 1, 1] 0 - Linear-73 [-1, 64] 65,600 - ReLU-74 [-1, 64] 0 - Linear-75 [-1, 1024] 66,560 - Conv2d-76 [-1, 1, 1, 1] 1,024 - SCSEBlock-77 [-1, 1024, 1, 1] 0 - ConvTranspose2d-78 [-1, 512, 2, 2] 8,389,120 - DecoderUnetSCSE-79 [-1, 512, 2, 2] 0 - Conv2d-80 [-1, 1024, 2, 2] 9,438,208 - BatchNorm2d-81 [-1, 1024, 2, 2] 2,048 - ReLU-82 [-1, 1024, 2, 2] 0 - _ActivatedBatchNorm-83 [-1, 1024, 2, 2] 0 - AdaptiveAvgPool2d-84 [-1, 1024, 1, 1] 0 - Linear-85 [-1, 64] 65,600 - ReLU-86 [-1, 64] 0 - Linear-87 [-1, 1024] 66,560 - Conv2d-88 [-1, 1, 2, 2] 1,024 - SCSEBlock-89 [-1, 1024, 2, 2] 0 - ConvTranspose2d-90 [-1, 256, 4, 4] 4,194,560 - DecoderUnetSCSE-91 [-1, 256, 4, 4] 0 - Conv2d-92 [-1, 512, 4, 4] 2,359,808 - BatchNorm2d-93 [-1, 512, 4, 4] 1,024 - ReLU-94 [-1, 512, 4, 4] 0 - _ActivatedBatchNorm-95 [-1, 512, 4, 4] 0 - AdaptiveAvgPool2d-96 [-1, 512, 1, 1] 0 - Linear-97 [-1, 32] 16,416 - ReLU-98 [-1, 32] 0 - Linear-99 [-1, 512] 16,896 - Conv2d-100 [-1, 1, 4, 4] 512 - SCSEBlock-101 [-1, 512, 4, 4] 0 - ConvTranspose2d-102 [-1, 128, 8, 8] 1,048,704 - DecoderUnetSCSE-103 [-1, 128, 8, 8] 0 - Conv2d-104 [-1, 256, 8, 8] 590,080 - BatchNorm2d-105 [-1, 256, 8, 8] 512 - ReLU-106 [-1, 256, 8, 8] 0 - _ActivatedBatchNorm-107 [-1, 256, 8, 8] 0 - AdaptiveAvgPool2d-108 [-1, 256, 1, 1] 0 - Linear-109 [-1, 16] 4,112 - ReLU-110 [-1, 16] 0 - Linear-111 [-1, 256] 4,352 - Conv2d-112 [-1, 1, 8, 8] 256 - SCSEBlock-113 [-1, 256, 8, 8] 0 - ConvTranspose2d-114 [-1, 64, 16, 16] 262,208 - DecoderUnetSCSE-115 [-1, 64, 16, 16] 0 - Conv2d-116 [-1, 128, 16, 16] 147,584 - BatchNorm2d-117 [-1, 128, 16, 16] 256 - ReLU-118 [-1, 128, 16, 16] 0 - _ActivatedBatchNorm-119 [-1, 128, 16, 16] 0 - AdaptiveAvgPool2d-120 [-1, 128, 1, 1] 0 - Linear-121 [-1, 8] 1,032 - ReLU-122 [-1, 8] 0 - Linear-123 [-1, 128] 1,152 - Conv2d-124 [-1, 1, 16, 16] 128 - SCSEBlock-125 [-1, 128, 16, 16] 0 - ConvTranspose2d-126 [-1, 32, 32, 32] 65,568 - DecoderUnetSCSE-127 [-1, 32, 32, 32] 0 - Conv2d-128 [-1, 64, 32, 32] 55,360 - BatchNorm2d-129 [-1, 64, 32, 32] 128 - ReLU-130 [-1, 64, 32, 32] 0 - ReLU-134 [-1, 4] 0 - Linear-135 [-1, 64] 320 - Conv2d-136 [-1, 1, 32, 32] 64 - SCSEBlock-137 [-1, 64, 32, 32] 0 - ConvTranspose2d-138 [-1, 16, 64, 64] 16,400 - DecoderUnetSCSE-139 [-1, 16, 64, 64] 0 - Conv2d-140 [-1, 64, 64, 64] 31,808 - BatchNorm2d-141 [-1, 64, 64, 64] 128 - ReLU-142 [-1, 64, 64, 64] 0 - _ActivatedBatchNorm-143 [-1, 64, 64, 64] 0 - Conv2d-144 [-1, 5, 64, 64] 325 - EncoderDecoderNet-145 [-1, 5, 64, 64] 0 - ================================================================ - Total params= 42,813,873 - Trainable params= 42,813,873 - Non-trainable params= 0 - ---------------------------------------------------------------- - Input size (MB)= 0.05 - Forward/backward pass size (MB)= 20.35 - Params size (MB)= 163.32 - Estimated Total Size (MB)= 183.72 - ---------------------------------------------------------------- diff --git a/json-schema/schema.json b/json-schema/schema.json index e50c214..952042b 100644 --- a/json-schema/schema.json +++ b/json-schema/schema.json @@ -1,557 +1,795 @@ { "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "https://schemas.stacspec.org/v1.0.0-beta.3/extensions/dl-model/json-schema/schema.json", - "title": "DL Model Item", - "description": "This object represents the metadata for a Deep Learning (DL) model item in a DL Catalog.", - "allOf": [ + "$id": "https://stac-extensions.github.io/mlm/v1.0.0/schema.json", + "title": "Machine Learning Model STAC Extension Schema", + "description": "This object represents the metadata for a Machine Learning Model (MLM) used in STAC documents.", + "oneOf": [ { - "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json" - }, - { - "$ref": "#/definitions/dl-model" - }, - { - "$ref": "#/definitions/dlm:properties" - } - ], - "definitions": { - "dl-model": { - "type": "object", - "required": [ - "stac_extensions", - "properties", - "assets" - ], - "properties": { - "stac_extensions": { + "$comment": "This is the schema for STAC extension MLM in Items.", + "allOf": [ + { "type": "object", "required": [ - "stac_extensions" + "type", + "properties", + "assets" ], "properties": { - "stac_extensions": { - "type": "array", - "contains": { - "enum": [ - "dl-model", - "https://schemas.stacspec.org/v1.0.0-beta.3/extensions/dl-model/json-schema/schema.json" + "type": { + "const": "Feature" + }, + "properties": { + "allOf": [ + { + "required": [ + "mlm:name", + "mlm:architecture", + "mlm:tasks", + "mlm:input", + "mlm:output" + ] + }, + { + "$ref": "#/$defs/fields" + } + ] + }, + "assets": { + "type": "object", + "additionalProperties": { + "allOf": [ + { + "$ref": "#/$defs/fields" + }, + { + "$comment": "At least one Asset must provide the model definition.", + "$ref": "#/$defs/AssetModelRole" + } ] } } } }, - "properties": { + { + "$ref": "#/$defs/stac_extensions_mlm" + } + ] + }, + { + "$comment": "This is the schema for STAC extension MLM in Collections.", + "allOf": [ + { "type": "object", "required": [ - "dlm:inputs", - "dlm:outputs", - "dlm:runtime", - "dlm:archive", - "dlm:data" + "type" ], "properties": { - "dlm:inputs": { - "$ref": "#/definitions/dlm:inputs" - }, - "dlm:outputs": { - "$ref": "#/definitions/dlm:outputs" + "type": { + "const": "Collection" }, - "dlm:runtime": { - "$ref": "#/definitions/dlm:runtime" - }, - "dlm:architecture": { - "$ref": "#/definitions/dlm:architecture" - }, - "dlm:archive": { - "$ref": "#/definitions/dlm:archive" + "summaries": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/fields" + } }, - "dlm:data": { - "$ref": "#/definitions/dlm:data" + "assets": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/fields" + } } } + }, + { + "$ref": "#/$defs/stac_extensions_mlm" } - }, - "patternProperties": { - "^(?!dlm:)": {} - }, - "additionalProperties": false + ] + } + ], + "$defs": { + "stac_extensions_mlm": { + "type": "object", + "required": [ + "stac_extensions" + ], + "properties": { + "stac_extensions": { + "type": "array", + "contains": { + "const": "https://stac-extensions.github.io/mlm/v1.0.0/schema.json" + } + } + } }, - "dlm:runtime": { - "title": "Execution environment", - "description": "Describe the execution environment", + "stac_extensions_eo": { "type": "object", "required": [ - "framework", - "version", - "model_handler", - "model_src_url", - "requirement_file" + "stac_extensions" ], "properties": { - "framework": { - "title": "Name of the deep learning framework used", - "type": "string" - }, - "framework_version": { - "title": "Framework version", - "type": "string" - }, - "model_handler": { - "title": "Model handling function", - "type": "string" - }, - "model_src_url": { - "title": "Model source repository", - "type": "string" - }, - "model_commit_hash": { - "title": "Hash value for the Model source", - "type": "string" - }, - "requirement_file": { - "title": "Requirement file", - "type": "string" - }, - "docker": { - "title": "Docker runtime specifications", - "type": "object", - "anyOf": [ - { - "required": [ - "docker_file", - "gpu", - "working_dir", - "run" - ] - }, - { - "required": [ - "image_name", - "gpu", - "working_dir", - "run" - ] - } - ], - "properties": { - "docker_runtime": { - "anyOf": [ - { - "docker_file": { - "title": "Docker file url", - "type": "string" - }, - "docker_image": { - "title": "Docker image url", - "type": "string" - } - } - ] - }, - "gpu": { - "title": "Docker runtime requires a gpu", - "type": "boolean" - }, - "image_name": { - "title": "Docker image name", - "type": "string" - }, - "tag": { - "title": "Docker image tag", - "type": "string" - }, - "working_dir": { - "title": "Docker container working dir", - "type": "string" - }, - "run": { - "title": "Docker run parameters", - "type": "string" - } + "stac_extensions": { + "type": "array", + "contains": { + "type": "string", + "pattern": "https://stac-extensions\\.github\\.io/eo/v1(\\.[0-9]+){2}/schema\\.json" } } } }, - "dlm:architecture": { - "title": "Model architecture description", - "description": "Describe the model architecture", + "stac_extensions_raster": { "type": "object", "required": [ - "total_nb_parameters", - "estimated_total_size_mb", - "type", - "pretrained" + "stac_extensions" ], "properties": { - "total_nb_parameters": { - "title": "Total number of parameters", - "type": "integer" - }, - "estimated_total_size_mb": { - "title": "Estimated memory size in MB", - "type": "number" - }, - "type": { - "title": "Type of architecture", - "type": "string" - }, - "summary": { - "title": "Summary of the architecture", - "type": "string", - "examples": [ - { - "$ref": "https://raw.githubusercontent.com/crim-ca/dlm-extension/main/examples/model-arch-summary.txt" - } - ] - }, - "pretrained": { - "title": "Pre-training", - "type": "string" + "stac_extensions": { + "type": "array", + "contains": { + "type": "string", + "pattern": "https://stac-extensions\\.github\\.io/raster/v1(\\.[0-9]+){2}/schema\\.json" + } } } }, - "dlm:inputs": { - "title": "Description of the input tensor", + "stac_version_1.1": { + "$comment": "Requirement for STAC 1.1 or above.", "type": "object", - "description": "Describe the inputs required by the model", "required": [ - "name", - "scaling_factor", - "normalization:mean", - "normalization:std", - "selected_bands", - "input_tensors" + "stac_version" ], "properties": { - "name": { - "title": "Python name of the tensor", - "type": "string" + "stac_version": { + "pattern": "1\\.[1-9][0-9]*\\.[0-9]+(-.*)?" + } + } + }, + "fields": { + "type": "object", + "properties": { + "mlm:name": { + "$ref": "#/$defs/mlm:name" }, - "scaling_factor": { - "title": "Scaling factor", - "description": "Scaling factor to be applied on the data in order to bring the range of values between 0 and 1", - "type": "number", - "exclusiveMinimum": 0 + "mlm:architecture": { + "$ref": "#/$defs/mlm:architecture" }, - "normalization:mean": { - "title": "Statistical mean", - "type": "array", - "minItems": 1, - "items": { - "type": "number" - } + "mlm:tasks": { + "$ref": "#/$defs/mlm:tasks" }, - "normalization:std": { - "title": "Statistical standard-deviation", - "type": "array", - "minItems": 1, - "items": { - "type": "number", - "exclusiveMinimum": 0 - } + "mlm:framework": { + "$ref": "#/$defs/mlm:framework" }, - "selected_bands": { - "title": "Selected bands", - "type": "array", - "minItems": 1, - "items": { - "type": "integer" - } + "mlm:framework_version": { + "$ref": "#/$defs/mlm:framework_version" }, - "pre_processing_function": { - "title": "Pre-processing function", - "description": "Pre-processing Python function transforming the EO data to a ML-ready tensor", - "type": "string" + "mlm:memory_size": { + "$ref": "#/$defs/mlm:memory_size" }, - "input_tensors": { - "title": "Shape of the input tensor", - "description": "Describe the dimensions of the input tensors", - "type": "array", - "minItems": 1, - "items": { + "mlm:total_parameters": { + "$ref": "#/$defs/mlm:total_parameters" + }, + "mlm:pretrained": { + "$ref": "#/$defs/mlm:pretrained" + }, + "mlm:pretrained_source": { + "$ref": "#/$defs/mlm:pretrained_source" + }, + "mlm:batch_size_suggestion": { + "$ref": "#/$defs/mlm:batch_size_suggestion" + }, + "mlm:accelerator": { + "$ref": "#/$defs/mlm:accelerator" + }, + "mlm:accelerator_constrained": { + "$ref": "#/$defs/mlm:accelerator_constrained" + }, + "mlm:accelerator_summary": { + "$ref": "#/$defs/mlm:accelerator_summary" + }, + "mlm:accelerator_count": { + "$ref": "#/$defs/mlm:accelerator_count" + }, + "mlm:input": { + "$ref": "#/$defs/mlm:input" + }, + "mlm:output": { + "$ref": "#/$defs/mlm:output" + }, + "mlm:hyperparameters": { + "$ref": "#/$defs/mlm:hyperparameters" + } + }, + "$comment": "Allow properties not defined by MLM prefix to allow combination with other extensions.", + "patternProperties": { + "^(?!dlm:)": {} + }, + "additionalProperties": false + }, + "mlm:name": { + "type": "string", + "pattern": "^[a-zA-Z][a-zA-Z0-9_.\\-\\s]+[a-zA-Z0-9]$" + }, + "mlm:architecture": { + "type": "string", + "title": "Model Architecture", + "description": "A descriptive name of the model architecture, typically a common name from the literature.", + "examples": [ + "ResNet", + "VGG", + "GAN", + "Vision Transformer" + ] + }, + "mlm:framework": { + "title": "Name of the machine learning framework used.", + "anyOf": [ + { + "$comment": "Add more entries here as needed, and repeat them in the README.", + "description": "Notable predefined framework names.", + "type": "string", + "enum": [ + "PyTorch", + "TensorFlow", + "Scikit-learn", + "Huggingface", + "Keras", + "ONNX", + "rgee", + "spatialRF", + "JAX", + "MXNet", + "Caffe", + "PyMC", + "Weka" + ] + }, + { + "type": "string", + "minLength": 1, + "description": "Any other framework name to allow extension. Enum names should be preferred when possible to allow better portability." + } + ] + }, + "mlm:framework_version": { + "title": "Framework version", + "type": "string", + "pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$" + }, + "mlm:tasks": { + "type": "array", + "uniqueItems": true, + "items": { + "type": "string", + "enum": [ + "regression", + "classification", + "scene-classification", + "detection", + "object-detection", + "segmentation", + "semantic-segmentation", + "instance-segmentation", + "panoptic-segmentation", + "similarity-search", + "generative", + "image-captioning", + "super-resolution" + ] + } + }, + "mlm:memory_size": { + "description": "Memory size (in bytes) required to load the model with the specified accelerator.", + "type": "integer", + "minimum": 0 + }, + "mlm:total_parameters": { + "description": "Total number of model parameters (weights).", + "type": "integer", + "minimum": 0 + }, + "mlm:pretrained": { + "type": "boolean", + "$comment": "If trained from scratch, the source should be explicitly 'null'. However, omitting the source if pretrained is allowed.", + "if": { + "$comment": "This is the JSON-object 'properties' definition.", + "properties": { + "$comment": "This is the STAC-Item 'properties' field.", + "properties": { + "$comment": "This is the JSON-object 'properties' definition for the STAC Item 'properties' field.", "properties": { - "batch": { - "title": "Batch size", - "type": "integer", - "minimum": 1 - }, - "dim": { - "title": "Number of channels", - "type": "integer", - "minimum": 1 - }, - "height": { - "title": "Height", - "type": "integer", - "minimum": 1 - }, - "width": { - "title": "Width", - "type": "integer", - "minimum": 1 + "$comment": "Required MLM pretraining reference.", + "mlm:pretrained": { + "const": false + } + } + } + } + }, + "then": { + "$comment": "This is the JSON-object 'properties' definition.", + "properties": { + "$comment": "This is the STAC-Item 'properties' field.", + "properties": { + "$comment": "This is the JSON-object 'properties' definition for the STAC Item 'properties' field.", + "required": ["mlm:pretrained_source"], + "properties": { + "$comment": "Required MLM pretraining reference.", + "mlm:pretrained_source": { + "const": null } } } } } }, - "dlm:outputs": { - "title": "Description of the outputs of the model", - "type": "object", - "description": "Describe the outputs of the model", - "required": [ - "task", - "number_of_classes", - "final_layer_size", - "class_name_mapping" - ], - "properties": { - "task": { - "title": "Task name", + "mlm:pretrained_source": { + "description": "Pre-training dataset reference or training from scratch definition.", + "oneOf": [ + { "type": "string", - "enum": [ - "semantic segmentation", - "classification", - "object detection", - "object segmentation" + "description": "The name or URI of the dataset used for pretraining the model.", + "examples": [ + "ImageNet", + "EuroSAT" ] }, - "number_of_classes": { - "title": "number of classes", - "type": "integer", - "minimum": 1 + { + "type": "null", + "description": "Explicit mention that the model is trained from scratch." + } + ] + }, + "mlm:batch_size_suggestion": { + "description": "Recommended batch size to employ the model with the accelerator.", + "type": "integer", + "minimum": 0 + }, + "mlm:accelerator": { + "oneOf": [ + { + "type": "string", + "enum": [ + "amd64", + "cuda", + "xla", + "amd-rocm", + "intel-ipex-cpu", + "intel-ipex-gpu", + "macos-arm" + ] }, - "final_layer_size": { - "title": "Output size", - "description": "Size of the tensor from the top layer", - "type": "array", - "minItems": 1, - "items": { - "type": "integer", - "exclusiveMinimum": 0 + { + "type": "null" + } + ], + "default": null + }, + "mlm:accelerator_constrained": { + "type": "boolean", + "default": false + }, + "mlm:accelerator_summary": { + "type": "string" + }, + "mlm:accelerator_count": { + "type": "integer", + "minimum": 1 + }, + "mlm:input": { + "type": "array", + "items": { + "title": "Model Input Object", + "type": "object", + "required": [ + "name", + "bands", + "input" + ], + "properties": { + "name": { + "type": "string", + "minLength": 1 + }, + "bands": { + "$ref": "#/$defs/ModelBands" + }, + "input": { + "$ref": "#/$defs/InputStructure" + }, + "norm_by_channel": { + "type": "boolean" + }, + "norm_type": { + "$ref": "#/$defs/NormalizeType" + }, + "norm_clip": { + "$ref": "#/$defs/NormalizeClip" + }, + "resize_type": { + "$ref": "#/$defs/ResizeType" + }, + "statistics": { + "$ref": "#/$defs/InputStatistics" + }, + "pre_processing_function": { + "$ref": "#/$defs/ProcessingExpression" } - }, - "dont_care_index": { - "title": "Index of 'dont-care' class", - "description": "In case a 'dont-care' class is used", - "type": "integer" - }, - "post_processing_function": { - "title": "Name of the post-processing file", - "description": "Name of the python file containing a post-processing function", - "type": "string" - }, - "class_name_mapping": { - "description": "This is a lookup table mapping the model output (index) to a class name", - "oneOf": [ - { - "$ref": "#/definitions/dlm:class_name_listing" - }, - { - "$ref": "#/definitions/dlm:class_name_mapping" - } - ] } } }, - "dlm:class_name_listing": { - "deprecated": true, + "mlm:output": { "type": "array", - "minItems": 1, "items": { - "oneOf": [ - { - "type": "object", - "properties": { - "index": { - "title": "Class index", - "type": "integer", - "minimum": 0 - }, - "class_name": { - "title": "Class name", - "type": "string" - } - } - }, - { - "$ref": "#/definitions/dlm:class_name_mapping" - } - ] - } - }, - "dlm:class_name_mapping": { + "title": "Model Output Object", + "type": "object", + "required": [ + "name", + "tasks", + "result" + ], + "properties": { + "name": { + "type": "string", + "minLength": 1 + }, + "tasks": { + "$ref": "#/$defs/mlm:tasks" + }, + "result": { + "$ref": "#/$defs/ResultStructure" + }, + "classification:classes": { + "$ref": "#/$defs/ClassificationClasses" + }, + "post_processing_function": { + "$ref": "#/$defs/ProcessingExpression" + } + } + } + }, + "mlm:hyperparameters": { "type": "object", + "minProperties": 1, "patternProperties": { - "^I_": { - "description": "Class name to map the index.", - "type": "string" - } + "^[0-9a-zA-Z_.-]+$": true }, "additionalProperties": false }, - "dlm:data": { - "title": "Description of the data requirements", + "InputStructure": { + "title": "Input Structure Object", "type": "object", - "description": "Describe the eo data compatible with the model", "required": [ - "process_level", - "data_type", - "number_of_bands", - "class_name_mapping" + "shape", + "dim_order", + "data_type" ], "properties": { - "process_ level": { - "title": "Data processing level", - "description": "Describe the processing level expected", - "type": "string", - "enum": [ - "raw", - "ortho", - "L0", - "L1", - "L2", - "L3" - ] + "shape": { + "$ref": "#/$defs/DimensionShape" }, - "nodata": { - "title": "no data value", - "description": "Sometimes datasets have no data value, this value should be ignored", - "type": "number" - }, - "item_examples": { - "title": "item examples", - "description": "Link to additional data records or stac items", - "type": "array", - "minItems": 1, - "items": { - "properties": { - "url": { - "title": "Link toward an item", - "type": "string" - }, - "title": { - "title": "item description", - "type": "string" - } - } - } + "dim_order": { + "$ref": "#/$defs/DimensionOrder" }, - "number_of_bands": { - "title": "number of bands", - "description": "Number of spectral bands expected in the eo data", - "type": "number", - "minimum": 1 + "data_type": { + "$ref": "#/$defs/DataType" + } + } + }, + "ResultStructure": { + "title": "Result Structure Object", + "type": "object", + "required": [ + "shape", + "dim_order", + "data_type" + ], + "properties": { + "shape": { + "$ref": "#/$defs/DimensionShape" }, - "useful_bands": { - "title": "Useful bands", - "description": "Describe the spectral bands required by the model", - "type": "array", - "minItems": 1, - "items": { - "properties": { - "index": { - "title": "Index of the spectral band", - "description": "Index of the band in the original dataset", - "type": "integer", - "minimum": 0 - }, - "name": { - "title": "Short name of the band", - "type": "string" - } - } - } + "dim_order": { + "$ref": "#/$defs/DimensionOrder" }, "data_type": { - "title": "Data type", - "description": "Data type according to numpy", + "$ref": "#/$defs/DataType" + } + } + }, + "DimensionShape": { + "type": "array", + "minItems": 1, + "items": { + "type": "integer", + "minimum": -1 + } + }, + "DimensionOrder": { + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "type": "string", + "minLength": 1, + "pattern": "^[a-z-_]+$", + "examples": [ + "batch", + "channel", + "time", + "height", + "width", + "depth", + "token", + "class", + "score", + "confidence" + ] + } + }, + "NormalizeType": { + "oneOf": [ + { "type": "string", "enum": [ - "byte", - "short", - "intc", - "int_", - "longlong", - "int8", - "int16", - "int32", - "int64", - "ubyte", - "ushort", - "uintc", - "uint", - "ulonglong", - "uint8", - "uint16", - "uint32", - "uint64", - "half", - "double", - "float_", - "longfloat", - "float16", - "float32", - "float64", - "float96", - "float128" + "min-max", + "z-score", + "l1", + "l2", + "l2sqr", + "hamming", + "hamming2", + "type-mask", + "relative", + "inf" ] }, - "test_file": { - "title": "Test file", - "type": "string", - "description": "this test file is a data sample" + { + "type": "null" } + ] + }, + "NormalizeClip": { + "type": "array", + "minItems": 1, + "items": { + "type": "number" } }, - "dlm:archive": { - "title": "Description of the archive content", - "description": "Describe the model archive content", + "ResizeType": { + "oneOf": [ + { + "type": "string", + "enum": [ + "crop", + "pad", + "interpolation-nearest", + "interpolation-linear", + "interpolation-cubic", + "interpolation-area", + "interpolation-lanczos4", + "interpolation-max", + "wrap-fill-outliers", + "wrap-inverse-map" + ] + }, + { + "type": "null" + } + ] + }, + "ClassificationClasses": { + "$comment": "Must allow empty array for outputs that provide other predictions than classes.", + "oneOf": [ + { + "$ref": "https://stac-extensions.github.io/classification/v1.1.0/schema.json#/definitions/fields/properties/classification:classes" + }, + { + "type": "array", + "maxItems": 0 + } + ] + }, + "InputStatistics": { + "$comment": "MLM statistics for the specific input relevant for normalization for ML features.", "type": "array", "minItems": 1, "items": { - "properties": { - "name": { - "title": "File name", - "type": "string" - }, - "role": { - "title": "Role of the file", + "$ref": "https://stac-extensions.github.io/raster/v1.1.0/schema.json#/definitions/bands/items/properties/statistics" + } + }, + "ProcessingExpression": { + "oneOf": [ + { + "$ref": "https://stac-extensions.github.io/processing/v1.1.0/schema.json#/definitions/fields/properties/processing:expression" + }, + { + "type": "null" + } + ] + }, + "DataType": { + "$ref": "https://stac-extensions.github.io/raster/v1.1.0/schema.json#/definitions/bands/items/properties/data_type" + }, + "AssetModelRole": { + "required": ["roles"], + "properties": { + "roles": { + "contains": { "type": "string", - "enum": [ - "dependency", - "handling function", - "model weight", - "config file", - "test set", - "other" - ] + "const": "mlm:model" } } } }, - "dlm:properties": { - "type": "object", - "required": [ - "properties" - ], - "properties": { + "ModelBands": { + "allOf": [ + { + "$comment": "No 'minItems' here since to support model inputs not using any band (other data source).", + "type": "array", + "items": { + "type": "string", + "minLength": 1 + } + }, + { + "$comment": "However, if any band is indicated, a 'bands'-compliant section should describe them.", + "FIXME_$ref": "#/$defs/AnyBandsRef" + } + ] + }, + "AnyBandsRef": { + "$comment": "This definition ensures that, if at least 1 named MLM 'bands' is provided, at least 1 of the supported references from EO, Raster or STAC Core 1.1 are provided as well.", + "if": { + "$comment": "This is the JSON-object 'properties' definition.", "properties": { - "$comment": "Optional metadata that provides more details about provenance.", - "anyOf": [ - { - "$ref": "https://schemas.stacspec.org/v1.0.0-beta.2/item-spec/json-schema/instrument.json" - }, - { - "$ref": "https://schemas.stacspec.org/v1.0.0-beta.2/item-spec/json-schema/licensing.json" - }, - { - "$ref": "https://schemas.stacspec.org/v1.0.0-beta.2/item-spec/json-schema/provider.json" - }, - { - "$ref": "https://schemas.stacspec.org/v1.0.0-beta.2/item-spec/json-schema/datetime.json" + "$comment": "This is the STAC-Item 'properties' field.", + "properties": { + "required": [ + "mlm:input" + ], + "$comment": "This is the JSON-object 'properties' definition for the STAC Item 'properties' field.", + "properties": { + "$comment": "Required MLM bands listing referring to at least one band name.", + "mlm:input": { + "type": "array", + "items": { + "required": [ + "bands" + ], + "$comment": "This is the 'Model Input Object' properties.", + "properties": { + "bands": { + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "$comment": "This 'minItems' is the purpose of this whole 'if/then' block." + } + } + } + } + } } - ] + } + } + }, + "then": { + "$comment": "Need at least one 'bands', but multiple is allowed.", + "anyOf": [ + { + "allOf": [ + { + "$ref": "#/$defs/stac_extensions_raster" + }, + { + "$comment": "This is the JSON-object 'properties' definition.", + "properties": { + "$comment": "This is the STAC-Item 'properties' field.", + "properties": { + "required": ["raster:bands"], + "$comment": "This is the JSON-object 'properties' definition for the STAC Item 'properties' field.", + "properties": { + "$comment": "https://github.com/stac-extensions/raster#item-asset-fields", + "raster:bands": { + "type": "array", + "minItems": 1, + "items": { + "type": "object" + } + } + } + } + } + } + ] + }, + { + "allOf": [ + { + "$ref": "#/$defs/stac_extensions_eo" + }, + { + "$comment": "This is the JSON-object 'properties' definition.", + "properties": { + "$comment": "This is the STAC-Item 'properties' field.", + "properties": { + "required": ["eo:bands"], + "$comment": "This is the JSON-object 'properties' definition for the STAC Item 'properties' field.", + "properties": { + "$comment": "https://github.com/stac-extensions/eo#item-properties-or-asset-fields", + "eo:bands": { + "type": "array", + "minItems": 1, + "items": { + "type": "object" + } + } + } + } + } + } + ] + }, + { + "allOf": [ + { + "$ref": "#/$defs/stac_version_1.1" + }, + { + "$comment": "This is the JSON-object 'properties' definition.", + "properties": { + "$comment": "This is the STAC-Item 'properties' field.", + "properties": { + "required": ["bands"], + "$comment": "This is the JSON-object 'properties' definition for the STAC Item 'properties' field.", + "properties": { + "$comment": "https://github.com/radiantearth/stac-spec/blob/bands/item-spec/common-metadata.md#bands", + "bands": { + "type": "array", + "minItems": 1, + "items": { + "type": "object" + } + } + } + } + } + } + ] + } + ] + }, + "else": { + "$comment": "This is the JSON-object 'properties' definition.", + "properties": { + "$comment": "This is the STAC-Item 'properties' field.", + "properties": { + "required": [ + "mlm:input" + ], + "$comment": "This is the JSON-object 'properties' definition for the STAC Item 'properties' field.", + "properties": { + "$comment": "Required MLM bands listing referring to at least one band name.", + "mlm:input": { + "type": "array", + "items": { + "$comment": "This is the 'Model Input Object' properties.", + "properties": { + "bands": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "array", + "maxItems": 0 + } + ] + } + } + } + } + } + } } } } diff --git a/package.json b/package.json index 46d280a..24e9f20 100644 --- a/package.json +++ b/package.json @@ -3,14 +3,16 @@ "version": "1.0.0", "scripts": { "test": "npm run check-markdown && npm run check-examples", - "check-markdown": "remark . -f -r .github/remark.yaml", - "check-examples": "stac-node-validator . --lint --verbose --schemaMap https://stac-extensions.github.io/template/v1.0.0/schema.json=./json-schema/schema.json", - "format-examples": "stac-node-validator . --format --schemaMap https://stac-extensions.github.io/template/v1.0.0/schema.json=./json-schema/schema.json" + "check-markdown": "remark . -f -r .github/remark.yaml -i .remarkignore", + "format-markdown": "remark . -f -r .github/remark.yaml -i .remarkignore -o", + "check-examples": "stac-node-validator . --lint --verbose --schemaMap https://stac-extensions.github.io/mlm/v1.0.0/schema.json=./json-schema/schema.json", + "format-examples": "stac-node-validator . --format --schemaMap https://stac-extensions.github.io/mlm/v1.0.0/schema.json=./json-schema/schema.json" }, "dependencies": { "remark-cli": "^8.0.0", "remark-lint": "^7.0.0", "remark-lint-no-html": "^2.0.0", + "remark-gfm": "^4.0.0", "remark-preset-lint-consistent": "^3.0.0", "remark-preset-lint-markdown-style-guide": "^3.0.0", "remark-preset-lint-recommended": "^4.0.0", diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..8c1513c --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1549 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "bandit" +version = "1.7.8" +description = "Security oriented static analyser for python code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bandit-1.7.8-py3-none-any.whl", hash = "sha256:509f7af645bc0cd8fd4587abc1a038fc795636671ee8204d502b933aee44f381"}, + {file = "bandit-1.7.8.tar.gz", hash = "sha256:36de50f720856ab24a24dbaa5fee2c66050ed97c1477e0a1159deab1775eab6b"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +PyYAML = ">=5.3.1" +rich = "*" +stevedore = ">=1.20.0" + +[package.extras] +baseline = ["GitPython (>=3.1.30)"] +sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] +toml = ["tomli (>=1.1.0)"] +yaml = ["PyYAML"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.4.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "docstring-parser-fork" +version = "0.0.5" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "docstring_parser_fork-0.0.5-py3-none-any.whl", hash = "sha256:d521dea9b9cc6c60ab5569fa0c1115e3b84a83e6413266fb111a7c81cb935997"}, + {file = "docstring_parser_fork-0.0.5.tar.gz", hash = "sha256:395ae8ee6a359e268670ebc4fe9a40dab917a94f6decd7cda8e86f9bea5c9456"}, +] + +[[package]] +name = "dparse" +version = "0.6.3" +description = "A parser for Python dependency files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "dparse-0.6.3-py3-none-any.whl", hash = "sha256:0d8fe18714056ca632d98b24fbfc4e9791d4e47065285ab486182288813a5318"}, + {file = "dparse-0.6.3.tar.gz", hash = "sha256:27bb8b4bcaefec3997697ba3f6e06b2447200ba273c0b085c3d012a04571b528"}, +] + +[package.dependencies] +packaging = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +conda = ["pyyaml"] +pipenv = ["pipenv (<=2022.12.19)"] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "filelock" +version = "3.13.4" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.4-py3-none-any.whl", hash = "sha256:404e5e9253aa60ad457cae1be07c0f0ca90a63931200a47d9b6a6af84fd7b45f"}, + {file = "filelock-3.13.4.tar.gz", hash = "sha256:d13f466618bfde72bd2c18255e269f72542c6e70e7bac83a0232d6b1cc5c8cf4"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "identify" +version = "2.5.35" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, + {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jsonschema" +version = "4.21.1" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, + {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mypy" +version = "1.0.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71a808334d3f41ef011faa5a5cd8153606df5fc0b56de5b2e89566c8093a0c9a"}, + {file = "mypy-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:920169f0184215eef19294fa86ea49ffd4635dedfdea2b57e45cb4ee85d5ccaf"}, + {file = "mypy-1.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a0f74a298769d9fdc8498fcb4f2beb86f0564bcdb1a37b58cbbe78e55cf8c0"}, + {file = "mypy-1.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:65b122a993d9c81ea0bfde7689b3365318a88bde952e4dfa1b3a8b4ac05d168b"}, + {file = "mypy-1.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:5deb252fd42a77add936b463033a59b8e48eb2eaec2976d76b6878d031933fe4"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2013226d17f20468f34feddd6aae4635a55f79626549099354ce641bc7d40262"}, + {file = "mypy-1.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:48525aec92b47baed9b3380371ab8ab6e63a5aab317347dfe9e55e02aaad22e8"}, + {file = "mypy-1.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96b8a0c019fe29040d520d9257d8c8f122a7343a8307bf8d6d4a43f5c5bfcc8"}, + {file = "mypy-1.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:448de661536d270ce04f2d7dddaa49b2fdba6e3bd8a83212164d4174ff43aa65"}, + {file = "mypy-1.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:d42a98e76070a365a1d1c220fcac8aa4ada12ae0db679cb4d910fabefc88b994"}, + {file = "mypy-1.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64f48c6176e243ad015e995de05af7f22bbe370dbb5b32bd6988438ec873919"}, + {file = "mypy-1.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd63e4f50e3538617887e9aee91855368d9fc1dea30da743837b0df7373bc4"}, + {file = "mypy-1.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dbeb24514c4acbc78d205f85dd0e800f34062efcc1f4a4857c57e4b4b8712bff"}, + {file = "mypy-1.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a2948c40a7dd46c1c33765718936669dc1f628f134013b02ff5ac6c7ef6942bf"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bc8d6bd3b274dd3846597855d96d38d947aedba18776aa998a8d46fabdaed76"}, + {file = "mypy-1.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:17455cda53eeee0a4adb6371a21dd3dbf465897de82843751cf822605d152c8c"}, + {file = "mypy-1.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e831662208055b006eef68392a768ff83596035ffd6d846786578ba1714ba8f6"}, + {file = "mypy-1.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e60d0b09f62ae97a94605c3f73fd952395286cf3e3b9e7b97f60b01ddfbbda88"}, + {file = "mypy-1.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:0af4f0e20706aadf4e6f8f8dc5ab739089146b83fd53cb4a7e0e850ef3de0bb6"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24189f23dc66f83b839bd1cce2dfc356020dfc9a8bae03978477b15be61b062e"}, + {file = "mypy-1.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93a85495fb13dc484251b4c1fd7a5ac370cd0d812bbfc3b39c1bafefe95275d5"}, + {file = "mypy-1.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f546ac34093c6ce33f6278f7c88f0f147a4849386d3bf3ae193702f4fe31407"}, + {file = "mypy-1.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6c2ccb7af7154673c591189c3687b013122c5a891bb5651eca3db8e6c6c55bd"}, + {file = "mypy-1.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:15b5a824b58c7c822c51bc66308e759243c32631896743f030daf449fe3677f3"}, + {file = "mypy-1.0.1-py3-none-any.whl", hash = "sha256:eda5c8b9949ed411ff752b9a01adda31afe7eae1e53e946dbdf9db23865e66c4"}, + {file = "mypy-1.0.1.tar.gz", hash = "sha256:28cea5a6392bb43d266782983b5a4216c25544cd7d80be681a155ddcdafd152d"}, +] + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.4" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +optional = false +python-versions = ">=2.7" +files = [ + {file = "mypy_extensions-0.4.4.tar.gz", hash = "sha256:c8b707883a96efe9b4bb3aaf0dcc07e7e217d7d8368eec4db4049ee9e142f4fd"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pbr" +version = "6.0.0" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +files = [ + {file = "pbr-6.0.0-py2.py3-none-any.whl", hash = "sha256:4a7317d5e3b17a3dccb6a8cfe67dab65b20551404c52c8ed41279fa4f0cb4cda"}, + {file = "pbr-6.0.0.tar.gz", hash = "sha256:d1377122a5a00e2f940ee482999518efe16d745d423a670c27773dfbc3c9a7d9"}, +] + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.21.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "py-cpuinfo" +version = "9.0.0" +description = "Get CPU info with pure Python" +optional = false +python-versions = "*" +files = [ + {file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"}, + {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, +] + +[[package]] +name = "pydantic" +version = "2.7.0" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"}, + {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.18.1" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.18.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"}, + {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"}, + {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"}, + {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"}, + {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"}, + {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"}, + {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"}, + {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"}, + {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"}, + {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"}, + {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"}, + {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"}, + {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"}, + {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"}, + {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"}, + {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"}, + {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"}, + {file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"}, + {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"}, + {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"}, + {file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"}, + {file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"}, + {file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"}, + {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"}, + {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"}, + {file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"}, + {file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"}, + {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"}, + {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"}, + {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydoclint" +version = "0.3.10" +description = "A Python docstring linter that checks arguments, returns, yields, and raises sections" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydoclint-0.3.10-py2.py3-none-any.whl", hash = "sha256:aef97818334503693f5e291580b71432f39d1688eabdd4aeb3df0367472af39c"}, + {file = "pydoclint-0.3.10.tar.gz", hash = "sha256:d078e521939e222f605e27b409383c9fc4ce64d805ca224612cdfb1040054e00"}, +] + +[package.dependencies] +click = ">=8.0.0" +docstring-parser-fork = ">=0.0.5" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +flake8 = ["flake8 (>=4)"] + +[[package]] +name = "pydocstyle" +version = "6.3.0" +description = "Python docstring style checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, +] + +[package.dependencies] +snowballstemmer = ">=2.2.0" +tomli = {version = ">=1.2.3", optional = true, markers = "python_version < \"3.11\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli (>=1.2.3)"] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pystac" +version = "1.10.0" +description = "Python library for working with the SpatioTemporal Asset Catalog (STAC) specification" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pystac-1.10.0-py3-none-any.whl", hash = "sha256:2d1eb969abc7e13e2bdb4bb5ae1a68780da1e06f30f66fcf0d4143f51eb03f38"}, + {file = "pystac-1.10.0.tar.gz", hash = "sha256:e2762a700953ae9bab914137116cea31e08378f6c7024d805d651009a6341e20"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" + +[package.extras] +bench = ["asv (>=0.6.0,<0.7.0)", "packaging (>=24.0,<25.0)", "virtualenv (>=20.22,<21.0)"] +docs = ["Sphinx (>=6.2,<7.0)", "boto3 (>=1.28,<2.0)", "ipython (>=8.12,<9.0)", "jinja2 (<4.0)", "jupyter (>=1.0,<2.0)", "nbsphinx (>=0.9.0,<0.10.0)", "pydata-sphinx-theme (>=0.13,<1.0)", "rasterio (>=1.3,<2.0)", "shapely (>=2.0,<3.0)", "sphinx-autobuild (==2024.2.4)", "sphinx-design (>=0.5.0,<0.6.0)", "sphinxcontrib-fulltoc (>=1.2,<2.0)"] +jinja2 = ["jinja2 (<4.0)"] +orjson = ["orjson (>=3.5)"] +test = ["black (>=24.0,<25.0)", "codespell (>=2.2,<3.0)", "coverage (>=7.2,<8.0)", "doc8 (>=1.1,<2.0)", "html5lib (>=1.1,<2.0)", "jinja2 (<4.0)", "jsonschema (>=4.18,<5.0)", "mypy (>=1.2,<2.0)", "orjson (>=3.8,<4.0)", "pre-commit (>=3.2,<4.0)", "pytest (>=8.0,<9.0)", "pytest-cov (>=5.0,<6.0)", "pytest-mock (>=3.10,<4.0)", "pytest-recording (>=0.13.0,<0.14.0)", "requests-mock (>=1.11,<2.0)", "ruff (==0.3.4)", "types-html5lib (>=1.1,<2.0)", "types-jsonschema (>=4.18,<5.0)", "types-orjson (>=3.6,<4.0)", "types-python-dateutil (>=2.8,<3.0)", "types-urllib3 (>=1.26,<2.0)"] +urllib3 = ["urllib3 (>=1.26)"] +validation = ["jsonschema (>=4.18,<5.0)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-benchmark" +version = "4.0.0" +description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"}, + {file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"}, +] + +[package.dependencies] +py-cpuinfo = "*" +pytest = ">=3.8" + +[package.extras] +aspect = ["aspectlib"] +elasticsearch = ["elasticsearch"] +histogram = ["pygal", "pygaljs"] + +[[package]] +name = "pytest-click" +version = "1.1.0" +description = "Pytest plugin for Click" +optional = false +python-versions = "*" +files = [ + {file = "pytest_click-1.1.0-py3-none-any.whl", hash = "sha256:eade4742c2f02c345e78a32534a43e8db04acf98d415090539dacc880b7cd0e9"}, + {file = "pytest_click-1.1.0.tar.gz", hash = "sha256:fdd9f6721f877dda021e7c5dc73e70aecd37e5ed23ec6820f8a7b3fd7b4f8d30"}, +] + +[package.dependencies] +click = ">=6.0" +pytest = ">=5.0" + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-pikachu" +version = "1.0.0" +description = "Show surprise when tests are passing" +optional = false +python-versions = "*" +files = [ + {file = "pytest-pikachu-1.0.0.tar.gz", hash = "sha256:8acd13fdc51491e86aff5106cfaa31f80f4584ac41dcc3ae512d471c18333fd7"}, + {file = "pytest_pikachu-1.0.0-py3-none-any.whl", hash = "sha256:c20cfe20a84978e11e69af24f7a9d07beb90cbca805ae5011e2061c14a486eb6"}, +] + +[package.dependencies] +pytest = "*" + +[[package]] +name = "pytest-sugar" +version = "0.9.7" +description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." +optional = false +python-versions = "*" +files = [ + {file = "pytest-sugar-0.9.7.tar.gz", hash = "sha256:f1e74c1abfa55f7241cf7088032b6e378566f16b938f3f08905e2cf4494edd46"}, + {file = "pytest_sugar-0.9.7-py2.py3-none-any.whl", hash = "sha256:8cb5a4e5f8bbcd834622b0235db9e50432f4cbd71fef55b467fe44e43701e062"}, +] + +[package.dependencies] +packaging = ">=21.3" +pytest = ">=6.2.0" +termcolor = ">=2.1.0" + +[package.extras] +dev = ["black", "flake8", "pre-commit"] + +[[package]] +name = "pytest-timeout" +version = "2.3.1" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "referencing" +version = "0.34.0" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.34.0-py3-none-any.whl", hash = "sha256:d53ae300ceddd3169f1ffa9caf2cb7b769e92657e4fafb23d34b93679116dfd4"}, + {file = "referencing-0.34.0.tar.gz", hash = "sha256:5773bd84ef41799a5a8ca72dc34590c041eb01bf9aa02632b4a973fb0181a844"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rpds-py" +version = "0.18.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, +] + +[[package]] +name = "ruamel-yaml" +version = "0.18.6" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, + {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + +[[package]] +name = "ruff" +version = "0.2.2" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6"}, + {file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001"}, + {file = "ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3"}, + {file = "ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726"}, + {file = "ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e"}, + {file = "ruff-0.2.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e"}, + {file = "ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9"}, + {file = "ruff-0.2.2-py3-none-win32.whl", hash = "sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325"}, + {file = "ruff-0.2.2-py3-none-win_amd64.whl", hash = "sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d"}, + {file = "ruff-0.2.2-py3-none-win_arm64.whl", hash = "sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd"}, + {file = "ruff-0.2.2.tar.gz", hash = "sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d"}, +] + +[[package]] +name = "safety" +version = "2.3.4" +description = "Checks installed dependencies for known vulnerabilities and licenses." +optional = false +python-versions = "*" +files = [ + {file = "safety-2.3.4-py3-none-any.whl", hash = "sha256:6224dcd9b20986a2b2c5e7acfdfba6bca42bb11b2783b24ed04f32317e5167ea"}, + {file = "safety-2.3.4.tar.gz", hash = "sha256:b9e74e794e82f54d11f4091c5d820c4d2d81de9f953bf0b4f33ac8bc402ae72c"}, +] + +[package.dependencies] +Click = ">=8.0.2" +dparse = ">=0.6.2" +packaging = ">=21.0" +requests = "*" +"ruamel.yaml" = ">=0.17.21" +setuptools = ">=19.3" + +[package.extras] +github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] +gitlab = ["python-gitlab (>=1.3.0)"] + +[[package]] +name = "setuptools" +version = "69.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shapely" +version = "2.0.4" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:011b77153906030b795791f2fdfa2d68f1a8d7e40bce78b029782ade3afe4f2f"}, + {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9831816a5d34d5170aa9ed32a64982c3d6f4332e7ecfe62dc97767e163cb0b17"}, + {file = "shapely-2.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5c4849916f71dc44e19ed370421518c0d86cf73b26e8656192fcfcda08218fbd"}, + {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841f93a0e31e4c64d62ea570d81c35de0f6cea224568b2430d832967536308e6"}, + {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b4431f522b277c79c34b65da128029a9955e4481462cbf7ebec23aab61fc58"}, + {file = "shapely-2.0.4-cp310-cp310-win32.whl", hash = "sha256:92a41d936f7d6743f343be265ace93b7c57f5b231e21b9605716f5a47c2879e7"}, + {file = "shapely-2.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:30982f79f21bb0ff7d7d4a4e531e3fcaa39b778584c2ce81a147f95be1cd58c9"}, + {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de0205cb21ad5ddaef607cda9a3191eadd1e7a62a756ea3a356369675230ac35"}, + {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7d56ce3e2a6a556b59a288771cf9d091470116867e578bebced8bfc4147fbfd7"}, + {file = "shapely-2.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:58b0ecc505bbe49a99551eea3f2e8a9b3b24b3edd2a4de1ac0dc17bc75c9ec07"}, + {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:790a168a808bd00ee42786b8ba883307c0e3684ebb292e0e20009588c426da47"}, + {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4310b5494271e18580d61022c0857eb85d30510d88606fa3b8314790df7f367d"}, + {file = "shapely-2.0.4-cp311-cp311-win32.whl", hash = "sha256:63f3a80daf4f867bd80f5c97fbe03314348ac1b3b70fb1c0ad255a69e3749879"}, + {file = "shapely-2.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:c52ed79f683f721b69a10fb9e3d940a468203f5054927215586c5d49a072de8d"}, + {file = "shapely-2.0.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5bbd974193e2cc274312da16b189b38f5f128410f3377721cadb76b1e8ca5328"}, + {file = "shapely-2.0.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:41388321a73ba1a84edd90d86ecc8bfed55e6a1e51882eafb019f45895ec0f65"}, + {file = "shapely-2.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0776c92d584f72f1e584d2e43cfc5542c2f3dd19d53f70df0900fda643f4bae6"}, + {file = "shapely-2.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c75c98380b1ede1cae9a252c6dc247e6279403fae38c77060a5e6186c95073ac"}, + {file = "shapely-2.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3e700abf4a37b7b8b90532fa6ed5c38a9bfc777098bc9fbae5ec8e618ac8f30"}, + {file = "shapely-2.0.4-cp312-cp312-win32.whl", hash = "sha256:4f2ab0faf8188b9f99e6a273b24b97662194160cc8ca17cf9d1fb6f18d7fb93f"}, + {file = "shapely-2.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:03152442d311a5e85ac73b39680dd64a9892fa42bb08fd83b3bab4fe6999bfa0"}, + {file = "shapely-2.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:994c244e004bc3cfbea96257b883c90a86e8cbd76e069718eb4c6b222a56f78b"}, + {file = "shapely-2.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05ffd6491e9e8958b742b0e2e7c346635033d0a5f1a0ea083547fcc854e5d5cf"}, + {file = "shapely-2.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbdc1140a7d08faa748256438291394967aa54b40009f54e8d9825e75ef6113"}, + {file = "shapely-2.0.4-cp37-cp37m-win32.whl", hash = "sha256:5af4cd0d8cf2912bd95f33586600cac9c4b7c5053a036422b97cfe4728d2eb53"}, + {file = "shapely-2.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:464157509ce4efa5ff285c646a38b49f8c5ef8d4b340f722685b09bb033c5ccf"}, + {file = "shapely-2.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:489c19152ec1f0e5c5e525356bcbf7e532f311bff630c9b6bc2db6f04da6a8b9"}, + {file = "shapely-2.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b79bbd648664aa6f44ef018474ff958b6b296fed5c2d42db60078de3cffbc8aa"}, + {file = "shapely-2.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:674d7baf0015a6037d5758496d550fc1946f34bfc89c1bf247cabdc415d7747e"}, + {file = "shapely-2.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cd4ccecc5ea5abd06deeaab52fcdba372f649728050c6143cc405ee0c166679"}, + {file = "shapely-2.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5cdcbbe3080181498931b52a91a21a781a35dcb859da741c0345c6402bf00c"}, + {file = "shapely-2.0.4-cp38-cp38-win32.whl", hash = "sha256:55a38dcd1cee2f298d8c2ebc60fc7d39f3b4535684a1e9e2f39a80ae88b0cea7"}, + {file = "shapely-2.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:ec555c9d0db12d7fd777ba3f8b75044c73e576c720a851667432fabb7057da6c"}, + {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9103abd1678cb1b5f7e8e1af565a652e036844166c91ec031eeb25c5ca8af0"}, + {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:263bcf0c24d7a57c80991e64ab57cba7a3906e31d2e21b455f493d4aab534aaa"}, + {file = "shapely-2.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddf4a9bfaac643e62702ed662afc36f6abed2a88a21270e891038f9a19bc08fc"}, + {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:485246fcdb93336105c29a5cfbff8a226949db37b7473c89caa26c9bae52a242"}, + {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8de4578e838a9409b5b134a18ee820730e507b2d21700c14b71a2b0757396acc"}, + {file = "shapely-2.0.4-cp39-cp39-win32.whl", hash = "sha256:9dab4c98acfb5fb85f5a20548b5c0abe9b163ad3525ee28822ffecb5c40e724c"}, + {file = "shapely-2.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:31c19a668b5a1eadab82ff070b5a260478ac6ddad3a5b62295095174a8d26398"}, + {file = "shapely-2.0.4.tar.gz", hash = "sha256:5dc736127fac70009b8d309a0eeb74f3e08979e530cf7017f2f507ef62e6cfb8"}, +] + +[package.dependencies] +numpy = ">=1.14,<3" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "stevedore" +version = "5.2.0" +description = "Manage dynamic plugins for Python applications" +optional = false +python-versions = ">=3.8" +files = [ + {file = "stevedore-5.2.0-py3-none-any.whl", hash = "sha256:1c15d95766ca0569cad14cb6272d4d31dae66b011a929d7c18219c176ea1b5c9"}, + {file = "stevedore-5.2.0.tar.gz", hash = "sha256:46b93ca40e1114cea93d738a6c1e365396981bb6bb78c27045b7587c9473544d"}, +] + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "termcolor" +version = "2.4.0" +description = "ANSI color formatting for output in terminal" +optional = false +python-versions = ">=3.8" +files = [ + {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, + {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, +] + +[package.extras] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typer" +version = "0.9.4" +description = "Typer, build great CLIs. Easy to code. Based on Python type hints." +optional = false +python-versions = ">=3.6" +files = [ + {file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"}, + {file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"}, +] + +[package.dependencies] +click = ">=7.1.1,<9.0.0" +colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"all\""} +rich = {version = ">=10.11.0,<14.0.0", optional = true, markers = "extra == \"all\""} +shellingham = {version = ">=1.3.0,<2.0.0", optional = true, markers = "extra == \"all\""} +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] +doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "virtualenv" +version = "20.25.3" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.3-py3-none-any.whl", hash = "sha256:8aac4332f2ea6ef519c648d0bc48a5b1d324994753519919bddbb1aff25a104e"}, + {file = "virtualenv-20.25.3.tar.gz", hash = "sha256:7bb554bbdfeaacc3349fa614ea5bff6ac300fc7c335e9facf3a3bcfc703f45be"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "06fb206fe0cc4eafcb151f75a9276436489a3423ca0cf59b6a2e479b51e4d934" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..4a50ef6 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,220 @@ + +# Poetry pyproject.toml: https://python-poetry.org/docs/pyproject/ +[build-system] +requires = ["poetry_core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + + +[tool.poetry] +name = "stac-model" +version = "0.1.1.alpha4" +description = "A PydanticV2 validation and serialization libary for the STAC ML Model Extension" +readme = "README.md" +authors = ["Ryan Avery "] +license = "Apache Software License 2.0" +repository = "https://github.com/rbavery/stac-model" +homepage = "https://github.com/rbavery/stac-model" +packages = [ + {include = "stac_model"} +] + + +# Keywords description https://python-poetry.org/docs/pyproject/#keywords +keywords = [] # UPDATEME with relevant keywords + + +# Pypi classifiers: https://pypi.org/classifiers/ +classifiers = [ + "Development Status :: 3 - Alpha", + "Operating System :: OS Independent", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Framework :: Pydantic", + "Framework :: Pydantic :: 2", + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "Intended Audience :: Science/Research", + "Topic :: File Formats :: JSON :: JSON Schema", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Scientific/Engineering :: GIS", + "Topic :: Scientific/Engineering :: Image Processing", + "Topic :: Scientific/Engineering :: Image Recognition", +] + + +[tool.poetry.scripts] +# Entry points for the package https://python-poetry.org/docs/pyproject/#scripts +"stac-model" = "stac_model.__main__:app" + + +[tool.poetry.dependencies] +python = "^3.10" + +typer = {extras = ["all"], version = "^0.9.0"} +rich = "^13.7.0" +pydantic = "^2.6.3" # bug in post 2.3 https://github.com/pydantic/pydantic/issues/7720 +pydantic-core = "^2" +pystac = "^1.9.0" +shapely = "^2" +jsonschema = "^4.21.1" + +[tool.poetry.group.dev.dependencies] +mypy = "^1.0.0" +mypy-extensions = "^0.4.3" +pre-commit = "^2.21.0" +bandit = "^1.7.5" +safety = "^2.3.4" +pystac = "^1.10.0" # custom validator required (https://github.com/stac-utils/pystac/pull/1320) + +pydocstyle = {extras = ["toml"], version = "^6.2.0"} +pydoclint = "^0.3.0" + +pytest = "^7.2.1" +pytest-cov = "^4.1.0" +pytest-mock = "^3.10.0" +pytest-timeout = "^2.2.0" +pytest-benchmark = "^4.0.0" +pytest-sugar = "^0.9.7" +pytest-click = "^1.1.0" +pytest-pikachu = "^1.0.0" +coverage = "^7.3.0" +ruff = "^0.2.2" + +[tool.ruff] +exclude = [ + ".git", + "__pycache__", + ".mypy_cache", + ".tox", + ".venv", + "_build", + "buck-out", + "build", + "dist", + "env", + "venv", + "node_modules", +] +respect-gitignore = true +line-length = 120 +show-fixes = true + +[tool.ruff.lint] +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + "B", + # flake8-simplify + "SIM", + # isort + "I", +] + +[tool.mypy] +# https://github.com/python/mypy +# https://mypy.readthedocs.io/en/latest/config_file.html#using-a-pyproject-toml-file +python_version = "3.10" +pretty = true +show_traceback = true +color_output = true + +allow_redefinition = false +check_untyped_defs = true +disallow_any_generics = true +disallow_incomplete_defs = true +ignore_missing_imports = true +implicit_reexport = false +no_implicit_optional = true +show_column_numbers = true +show_error_codes = true +show_error_context = true +strict_equality = true +strict_optional = true +warn_no_return = true +warn_redundant_casts = true +warn_return_any = true +warn_unreachable = true +warn_unused_configs = true +warn_unused_ignores = true + +plugins = [ + "pydantic.mypy" +] + + +[tool.pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_required_dynamic_aliases = true + +[tool.pydocstyle] +# https://github.com/PyCQA/pydocstyle +# http://www.pydocstyle.org/en/stable/usage.html#available-options +convention = "google" +match_dir = "^(stac_model|tests)" +# ignore missing documentation, just validate provided ones +add_ignore = "D100,D101,D102,D103,D104,D105,D107,D200,D202,D204,D212,D401" + +[tool.pydoclint] +# https://github.com/jsh9/pydoclint +# https://jsh9.github.io/pydoclint/how_to_config.html +style = "google" +exclude = '\.git|\.hg|\.mypy_cache|\.tox|.?v?env|__pycache__|_build|buck-out|dist|node_modules' +# don't require type hints, since we have them in the signature instead (don't duplicate) +arg-type-hints-in-docstring = false +arg-type-hints-in-signature = true +check-return-types = false + +[tool.pytest.ini_options] +# https://github.com/pytest-dev/pytest +# https://docs.pytest.org/en/6.2.x/customize.html#pyproject-toml +# Directories that are not visited by pytest collector: +norecursedirs =[ + "hooks", + "*.egg", + ".eggs", + "dist", + "build", + "docs", + ".tox", + ".git", + "__pycache__", + "node_modules", +] +doctest_optionflags = ["NUMBER", "NORMALIZE_WHITESPACE", "IGNORE_EXCEPTION_DETAIL"] +timeout = 1000 + +# Extra options: +addopts = [ + "--strict-markers", + "--tb=short", + "--doctest-modules", + "--doctest-continue-on-failure", + "--pikachu" +] + + +[tool.coverage.run] +source = ["tests"] +branch = true + + +[tool.coverage.report] +exclude_also = [ + "def main", + "if __name__ == .__main__.:" +] +fail_under = 50 +show_missing = true + + +[tool.coverage.paths] +source = ["stac_model"] diff --git a/stac_model/__init__.py b/stac_model/__init__.py new file mode 100644 index 0000000..cb1e8e2 --- /dev/null +++ b/stac_model/__init__.py @@ -0,0 +1,10 @@ +""" +A PydanticV2/PySTAC validation and serialization library for the STAC Machine Learning Model Extension. +""" + +from importlib import metadata + +try: + __version__ = metadata.version("stac-model") +except metadata.PackageNotFoundError: + __version__ = "unknown" diff --git a/stac_model/__main__.py b/stac_model/__main__.py new file mode 100644 index 0000000..220730c --- /dev/null +++ b/stac_model/__main__.py @@ -0,0 +1,45 @@ +import json + +import typer +from rich.console import Console + +from stac_model import __version__ +from stac_model.examples import eurosat_resnet +from stac_model.schema import ItemMLModelExtension + +app = typer.Typer( + name="stac-model", + help="A PydanticV2 validation and serialization library for the STAC Machine Learning Model Extension", + add_completion=False, +) +console = Console() + + +def version_callback(print_version: bool) -> None: + """Print the version of the package.""" + if print_version: + console.print(f"[yellow]stac-model[/] version: [bold blue]{__version__}[/]") + raise typer.Exit() + + +@app.command(name="") +def main( + print_version: bool = typer.Option( + None, + "-v", + "--version", + callback=version_callback, + is_eager=True, + help="Prints the version of the stac-model package.", + ), +) -> ItemMLModelExtension: + """Generate example spec.""" + ml_model_meta = eurosat_resnet() + with open("example.json", "w") as json_file: + json.dump(ml_model_meta.item.to_dict(), json_file, indent=4) + print("Example model metadata written to ./example.json.") + return ml_model_meta + + +if __name__ == "__main__": + app() diff --git a/stac_model/base.py b/stac_model/base.py new file mode 100644 index 0000000..4e8cc6b --- /dev/null +++ b/stac_model/base.py @@ -0,0 +1,122 @@ +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, List, Literal, TypeAlias, Union + +from pydantic import BaseModel, ConfigDict, model_serializer + +Number: TypeAlias = Union[int, float] +JSON: TypeAlias = Union[ + Dict[str, "JSON"], + List["JSON"], + Number, + bool, + str, + None, +] + + +@dataclass +class _OmitIfNone: + pass + + +OmitIfNone = _OmitIfNone() + + +class MLMBaseModel(BaseModel): + """ + Allows wrapping any field with an annotation to drop it entirely if unset. + + ```python + field: Annotated[Optional[], OmitIfNone] = None + # or + field: Annotated[Optional[], OmitIfNone] = Field(default=None) + ``` + + Since `OmitIfNone` implies that the value could be `None` (even though it would be dropped), + the `Optional` annotation must be specified to corresponding typings to avoid `mypy` lint issues. + + It is important to use `MLMBaseModel`, otherwise the serializer will not be called and applied. + + Reference: https://github.com/pydantic/pydantic/discussions/5461#discussioncomment-7503283 + """ + + @model_serializer + def model_serialize(self): + omit_if_none_fields = { + key: field + for key, field in self.model_fields.items() + if any(isinstance(m, _OmitIfNone) for m in field.metadata) + } + values = { + self.__fields__[key].alias or key: val # use the alias if specified + for key, val in self + if key not in omit_if_none_fields or val is not None + } + return values + + model_config = ConfigDict( + populate_by_name=True, + ) + + +DataType: TypeAlias = Literal[ + "uint8", + "uint16", + "uint32", + "uint64", + "int8", + "int16", + "int32", + "int64", + "float16", + "float32", + "float64", + "cint16", + "cint32", + "cfloat32", + "cfloat64", + "other", +] + + +class TaskEnum(str, Enum): + REGRESSION = "regression" + CLASSIFICATION = "classification" + SCENE_CLASSIFICATION = "scene-classification" + DETECTION = "detection" + OBJECT_DETECTION = "object-detection" + SEGMENTATION = "segmentation" + SEMANTIC_SEGMENTATION = "semantic-segmentation" + INSTANCE_SEGMENTATION = "instance-segmentation" + PANOPTIC_SEGMENTATION = "panoptic-segmentation" + SIMILARITY_SEARCH = "similarity-search" + GENERATIVE = "generative" + IMAGE_CAPTIONING = "image-captioning" + SUPER_RESOLUTION = "super-resolution" + + +ModelTaskNames: TypeAlias = Literal[ + "regression", + "classification", + "scene-classification", + "detection", + "object-detection", + "segmentation", + "semantic-segmentation", + "instance-segmentation", + "panoptic-segmentation", + "similarity-search", + "generative", + "image-captioning", + "super-resolution", +] + + +ModelTask = Union[ModelTaskNames, TaskEnum] + + +class ProcessingExpression(BaseModel): + # FIXME: should use 'pystac' reference, but 'processing' extension is not implemented yet! + format: str + expression: Any diff --git a/stac_model/examples.py b/stac_model/examples.py new file mode 100644 index 0000000..c781a5b --- /dev/null +++ b/stac_model/examples.py @@ -0,0 +1,219 @@ +from typing import cast + +import pystac +import shapely +from dateutil.parser import parse as parse_dt +from pystac.extensions.file import FileExtension + +from stac_model.base import ProcessingExpression +from stac_model.input import InputStructure, MLMStatistic, ModelInput +from stac_model.output import MLMClassification, ModelOutput, ModelResult +from stac_model.schema import ItemMLModelExtension, MLModelExtension, MLModelProperties + + +def eurosat_resnet() -> ItemMLModelExtension: + input_struct = InputStructure( + shape=[-1, 13, 64, 64], + dim_order=["batch", "channel", "height", "width"], + data_type="float32", + ) + band_names = [ + "B01", + "B02", + "B03", + "B04", + "B05", + "B06", + "B07", + "B08", + "B8A", + "B09", + "B10", + "B11", + "B12", + ] + stats_mean = [ + 1354.40546513, + 1118.24399958, + 1042.92983953, + 947.62620298, + 1199.47283961, + 1999.79090914, + 2369.22292565, + 2296.82608323, + 732.08340178, + 12.11327804, + 1819.01027855, + 1118.92391149, + 2594.14080798, + ] + stats_stddev = [ + 245.71762908, + 333.00778264, + 395.09249139, + 593.75055589, + 566.4170017, + 861.18399006, + 1086.63139075, + 1117.98170791, + 404.91978886, + 4.77584468, + 1002.58768311, + 761.30323499, + 1231.58581042, + ] + stats = [ + MLMStatistic(mean=mean, stddev=stddev) + for mean, stddev in zip(stats_mean, stats_stddev) + ] + model_input = ModelInput( + name="13 Band Sentinel-2 Batch", + bands=band_names, + input=input_struct, + norm_by_channel=True, + norm_type="z-score", + resize_type=None, + statistics=stats, + pre_processing_function=ProcessingExpression( + format="python", + expression="torchgeo.datamodules.eurosat.EuroSATDataModule.collate_fn", + ), # noqa: E501 + ) + result_struct = ModelResult( + shape=[-1, 10], + dim_order=["batch", "class"], + data_type="float32" + ) + class_map = { + "Annual Crop": 0, + "Forest": 1, + "Herbaceous Vegetation": 2, + "Highway": 3, + "Industrial Buildings": 4, + "Pasture": 5, + "Permanent Crop": 6, + "Residential Buildings": 7, + "River": 8, + "SeaLake": 9, + } + class_objects = [ + MLMClassification(value=class_value, name=class_name) + for class_name, class_value in class_map.items() + ] + model_output = ModelOutput( + name="classification", + tasks={"classification"}, + classes=class_objects, + result=result_struct, + post_processing_function=None, + ) + assets = { + "model": pystac.Asset( + title="Pytorch weights checkpoint", + description=( + "A Resnet-18 classification model trained on normalized Sentinel-2 " + "imagery with Eurosat landcover labels with torchgeo." + ), + href="https://huggingface.co/torchgeo/resnet18_sentinel2_all_moco/resolve/main/resnet18_sentinel2_all_moco-59bfdff9.pth", + media_type="application/octet-stream; application=pytorch", + roles=[ + "mlm:model", + "mlm:weights", + "data" + ] + ), + "source_code": pystac.Asset( + title="Model implementation.", + description="Source code to run the model.", + href="https://github.com/microsoft/torchgeo/blob/61efd2e2c4df7ebe3bd03002ebbaeaa3cfe9885a/torchgeo/models/resnet.py#L207", + media_type="text/x-python", + roles=[ + "mlm:model", + "code" + ] + ) + } + + ml_model_size = 43000000 + ml_model_meta = MLModelProperties( + name="Resnet-18 Sentinel-2 ALL MOCO", + architecture="ResNet-18", + tasks={"classification"}, + framework="pytorch", + framework_version="2.1.2+cu121", + accelerator="cuda", + accelerator_constrained=False, + accelerator_summary="Unknown", + file_size=ml_model_size, + memory_size=1, + pretrained=True, + pretrained_source="EuroSat Sentinel-2", + total_parameters=11_700_000, + input=[model_input], + output=[model_output], + ) + # TODO, this can't be serialized but pystac.item calls for a datetime + # in docs. start_datetime=datetime.strptime("1900-01-01", "%Y-%m-%d") + # Is this a problem that we don't do date validation if we supply as str? + start_datetime_str = "1900-01-01" + end_datetime_str = "9999-01-01" # cannot be None, invalid against STAC Core! + start_datetime = parse_dt(start_datetime_str).isoformat() + "Z" + end_datetime = parse_dt(end_datetime_str).isoformat() + "Z" + bbox = [ + -7.882190080512502, + 37.13739173208318, + 27.911651652899923, + 58.21798141355221 + ] + geometry = shapely.geometry.Polygon.from_bounds(*bbox).__geo_interface__ + item_name = "item_basic" + col_name = "ml-model-examples" + item = pystac.Item( + id=item_name, + collection=col_name, + geometry=geometry, + bbox=bbox, + datetime=None, + properties={ + "start_datetime": start_datetime, + "end_datetime": end_datetime, + "description": ( + "Sourced from torchgeo python library, identifier is ResNet18_Weights.SENTINEL2_ALL_MOCO" + ), + }, + assets=assets, + ) + + # note: cannot use 'item.add_derived_from' since it expects a 'Item' object, but we refer to a 'Collection' here + # item.add_derived_from("https://earth-search.aws.element84.com/v1/collections/sentinel-2-l2a") + item.add_link( + pystac.Link( + target="https://earth-search.aws.element84.com/v1/collections/sentinel-2-l2a", + rel=pystac.RelType.DERIVED_FROM, + media_type=pystac.MediaType.JSON, + ) + ) + + # define more link references + col = pystac.Collection( + id=col_name, + title="Machine Learning Model examples", + description="Collection of items contained in the Machine Learning Model examples.", + extent=pystac.Extent( + temporal=pystac.TemporalExtent([[parse_dt(start_datetime), parse_dt(end_datetime)]]), + spatial=pystac.SpatialExtent([bbox]), + ) + ) + col.set_self_href("./examples/collection.json") + col.add_item(item) + item.set_self_href(f"./examples/{item_name}.json") + + model_asset = cast( + FileExtension[pystac.Asset], + pystac.extensions.file.FileExtension.ext(assets["model"], add_if_missing=True) + ) + model_asset.apply(size=ml_model_size) + + item_mlm = MLModelExtension.ext(item, add_if_missing=True) + item_mlm.apply(ml_model_meta.model_dump(by_alias=True, exclude_unset=True, exclude_defaults=True)) + return item_mlm diff --git a/stac_model/input.py b/stac_model/input.py new file mode 100644 index 0000000..19c6e13 --- /dev/null +++ b/stac_model/input.py @@ -0,0 +1,63 @@ +from typing import Annotated, List, Literal, Optional, TypeAlias, Union + +from pydantic import Field + +from stac_model.base import DataType, MLMBaseModel, Number, OmitIfNone, ProcessingExpression + + +class InputStructure(MLMBaseModel): + shape: List[Union[int, float]] = Field(min_items=1) + dim_order: List[str] = Field(min_items=1) + data_type: DataType + + +class MLMStatistic(MLMBaseModel): # FIXME: add 'Statistics' dep from raster extension (cases required to be triggered) + minimum: Annotated[Optional[Number], OmitIfNone] = None + maximum: Annotated[Optional[Number], OmitIfNone] = None + mean: Annotated[Optional[Number], OmitIfNone] = None + stddev: Annotated[Optional[Number], OmitIfNone] = None + count: Annotated[Optional[int], OmitIfNone] = None + valid_percent: Annotated[Optional[Number], OmitIfNone] = None + + +NormalizeType: TypeAlias = Optional[ + Literal[ + "min-max", + "z-score", + "l1", + "l2", + "l2sqr", + "hamming", + "hamming2", + "type-mask", + "relative", + "inf" + ] +] + +ResizeType: TypeAlias = Optional[ + Literal[ + "crop", + "pad", + "interpolation-nearest", + "interpolation-linear", + "interpolation-cubic", + "interpolation-area", + "interpolation-lanczos4", + "interpolation-max", + "wrap-fill-outliers", + "wrap-inverse-map", + ] +] + + +class ModelInput(MLMBaseModel): + name: str + bands: List[str] # order is critical here (same index as dim shape), allow duplicate if the model needs it somehow + input: InputStructure + norm_by_channel: Annotated[Optional[bool], OmitIfNone] = None + norm_type: Annotated[Optional[NormalizeType], OmitIfNone] = None + norm_clip: Annotated[Optional[List[Union[float, int]]], OmitIfNone] = None + resize_type: Annotated[Optional[ResizeType], OmitIfNone] = None + statistics: Annotated[Optional[List[MLMStatistic]], OmitIfNone] = None + pre_processing_function: Optional[ProcessingExpression] = None diff --git a/stac_model/output.py b/stac_model/output.py new file mode 100644 index 0000000..0e25ec7 --- /dev/null +++ b/stac_model/output.py @@ -0,0 +1,97 @@ +from typing import Annotated, Any, Dict, List, Optional, Set, Union, cast + +from pydantic import AliasChoices, ConfigDict, Field, model_serializer +from pystac.extensions.classification import Classification + +from stac_model.base import DataType, MLMBaseModel, ModelTask, OmitIfNone, ProcessingExpression + + +class ModelResult(MLMBaseModel): + shape: List[Union[int, float]] = Field(..., min_items=1) + dim_order: List[str] = Field(..., min_items=1) + data_type: DataType + + +# MLMClassification: TypeAlias = Annotated[ +# Classification, +# PlainSerializer( +# lambda x: x.to_dict(), +# when_used="json", +# return_type=TypedDict( +# "Classification", +# { +# "value": int, +# "name": str, +# "description": NotRequired[str], +# "color_hint": NotRequired[str], +# } +# ) +# ) +# ] + + +class MLMClassification(MLMBaseModel, Classification): + @model_serializer() + def model_dump(self, *_: Any, **__: Any) -> Dict[str, Any]: + return self.to_dict() # type: ignore[call-arg] + + def __init__( + self, + value: int, + description: Optional[str] = None, + name: Optional[str] = None, + color_hint: Optional[str] = None, + ) -> None: + Classification.__init__(self, {}) + if not name and not description: + raise ValueError("Class name or description is required!") + self.apply( + value=value, + name=name or description, + description=cast(str, description or name), + color_hint=color_hint, + ) + + def __hash__(self) -> int: + return sum(map(hash, self.to_dict().items())) + + def __setattr__(self, key: str, value: Any) -> None: + if key == "properties": + Classification.__setattr__(self, key, value) + else: + MLMBaseModel.__setattr__(self, key, value) + + model_config = ConfigDict( + populate_by_name=True, + arbitrary_types_allowed=True, + ) + + +# class ClassObject(BaseModel): +# value: int +# name: str +# description: Optional[str] = None +# title: Optional[str] = None +# color_hint: Optional[str] = None +# nodata: Optional[bool] = False + + +class ModelOutput(MLMBaseModel): + name: str + tasks: Set[ModelTask] + result: ModelResult + + # NOTE: + # Although it is preferable to have 'Set' to avoid duplicate, + # it is more important to keep the order in this case, + # which we would lose with 'Set'. + # We also get some unhashable errors with 'Set', although 'MLMClassification' implements '__hash__'. + classes: Annotated[List[MLMClassification], OmitIfNone] = Field( + alias="classification:classes", + validation_alias=AliasChoices("classification:classes", "classification_classes", "classes"), + ) + post_processing_function: Optional[ProcessingExpression] = None + + model_config = ConfigDict( + populate_by_name=True, + ) diff --git a/stac_model/runtime.py b/stac_model/runtime.py new file mode 100644 index 0000000..9104fa6 --- /dev/null +++ b/stac_model/runtime.py @@ -0,0 +1,49 @@ +from enum import Enum +from typing import Annotated, Literal, Optional, Union + +from pydantic import AliasChoices, Field + +from stac_model.base import MLMBaseModel, OmitIfNone + + +class AcceleratorEnum(str, Enum): + amd64 = "amd64" + cuda = "cuda" + xla = "xla" + amd_rocm = "amd-rocm" + intel_ipex_cpu = "intel-ipex-cpu" + intel_ipex_gpu = "intel-ipex-gpu" + macos_arm = "macos-arm" + + def __str__(self): + return self.value + + +AcceleratorName = Literal[ + "amd64", + "cuda", + "xla", + "amd-rocm", + "intel-ipex-cpu", + "intel-ipex-gpu", + "macos-arm", +] + +AcceleratorType = Union[AcceleratorName, AcceleratorEnum] + + +class Runtime(MLMBaseModel): + framework: Annotated[Optional[str], OmitIfNone] = Field(default=None) + framework_version: Annotated[Optional[str], OmitIfNone] = Field(default=None) + file_size: Annotated[Optional[int], OmitIfNone] = Field( + alias="file:size", + validation_alias=AliasChoices("file_size", "file:size"), + default=None, + ) + memory_size: Annotated[Optional[int], OmitIfNone] = Field(default=None) + batch_size_suggestion: Annotated[Optional[int], OmitIfNone] = Field(default=None) + + accelerator: Optional[AcceleratorType] = Field(default=None) + accelerator_constrained: bool = Field(default=False) + accelerator_summary: Annotated[Optional[str], OmitIfNone] = Field(default=None) + accelerator_count: Annotated[Optional[int], OmitIfNone] = Field(default=None, minimum=1) diff --git a/stac_model/schema.py b/stac_model/schema.py new file mode 100644 index 0000000..38c35fc --- /dev/null +++ b/stac_model/schema.py @@ -0,0 +1,281 @@ +import json +from typing import ( + Annotated, + Any, + Generic, + Iterable, + List, + Literal, + Optional, + Set, + TypeVar, + Union, + cast, + get_args, + overload, +) + +import pystac +from pydantic import ConfigDict, Field +from pydantic.fields import FieldInfo +from pystac.extensions.base import ( + ExtensionManagementMixin, + PropertiesExtension, + SummariesExtension, +) + +from stac_model.base import ModelTask, OmitIfNone +from stac_model.input import ModelInput +from stac_model.output import ModelOutput +from stac_model.runtime import Runtime + +T = TypeVar( + "T", + pystac.Collection, + pystac.Item, + pystac.Asset, # item_assets.AssetDefinition, +) + +SchemaName = Literal["mlm"] +SCHEMA_URI: str = "https://stac-extensions.github.io/mlm/v1.0.0/schema.json" +PREFIX = f"{get_args(SchemaName)[0]}:" + + +def mlm_prefix_adder(field_name: str) -> str: + return "mlm:" + field_name + + +class MLModelProperties(Runtime): + name: str = Field(min_length=1) + architecture: str = Field(min_length=1) + tasks: Set[ModelTask] + input: List[ModelInput] + output: List[ModelOutput] + + total_parameters: int + pretrained: Annotated[Optional[bool], OmitIfNone] = Field(default=True) + pretrained_source: Annotated[Optional[str], OmitIfNone] = None + + model_config = ConfigDict(alias_generator=mlm_prefix_adder, populate_by_name=True, extra="ignore") + + +class MLModelExtension( + Generic[T], + PropertiesExtension, + # FIXME: resolve typing incompatibility? + # 'pystac.Asset' does not derive from STACObject + # therefore, it technically cannot be used in 'ExtensionManagementMixin[T]' + # however, this makes our extension definition much easier and avoids lots of code duplication + ExtensionManagementMixin[ # type: ignore[type-var] + Union[ + pystac.Collection, + pystac.Item, + pystac.Asset, + ] + ], +): + @property + def name(self) -> SchemaName: + return cast(SchemaName, get_args(SchemaName)[0]) + + def apply( + self, + properties: Union[MLModelProperties, dict[str, Any]], + ) -> None: + """ + Applies Machine Learning Model Extension properties to the extended :mod:`~pystac` object. + """ + if isinstance(properties, dict): + properties = MLModelProperties(**properties) + data_json = json.loads(properties.model_dump_json(by_alias=True)) + for prop, val in data_json.items(): + self._set_property(prop, val) + + @classmethod + def get_schema_uri(cls) -> str: + return SCHEMA_URI + + @overload + @classmethod + def ext(cls, obj: pystac.Asset, add_if_missing: bool = False) -> "AssetMLModelExtension": ... + + @overload + @classmethod + def ext(cls, obj: pystac.Item, add_if_missing: bool = False) -> "ItemMLModelExtension": ... + + @overload + @classmethod + def ext(cls, obj: pystac.Collection, add_if_missing: bool = False) -> "CollectionMLModelExtension": ... + + # @overload + # @classmethod + # def ext(cls, obj: item_assets.AssetDefinition, add_if_missing: bool = False) -> "ItemAssetsMLModelExtension": + # ... + + @classmethod + def ext( + cls, + obj: Union[pystac.Collection, pystac.Item, pystac.Asset], # item_assets.AssetDefinition + add_if_missing: bool = False, + ) -> Union[ + "CollectionMLModelExtension", + "ItemMLModelExtension", + "AssetMLModelExtension", + ]: + """ + Extends the given STAC Object with properties from the :stac-ext:`Machine Learning Model Extension `. + + This extension can be applied to instances of :class:`~pystac.Item` or :class:`~pystac.Asset`. + + Args: + obj: STAC Object to extend with the MLM extension fields. + add_if_missing: Add the MLM extension schema URI to the object if not already in `stac_extensions`. + + Returns: + Extended object. + + Raises: + pystac.ExtensionTypeError : If an invalid object type is passed. + """ + if isinstance(obj, pystac.Collection): + cls.ensure_has_extension(obj, add_if_missing) + return CollectionMLModelExtension(obj) + elif isinstance(obj, pystac.Item): + cls.ensure_has_extension(obj, add_if_missing) + return ItemMLModelExtension(obj) + elif isinstance(obj, pystac.Asset): + cls.ensure_owner_has_extension(obj, add_if_missing) + return AssetMLModelExtension(obj) + # elif isinstance(obj, item_assets.AssetDefinition): + # cls.ensure_owner_has_extension(obj, add_if_missing) + # return ItemAssetsMLModelExtension(obj) + else: + raise pystac.ExtensionTypeError(cls._ext_error_message(obj)) + + @classmethod + def summaries(cls, obj: pystac.Collection, add_if_missing: bool = False) -> "SummariesMLModelExtension": + """Returns the extended summaries object for the given collection.""" + cls.ensure_has_extension(obj, add_if_missing) + return SummariesMLModelExtension(obj) + + +class SummariesMLModelExtension(SummariesExtension): + """ + Summaries annotated with the Machine Learning Model Extension. + + A concrete implementation of :class:`~SummariesExtension` that extends + the ``summaries`` field of a :class:`~pystac.Collection` to include properties + defined in the :stac-ext:`Machine Learning Model `. + """ + + def _check_mlm_property(self, prop: str) -> FieldInfo: + try: + return MLModelProperties.model_fields[prop] + except KeyError as err: + raise AttributeError(f"Name '{prop}' is not a valid MLM property.") from err + + def _validate_mlm_property(self, prop: str, summaries: list[Any]) -> None: + # ignore mypy issue when combined with Annotated + # - https://github.com/pydantic/pydantic/issues/6713 + # - https://github.com/pydantic/pydantic/issues/5190 + model = MLModelProperties.model_construct() # type: ignore[call-arg] + validator = MLModelProperties.__pydantic_validator__ + for value in summaries: + validator.validate_assignment(model, prop, value) + + def get_mlm_property(self, prop: str) -> Optional[list[Any]]: + self._check_mlm_property(prop) + return self.summaries.get_list(prop) + + def set_mlm_property(self, prop: str, summaries: list[Any]) -> None: + self._check_mlm_property(prop) + self._validate_mlm_property(prop, summaries) + self._set_summary(prop, summaries) + + def __getattr__(self, prop): + return self.get_mlm_property(prop) + + def __setattr__(self, prop, value): + self.set_mlm_property(prop, value) + + +class ItemMLModelExtension(MLModelExtension[pystac.Item]): + """ + Item annotated with the Machine Learning Model Extension. + + A concrete implementation of :class:`MLModelExtension` on an + :class:`~pystac.Item` that extends the properties of the Item to + include properties defined in the :stac-ext:`Machine Learning Model + Extension `. + + This class should generally not be instantiated directly. Instead, call + :meth:`MLModelExtension.ext` on an :class:`~pystac.Item` to extend it. + """ + + def __init__(self, item: pystac.Item): + self.item = item + self.properties = item.properties + + def __repr__(self) -> str: + return f"" + + +# class ItemAssetsMLModelExtension(MLModelExtension[item_assets.AssetDefinition]): +# properties: dict[str, Any] +# asset_defn: item_assets.AssetDefinition +# +# def __init__(self, item_asset: item_assets.AssetDefinition): +# self.asset_defn = item_asset +# self.properties = item_asset.properties + + +class AssetMLModelExtension(MLModelExtension[pystac.Asset]): + """ + Asset annotated with the Machine Learning Model Extension. + + A concrete implementation of :class:`MLModelExtension` on an + :class:`~pystac.Asset` that extends the Asset fields to include + properties defined in the :stac-ext:`Machine Learning Model + Extension `. + + This class should generally not be instantiated directly. Instead, call + :meth:`MLModelExtension.ext` on an :class:`~pystac.Asset` to extend it. + """ + + asset_href: str + """The ``href`` value of the :class:`~pystac.Asset` being extended.""" + + properties: dict[str, Any] + """The :class:`~pystac.Asset` fields, including extension properties.""" + + additional_read_properties: Optional[Iterable[dict[str, Any]]] = None + """If present, this will be a list containing 1 dictionary representing the + properties of the owning :class:`~pystac.Item`.""" + + def __init__(self, asset: pystac.Asset): + self.asset_href = asset.href + self.properties = asset.extra_fields + if asset.owner and isinstance(asset.owner, pystac.Item): + self.additional_read_properties = [asset.owner.properties] + + def __repr__(self) -> str: + return f"" + + +class CollectionMLModelExtension(MLModelExtension[pystac.Collection]): + def __init__(self, collection: pystac.Collection): + self.collection = collection + + +# __all__ = [ +# "MLModelExtension", +# "ModelInput", +# "InputArray", +# "Band", +# "Statistics", +# "ModelOutput", +# "Asset", +# "Runtime", +# "Container", +# "Asset", +# ] diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..0092fe0 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,56 @@ +import json +import os +from typing import TYPE_CHECKING, Any, Dict, cast + +import pystac +import pytest + +from stac_model.base import JSON +from stac_model.examples import eurosat_resnet as make_eurosat_resnet +from stac_model.schema import SCHEMA_URI + +if TYPE_CHECKING: + from _pytest.fixtures import SubRequest + +TEST_DIR = os.path.dirname(__file__) +EXAMPLES_DIR = os.path.abspath(os.path.join(TEST_DIR, "../examples")) +JSON_SCHEMA_DIR = os.path.abspath(os.path.join(TEST_DIR, "../json-schema")) + + +@pytest.fixture(scope="session") +def mlm_schema() -> JSON: + with open(os.path.join(JSON_SCHEMA_DIR, "schema.json")) as schema_file: + data = json.load(schema_file) + return cast(JSON, data) + + +@pytest.fixture(scope="session") +def mlm_validator( + request: "SubRequest", + mlm_schema: Dict[str, Any], +) -> pystac.validation.stac_validator.JsonSchemaSTACValidator: + """ + Update the :class:`pystac.validation.RegisteredValidator` with the local ML-AOI JSON schema definition. + + Because the schema is *not yet* uploaded to the expected STAC schema URI, + any call to :func:`pystac.validation.validate` or :meth:`pystac.stac_object.STACObject.validate` results + in ``GetSchemaError`` when the schema retrieval is attempted by the validator. By adding the schema to the + mapping beforehand, remote resolution can be bypassed temporarily. + """ + validator = pystac.validation.RegisteredValidator.get_validator() + validator = cast(pystac.validation.stac_validator.JsonSchemaSTACValidator, validator) + validator.schema_cache[SCHEMA_URI] = mlm_schema + pystac.validation.RegisteredValidator.set_validator(validator) # apply globally to allow 'STACObject.validate()' + return validator + + +@pytest.fixture +def mlm_example(request: "SubRequest") -> JSON: + with open(os.path.join(EXAMPLES_DIR, request.param)) as example_file: + data = json.load(example_file) + return cast(JSON, data) + + +@pytest.fixture(name="eurosat_resnet") +def eurosat_resnet(): + return make_eurosat_resnet() diff --git a/tests/test_schema.py b/tests/test_schema.py new file mode 100644 index 0000000..a3e9899 --- /dev/null +++ b/tests/test_schema.py @@ -0,0 +1,42 @@ +from typing import Any, Dict, cast + +import pystac +import pytest +from pystac.validation.stac_validator import STACValidator + +from stac_model.base import JSON +from stac_model.schema import SCHEMA_URI + + +@pytest.mark.parametrize( + "mlm_example", # value passed to 'mlm_example' fixture + [ + "item_basic.json", + "item_raster_bands.json", + "item_eo_bands.json", + "item_multi_io.json", + ], + indirect=True, +) +def test_mlm_schema( + mlm_validator: STACValidator, + mlm_example: JSON, +) -> None: + mlm_item = pystac.Item.from_dict(cast(Dict[str, Any], mlm_example)) + validated = pystac.validation.validate(mlm_item, validator=mlm_validator) + assert len(validated) >= len(mlm_item.stac_extensions) # extra STAC core schemas + assert SCHEMA_URI in validated + + +def test_model_metadata_to_dict(eurosat_resnet): + assert eurosat_resnet.item.to_dict() + + +def test_validate_model_metadata(eurosat_resnet): + assert pystac.read_dict(eurosat_resnet.item.to_dict()) + + +def test_validate_model_against_schema(eurosat_resnet, mlm_validator): + mlm_item = pystac.read_dict(eurosat_resnet.item.to_dict()) + validated = pystac.validation.validate(mlm_item, validator=mlm_validator) + assert SCHEMA_URI in validated