From bd9c75798eede1a4b7d7ecd6203179d3cb5e54dd Mon Sep 17 00:00:00 2001 From: Slawek Bierwiaczonek <119700507+Domejko@users.noreply.github.com> Date: Fri, 3 May 2024 15:35:34 +0200 Subject: [PATCH] Prettier formatter (#2018) * Formatted with Prettier * Added package-lock.json * Removed the .idea folder * Removed version * Added .prettierignore with tests/data/* * Added npm updates * Updated prettier * Update style.yaml Co-authored-by: Adam J. Stewart * Updated linters section * Added Prettier hook * Added Prettier hook * Added exclude regex * Update .prettierignore Co-authored-by: Adam J. Stewart * Added package.json and package-lock.json * Added npm ci flag * Formatted with Prettier * Updated .gitignore * Reverted changes in data folder * Added .idea to ignored entries * Delete package-lock.json from root * Added cache dependency path * Formatted with Prettier * Updated tests.yaml * Updated regex * Updated actions/checkout to v4.1.4 * Updated SECURITY and dependabot * Removed .idea and requirements/node_modules * Updated package files * Changed .yaml files format * Added EOL * Updated contributing.rst * Updated style.yaml * Updated style CI * Added node_modules to ignored entries * Updated npm install command * Added *.json root files to ignored entries * Updated prettier version range * Updated prettier install command * Relocated # Node stuff --------- Co-authored-by: Adam J. Stewart --- .devcontainer/devcontainer.json | 2 +- .github/CONTRIBUTING.md | 12 +- .github/ISSUE_TEMPLATE/bug-report.yml | 100 +++++------ .github/ISSUE_TEMPLATE/documentation.yml | 40 ++--- .github/ISSUE_TEMPLATE/feature-request.yml | 84 +++++----- .github/SECURITY.md | 20 +-- .github/SUPPORT.md | 26 +-- .github/dependabot.yml | 4 + .github/labeler.yml | 68 ++++---- .github/workflows/labeler.yml | 14 +- .github/workflows/release.yaml | 142 ++++++++-------- .github/workflows/style.yaml | 116 +++++++------ .github/workflows/tests.yaml | 158 +++++++++--------- .github/workflows/tutorials.yaml | 50 +++--- .gitignore | 4 + .pre-commit-config.yaml | 9 + .prettierignore | 2 + .readthedocs.yaml | 12 +- CITATION.cff | 32 ++-- README.md | 19 ++- docs/_static/button-width.css | 4 +- docs/_static/notebook-prompt.css | 2 +- docs/user/contributing.rst | 18 ++ experiments/README.md | 4 +- experiments/ssl4eo/landsat/README.md | 18 +- .../conf/ssl4eo_benchmark_etm_sr_cdl.yaml | 20 ++- .../conf/ssl4eo_benchmark_etm_sr_nlcd.yaml | 16 +- .../conf/ssl4eo_benchmark_etm_toa_cdl.yaml | 20 ++- .../conf/ssl4eo_benchmark_etm_toa_nlcd.yaml | 16 +- .../conf/ssl4eo_benchmark_oli_sr_cdl.yaml | 20 ++- .../conf/ssl4eo_benchmark_oli_sr_nlcd.yaml | 16 +- .../ssl4eo_benchmark_oli_tirs_toa_cdl.yaml | 20 ++- .../ssl4eo_benchmark_oli_tirs_toa_nlcd.yaml | 16 +- .../conf/ssl4eo_benchmark_tm_toa_cdl.yaml | 20 ++- .../conf/ssl4eo_benchmark_tm_toa_nlcd.yaml | 16 +- requirements/package-lock.json | 20 +++ requirements/package.json | 7 + tests/conf/chesapeake_cvpr_5.yaml | 6 +- tests/conf/chesapeake_cvpr_7.yaml | 6 +- tests/conf/chesapeake_cvpr_prior_byol.yaml | 6 +- tests/conf/chesapeake_cvpr_prior_moco.yaml | 6 +- tests/conf/chesapeake_cvpr_prior_simclr.yaml | 6 +- tests/conf/seco_byol_1.yaml | 6 +- tests/conf/seco_byol_2.yaml | 6 +- tests/conf/seco_moco_1.yaml | 6 +- tests/conf/seco_moco_2.yaml | 6 +- tests/conf/seco_simclr_1.yaml | 6 +- tests/conf/seco_simclr_2.yaml | 6 +- .../conf/sentinel2_south_america_soybean.yaml | 30 ++-- tests/conf/southafricacroptype.yaml | 2 +- tests/conf/ssl4eo_l_moco_1.yaml | 4 +- tests/data/README.md | 2 + 52 files changed, 760 insertions(+), 511 deletions(-) create mode 100644 .prettierignore create mode 100644 requirements/package-lock.json create mode 100644 requirements/package.json diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index eabf335d4..87cafc979 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -38,7 +38,7 @@ "python.editor.codeActionsOnSave": { "source.organizeImports": true }, - "python.diffEditor.wordWrap": "off", + "python.diffEditor.wordWrap": "off" } } } diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 2d57aa4b2..17952dcdb 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -2,12 +2,12 @@ TorchGeo is an open-source project built by contributors like you from around the world. If you find a bug or would like to add a new feature, please open a pull request. For further information on how to contribute, including topics like: -* using git, -* licensing requirements, -* writing and running unit tests, -* running linters, -* building the documentation, and -* adding new datasets, +- using git, +- licensing requirements, +- writing and running unit tests, +- running linters, +- building the documentation, and +- adding new datasets, please see our [Contributing Guide](https://torchgeo.readthedocs.io/en/stable/user/contributing.html). diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml index b7e709122..c49b0822d 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.yml +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -3,56 +3,56 @@ description: Create a report to help us reproduce and fix a bug labels: [bug] body: -- type: textarea - attributes: - label: Description - description: > - Please provide a clear and concise description of the bug. - validations: - required: true -- type: textarea - attributes: - label: Steps to reproduce - description: > - We can't solve your issue if we can't reproduce it. Please provide a - [minimal reproducible example](https://stackoverflow.com/help/minimal-reproducible-example) - that shows how to reproduce the bug. If the bug requires any additional files to - reproduce, please upload those or provide a download link. Your code should be - runnable and include all relevant imports. - placeholder: | - 1. Download any Landsat 8 scene from https://earthexplorer.usgs.gov/ - 2. Run the following code to reproduce the error + - type: textarea + attributes: + label: Description + description: > + Please provide a clear and concise description of the bug. + validations: + required: true + - type: textarea + attributes: + label: Steps to reproduce + description: > + We can't solve your issue if we can't reproduce it. Please provide a + [minimal reproducible example](https://stackoverflow.com/help/minimal-reproducible-example) + that shows how to reproduce the bug. If the bug requires any additional files to + reproduce, please upload those or provide a download link. Your code should be + runnable and include all relevant imports. + placeholder: | + 1. Download any Landsat 8 scene from https://earthexplorer.usgs.gov/ + 2. Run the following code to reproduce the error - ```python - from torchgeo.datasets import Landsat8 + ```python + from torchgeo.datasets import Landsat8 - ds = Landsat8("/path/to/downloads/directory") - ... - ``` - validations: - required: true -- type: input - attributes: - label: Version - description: | - What version of TorchGeo are you using? This can be found using the following code. - ```console - $ python - >>> import torchgeo - >>> torchgeo.__version__ - X.Y.Z - ``` - If you are using a development release (ends with ".dev0") please also include the specific git commit. + ds = Landsat8("/path/to/downloads/directory") + ... + ``` + validations: + required: true + - type: input + attributes: + label: Version + description: | + What version of TorchGeo are you using? This can be found using the following code. + ```console + $ python + >>> import torchgeo + >>> torchgeo.__version__ + X.Y.Z + ``` + If you are using a development release (ends with ".dev0") please also include the specific git commit. - Whenever possible, try to reproduce your issue with the latest commit from `main`. You never know, someone may have already fixed your bug! - placeholder: | - 1.2.3.dev0 (e1285e6cc6b65080e82bdbf7de9dea3f647d8b3b) - validations: - required: true -- type: markdown - attributes: - value: > - Thanks for taking the time to report this bug! TorchGeo is an open-source project - maintained by its users. If you're Python savvy and want to contribute a pull - request to fix this bug, we'll be happy to review it. If not, we'll try to fix it - as long as we can reproduce it. + Whenever possible, try to reproduce your issue with the latest commit from `main`. You never know, someone may have already fixed your bug! + placeholder: | + 1.2.3.dev0 (e1285e6cc6b65080e82bdbf7de9dea3f647d8b3b) + validations: + required: true + - type: markdown + attributes: + value: > + Thanks for taking the time to report this bug! TorchGeo is an open-source project + maintained by its users. If you're Python savvy and want to contribute a pull + request to fix this bug, we'll be happy to review it. If not, we'll try to fix it + as long as we can reproduce it. diff --git a/.github/ISSUE_TEMPLATE/documentation.yml b/.github/ISSUE_TEMPLATE/documentation.yml index c0847bd02..3fe553025 100644 --- a/.github/ISSUE_TEMPLATE/documentation.yml +++ b/.github/ISSUE_TEMPLATE/documentation.yml @@ -3,23 +3,23 @@ description: Issues or suggestions related to documentation labels: [documentation] body: -- type: textarea - attributes: - label: Issue - description: > - A clear and concise description of what is wrong with or missing from the - documentation. - validations: - required: true -- type: textarea - attributes: - label: Fix - description: > - Tell us how we could improve the documentation! -- type: markdown - attributes: - value: > - Thanks for taking the time to improve the documentation! TorchGeo is an - open-source project maintained by its users. If you're rST savvy and want - to contribute a pull request to improve the docs, we'll be happy to review - it. If not, we'll try to fix it when a get a chance. + - type: textarea + attributes: + label: Issue + description: > + A clear and concise description of what is wrong with or missing from the + documentation. + validations: + required: true + - type: textarea + attributes: + label: Fix + description: > + Tell us how we could improve the documentation! + - type: markdown + attributes: + value: > + Thanks for taking the time to improve the documentation! TorchGeo is an + open-source project maintained by its users. If you're rST savvy and want + to contribute a pull request to improve the docs, we'll be happy to review + it. If not, we'll try to fix it when a get a chance. diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml index 906e1a331..03da42401 100644 --- a/.github/ISSUE_TEMPLATE/feature-request.yml +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -3,45 +3,45 @@ description: Submit a proposal/request for a new TorchGeo feature labels: [feature] body: -- type: markdown - attributes: - value: > - TorchGeo is a PyTorch domain library for _geospatial_ data. If the feature you - are suggesting is not specific to working with geospatial data or multispectral - satellite imagery and may be of interest to the broader computer vision - community, consider contributing it to - [torchvision](https://github.com/pytorch/vision) or - [kornia](https://github.com/kornia/kornia) instead. -- type: textarea - attributes: - label: Summary - description: > - A clear and concise summary of your suggestion. -- type: textarea - attributes: - label: Rationale - description: > - Why is this feature important? Is it related to a problem you are experiencing? -- type: textarea - attributes: - label: Implementation - description: > - If you've thought about how to implement this feature, describe your proposed - solution. -- type: textarea - attributes: - label: Alternatives - description: > - Are there any alternatives to the solution you've proposed? -- type: textarea - attributes: - label: Additional information - description: > - Any additional information that might be relevant to the proposed feature. -- type: markdown - attributes: - value: > - Thanks for suggesting this awesome new feature! TorchGeo is an open-source project - maintained by its users. If you're Python savvy and want to contribute a pull - request to implement this feature, we'll be happy to review it. If not, we'll try - to implement your feature when we get a chance. + - type: markdown + attributes: + value: > + TorchGeo is a PyTorch domain library for _geospatial_ data. If the feature you + are suggesting is not specific to working with geospatial data or multispectral + satellite imagery and may be of interest to the broader computer vision + community, consider contributing it to + [torchvision](https://github.com/pytorch/vision) or + [kornia](https://github.com/kornia/kornia) instead. + - type: textarea + attributes: + label: Summary + description: > + A clear and concise summary of your suggestion. + - type: textarea + attributes: + label: Rationale + description: > + Why is this feature important? Is it related to a problem you are experiencing? + - type: textarea + attributes: + label: Implementation + description: > + If you've thought about how to implement this feature, describe your proposed + solution. + - type: textarea + attributes: + label: Alternatives + description: > + Are there any alternatives to the solution you've proposed? + - type: textarea + attributes: + label: Additional information + description: > + Any additional information that might be relevant to the proposed feature. + - type: markdown + attributes: + value: > + Thanks for suggesting this awesome new feature! TorchGeo is an open-source project + maintained by its users. If you're Python savvy and want to contribute a pull + request to implement this feature, we'll be happy to review it. If not, we'll try + to implement your feature when we get a chance. diff --git a/.github/SECURITY.md b/.github/SECURITY.md index 926b8ae40..9ea576f72 100644 --- a/.github/SECURITY.md +++ b/.github/SECURITY.md @@ -4,7 +4,7 @@ Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). -If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below. +If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383%28v=technet.10%29), please report it to us as described below. ## Reporting Security Issues @@ -12,19 +12,19 @@ If you believe you have found a security vulnerability in any Microsoft-owned re Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report). -If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc). +If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc). -You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). +You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc). Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: - * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) - * Full paths of source file(s) related to the manifestation of the issue - * The location of the affected source code (tag/branch/commit or direct URL) - * Any special configuration required to reproduce the issue - * Step-by-step instructions to reproduce the issue - * Proof-of-concept or exploit code (if possible) - * Impact of the issue, including how an attacker might exploit the issue +- Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) +- Full paths of source file(s) related to the manifestation of the issue +- The location of the affected source code (tag/branch/commit or direct URL) +- Any special configuration required to reproduce the issue +- Step-by-step instructions to reproduce the issue +- Proof-of-concept or exploit code (if possible) +- Impact of the issue, including how an attacker might exploit the issue This information will help us triage your report more quickly. diff --git a/.github/SUPPORT.md b/.github/SUPPORT.md index e1c23259f..bac17f63b 100644 --- a/.github/SUPPORT.md +++ b/.github/SUPPORT.md @@ -1,13 +1,13 @@ -# Support - -## How to file issues and get help - -This project uses GitHub Issues to track bugs and feature requests. Please search the existing -issues before filing new issues to avoid duplicates. For new issues, file your bug or -feature request as a new Issue. - -For help and questions about using this project, please open an Issue. - -## Microsoft Support Policy - -Support for TorchGeo is limited to the resources listed above. +# Support + +## How to file issues and get help + +This project uses GitHub Issues to track bugs and feature requests. Please search the existing +issues before filing new issues to avoid duplicates. For new issues, file your bug or +feature request as a new Issue. + +For help and questions about using this project, please open an Issue. + +## Microsoft Support Policy + +Support for TorchGeo is limited to the resources listed above. diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 7f521949c..eb0571076 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -29,3 +29,7 @@ updates: versions: ">=6" # segmentation-models-pytorch pins timm, must update in unison - dependency-name: "timm" + - package-ecosystem: "npm" + directory: "/requirements" + schedule: + interval: "weekly" diff --git a/.github/labeler.yml b/.github/labeler.yml index 99380f2c4..9e366aa77 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -1,48 +1,48 @@ # TorchGeo modules datamodules: -- changed-files: - - any-glob-to-any-file: "torchgeo/datamodules/**" + - changed-files: + - any-glob-to-any-file: "torchgeo/datamodules/**" datasets: -- changed-files: - - any-glob-to-any-file: "torchgeo/datasets/**" + - changed-files: + - any-glob-to-any-file: "torchgeo/datasets/**" losses: -- changed-files: - - any-glob-to-any-file: "torchgeo/losses/**" + - changed-files: + - any-glob-to-any-file: "torchgeo/losses/**" models: -- changed-files: - - any-glob-to-any-file: "torchgeo/models/**" + - changed-files: + - any-glob-to-any-file: "torchgeo/models/**" samplers: -- changed-files: - - any-glob-to-any-file: "torchgeo/samplers/**" + - changed-files: + - any-glob-to-any-file: "torchgeo/samplers/**" trainers: -- changed-files: - - any-glob-to-any-file: "torchgeo/trainers/**" + - changed-files: + - any-glob-to-any-file: "torchgeo/trainers/**" transforms: -- changed-files: - - any-glob-to-any-file: "torchgeo/transforms/**" + - changed-files: + - any-glob-to-any-file: "torchgeo/transforms/**" # Other dependencies: -- changed-files: - - any-glob-to-any-file: - - "pyproject.toml" - - "requirements/**" - - ".github/dependabot.yml" + - changed-files: + - any-glob-to-any-file: + - "pyproject.toml" + - "requirements/**" + - ".github/dependabot.yml" documentation: -- changed-files: - - any-glob-to-any-file: - - "docs/**" - - "*.md" - - ".github/*.md" - - ".readthedocs.yaml" + - changed-files: + - any-glob-to-any-file: + - "docs/**" + - "*.md" + - ".github/*.md" + - ".readthedocs.yaml" scripts: -- changed-files: - - any-glob-to-any-file: - - "torchgeo/__main__.py" - - "torchgeo/main.py" - - "experiments/**" + - changed-files: + - any-glob-to-any-file: + - "torchgeo/__main__.py" + - "torchgeo/main.py" + - "experiments/**" testing: -- changed-files: - - any-glob-to-any-file: - - "tests/**" - - ".github/workflows/**" + - changed-files: + - any-glob-to-any-file: + - "tests/**" + - ".github/workflows/**" diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index da9211ed6..75ea17f8f 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -1,6 +1,6 @@ name: "labeler" on: -- pull_request_target + - pull_request_target jobs: labeler: permissions: @@ -8,9 +8,9 @@ jobs: pull-requests: write runs-on: ubuntu-latest steps: - - name: Clone repo - uses: actions/checkout@v4.1.4 - - name: Add label - uses: actions/labeler@v5.0.0 - with: - sync-labels: true + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Add label + uses: actions/labeler@v5.0.0 + with: + sync-labels: true diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 4746620c3..ba35c1dff 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -2,92 +2,92 @@ name: release on: push: branches: - - release** + - release** pull_request: branches: - - release** + - release** jobs: datasets: name: datasets runs-on: ubuntu-latest steps: - - name: Clone repo - uses: actions/checkout@v4.1.4 - - name: Set up python - uses: actions/setup-python@v5.1.0 - with: - python-version: '3.12' - - name: Cache dependencies - uses: actions/cache@v4.0.2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-datasets - - name: Install pip dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install .[tests] - pip cache purge - - name: List pip dependencies - run: pip list - - name: Run pytest checks - run: | - pytest --cov=torchgeo --cov-report=xml --durations=10 - python -m torchgeo --help - torchgeo --help + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Set up python + uses: actions/setup-python@v5.1.0 + with: + python-version: "3.12" + - name: Cache dependencies + uses: actions/cache@v4.0.2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-datasets + - name: Install pip dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install .[tests] + pip cache purge + - name: List pip dependencies + run: pip list + - name: Run pytest checks + run: | + pytest --cov=torchgeo --cov-report=xml --durations=10 + python -m torchgeo --help + torchgeo --help integration: name: integration runs-on: ubuntu-latest steps: - - name: Clone repo - uses: actions/checkout@v4.1.4 - - name: Set up python - uses: actions/setup-python@v5.1.0 - with: - python-version: '3.12' - - name: Cache dependencies - uses: actions/cache@v4.0.2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-integration - - name: Install pip dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install .[datasets,tests] - pip cache purge - - name: List pip dependencies - run: pip list - - name: Run integration checks - run: | - pytest -m slow --durations=10 - python -m torchgeo --help - torchgeo --help + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Set up python + uses: actions/setup-python@v5.1.0 + with: + python-version: "3.12" + - name: Cache dependencies + uses: actions/cache@v4.0.2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-integration + - name: Install pip dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install .[datasets,tests] + pip cache purge + - name: List pip dependencies + run: pip list + - name: Run integration checks + run: | + pytest -m slow --durations=10 + python -m torchgeo --help + torchgeo --help notebooks: name: notebooks runs-on: ubuntu-latest steps: - - name: Clone repo - uses: actions/checkout@v4.1.4 - - name: Set up python - uses: actions/setup-python@v5.1.0 - with: - python-version: '3.12' - - name: Cache dependencies - uses: actions/cache@v4.0.2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-tutorials - - name: Install pip dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install .[docs,tests] planetary_computer pystac - pip cache purge - - name: List pip dependencies - run: pip list - - name: Run notebook checks - run: pytest --nbmake --durations=10 docs/tutorials + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Set up python + uses: actions/setup-python@v5.1.0 + with: + python-version: "3.12" + - name: Cache dependencies + uses: actions/cache@v4.0.2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('pyproject.toml') }}-tutorials + - name: Install pip dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install .[docs,tests] planetary_computer pystac + pip cache purge + - name: List pip dependencies + run: pip list + - name: Run notebook checks + run: pytest --nbmake --durations=10 docs/tutorials concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.head.label || github.head_ref || github.ref }} cancel-in-progress: true diff --git a/.github/workflows/style.yaml b/.github/workflows/style.yaml index 6af89e540..4d2b5acde 100644 --- a/.github/workflows/style.yaml +++ b/.github/workflows/style.yaml @@ -2,65 +2,85 @@ name: style on: push: branches: - - main - - release** + - main + - release** pull_request: branches: - - main - - release** + - main + - release** jobs: mypy: name: mypy runs-on: ubuntu-latest steps: - - name: Clone repo - uses: actions/checkout@v4.1.4 - - name: Set up python - uses: actions/setup-python@v5.1.0 - with: - python-version: '3.12' - - name: Cache dependencies - uses: actions/cache@v4.0.2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/required.txt') }}-${{ hashFiles('requirements/datasets.txt') }}-${{ hashFiles('requirements/style.txt') }}-${{ hashFiles('requirements/tests.txt') }} - - name: Install pip dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install -r requirements/required.txt -r requirements/datasets.txt -r requirements/style.txt -r requirements/tests.txt - pip cache purge - - name: List pip dependencies - run: pip list - - name: Run mypy checks - run: mypy . + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Set up python + uses: actions/setup-python@v5.1.0 + with: + python-version: "3.12" + - name: Cache dependencies + uses: actions/cache@v4.0.2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/required.txt') }}-${{ hashFiles('requirements/datasets.txt') }}-${{ hashFiles('requirements/style.txt') }}-${{ hashFiles('requirements/tests.txt') }} + - name: Install pip dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install -r requirements/required.txt -r requirements/datasets.txt -r requirements/style.txt -r requirements/tests.txt + pip cache purge + - name: List pip dependencies + run: pip list + - name: Run mypy checks + run: mypy . ruff: name: ruff runs-on: ubuntu-latest steps: - - name: Clone repo - uses: actions/checkout@v4.1.4 - - name: Set up python - uses: actions/setup-python@v5.1.0 - with: - python-version: '3.12' - - name: Cache dependencies - uses: actions/cache@v4.0.2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/style.txt') }} - - name: Install pip dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install -r requirements/style.txt - pip cache purge - - name: List pip dependencies - run: pip list - - name: Run ruff checks - run: | - ruff check --output-format=github --no-fix . - ruff format --diff + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Set up python + uses: actions/setup-python@v5.1.0 + with: + python-version: "3.12" + - name: Cache dependencies + uses: actions/cache@v4.0.2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/style.txt') }} + - name: Install pip dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install -r requirements/style.txt + pip cache purge + - name: List pip dependencies + run: pip list + - name: Run ruff checks + run: | + ruff check --output-format=github --no-fix . + ruff format --diff + prettier: + name: prettier + runs-on: ubuntu-latest + steps: + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Set up nodejs + uses: actions/setup-node@v4.0.2 + with: + node-version: "20" + cache: "npm" + cache-dependency-path: "requirements/package-lock.json" + - name: Installing prettier + run: | + npm install requirements/ + npm cache clean --force + - name: List npm dependencies + run: npm ls --all + - name: Run prettier formatting + run: npx prettier . --check concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.head.label || github.head_ref || github.ref }} cancel-in-progress: true diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 0badf940d..c572be463 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -2,12 +2,12 @@ name: tests on: push: branches: - - main - - release** + - main + - release** pull_request: branches: - - main - - release** + - main + - release** jobs: latest: name: latest @@ -17,88 +17,88 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ['3.10', '3.11', '3.12'] + python-version: ["3.10", "3.11", "3.12"] steps: - - name: Clone repo - uses: actions/checkout@v4.1.4 - - name: Set up python - uses: actions/setup-python@v5.1.0 - with: - python-version: ${{ matrix.python-version }} - - name: Cache dependencies - uses: actions/cache@v4.0.2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/required.txt') }}-${{ hashFiles('requirements/datasets.txt') }}-${{ hashFiles('requirements/tests.txt') }} - if: ${{ runner.os != 'macOS' }} - - name: Setup headless display for pyvista - uses: pyvista/setup-headless-display-action@v2 - - name: Install apt dependencies (Linux) - run: | - sudo apt-get update - sudo apt-get install unrar - if: ${{ runner.os == 'Linux' }} - - name: Install brew dependencies (macOS) - run: brew install rar - if: ${{ runner.os == 'macOS' }} - - name: Install choco dependencies (Windows) - run: choco install 7zip - if: ${{ runner.os == 'Windows' }} - - name: Install pip dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install -r requirements/required.txt -r requirements/datasets.txt -r requirements/tests.txt - pip cache purge - - name: List pip dependencies - run: pip list - - name: Run pytest checks - run: | - pytest --cov=torchgeo --cov-report=xml --durations=10 - python3 -m torchgeo --help - - name: Report coverage - uses: codecov/codecov-action@v4.3.0 - with: - token: ${{ secrets.CODECOV_TOKEN }} + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Set up python + uses: actions/setup-python@v5.1.0 + with: + python-version: ${{ matrix.python-version }} + - name: Cache dependencies + uses: actions/cache@v4.0.2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/required.txt') }}-${{ hashFiles('requirements/datasets.txt') }}-${{ hashFiles('requirements/tests.txt') }} + if: ${{ runner.os != 'macOS' }} + - name: Setup headless display for pyvista + uses: pyvista/setup-headless-display-action@v2 + - name: Install apt dependencies (Linux) + run: | + sudo apt-get update + sudo apt-get install unrar + if: ${{ runner.os == 'Linux' }} + - name: Install brew dependencies (macOS) + run: brew install rar + if: ${{ runner.os == 'macOS' }} + - name: Install choco dependencies (Windows) + run: choco install 7zip + if: ${{ runner.os == 'Windows' }} + - name: Install pip dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install -r requirements/required.txt -r requirements/datasets.txt -r requirements/tests.txt + pip cache purge + - name: List pip dependencies + run: pip list + - name: Run pytest checks + run: | + pytest --cov=torchgeo --cov-report=xml --durations=10 + python3 -m torchgeo --help + - name: Report coverage + uses: codecov/codecov-action@v4.3.0 + with: + token: ${{ secrets.CODECOV_TOKEN }} minimum: name: minimum runs-on: ubuntu-latest env: MPLBACKEND: Agg steps: - - name: Clone repo - uses: actions/checkout@v4.1.4 - - name: Set up python - uses: actions/setup-python@v5.1.0 - with: - python-version: '3.10' - - name: Cache dependencies - uses: actions/cache@v4.0.2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/min-reqs.old') }}-${{ hashFiles('requirements/mins-cons.old') }} - - name: Setup headless display for pyvista - uses: pyvista/setup-headless-display-action@v2 - - name: Install apt dependencies (Linux) - run: | - sudo apt-get update - sudo apt-get install unrar - - name: Install pip dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install -r requirements/min-reqs.old -c requirements/min-cons.old - pip cache purge - - name: List pip dependencies - run: pip list - - name: Run pytest checks - run: | - pytest --cov=torchgeo --cov-report=xml --durations=10 - python3 -m torchgeo --help - - name: Report coverage - uses: codecov/codecov-action@v4.3.0 - with: - token: ${{ secrets.CODECOV_TOKEN }} + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Set up python + uses: actions/setup-python@v5.1.0 + with: + python-version: "3.10" + - name: Cache dependencies + uses: actions/cache@v4.0.2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/min-reqs.old') }}-${{ hashFiles('requirements/mins-cons.old') }} + - name: Setup headless display for pyvista + uses: pyvista/setup-headless-display-action@v2 + - name: Install apt dependencies (Linux) + run: | + sudo apt-get update + sudo apt-get install unrar + - name: Install pip dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install -r requirements/min-reqs.old -c requirements/min-cons.old + pip cache purge + - name: List pip dependencies + run: pip list + - name: Run pytest checks + run: | + pytest --cov=torchgeo --cov-report=xml --durations=10 + python3 -m torchgeo --help + - name: Report coverage + uses: codecov/codecov-action@v4.3.0 + with: + token: ${{ secrets.CODECOV_TOKEN }} concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.head.label || github.head_ref || github.ref }} cancel-in-progress: true diff --git a/.github/workflows/tutorials.yaml b/.github/workflows/tutorials.yaml index de8008205..8159dbba0 100644 --- a/.github/workflows/tutorials.yaml +++ b/.github/workflows/tutorials.yaml @@ -2,40 +2,40 @@ name: tutorials on: push: branches: - - main + - main paths: - - docs/tutorials/** + - docs/tutorials/** pull_request: branches: - - main + - main paths: - - docs/tutorials/** + - docs/tutorials/** jobs: notebooks: name: notebooks runs-on: ubuntu-latest steps: - - name: Clone repo - uses: actions/checkout@v4.1.4 - - name: Set up python - uses: actions/setup-python@v5.1.0 - with: - python-version: '3.12' - - name: Cache dependencies - uses: actions/cache@v4.0.2 - id: cache - with: - path: ${{ env.pythonLocation }} - key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/required.txt') }}-${{ hashFiles('requirements/docs.txt') }}-${{ hashFiles('requirements/tests.txt') }}-tutorials - - name: Install pip dependencies - if: steps.cache.outputs.cache-hit != 'true' - run: | - pip install -r requirements/required.txt -r requirements/docs.txt -r requirements/tests.txt planetary_computer pystac - pip cache purge - - name: List pip dependencies - run: pip list - - name: Run notebook checks - run: pytest --nbmake --durations=10 docs/tutorials + - name: Clone repo + uses: actions/checkout@v4.1.4 + - name: Set up python + uses: actions/setup-python@v5.1.0 + with: + python-version: "3.12" + - name: Cache dependencies + uses: actions/cache@v4.0.2 + id: cache + with: + path: ${{ env.pythonLocation }} + key: ${{ env.pythonLocation }}-${{ hashFiles('requirements/required.txt') }}-${{ hashFiles('requirements/docs.txt') }}-${{ hashFiles('requirements/tests.txt') }}-tutorials + - name: Install pip dependencies + if: steps.cache.outputs.cache-hit != 'true' + run: | + pip install -r requirements/required.txt -r requirements/docs.txt -r requirements/tests.txt planetary_computer pystac + pip cache purge + - name: List pip dependencies + run: pip list + - name: Run notebook checks + run: pytest --nbmake --durations=10 docs/tutorials concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.head.label || github.head_ref || github.ref }} cancel-in-progress: true diff --git a/.gitignore b/.gitignore index 29e2b022a..3a017d0ee 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,10 @@ /results/ *.aux.xml +# Node stuff: +node_modules/ +/*.json + # Spack .spack-env/ spack.lock diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a090a0085..5d768ae04 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,3 +33,12 @@ repos: - torch>=2.3 - torchmetrics>=0.10 exclude: (build|data|dist|logo|logs|output)/ + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.1.0 + hooks: + - id: prettier + language: node + language_version: 22.0.0 + args: + - --write + exclude: ^tests/data/[^/]+/$ diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000..790f9904d --- /dev/null +++ b/.prettierignore @@ -0,0 +1,2 @@ +# Ignore artifacts: +tests/data/*/** diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 3804a40b8..2c11def62 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -12,12 +12,12 @@ build: # Configuration of the Python environment to be used python: - install: - - requirements: requirements/required.txt - - requirements: requirements/docs.txt - - requirements: docs/requirements.txt + install: + - requirements: requirements/required.txt + - requirements: requirements/docs.txt + - requirements: docs/requirements.txt # Configuration for Sphinx documentation sphinx: - configuration: docs/conf.py - fail_on_warning: true + configuration: docs/conf.py + fail_on_warning: true diff --git a/CITATION.cff b/CITATION.cff index e1d0db6a6..64524eaa2 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -1,22 +1,6 @@ # https://github.com/citation-file-format/citation-file-format/blob/main/schema-guide.md # Can be validated using `cffconvert --validate` authors: -- family-names: "Stewart" - given-names: "Adam J." -- family-names: "Robinson" - given-names: "Caleb" -- family-names: "Corley" - given-names: "Isaac A." -- family-names: "Ortiz" - given-names: "Anthony" -- family-names: "Lavista Ferres" - given-names: "Juan M." -- family-names: "Banerjee" - given-names: "Arindam" -cff-version: "1.2.0" -message: "If you use this software, please cite it using the metadata from this file." -preferred-citation: - authors: - family-names: "Stewart" given-names: "Adam J." - family-names: "Robinson" @@ -29,6 +13,22 @@ preferred-citation: given-names: "Juan M." - family-names: "Banerjee" given-names: "Arindam" +cff-version: "1.2.0" +message: "If you use this software, please cite it using the metadata from this file." +preferred-citation: + authors: + - family-names: "Stewart" + given-names: "Adam J." + - family-names: "Robinson" + given-names: "Caleb" + - family-names: "Corley" + given-names: "Isaac A." + - family-names: "Ortiz" + given-names: "Anthony" + - family-names: "Lavista Ferres" + given-names: "Juan M." + - family-names: "Banerjee" + given-names: "Arindam" collection-title: "Proceedings of the 30th International Conference on Advances in Geographic Information Systems" collection-type: "proceedings" conference: diff --git a/README.md b/README.md index f1e1a37f7..aff785bdd 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ from torchgeo.trainers import SemanticSegmentationTask ### Geospatial datasets and samplers -Many remote sensing applications involve working with [*geospatial datasets*](https://torchgeo.readthedocs.io/en/stable/api/datasets.html#geospatial-datasets)—datasets with geographic metadata. These datasets can be challenging to work with due to the sheer variety of data. Geospatial imagery is often multispectral with a different number of spectral bands and spatial resolution for every satellite. In addition, each file may be in a different coordinate reference system (CRS), requiring the data to be reprojected into a matching CRS. +Many remote sensing applications involve working with [_geospatial datasets_](https://torchgeo.readthedocs.io/en/stable/api/datasets.html#geospatial-datasets)—datasets with geographic metadata. These datasets can be challenging to work with due to the sheer variety of data. Geospatial imagery is often multispectral with a different number of spectral bands and spatial resolution for every satellite. In addition, each file may be in a different coordinate reference system (CRS), requiring the data to be reprojected into a matching CRS. Example application in which we combine Landsat and CDL and sample from both @@ -84,7 +84,7 @@ cdl = CDL(root="...", download=True, checksum=True) dataset = landsat & cdl ``` -This dataset can now be used with a PyTorch data loader. Unlike benchmark datasets, geospatial datasets often include very large images. For example, the CDL dataset consists of a single image covering the entire continental United States. In order to sample from these datasets using geospatial coordinates, TorchGeo defines a number of [*samplers*](https://torchgeo.readthedocs.io/en/stable/api/samplers.html). In this example, we'll use a random sampler that returns 256 x 256 pixel images and 10,000 samples per epoch. We also use a custom collation function to combine each sample dictionary into a mini-batch of samples. +This dataset can now be used with a PyTorch data loader. Unlike benchmark datasets, geospatial datasets often include very large images. For example, the CDL dataset consists of a single image covering the entire continental United States. In order to sample from these datasets using geospatial coordinates, TorchGeo defines a number of [_samplers_](https://torchgeo.readthedocs.io/en/stable/api/samplers.html). In this example, we'll use a random sampler that returns 256 x 256 pixel images and 10,000 samples per epoch. We also use a custom collation function to combine each sample dictionary into a mini-batch of samples. ```python sampler = RandomGeoSampler(dataset, size=256, length=10000) @@ -103,19 +103,19 @@ for batch in dataloader: Many applications involve intelligently composing datasets based on geospatial metadata like this. For example, users may want to: -* Combine datasets for multiple image sources and treat them as equivalent (e.g., Landsat 7 and 8) -* Combine datasets for disparate geospatial locations (e.g., Chesapeake NY and PA) +- Combine datasets for multiple image sources and treat them as equivalent (e.g., Landsat 7 and 8) +- Combine datasets for disparate geospatial locations (e.g., Chesapeake NY and PA) These combinations require that all queries are present in at least one dataset, and can be created using a `UnionDataset`. Similarly, users may want to: -* Combine image and target labels and sample from both simultaneously (e.g., Landsat and CDL) -* Combine datasets for multiple image sources for multimodal learning or data fusion (e.g., Landsat and Sentinel) +- Combine image and target labels and sample from both simultaneously (e.g., Landsat and CDL) +- Combine datasets for multiple image sources for multimodal learning or data fusion (e.g., Landsat and Sentinel) These combinations require that all queries are present in both datasets, and can be created using an `IntersectionDataset`. TorchGeo automatically composes these datasets for you when you use the intersection (`&`) and union (`|`) operators. ### Benchmark datasets -TorchGeo includes a number of [*benchmark datasets*](https://torchgeo.readthedocs.io/en/stable/api/datasets.html#non-geospatial-datasets)—datasets that include both input images and target labels. This includes datasets for tasks like image classification, regression, semantic segmentation, object detection, instance segmentation, change detection, and more. +TorchGeo includes a number of [_benchmark datasets_](https://torchgeo.readthedocs.io/en/stable/api/datasets.html#non-geospatial-datasets)—datasets that include both input images and target labels. This includes datasets for tasks like image classification, regression, semantic segmentation, object detection, instance segmentation, change detection, and more. If you've used [torchvision](https://pytorch.org/vision) before, these datasets should seem very familiar. In this example, we'll create a dataset for the Northwestern Polytechnical University (NWPU) very-high-resolution ten-class ([VHR-10](https://github.com/chaozhong2010/VHR-10_dataset_coco)) geospatial object detection dataset. This dataset can be automatically downloaded, checksummed, and extracted, just like with torchvision. @@ -168,7 +168,7 @@ These weights can also directly be used in TorchGeo Lightning modules that are s ### Reproducibility with Lightning -In order to facilitate direct comparisons between results published in the literature and further reduce the boilerplate code needed to run experiments with datasets in TorchGeo, we have created Lightning [*datamodules*](https://torchgeo.readthedocs.io/en/stable/api/datamodules.html) with well-defined train-val-test splits and [*trainers*](https://torchgeo.readthedocs.io/en/stable/api/trainers.html) for various tasks like classification, regression, and semantic segmentation. These datamodules show how to incorporate augmentations from the kornia library, include preprocessing transforms (with pre-calculated channel statistics), and let users easily experiment with hyperparameters related to the data itself (as opposed to the modeling process). Training a semantic segmentation model on the [Inria Aerial Image Labeling](https://project.inria.fr/aerialimagelabeling/) dataset is as easy as a few imports and four lines of code. +In order to facilitate direct comparisons between results published in the literature and further reduce the boilerplate code needed to run experiments with datasets in TorchGeo, we have created Lightning [_datamodules_](https://torchgeo.readthedocs.io/en/stable/api/datamodules.html) with well-defined train-val-test splits and [_trainers_](https://torchgeo.readthedocs.io/en/stable/api/trainers.html) for various tasks like classification, regression, and semantic segmentation. These datamodules show how to incorporate augmentations from the kornia library, include preprocessing transforms (with pre-calculated channel statistics), and let users easily experiment with hyperparameters related to the data itself (as opposed to the modeling process). Training a semantic segmentation model on the [Inria Aerial Image Labeling](https://project.inria.fr/aerialimagelabeling/) dataset is as easy as a few imports and four lines of code. ```python datamodule = InriaAerialImageLabelingDataModule(root="...", batch_size=64, num_workers=6) @@ -213,6 +213,7 @@ torchgeo fit --data.help EuroSAT100DataModule ``` Using the following config file: + ```yaml trainer: max_epochs: 20 @@ -231,6 +232,7 @@ data: ``` we can see the script in action: + ```console # Train and validate a model torchgeo fit --config config.yaml @@ -253,6 +255,7 @@ See the [Lightning documentation](https://lightning.ai/docs/pytorch/stable/cli/l ## Citation If you use this software in your work, please cite our [paper](https://dl.acm.org/doi/10.1145/3557915.3560953): + ```bibtex @inproceedings{Stewart_TorchGeo_Deep_Learning_2022, address = {Seattle, Washington}, diff --git a/docs/_static/button-width.css b/docs/_static/button-width.css index f93be06c1..edf1c62c9 100644 --- a/docs/_static/button-width.css +++ b/docs/_static/button-width.css @@ -1,4 +1,4 @@ .colabbadge { - height: 50px !important; - width: auto !important; + height: 50px !important; + width: auto !important; } diff --git a/docs/_static/notebook-prompt.css b/docs/_static/notebook-prompt.css index 9c5fe4a9c..abb5e320e 100644 --- a/docs/_static/notebook-prompt.css +++ b/docs/_static/notebook-prompt.css @@ -1,4 +1,4 @@ .nbinput .prompt, .nboutput .prompt { - display: none; + display: none; } diff --git a/docs/user/contributing.rst b/docs/user/contributing.rst index 8fa274051..5e44212d8 100644 --- a/docs/user/contributing.rst +++ b/docs/user/contributing.rst @@ -92,6 +92,7 @@ In order to remain `PEP-8 `_ compliant and ma * `ruff `_ for code formatting * `mypy `_ for static type analysis +* `prettier `_ for code formatting These tools should be used from the root of the project to ensure that our configuration files are found. Ruff is relatively easy to use, and will automatically fix most issues it encounters: @@ -110,6 +111,23 @@ Mypy won't fix your code for you, but will warn you about potential issues with If you've never used mypy before or aren't familiar with `Python type hints `_, this check can be particularly daunting. Don't hesitate to ask for help with resolving any of these warnings on your pull request. +Prettier is a code formatter that helps to ensure consistent code style across a project. It supports various languages. Follow these steps to install Prettier: + +1. Install Node.js: Prettier is a Node.js module, so you need to have Node.js installed on your system. You can download and install Node.js from the `Node.js official website `_. +2. Install Prettier: Use the following command to install the Prettier module in your project: + +.. code-block:: console + + $ npm install prettier --no-save + + +3. Run Prettier: Use the following command to run Prettier formating: + +.. code-block:: console + + $ npx prettier . --write + + You can also use `git pre-commit hooks `_ to automatically run these checks before each commit. pre-commit is a tool that automatically runs linters locally, so that you don't have to remember to run them manually and then have your code flagged by CI. You can set up pre-commit with: .. code-block:: console diff --git a/experiments/README.md b/experiments/README.md index 8813af5e3..77131c161 100644 --- a/experiments/README.md +++ b/experiments/README.md @@ -1,4 +1,4 @@ This directory contains scripts used to reproduce the results of the following papers: -* torchgeo: https://doi.org/10.1145/3557915.3560953 -* ssl4eo: https://proceedings.neurips.cc/paper_files/paper/2023/hash/bbf7ee04e2aefec136ecf60e346c2e61-Abstract-Datasets_and_Benchmarks.html +- torchgeo: https://doi.org/10.1145/3557915.3560953 +- ssl4eo: https://proceedings.neurips.cc/paper_files/paper/2023/hash/bbf7ee04e2aefec136ecf60e346c2e61-Abstract-Datasets_and_Benchmarks.html diff --git a/experiments/ssl4eo/landsat/README.md b/experiments/ssl4eo/landsat/README.md index d681ea289..5c517642e 100644 --- a/experiments/ssl4eo/landsat/README.md +++ b/experiments/ssl4eo/landsat/README.md @@ -14,9 +14,9 @@ $ bash sample_conus.sh # for benchmark datasets The first section of these scripts includes user-specific parameters that can be modified to change the behavior of the scripts. Of particular importance are: -* `SAVE_PATH`: controls where the sampling location CSV is saved to -* `START_INDEX`: index to start from (usually 0, can be increased to append more locations) -* `END_INDEX`: index to stop at (start with ~500K) +- `SAVE_PATH`: controls where the sampling location CSV is saved to +- `START_INDEX`: index to start from (usually 0, can be increased to append more locations) +- `END_INDEX`: index to stop at (start with ~500K) These scripts will download world city data and write `sampled_locations.csv` files to be used for downloading. @@ -35,12 +35,12 @@ $ bash download_oli_sr.sh These scripts contain the following variables you may want to modify: -* `ROOT_DIR`: root directory containing all subdirectories -* `SAVE_PATH`: where the downloaded data is saved -* `MATCH_FILE`: the CSV created in the previous step -* `NUM_WOKERS`: number of parallel workers -* `START_INDEX`: index from which to start downloading -* `END_INDEX`: index at which to stop downloading +- `ROOT_DIR`: root directory containing all subdirectories +- `SAVE_PATH`: where the downloaded data is saved +- `MATCH_FILE`: the CSV created in the previous step +- `NUM_WOKERS`: number of parallel workers +- `START_INDEX`: index from which to start downloading +- `END_INDEX`: index at which to stop downloading These scripts are designed for downloading the pre-training datasets. Each script can be easily modified to instead download the benchmarking datasets by changing the `MATCH_FILE`, `YEAR`, and `--dates` passed in to the download script. For ETM+ TOA, you'll also want to set a `--default-value` since you'll need to include nodata pixels due to SLC-off. diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_sr_cdl.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_sr_cdl.yaml index 30b41c1cb..93062f994 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_sr_cdl.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_sr_cdl.yaml @@ -22,4 +22,22 @@ data: root: "data/ssl4eo_benchmark" sensor: "etm_sr" product: "cdl" - classes: [0, 1, 5, 24, 36, 37, 61, 111, 121, 122, 131, 141, 142, 143, 152, 176, 190, 195] + classes: + - 0 + - 1 + - 5 + - 24 + - 36 + - 37 + - 61 + - 111 + - 121 + - 122 + - 131 + - 141 + - 142 + - 143 + - 152 + - 176 + - 190 + - 195 diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_sr_nlcd.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_sr_nlcd.yaml index 51e1732e9..718b3281e 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_sr_nlcd.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_sr_nlcd.yaml @@ -22,4 +22,18 @@ data: root: "data/ssl4eo_benchmark" sensor: "etm_sr" product: "nlcd" - classes: [0, 11, 21, 22, 31, 41, 42, 43, 52, 71, 81, 82, 90, 95] + classes: + - 0 + - 11 + - 21 + - 22 + - 31 + - 41 + - 42 + - 43 + - 52 + - 71 + - 81 + - 82 + - 90 + - 95 diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_toa_cdl.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_toa_cdl.yaml index 7205bc6e2..f10a0508f 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_toa_cdl.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_toa_cdl.yaml @@ -22,4 +22,22 @@ data: root: "data/ssl4eo_benchmark" sensor: "etm_toa" product: "cdl" - classes: [0, 1, 5, 24, 36, 37, 61, 111, 121, 122, 131, 141, 142, 143, 152, 176, 190, 195] + classes: + - 0 + - 1 + - 5 + - 24 + - 36 + - 37 + - 61 + - 111 + - 121 + - 122 + - 131 + - 141 + - 142 + - 143 + - 152 + - 176 + - 190 + - 195 diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_toa_nlcd.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_toa_nlcd.yaml index 10b9ea0a2..52a6107c0 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_toa_nlcd.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_etm_toa_nlcd.yaml @@ -22,4 +22,18 @@ data: root: "data/ssl4eo_benchmark" sensor: "etm_toa" product: "nlcd" - classes: [0, 11, 21, 22, 31, 41, 42, 43, 52, 71, 81, 82, 90, 95] + classes: + - 0 + - 11 + - 21 + - 22 + - 31 + - 41 + - 42 + - 43 + - 52 + - 71 + - 81 + - 82 + - 90 + - 95 diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_sr_cdl.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_sr_cdl.yaml index f32a344cd..669e12219 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_sr_cdl.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_sr_cdl.yaml @@ -22,4 +22,22 @@ data: root: "data/ssl4eo_benchmark" sensor: "oli_sr" product: "cdl" - classes: [0, 1, 5, 24, 36, 37, 61, 111, 121, 122, 131, 141, 142, 143, 152, 176, 190, 195] + classes: + - 0 + - 1 + - 5 + - 24 + - 36 + - 37 + - 61 + - 111 + - 121 + - 122 + - 131 + - 141 + - 142 + - 143 + - 152 + - 176 + - 190 + - 195 diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_sr_nlcd.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_sr_nlcd.yaml index 4fa805965..81f3283f5 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_sr_nlcd.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_sr_nlcd.yaml @@ -22,4 +22,18 @@ data: root: "data/ssl4eo_benchmark" sensor: "oli_sr" product: "nlcd" - classes: [0, 11, 21, 22, 31, 41, 42, 43, 52, 71, 81, 82, 90, 95] + classes: + - 0 + - 11 + - 21 + - 22 + - 31 + - 41 + - 42 + - 43 + - 52 + - 71 + - 81 + - 82 + - 90 + - 95 diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_tirs_toa_cdl.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_tirs_toa_cdl.yaml index 2beacfd67..876e25184 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_tirs_toa_cdl.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_tirs_toa_cdl.yaml @@ -22,4 +22,22 @@ data: root: "data/ssl4eo_benchmark" sensor: "oli_tirs_toa" product: "cdl" - classes: [0, 1, 5, 24, 36, 37, 61, 111, 121, 122, 131, 141, 142, 143, 152, 176, 190, 195] + classes: + - 0 + - 1 + - 5 + - 24 + - 36 + - 37 + - 61 + - 111 + - 121 + - 122 + - 131 + - 141 + - 142 + - 143 + - 152 + - 176 + - 190 + - 195 diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_tirs_toa_nlcd.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_tirs_toa_nlcd.yaml index d14dbb6c6..77f3f6eae 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_tirs_toa_nlcd.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_oli_tirs_toa_nlcd.yaml @@ -22,4 +22,18 @@ data: root: "data/ssl4eo_benchmark" sensor: "oli_tirs_toa" product: "nlcd" - classes: [0, 11, 21, 22, 31, 41, 42, 43, 52, 71, 81, 82, 90, 95] + classes: + - 0 + - 11 + - 21 + - 22 + - 31 + - 41 + - 42 + - 43 + - 52 + - 71 + - 81 + - 82 + - 90 + - 95 diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_tm_toa_cdl.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_tm_toa_cdl.yaml index d64d0ab65..7adf1e46f 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_tm_toa_cdl.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_tm_toa_cdl.yaml @@ -22,4 +22,22 @@ data: root: "data/ssl4eo_benchmark" sensor: "tm_toa" product: "cdl" - classes: [0, 1, 5, 24, 36, 37, 61, 111, 121, 122, 131, 141, 142, 143, 152, 176, 190, 195] + classes: + - 0 + - 1 + - 5 + - 24 + - 36 + - 37 + - 61 + - 111 + - 121 + - 122 + - 131 + - 141 + - 142 + - 143 + - 152 + - 176 + - 190 + - 195 diff --git a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_tm_toa_nlcd.yaml b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_tm_toa_nlcd.yaml index c1f9d0bde..b52fee6c6 100644 --- a/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_tm_toa_nlcd.yaml +++ b/experiments/ssl4eo/landsat/conf/ssl4eo_benchmark_tm_toa_nlcd.yaml @@ -22,4 +22,18 @@ data: root: "data/ssl4eo_benchmark" sensor: "tm_toa" product: "nlcd" - classes: [0, 11, 21, 22, 31, 41, 42, 43, 52, 71, 81, 82, 90, 95] + classes: + - 0 + - 11 + - 21 + - 22 + - 31 + - 41 + - 42 + - 43 + - 52 + - 71 + - 81 + - 82 + - 90 + - 95 diff --git a/requirements/package-lock.json b/requirements/package-lock.json new file mode 100644 index 000000000..d2d97509f --- /dev/null +++ b/requirements/package-lock.json @@ -0,0 +1,20 @@ +{ + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/prettier": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", + "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", + "bin": { + "prettier": "bin/prettier.cjs" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" + } + } + } +} diff --git a/requirements/package.json b/requirements/package.json new file mode 100644 index 000000000..ebfe31dde --- /dev/null +++ b/requirements/package.json @@ -0,0 +1,7 @@ +{ + "name": "torchgeo", + "private": "true", + "dependencies": { + "prettier": ">=3.0.0" + } +} diff --git a/tests/conf/chesapeake_cvpr_5.yaml b/tests/conf/chesapeake_cvpr_5.yaml index 2db9c5ae0..2b499a7fa 100644 --- a/tests/conf/chesapeake_cvpr_5.yaml +++ b/tests/conf/chesapeake_cvpr_5.yaml @@ -12,11 +12,11 @@ data: class_path: ChesapeakeCVPRDataModule init_args: train_splits: - - "de-test" + - "de-test" val_splits: - - "de-test" + - "de-test" test_splits: - - "de-test" + - "de-test" batch_size: 2 patch_size: 64 class_set: 5 diff --git a/tests/conf/chesapeake_cvpr_7.yaml b/tests/conf/chesapeake_cvpr_7.yaml index cc475cbb3..a5e2e5bb5 100644 --- a/tests/conf/chesapeake_cvpr_7.yaml +++ b/tests/conf/chesapeake_cvpr_7.yaml @@ -12,11 +12,11 @@ data: class_path: ChesapeakeCVPRDataModule init_args: train_splits: - - "de-test" + - "de-test" val_splits: - - "de-test" + - "de-test" test_splits: - - "de-test" + - "de-test" batch_size: 2 patch_size: 64 class_set: 7 diff --git a/tests/conf/chesapeake_cvpr_prior_byol.yaml b/tests/conf/chesapeake_cvpr_prior_byol.yaml index 1819c87a0..5018f0e4c 100644 --- a/tests/conf/chesapeake_cvpr_prior_byol.yaml +++ b/tests/conf/chesapeake_cvpr_prior_byol.yaml @@ -7,11 +7,11 @@ data: class_path: ChesapeakeCVPRDataModule init_args: train_splits: - - "de-test" + - "de-test" val_splits: - - "de-test" + - "de-test" test_splits: - - "de-test" + - "de-test" batch_size: 2 patch_size: 64 class_set: 5 diff --git a/tests/conf/chesapeake_cvpr_prior_moco.yaml b/tests/conf/chesapeake_cvpr_prior_moco.yaml index 87179a28e..918288d90 100644 --- a/tests/conf/chesapeake_cvpr_prior_moco.yaml +++ b/tests/conf/chesapeake_cvpr_prior_moco.yaml @@ -7,11 +7,11 @@ data: class_path: ChesapeakeCVPRDataModule init_args: train_splits: - - "de-test" + - "de-test" val_splits: - - "de-test" + - "de-test" test_splits: - - "de-test" + - "de-test" batch_size: 2 patch_size: 64 class_set: 5 diff --git a/tests/conf/chesapeake_cvpr_prior_simclr.yaml b/tests/conf/chesapeake_cvpr_prior_simclr.yaml index 96acc8528..9f21527cf 100644 --- a/tests/conf/chesapeake_cvpr_prior_simclr.yaml +++ b/tests/conf/chesapeake_cvpr_prior_simclr.yaml @@ -10,11 +10,11 @@ data: class_path: ChesapeakeCVPRDataModule init_args: train_splits: - - "de-test" + - "de-test" val_splits: - - "de-test" + - "de-test" test_splits: - - "de-test" + - "de-test" batch_size: 2 patch_size: 64 class_set: 5 diff --git a/tests/conf/seco_byol_1.yaml b/tests/conf/seco_byol_1.yaml index 9d2680fde..9744f31a2 100644 --- a/tests/conf/seco_byol_1.yaml +++ b/tests/conf/seco_byol_1.yaml @@ -12,6 +12,6 @@ data: seasons: 1 # https://github.com/Lightning-AI/lightning/issues/18616 bands: - - "B4" - - "B3" - - "B2" + - "B4" + - "B3" + - "B2" diff --git a/tests/conf/seco_byol_2.yaml b/tests/conf/seco_byol_2.yaml index f3b51c002..cb87dc1db 100644 --- a/tests/conf/seco_byol_2.yaml +++ b/tests/conf/seco_byol_2.yaml @@ -12,6 +12,6 @@ data: seasons: 2 # https://github.com/Lightning-AI/lightning/issues/18616 bands: - - "B4" - - "B3" - - "B2" + - "B4" + - "B3" + - "B2" diff --git a/tests/conf/seco_moco_1.yaml b/tests/conf/seco_moco_1.yaml index 164f13a7a..979722457 100644 --- a/tests/conf/seco_moco_1.yaml +++ b/tests/conf/seco_moco_1.yaml @@ -17,6 +17,6 @@ data: seasons: 1 # https://github.com/Lightning-AI/lightning/issues/18616 bands: - - "B4" - - "B3" - - "B2" + - "B4" + - "B3" + - "B2" diff --git a/tests/conf/seco_moco_2.yaml b/tests/conf/seco_moco_2.yaml index fee827289..2e98abb6f 100644 --- a/tests/conf/seco_moco_2.yaml +++ b/tests/conf/seco_moco_2.yaml @@ -20,6 +20,6 @@ data: seasons: 2 # https://github.com/Lightning-AI/lightning/issues/18616 bands: - - "B4" - - "B3" - - "B2" + - "B4" + - "B3" + - "B2" diff --git a/tests/conf/seco_simclr_1.yaml b/tests/conf/seco_simclr_1.yaml index b23653e0c..5f8ec279c 100644 --- a/tests/conf/seco_simclr_1.yaml +++ b/tests/conf/seco_simclr_1.yaml @@ -18,6 +18,6 @@ data: seasons: 1 # https://github.com/Lightning-AI/lightning/issues/18616 bands: - - "B4" - - "B3" - - "B2" + - "B4" + - "B3" + - "B2" diff --git a/tests/conf/seco_simclr_2.yaml b/tests/conf/seco_simclr_2.yaml index 1b06c5d5c..9af2632de 100644 --- a/tests/conf/seco_simclr_2.yaml +++ b/tests/conf/seco_simclr_2.yaml @@ -18,6 +18,6 @@ data: seasons: 2 # https://github.com/Lightning-AI/lightning/issues/18616 bands: - - "B4" - - "B3" - - "B2" + - "B4" + - "B3" + - "B2" diff --git a/tests/conf/sentinel2_south_america_soybean.yaml b/tests/conf/sentinel2_south_america_soybean.yaml index 1d1b91ba0..7fe957049 100644 --- a/tests/conf/sentinel2_south_america_soybean.yaml +++ b/tests/conf/sentinel2_south_america_soybean.yaml @@ -1,17 +1,17 @@ model: - class_path: SemanticSegmentationTask - init_args: - loss: "ce" - model: "deeplabv3+" - backbone: "resnet18" - in_channels: 13 - num_classes: 2 - num_filters: 1 + class_path: SemanticSegmentationTask + init_args: + loss: "ce" + model: "deeplabv3+" + backbone: "resnet18" + in_channels: 13 + num_classes: 2 + num_filters: 1 data: - class_path: Sentinel2SouthAmericaSoybeanDataModule - init_args: - batch_size: 2 - patch_size: 16 - dict_kwargs: - south_america_soybean_paths: "tests/data/south_america_soybean" - sentinel2_paths: "tests/data/sentinel2" + class_path: Sentinel2SouthAmericaSoybeanDataModule + init_args: + batch_size: 2 + patch_size: 16 + dict_kwargs: + south_america_soybean_paths: "tests/data/south_america_soybean" + sentinel2_paths: "tests/data/sentinel2" diff --git a/tests/conf/southafricacroptype.yaml b/tests/conf/southafricacroptype.yaml index 2cc5d39fe..cfd4f8dfe 100644 --- a/tests/conf/southafricacroptype.yaml +++ b/tests/conf/southafricacroptype.yaml @@ -14,4 +14,4 @@ data: batch_size: 2 patch_size: 16 dict_kwargs: - paths: "tests/data/south_africa_crop_type" \ No newline at end of file + paths: "tests/data/south_africa_crop_type" diff --git a/tests/conf/ssl4eo_l_moco_1.yaml b/tests/conf/ssl4eo_l_moco_1.yaml index 1486d29bf..023f1ff9b 100644 --- a/tests/conf/ssl4eo_l_moco_1.yaml +++ b/tests/conf/ssl4eo_l_moco_1.yaml @@ -12,8 +12,8 @@ model: class_path: kornia.augmentation.RandomResizedCrop init_args: size: - - 224 - - 224 + - 224 + - 224 data: class_path: SSL4EOLDataModule init_args: diff --git a/tests/data/README.md b/tests/data/README.md index c2d654a17..1d95c728d 100644 --- a/tests/data/README.md +++ b/tests/data/README.md @@ -23,7 +23,9 @@ with rio.open(os.path.join(ROOT, FILENAME), "r") as src: for i in dst.profile.indexes: dst.write(Z, i) ``` + Optionally, if the dataset has a colormap, this can be copied like so: + ```python cmap = src.colormap(1) dst.write_colormap(1, cmap)