From 10c63025ffb9538973e61f15fb19436eda28938e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 6 Aug 2024 10:35:24 -0500 Subject: [PATCH 01/47] Remove PROJ/pyproj warnings when generating areas previews in docs --- doc/source/conf.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 62156e9760..248c9f7a07 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -29,6 +29,7 @@ _read_yaml_area_file_content, generate_area_def_rst_list, ) +from pyresample.utils.proj4 import ignore_pyproj_proj_warnings from reader_table import generate_reader_table, rst_table_header, rst_table_row # noqa: E402 import satpy # noqa: E402 @@ -98,8 +99,9 @@ def __getattr__(cls, name): if not hasattr(area, "_repr_html_"): continue - area_table.append(rst_table_row([f"`{aname}`_", area.description, - area.proj_dict.get("proj")])) + with ignore_pyproj_proj_warnings(): + area_proj = area.proj_dict.get("proj") + area_table.append(rst_table_row([f"`{aname}`_", area.description, area_proj])) with open("area_def_list.rst", mode="w") as f: f.write("".join(area_table)) From 6055c4415f5ca2638bcb8577ede9c0e717dfcf54 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 2 May 2024 15:03:15 +0200 Subject: [PATCH 02/47] start draft reader for MCD12Q1 MODIS Land Cover L3 --- satpy/etc/readers/mcd12q1.yaml | 15 ++++ satpy/readers/hdfeos_base.py | 10 ++- satpy/readers/mcd12q1.py | 141 +++++++++++++++++++++++++++++++++ 3 files changed, 164 insertions(+), 2 deletions(-) create mode 100644 satpy/etc/readers/mcd12q1.yaml create mode 100644 satpy/readers/mcd12q1.py diff --git a/satpy/etc/readers/mcd12q1.yaml b/satpy/etc/readers/mcd12q1.yaml new file mode 100644 index 0000000000..3dc88347cf --- /dev/null +++ b/satpy/etc/readers/mcd12q1.yaml @@ -0,0 +1,15 @@ +reader: + name: mcd12q1 + short_name: MCD12Q1 + long_name: MODIS Level 3 (mcd12Q1) data in HDF-EOS format + description: MODIS HDF-EOS MCD12Q1 L3 Reader + status: Beta + supports_fsspec: false + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [modis] + +file_types: + modis_mcd12q1_hdf_eos: + file_patterns: ['MCD12Q1.A{start_time:%Y%j}.{tile_id}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf'] + file_reader: !!python/name:satpy.readers.mcd12q1.MCD12Q1HDFFileHandler + diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 7c25e1d09a..7be1045878 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -183,7 +183,10 @@ def start_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"]) - return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") + if len(date) == 19: + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S") + else: + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self._start_time_from_filename() @@ -196,7 +199,10 @@ def end_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"]) - return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") + if len(date) == 19: + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S") + else: + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self.start_time diff --git a/satpy/readers/mcd12q1.py b/satpy/readers/mcd12q1.py new file mode 100644 index 0000000000..f05c0652e7 --- /dev/null +++ b/satpy/readers/mcd12q1.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2024 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""MCD12Q1 hdf-eos format reader. + +Introduction +------------ + +The ``mcd12q1`` reader reads MCD12Q1 products in HDF-EOS format. + +The 500m product is provided on a sinusoidal grid. + +The reader has been tested with: + - MCD12Q1: Land cover data. + +To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. + +""" +import logging +from typing import Iterable + +from pyresample import geometry + +from satpy.readers.hdfeos_base import HDFEOSGeoReader + +logger = logging.getLogger(__name__) + + +class MCD12Q1HDFFileHandler(HDFEOSGeoReader): + """File handler for MCD12Q1 HDF-EOS 500m granules.""" + def available_datasets(self, configured_datasets=None): + """Automatically determine datasets provided by this file.""" + # Initialise set of variable names to carry through code + handled_var_names = set() + + ds_dict = self.sd.datasets() + + for is_avail, ds_info in (configured_datasets or []): + file_key = ds_info.get("file_key", ds_info["name"]) + # we must add all variables here even if another file handler has + # claimed the variable. It could be another instance of this file + # type, and we don't want to add that variable dynamically if the + # other file handler defined it by the YAML definition. + handled_var_names.add(file_key) + if is_avail is not None: + # some other file handler said it has this dataset + # we don't know any more information than the previous + # file handler so let's yield early + yield is_avail, ds_info + continue + if self.file_type_matches(ds_info["file_type"]) is None: + # this is not the file type for this dataset + yield None, ds_info + continue + yield file_key in ds_dict.keys(), ds_info + + yield from self._dynamic_variables_from_file(handled_var_names) + + def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: + res = 500 # TODO + for var_name in self.sd.datasets().keys(): + if var_name in handled_var_names: + # skip variables that YAML had configured + continue + common = {"file_type": "mcd12q1_500m_hdf", + "resolution": res, + "name": var_name} + yield True, common + + + def _get_res(self): + """Compute the resolution from the file metadata.""" + gridname = self.metadata["GridStructure"]["GRID_1"]["GridName"] + if "MCD12Q1" not in gridname: + raise ValueError("Only MCD12Q1 grids are supported") + + # Get the grid resolution from the grid name + pos = gridname.rfind("_") + 1 + pos2 = gridname.rfind("Deg") + + # Initialise number of rows and columns + # Some products don't have resolution listed. + if pos < 0 or pos2 < 0: + return 360. / self.metadata["GridStructure"]["GRID_1"]["XDim"] + else: + return float(gridname[pos:pos2]) + + def get_dataset(self, dataset_id, dataset_info): + """Get DataArray for specified dataset.""" + dataset_name = dataset_id["name"] + # xxx + dataset = self.sd.select(dataset_name) + #dataset = self.load_dataset(dataset_name, dataset_info.pop("category", False)) + self._add_satpy_metadata(dataset_id, dataset) + + return dataset + + def _get_area_extent(self): + """Get the grid properties.""" + # Now compute the data extent + upperleft = self.metadata["GridStructure"]["GRID_1"]["UpperLeftPointMtrs"] + lowerright = self.metadata["GridStructure"]["GRID_1"]["LowerRightMtrs"] + + return upperleft[0], lowerright[1], lowerright[0], upperleft[1] + + def get_area_def(self, dsid): + """Get the area definition. + + This is fixed, but not defined in the file. So we must + generate it ourselves with some assumptions. + """ + proj_param = "EPSG:4326" + + # Get the size of the dataset + nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] + ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] + + # Construct the area definition + area = geometry.AreaDefinition("SIN MODIS", + "A gridded L3 MODIS area", + "sinu", + proj_param, + ncols, + nrows, + self._get_area_extent()) + + return area From 14ff97ef08b84e7978bcf6531d3af6f25f68f766 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 3 May 2024 11:04:09 +0200 Subject: [PATCH 03/47] add dataset entry in yaml file + fix loading --- satpy/etc/readers/mcd12q1.yaml | 6 ++++++ satpy/readers/mcd12q1.py | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/mcd12q1.yaml b/satpy/etc/readers/mcd12q1.yaml index 3dc88347cf..d347c340c0 100644 --- a/satpy/etc/readers/mcd12q1.yaml +++ b/satpy/etc/readers/mcd12q1.yaml @@ -13,3 +13,9 @@ file_types: file_patterns: ['MCD12Q1.A{start_time:%Y%j}.{tile_id}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf'] file_reader: !!python/name:satpy.readers.mcd12q1.MCD12Q1HDFFileHandler +datasets: + LC_Prop1: + name: LC_Prop1 + resolution: 500 + file_type: modis_mcd12q1_hdf_eos + diff --git a/satpy/readers/mcd12q1.py b/satpy/readers/mcd12q1.py index f05c0652e7..eb0604a1de 100644 --- a/satpy/readers/mcd12q1.py +++ b/satpy/readers/mcd12q1.py @@ -103,8 +103,8 @@ def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" dataset_name = dataset_id["name"] # xxx - dataset = self.sd.select(dataset_name) - #dataset = self.load_dataset(dataset_name, dataset_info.pop("category", False)) + dataset = self.load_dataset(dataset_name, dataset_info.pop("category", False)) + self._add_satpy_metadata(dataset_id, dataset) return dataset From 6857fcb1e197c2a52383263be8116d6398919388 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 3 May 2024 11:47:16 +0200 Subject: [PATCH 04/47] use HDFEOSBaseFileReader and fix proj_param proj_param is now set to World_Sinusoidal --- satpy/readers/mcd12q1.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/mcd12q1.py b/satpy/readers/mcd12q1.py index eb0604a1de..3471abccc1 100644 --- a/satpy/readers/mcd12q1.py +++ b/satpy/readers/mcd12q1.py @@ -35,12 +35,12 @@ from pyresample import geometry -from satpy.readers.hdfeos_base import HDFEOSGeoReader +from satpy.readers.hdfeos_base import HDFEOSBaseFileReader logger = logging.getLogger(__name__) -class MCD12Q1HDFFileHandler(HDFEOSGeoReader): +class MCD12Q1HDFFileHandler(HDFEOSBaseFileReader): """File handler for MCD12Q1 HDF-EOS 500m granules.""" def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" @@ -124,7 +124,7 @@ def get_area_def(self, dsid): generate it ourselves with some assumptions. """ proj_param = "EPSG:4326" - + proj_param = "World_Sinusoidal" # Get the size of the dataset nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] From decfc9c294c622abc8c87eefa0ceb14c17fd2010 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Fri, 3 May 2024 14:33:26 +0200 Subject: [PATCH 05/47] fix proj_param for AreaDefinition --- satpy/readers/mcd12q1.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/satpy/readers/mcd12q1.py b/satpy/readers/mcd12q1.py index 3471abccc1..e3126fdcb5 100644 --- a/satpy/readers/mcd12q1.py +++ b/satpy/readers/mcd12q1.py @@ -122,17 +122,19 @@ def get_area_def(self, dsid): This is fixed, but not defined in the file. So we must generate it ourselves with some assumptions. + + The proj_param string comes from https://lpdaac.usgs.gov/documents/101/MCD12_User_Guide_V6.pdf """ - proj_param = "EPSG:4326" - proj_param = "World_Sinusoidal" + proj_param = "proj=sinu +a=6371007.181 +b=6371007.181 +units=m" + # Get the size of the dataset nrows = self.metadata["GridStructure"]["GRID_1"]["YDim"] ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] # Construct the area definition - area = geometry.AreaDefinition("SIN MODIS", + area = geometry.AreaDefinition("SIN MODIS Tiled", "A gridded L3 MODIS area", - "sinu", + "SIN MODIS", proj_param, ncols, nrows, From 67d93c53550abfd3c2f7df523eb3d72218ca1cb1 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Sun, 5 May 2024 09:58:06 +0200 Subject: [PATCH 06/47] Extend datasets for mcd12q1 reader --- satpy/etc/readers/mcd12q1.yaml | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/satpy/etc/readers/mcd12q1.yaml b/satpy/etc/readers/mcd12q1.yaml index d347c340c0..202eea781a 100644 --- a/satpy/etc/readers/mcd12q1.yaml +++ b/satpy/etc/readers/mcd12q1.yaml @@ -14,8 +14,36 @@ file_types: file_reader: !!python/name:satpy.readers.mcd12q1.MCD12Q1HDFFileHandler datasets: + LC_Type1: + name: LC_Type1 + resolution: 500 + file_type: modis_mcd12q1_hdf_eos + + LC_Type2: + name: LC_Type2 + resolution: 500 + file_type: modis_mcd12q1_hdf_eos + LC_Type3: + name: LC_Type3 + resolution: 500 + file_type: modis_mcd12q1_hdf_eos + LC_Type4: + name: LC_Type4 + resolution: 500 + file_type: modis_mcd12q1_hdf_eos + LC_Type5: + name: LC_Type5 + resolution: 500 + file_type: modis_mcd12q1_hdf_eos LC_Prop1: name: LC_Prop1 resolution: 500 file_type: modis_mcd12q1_hdf_eos - + LC_Prop2: + name: LC_Prop2 + resolution: 500 + file_type: modis_mcd12q1_hdf_eos + LC_Prop3: + name: LC_Prop3 + resolution: 500 + file_type: modis_mcd12q1_hdf_eos From 4cb9cb2c4b30bee2e6b92473da9dfb900be3b609 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 8 May 2024 22:29:26 +0200 Subject: [PATCH 07/47] add reference documents --- satpy/readers/mcd12q1.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/readers/mcd12q1.py b/satpy/readers/mcd12q1.py index e3126fdcb5..723abf39be 100644 --- a/satpy/readers/mcd12q1.py +++ b/satpy/readers/mcd12q1.py @@ -24,6 +24,11 @@ The 500m product is provided on a sinusoidal grid. +Reference documents and links: + - MODIS land products grid: https://modis-land.gsfc.nasa.gov/MODLAND_grid.html + - User guide: https://lpdaac.usgs.gov/documents/101/MCD12_User_Guide_V6.pdf + - MCD12Q1 v061: MODIS/Terra+Aqua Land Cover Type Yearly L3 Global 500 m SIN Grid + The reader has been tested with: - MCD12Q1: Land cover data. From a0e79dc5f9f0e9fa3bde23ec56d9255e42c50211 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 8 May 2024 22:29:38 +0200 Subject: [PATCH 08/47] improve grid definition --- satpy/readers/mcd12q1.py | 22 +++++++++------------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/satpy/readers/mcd12q1.py b/satpy/readers/mcd12q1.py index 723abf39be..28d56bdf83 100644 --- a/satpy/readers/mcd12q1.py +++ b/satpy/readers/mcd12q1.py @@ -76,7 +76,7 @@ def available_datasets(self, configured_datasets=None): yield from self._dynamic_variables_from_file(handled_var_names) def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]: - res = 500 # TODO + res = self._get_res() for var_name in self.sd.datasets().keys(): if var_name in handled_var_names: # skip variables that YAML had configured @@ -93,16 +93,12 @@ def _get_res(self): if "MCD12Q1" not in gridname: raise ValueError("Only MCD12Q1 grids are supported") - # Get the grid resolution from the grid name - pos = gridname.rfind("_") + 1 - pos2 = gridname.rfind("Deg") - - # Initialise number of rows and columns - # Some products don't have resolution listed. - if pos < 0 or pos2 < 0: - return 360. / self.metadata["GridStructure"]["GRID_1"]["XDim"] + resolution_string = self.metadata["ARCHIVEDMETADATA"]["NADIRDATARESOLUTION"]["VALUE"] + if resolution_string[-1] == 'm': + return int(resolution_string.removesuffix('m')) else: - return float(gridname[pos:pos2]) + raise ValueError("Cannot parse resolution of MCD12Q1 grid") + def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" @@ -137,9 +133,9 @@ def get_area_def(self, dsid): ncols = self.metadata["GridStructure"]["GRID_1"]["XDim"] # Construct the area definition - area = geometry.AreaDefinition("SIN MODIS Tiled", - "A gridded L3 MODIS area", - "SIN MODIS", + area = geometry.AreaDefinition("sinusoidal_modis", + "Tiled sinusoidal L3 MODIS area", + "sinusoidal", proj_param, ncols, nrows, From c514fe8ca48ecf0017dd462844abe02e82b5988a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 27 Aug 2024 14:10:39 -0500 Subject: [PATCH 09/47] Move area list generation to separate script And to use bokeh --- .readthedocs.yml | 3 + doc/rtd_environment.yml | 5 +- doc/source/conf.py | 30 +----- doc/source/generate_area_def_list.py | 152 +++++++++++++++++++++++++++ 4 files changed, 159 insertions(+), 31 deletions(-) create mode 100644 doc/source/generate_area_def_list.py diff --git a/.readthedocs.yml b/.readthedocs.yml index 9f3d7bd1b5..1eeb6b2332 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -18,5 +18,8 @@ build: - git fetch --tags pre_install: - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py + pre_build: + - cd doc/source + - python generate_area_def_list.py conda: environment: doc/rtd_environment.yml diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index abd8add616..2ffcceb8d4 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -16,7 +16,7 @@ dependencies: - numpy - pillow - pooch - - pyresample +# - pyresample - pytest - python-eccodes - python-geotiepoints @@ -31,8 +31,9 @@ dependencies: - xarray - zarr - xarray-datatree - - cartopy + - geoviews - pip: - graphviz - pytest-lazy-fixtures + - git+https://github.com/djhoese/pyresample.git@feat-html-bokeh - .. # relative path to the satpy project diff --git a/doc/source/conf.py b/doc/source/conf.py index 248c9f7a07..2e41793c15 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -24,16 +24,9 @@ sys.path.append(os.path.abspath("../../")) sys.path.append(os.path.abspath(os.path.dirname(__file__))) -from pyresample.area_config import ( # noqa: E402 - _create_area_def_from_dict, - _read_yaml_area_file_content, - generate_area_def_rst_list, -) -from pyresample.utils.proj4 import ignore_pyproj_proj_warnings -from reader_table import generate_reader_table, rst_table_header, rst_table_row # noqa: E402 +from reader_table import generate_reader_table # noqa: E402 import satpy # noqa: E402 -from satpy.resample import get_area_file # noqa: E402 # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -87,27 +80,6 @@ def __getattr__(cls, name): with open("reader_table.rst", mode="w") as f: f.write(generate_reader_table()) -# create table from area definition yaml file -area_file = get_area_file()[0] - -area_dict = _read_yaml_area_file_content(area_file) -area_table = [rst_table_header("Area Definitions", header=["Name", "Description", "Projection"], - widths="auto", class_name="area-table")] - -for aname, params in area_dict.items(): - area = _create_area_def_from_dict(aname, params) - if not hasattr(area, "_repr_html_"): - continue - - with ignore_pyproj_proj_warnings(): - area_proj = area.proj_dict.get("proj") - area_table.append(rst_table_row([f"`{aname}`_", area.description, area_proj])) - -with open("area_def_list.rst", mode="w") as f: - f.write("".join(area_table)) - f.write("\n\n") - f.write(generate_area_def_rst_list(area_file)) - # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions diff --git a/doc/source/generate_area_def_list.py b/doc/source/generate_area_def_list.py new file mode 100644 index 0000000000..2c5d8d17b7 --- /dev/null +++ b/doc/source/generate_area_def_list.py @@ -0,0 +1,152 @@ +"""Generate the area definition list restructuredtext document. + +This should be run once before generating the sphinx documentation to +produce the ``area_def_list.rst`` file referenced by ``satpy/resample.py``. + +""" +import logging +import pathlib +import sys +from datetime import datetime + +import bokeh +import geoviews as gv +import geoviews.feature as gf +from bokeh.embed import components +from jinja2 import Template +from pyresample._formatting_html import _load_static_files +from pyresample.area_config import area_repr, load_area +from pyresample.utils.proj4 import ignore_pyproj_proj_warnings +from reader_table import rst_table_header, rst_table_row + +from satpy.resample import get_area_file + +logger = logging.getLogger(__name__) + +gv.extension("bokeh") + + +TEMPLATE = ''' + +{{ table_header }} +{% for area_name, area_def in areas.items() if area_def._repr_html_ is defined %} +{{ create_table_row(area_name, area_def) }} +{% endfor %} + + +.. raw:: html + + {{ resources }} + {{ pyr_icons_svg | indent(5) }} + + {{ script | indent(5)}} + +{% for area_name, area_div in area_divs_dict.items() %} + +{{ area_name }} +{{ rst_underline('^', area_name|length) }} + +.. raw:: html + + {{ area_repr(areas[area_name], map_content=area_div, include_header=False, include_static_files=False) | + indent(5) }} +
+ +{% endfor %} +''' # noqa: Q001 + + +def main(): + """Parse CLI arguments and generate area definition list file.""" + from argparse import ArgumentParser + + parser = ArgumentParser(description="Generate restructuredtext area definition list for sphinx documentation") + parser.add_argument("--area-file", + help="Input area YAML file to read") + parser.add_argument("-o", "--output-file", + type=pathlib.Path, + help="HTML or restructuretext filename to create. " + "Defaults to 'area_def_list.rst' in the " + "documentation source directory.") + args = parser.parse_args() + logging.basicConfig(level=logging.INFO) + + if args.output_file is None: + args.output_file = str(pathlib.Path(__file__).resolve().parent / "area_def_list.rst") + area_file = args.area_file + if area_file is None: + area_file = get_area_file()[0] + + area_list = load_area(area_file) + areas_dict = {_area_name(area): area for area in area_list} + logger.info(f"Generating bokeh plots ({datetime.now()})...") + script, divs_dict = _generate_html_map_divs(areas_dict) + logger.info(f"Done generating bokeh plots ({datetime.now()})") + + def rst_underline(ch, num_chars): + return ch * num_chars + + template = Template(TEMPLATE) + icons_svg, css_style = _load_static_files() + logger.info(f"Rendering document ({datetime.now()})...") + res = template.render( + resources=bokeh.resources.CDN.render(), + script=script, + area_divs_dict=divs_dict, + areas=areas_dict, + rst_underline=rst_underline, + area_repr=area_repr, + pyr_icons_svg=icons_svg, + pyr_css_style=css_style, + table_header=rst_table_header("Area Definitions", header=["Name", "Description", "Projection"], + widths="auto", class_name="area-table"), + create_table_row=_area_table_row, + ) + logger.info(f"Done rendering document ({datetime.now()})") + + with open(args.output_file, mode="w") as f: + f.write(res) + + +def _area_name(area_def) -> str: + if hasattr(area_def, "attrs"): + # pyresample 2 + return area_def.attrs["name"] + # pyresample 1 + return area_def.area_id + + +def _area_table_row(area_name, area_def): + with ignore_pyproj_proj_warnings(): + area_proj = area_def.proj_dict.get("proj") + return rst_table_row([f"`{area_name}`_", area_def.description, area_proj]) + + +def _generate_html_map_divs(areas_dict: dict) -> tuple[str, dict]: + areas_bokeh_models = {} + for area_name, area_def in areas_dict.items(): + if not hasattr(area_def, "to_cartopy_crs"): + logger.info(f"Skipping {area_name} because it can't be converted to cartopy CRS") + continue + crs = area_def.to_cartopy_crs() + + features = gv.Overlay([gf.ocean, gf.land, gf.borders, gf.coastline]) + f = gv.render( + features.opts( + toolbar=None, + default_tools=[], + projection=crs, + xlim=crs.bounds[:2], + ylim=crs.bounds[2:], + ), + backend="bokeh") + areas_bokeh_models[area_name] = f + + script, divs_dict = components(areas_bokeh_models) + return script, divs_dict + + +if __name__ == "__main__": + sys.exit(main()) From ff2fa34eb1c2e8965e5f21b89287be5d1ea06da2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 27 Aug 2024 14:21:55 -0500 Subject: [PATCH 10/47] Remove non-working pyresample dev install in RTD --- doc/rtd_environment.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 2ffcceb8d4..5bc7dabe95 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -16,7 +16,7 @@ dependencies: - numpy - pillow - pooch -# - pyresample + - pyresample - pytest - python-eccodes - python-geotiepoints @@ -35,5 +35,4 @@ dependencies: - pip: - graphviz - pytest-lazy-fixtures - - git+https://github.com/djhoese/pyresample.git@feat-html-bokeh - .. # relative path to the satpy project From 56e4d5b313867f24812594e98512c81f51d0c157 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 29 Aug 2024 09:07:34 -0500 Subject: [PATCH 11/47] Fix readthedocs running area def script --- .readthedocs.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 1eeb6b2332..b786e6ebce 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -19,7 +19,6 @@ build: pre_install: - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py pre_build: - - cd doc/source - - python generate_area_def_list.py + - cd doc/source && python generate_area_def_list.py conda: environment: doc/rtd_environment.yml From fe930206e3ffa48ebb88117e4a8db473c4d9ed60 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 29 Aug 2024 09:20:08 -0500 Subject: [PATCH 12/47] Add docutils config to workaround long bokeh lines --- doc/source/docutils.conf | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 doc/source/docutils.conf diff --git a/doc/source/docutils.conf b/doc/source/docutils.conf new file mode 100644 index 0000000000..70bcbbb2e7 --- /dev/null +++ b/doc/source/docutils.conf @@ -0,0 +1,2 @@ +[parsers] +line_length_limit=30000000 From e804b7068fd94ecd0e18385c889ac0d808ef2f76 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 29 Aug 2024 10:03:01 -0500 Subject: [PATCH 13/47] Allow conda unstable remove to always succeed --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9d9e552da5..f7b5f447c4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -80,7 +80,7 @@ jobs: numpy \ pandas \ scipy - conda remove --force-remove -y pykdtree pyresample python-geotiepoints pyhdf netcdf4 h5py cftime astropy pyerfa + conda remove --force-remove -y pykdtree pyresample python-geotiepoints pyhdf netcdf4 h5py cftime astropy pyerfa || true python -m pip install --upgrade --no-deps --pre --no-build-isolation \ pyerfa \ git+https://github.com/storpipfugl/pykdtree \ From 084b1144b2019a86d47c0dc9e5f2f62752a8c565 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 29 Aug 2024 11:19:16 -0500 Subject: [PATCH 14/47] Add documentation about generating area list docs --- doc/source/dev_guide/index.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/doc/source/dev_guide/index.rst b/doc/source/dev_guide/index.rst index bbd9bedaf4..0df47a866c 100644 --- a/doc/source/dev_guide/index.rst +++ b/doc/source/dev_guide/index.rst @@ -128,6 +128,15 @@ additional packages are needed. These can be installed with :: pip install -e ".[doc]" +Generating the documentation requires a one-time script to generate a list +of previews of all of the AreaDefinition objects used by the documentation. +This script can take 2+ minutes to execute so it is run separately from the +normal documentation build process. To run it:: + + cd doc/source/ + python generate_area_def_list.py + cd ../../ + After editing the source files there the documentation can be generated locally:: cd doc From d25ec20ace9f72bb38cdd515c9ea56e5ce90d9bd Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 1 Sep 2024 09:55:47 -0500 Subject: [PATCH 15/47] Turn off area def list generation if not latest/stable RTD --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index b786e6ebce..f6f9662c1c 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -19,6 +19,6 @@ build: pre_install: - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py pre_build: - - cd doc/source && python generate_area_def_list.py + - cd doc/source && if [[ $READTHEDOCS_VERSION =~ latest|stable ]]; then python generate_area_def_list.py; else touch area_def_list.rst; fi conda: environment: doc/rtd_environment.yml From 5b2ac6855afd2aaaefc46754f8c15d66b51d88ab Mon Sep 17 00:00:00 2001 From: David Hoese Date: Sun, 1 Sep 2024 10:05:27 -0500 Subject: [PATCH 16/47] Switch to 'sh' compatible syntax in RTD config --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index f6f9662c1c..8404e7da46 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -19,6 +19,6 @@ build: pre_install: - git update-index --assume-unchanged doc/rtd_environment.yml doc/source/conf.py pre_build: - - cd doc/source && if [[ $READTHEDOCS_VERSION =~ latest|stable ]]; then python generate_area_def_list.py; else touch area_def_list.rst; fi + - cd doc/source && if [ "$READTHEDOCS_VERSION" = "latest" ] || [ "$READTHEDOCS_VERSION" = "stable" ]; then python generate_area_def_list.py; else touch area_def_list.rst; fi conda: environment: doc/rtd_environment.yml From 8e22d866f1cbebd126ec44b4bc1572ac754a4c9b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 2 Sep 2024 11:03:42 +0300 Subject: [PATCH 17/47] Fix AAPP L1b reader not to up-cast data to float64 --- satpy/readers/aapp_l1b.py | 36 +++++++++++------------ satpy/tests/reader_tests/test_aapp_l1b.py | 25 ++++++++++------ 2 files changed, 34 insertions(+), 27 deletions(-) diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py index 6e3072b4d0..aed70ae0fe 100644 --- a/satpy/readers/aapp_l1b.py +++ b/satpy/readers/aapp_l1b.py @@ -38,7 +38,7 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_chunk_size_limit -CHANNEL_DTYPE = np.float64 +CHANNEL_DTYPE = np.float32 def get_avhrr_lac_chunks(shape, dtype): @@ -239,7 +239,6 @@ def available_datasets(self, configured_datasets=None): def get_angles(self, angle_id): """Get sun-satellite viewing angles.""" sunz, satz, azidiff = self._get_all_interpolated_angles() - name_to_variable = dict(zip(self._angle_names, (satz, sunz, azidiff))) return create_xarray(name_to_variable[angle_id]) @@ -248,9 +247,10 @@ def _get_all_interpolated_angles_uncached(self): return self._interpolate_arrays(sunz40km, satz40km, azidiff40km) def _get_tiepoint_angles_in_degrees(self): - sunz40km = self._data["ang"][:, :, 0] * 1e-2 - satz40km = self._data["ang"][:, :, 1] * 1e-2 - azidiff40km = self._data["ang"][:, :, 2] * 1e-2 + angles = self._data["ang"].astype(np.float32) + sunz40km = angles[:, :, 0] * 1e-2 + satz40km = angles[:, :, 1] * 1e-2 + azidiff40km = angles[:, :, 2] * 1e-2 return sunz40km, satz40km, azidiff40km def _interpolate_arrays(self, *input_arrays, geolocation=False): @@ -299,8 +299,10 @@ def _get_all_interpolated_coordinates_uncached(self): return self._interpolate_arrays(lons40km, lats40km, geolocation=True) def _get_coordinates_in_degrees(self): - lons40km = self._data["pos"][:, :, 1] * 1e-4 - lats40km = self._data["pos"][:, :, 0] * 1e-4 + position_data = self._data["pos"].astype(np.float32) + lons40km = position_data[:, :, 1] * 1e-4 + lats40km = position_data[:, :, 0] * 1e-4 + return lons40km, lats40km def calibrate(self, @@ -586,14 +588,11 @@ def _vis_calibrate(data, slope2 = da.from_array(calib_coeffs[2], chunks=line_chunks) intercept2 = da.from_array(calib_coeffs[3], chunks=line_chunks) else: - slope1 = da.from_array(data["calvis"][:, chn, coeff_idx, 0], - chunks=line_chunks) * 1e-10 - intercept1 = da.from_array(data["calvis"][:, chn, coeff_idx, 1], - chunks=line_chunks) * 1e-7 - slope2 = da.from_array(data["calvis"][:, chn, coeff_idx, 2], - chunks=line_chunks) * 1e-10 - intercept2 = da.from_array(data["calvis"][:, chn, coeff_idx, 3], - chunks=line_chunks) * 1e-7 + calvis = data["calvis"].astype(np.float32) + slope1 = da.from_array(calvis[:, chn, coeff_idx, 0] * 1e-10, chunks=line_chunks) + intercept1 = da.from_array(calvis[:, chn, coeff_idx, 1] * 1e-7, chunks=line_chunks) + slope2 = da.from_array(calvis[:, chn, coeff_idx, 2] * 1e-10, chunks=line_chunks) + intercept2 = da.from_array(calvis[:, chn, coeff_idx, 3] * 1e-7, chunks=line_chunks) # In the level 1b file, the visible coefficients are stored as 4-byte integers. Scaling factors then convert # them to real numbers which are applied to the measured counts. The coefficient is different depending on @@ -632,9 +631,10 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): mask &= count != 0 count = count.astype(CHANNEL_DTYPE) - k1_ = da.from_array(data["calir"][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9 - k2_ = da.from_array(data["calir"][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6 - k3_ = da.from_array(data["calir"][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6 + calir = data["calir"].astype(np.float32) + k1_ = da.from_array(calir[:, irchn, 0, 0] * 1.0e-9, chunks=line_chunks) + k2_ = da.from_array(calir[:, irchn, 0, 1] * 1.0e-6, chunks=line_chunks) + k3_ = da.from_array(calir[:, irchn, 0, 2] * 1.0e-6, chunks=line_chunks) # Count to radiance conversion: rad = k1_[:, None] * count * count + k2_[:, None] * count + k3_[:, None] diff --git a/satpy/tests/reader_tests/test_aapp_l1b.py b/satpy/tests/reader_tests/test_aapp_l1b.py index a9997f7a7e..ca4cf7eb81 100644 --- a/satpy/tests/reader_tests/test_aapp_l1b.py +++ b/satpy/tests/reader_tests/test_aapp_l1b.py @@ -106,6 +106,7 @@ def test_read(self): for name in ["1", "2", "3a"]: key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) + assert res.dtype == np.float32 assert res.min() == 0 assert res.max() >= 100 mins.append(res.min().values) @@ -116,14 +117,13 @@ def test_read(self): for name in ["3b", "4", "5"]: key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) + assert res.dtype == np.float32 mins.append(res.min().values) maxs.append(res.max().values) if name == "3b": assert np.all(np.isnan(res[2:, :])) - - np.testing.assert_allclose(mins, [0., 0., 0., 204.10106939, 103.23477235, 106.42609758]) - np.testing.assert_allclose(maxs, [108.40391775, 107.68545158, 106.80061233, - 337.71416096, 355.15898219, 350.87182166]) + np.testing.assert_allclose(mins, [0., 0., 0., 204.1018, 103.24155, 106.426704]) + np.testing.assert_allclose(maxs, [108.40393, 107.68546, 106.80061, 337.71414, 355.15897, 350.87186]) def test_angles(self): """Test reading the angles.""" @@ -136,6 +136,7 @@ def test_angles(self): info = {} key = make_dataid(name="solar_zenith_angle") res = fh.get_dataset(key, info) + assert res.dtype == np.float32 assert np.all(res == 0) def test_navigation(self): @@ -149,9 +150,11 @@ def test_navigation(self): info = {} key = make_dataid(name="longitude") res = fh.get_dataset(key, info) + assert res.dtype == np.float32 assert np.all(res == 0) key = make_dataid(name="latitude") res = fh.get_dataset(key, info) + assert res.dtype == np.float32 assert np.all(res == 0) def test_interpolation(self): @@ -188,7 +191,7 @@ def test_interpolation(self): -176.7503, -177.5758, -178.3968, -179.2157, 179.9646, 179.1416, 178.3124, 177.4742, 176.6238, 175.7577, 174.8724, 173.9635, 173.0263, 172.0552, 171.0436, 169.9833, 168.8643, 167.6734, - 166.3931, 164.9982, 163.4507]]) + 166.3931, 164.9982, 163.4507]], dtype=np.float32) lats40km = np.array([ [78.6613, 78.9471, 79.0802, 79.1163, 79.0889, 79.019, 78.9202, 78.8016, 78.6695, 78.528, 78.38, 78.2276, 78.0721, 77.9145, @@ -213,11 +216,12 @@ def test_interpolation(self): 75.3844, 75.1911, 74.9921, 74.7864, 74.5734, 74.3518, 74.1207, 73.8786, 73.624, 73.3552, 73.0699, 72.7658, 72.4398, 72.0882, 71.7065, 71.2891, 70.8286, 70.3158, 69.7381, 69.0782, 68.3116, - 67.4012, 66.2872]]) + 67.4012, 66.2872]], dtype=np.float32) fh._get_coordinates_in_degrees = mock.MagicMock() fh._get_coordinates_in_degrees.return_value = (lons40km, lats40km) (lons, lats) = fh._get_all_interpolated_coordinates() lon_data = lons.compute() + assert lon_data.dtype == np.float32 assert (np.max(lon_data) <= 180) # Not longitdes between -110, 110 in indata assert np.all(np.abs(lon_data) > 110) @@ -242,7 +246,7 @@ def test_interpolation_angles(self): 116.14, 115.96, 115.78, 115.6, 115.43, 115.25, 115.08, 114.9, 114.72, 114.54, 114.36, 114.17, 113.98, 113.78, 113.57, 113.36, 113.14, 112.91, 112.67, 112.42, 112.15, 111.86, 111.55, 111.21, 110.84, 110.43, 109.98, 109.46, 108.87, 108.17, - 107.32]]) + 107.32]], dtype=np.float32) satz40km = np.array( [[6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01, 4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01, @@ -259,7 +263,7 @@ def test_interpolation_angles(self): 7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01, 2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01, 4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01, - 6.290e+01, 6.633e+01]]) + 6.290e+01, 6.633e+01]], dtype=np.float32) azidiff40km = np.array([ [56.9, 56.24, 55.71, 55.27, 54.9, 54.57, 54.29, 54.03, 53.8, 53.59, 53.4, 53.22, 53.05, 52.89, 52.74, 52.6, 52.47, 52.34, 52.22, 52.1, @@ -272,10 +276,13 @@ def test_interpolation_angles(self): 51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.75, 128.86, 128.96, 129.06, 129.17, 129.27, 129.38, 129.49, 129.6, 129.71, 129.83, 129.95, 130.08, 130.21, 130.35, 130.49, 130.65, 130.81, 130.99, 131.18, 131.39, 131.62, 131.89, - 132.19]]) + 132.19]], dtype=np.float32) fh._get_tiepoint_angles_in_degrees = mock.MagicMock() fh._get_tiepoint_angles_in_degrees.return_value = (sunz40km, satz40km, azidiff40km) (sunz, satz, azidiff) = fh._get_all_interpolated_angles() + assert sunz.dtype == np.float32 + assert satz.dtype == np.float32 + assert azidiff.dtype == np.float32 assert (np.max(sunz) <= 123) assert (np.max(satz) <= 70) From b0458c84e66893e6de676c4deeff83dfc7813da5 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 2 Sep 2024 11:35:54 +0300 Subject: [PATCH 18/47] Convert IR calibration variables to float32 --- satpy/readers/aapp_l1b.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py index aed70ae0fe..f22c552b25 100644 --- a/satpy/readers/aapp_l1b.py +++ b/satpy/readers/aapp_l1b.py @@ -646,15 +646,17 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): mask &= rad > 0.0 return da.where(mask, rad, np.nan) + radtempcnv = header["radtempcnv"].astype(np.float32) + # Central wavenumber: - cwnum = header["radtempcnv"][0, irchn, 0] + cwnum = radtempcnv[0, irchn, 0] if irchn == 0: cwnum = cwnum / 1.0e2 else: cwnum = cwnum / 1.0e3 - bandcor_2 = header["radtempcnv"][0, irchn, 1] / 1e5 - bandcor_3 = header["radtempcnv"][0, irchn, 2] / 1e6 + bandcor_2 = radtempcnv[0, irchn, 1] / 1e5 + bandcor_3 = radtempcnv[0, irchn, 2] / 1e6 ir_const_1 = 1.1910659e-5 ir_const_2 = 1.438833 From 15a080099a7bb4735f9e78777f71ad043ac25769 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 2 Sep 2024 12:30:26 +0300 Subject: [PATCH 19/47] Do not convert memmap to in-memory variable --- satpy/readers/aapp_l1b.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py index f22c552b25..08058c6f50 100644 --- a/satpy/readers/aapp_l1b.py +++ b/satpy/readers/aapp_l1b.py @@ -300,7 +300,7 @@ def _get_all_interpolated_coordinates_uncached(self): def _get_coordinates_in_degrees(self): position_data = self._data["pos"].astype(np.float32) - lons40km = position_data[:, :, 1] * 1e-4 + lons40km = position_data[:, :, 1] * 1e-4 lats40km = position_data[:, :, 0] * 1e-4 return lons40km, lats40km @@ -588,11 +588,11 @@ def _vis_calibrate(data, slope2 = da.from_array(calib_coeffs[2], chunks=line_chunks) intercept2 = da.from_array(calib_coeffs[3], chunks=line_chunks) else: - calvis = data["calvis"].astype(np.float32) - slope1 = da.from_array(calvis[:, chn, coeff_idx, 0] * 1e-10, chunks=line_chunks) - intercept1 = da.from_array(calvis[:, chn, coeff_idx, 1] * 1e-7, chunks=line_chunks) - slope2 = da.from_array(calvis[:, chn, coeff_idx, 2] * 1e-10, chunks=line_chunks) - intercept2 = da.from_array(calvis[:, chn, coeff_idx, 3] * 1e-7, chunks=line_chunks) + calvis = data["calvis"] + slope1 = da.from_array(calvis[:, chn, coeff_idx, 0], chunks=line_chunks).astype(np.float32) * 1e-10 + intercept1 = da.from_array(calvis[:, chn, coeff_idx, 1], chunks=line_chunks).astype(np.float32) * 1e-7 + slope2 = da.from_array(calvis[:, chn, coeff_idx, 2], chunks=line_chunks).astype(np.float32) * 1e-10 + intercept2 = da.from_array(calvis[:, chn, coeff_idx, 3], chunks=line_chunks).astype(np.float32) * 1e-7 # In the level 1b file, the visible coefficients are stored as 4-byte integers. Scaling factors then convert # them to real numbers which are applied to the measured counts. The coefficient is different depending on @@ -631,10 +631,10 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): mask &= count != 0 count = count.astype(CHANNEL_DTYPE) - calir = data["calir"].astype(np.float32) - k1_ = da.from_array(calir[:, irchn, 0, 0] * 1.0e-9, chunks=line_chunks) - k2_ = da.from_array(calir[:, irchn, 0, 1] * 1.0e-6, chunks=line_chunks) - k3_ = da.from_array(calir[:, irchn, 0, 2] * 1.0e-6, chunks=line_chunks) + calir = data["calir"] + k1_ = da.from_array(calir[:, irchn, 0, 0], chunks=line_chunks).astype(np.float32) * 1.0e-9 + k2_ = da.from_array(calir[:, irchn, 0, 1], chunks=line_chunks).astype(np.float32) * 1.0e-6 + k3_ = da.from_array(calir[:, irchn, 0, 2], chunks=line_chunks).astype(np.float32) * 1.0e-6 # Count to radiance conversion: rad = k1_[:, None] * count * count + k2_[:, None] * count + k3_[:, None] From 593b30985d160a11a3ee11d392d5ec8cee279f16 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 2 Sep 2024 21:53:00 +0000 Subject: [PATCH 20/47] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.5.6 → v0.6.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.5.6...v0.6.3) - [github.com/pre-commit/mirrors-mypy: v1.11.1 → v1.11.2](https://github.com/pre-commit/mirrors-mypy/compare/v1.11.1...v1.11.2) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fbd244eab8..a6c4b2b3d2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.5.6' + rev: 'v0.6.3' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.11.1' # Use the sha / tag you want to point at + rev: 'v1.11.2' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From eb5cb990c1c8609a5208f16c8faa0eb048b1d822 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 4 Sep 2024 09:48:00 -0500 Subject: [PATCH 21/47] Fix PT001 parentheses issues --- satpy/tests/cf_tests/test_area.py | 2 +- satpy/tests/cf_tests/test_coords.py | 2 +- satpy/tests/cf_tests/test_decoding.py | 4 +- satpy/tests/cf_tests/test_encoding.py | 4 +- satpy/tests/compositor_tests/test_viirs.py | 8 +-- satpy/tests/conftest.py | 2 +- .../enhancement_tests/test_enhancements.py | 2 +- satpy/tests/modifier_tests/test_parallax.py | 10 +-- satpy/tests/multiscene_tests/test_blend.py | 20 +++--- satpy/tests/multiscene_tests/test_misc.py | 8 +-- .../reader_tests/gms/test_gms5_vissr_l1b.py | 66 +++++++++---------- .../gms/test_gms5_vissr_navigation.py | 24 +++---- .../modis_tests/_modis_fixtures.py | 2 +- satpy/tests/reader_tests/test_abi_l1b.py | 10 +-- satpy/tests/reader_tests/test_ahi_hsd.py | 2 +- satpy/tests/reader_tests/test_ami_l1b.py | 6 +- satpy/tests/reader_tests/test_atms_l1b_nc.py | 6 +- satpy/tests/reader_tests/test_cmsaf_claas.py | 22 +++---- satpy/tests/reader_tests/test_epic_l1b_h5.py | 2 +- satpy/tests/reader_tests/test_fci_l1c_nc.py | 18 ++--- satpy/tests/reader_tests/test_gld360_ualf2.py | 6 +- .../reader_tests/test_goes_imager_nc_noaa.py | 12 ++-- satpy/tests/reader_tests/test_hrit_base.py | 8 +-- satpy/tests/reader_tests/test_iasi_l2.py | 4 +- satpy/tests/reader_tests/test_ici_l1b_nc.py | 6 +- satpy/tests/reader_tests/test_mws_l1b_nc.py | 4 +- satpy/tests/reader_tests/test_nwcsaf_nc.py | 10 +-- .../reader_tests/test_oceancolorcci_l3_nc.py | 6 +- satpy/tests/reader_tests/test_satpy_cf_nc.py | 4 +- satpy/tests/reader_tests/test_seviri_base.py | 4 +- .../reader_tests/test_seviri_l1b_native.py | 4 +- .../tests/reader_tests/test_seviri_l1b_nc.py | 2 +- .../tests/reader_tests/test_viirs_compact.py | 4 +- .../reader_tests/test_viirs_vgac_l1c_nc.py | 2 +- satpy/tests/scene_tests/test_conversions.py | 4 +- satpy/tests/test_composites.py | 18 ++--- satpy/tests/test_config.py | 8 +-- satpy/tests/test_readers.py | 4 +- satpy/tests/test_yaml_reader.py | 10 +-- satpy/tests/writer_tests/test_cf.py | 12 ++-- satpy/tests/writer_tests/test_ninjogeotiff.py | 2 +- 41 files changed, 177 insertions(+), 177 deletions(-) diff --git a/satpy/tests/cf_tests/test_area.py b/satpy/tests/cf_tests/test_area.py index ee24d0e10d..370a23ed3c 100644 --- a/satpy/tests/cf_tests/test_area.py +++ b/satpy/tests/cf_tests/test_area.py @@ -25,7 +25,7 @@ from satpy.cf.area import _add_grid_mapping, area2cf -@pytest.fixture() +@pytest.fixture def input_data_arr() -> xr.DataArray: """Create a data array.""" return xr.DataArray( diff --git a/satpy/tests/cf_tests/test_coords.py b/satpy/tests/cf_tests/test_coords.py index 68ba319741..6a0d65de17 100644 --- a/satpy/tests/cf_tests/test_coords.py +++ b/satpy/tests/cf_tests/test_coords.py @@ -177,7 +177,7 @@ def test_is_projected(self, caplog): assert _is_projected(da) assert "Failed to tell if data are projected." in caplog.text - @pytest.fixture() + @pytest.fixture def datasets(self): """Create test dataset.""" data = [[75, 2], [3, 4]] diff --git a/satpy/tests/cf_tests/test_decoding.py b/satpy/tests/cf_tests/test_decoding.py index 51c1bfecaf..6309b62263 100644 --- a/satpy/tests/cf_tests/test_decoding.py +++ b/satpy/tests/cf_tests/test_decoding.py @@ -28,7 +28,7 @@ class TestDecodeAttrs: """Test decoding of CF-encoded attributes.""" - @pytest.fixture() + @pytest.fixture def attrs(self): """Get CF-encoded attributes.""" return { @@ -41,7 +41,7 @@ def attrs(self): "my_dict": '{"a": {"b": [1, 2, 3]}, "c": {"d": "2000-01-01 12:15:33.123456"}}' } - @pytest.fixture() + @pytest.fixture def expected(self): """Get expected decoded results.""" return { diff --git a/satpy/tests/cf_tests/test_encoding.py b/satpy/tests/cf_tests/test_encoding.py index 53ce8b6c8f..103736120d 100644 --- a/satpy/tests/cf_tests/test_encoding.py +++ b/satpy/tests/cf_tests/test_encoding.py @@ -25,7 +25,7 @@ class TestUpdateEncoding: """Test update of dataset encodings.""" - @pytest.fixture() + @pytest.fixture def fake_ds(self): """Create fake data for testing.""" ds = xr.Dataset({"foo": (("y", "x"), [[1, 2], [3, 4]]), @@ -35,7 +35,7 @@ def fake_ds(self): "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds - @pytest.fixture() + @pytest.fixture def fake_ds_digit(self): """Create fake data for testing.""" ds_digit = xr.Dataset({"CHANNEL_1": (("y", "x"), [[1, 2], [3, 4]]), diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py index 95ff3e0d39..eef88753d2 100644 --- a/satpy/tests/compositor_tests/test_viirs.py +++ b/satpy/tests/compositor_tests/test_viirs.py @@ -30,7 +30,7 @@ class TestVIIRSComposites: """Test various VIIRS-specific composites.""" - @pytest.fixture() + @pytest.fixture def area(self): """Return fake area for use with DNB tests.""" rows = 5 @@ -43,7 +43,7 @@ def area(self): (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) return area - @pytest.fixture() + @pytest.fixture def dnb(self, area): """Return fake channel 1 data for DNB tests.""" dnb = np.zeros(area.shape) + 0.25 @@ -56,7 +56,7 @@ def dnb(self, area): "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)}) return c01 - @pytest.fixture() + @pytest.fixture def sza(self, area): """Return fake sza dataset for DNB tests.""" # data changes by row, sza changes by col for testing @@ -70,7 +70,7 @@ def sza(self, area): "start_time": dt.datetime(2020, 1, 1, 12, 0, 0)}) return c02 - @pytest.fixture() + @pytest.fixture def lza(self, area): """Return fake lunal zenith angle dataset for DNB tests.""" lza = np.zeros(area.shape) + 70.0 diff --git a/satpy/tests/conftest.py b/satpy/tests/conftest.py index 754b11ffcd..aac24ff10f 100644 --- a/satpy/tests/conftest.py +++ b/satpy/tests/conftest.py @@ -50,7 +50,7 @@ def _clear_function_caches(): load_compositor_configs_for_sensor.cache_clear() -@pytest.fixture() +@pytest.fixture def include_test_etc(): """Tell Satpy to use the config 'etc' directory from the tests directory.""" with satpy.config.set(config_path=[TEST_ETC_DIR]): diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 747d6fc0cd..b30a073968 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -518,7 +518,7 @@ def func(dask_array): assert res.shape == arr.shape -@pytest.fixture() +@pytest.fixture def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index b769e45608..276ff0ebd4 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -40,7 +40,7 @@ # - request -@pytest.fixture() +@pytest.fixture def fake_tle(): """Produce fake Two Line Element (TLE) object from pyorbital.""" return pyorbital.tlefile.Tle( @@ -566,7 +566,7 @@ def test_parallax_modifier_interface_with_cloud(self): # do so after parallax correction assert not (res.diff("x") < 0).any() - @pytest.fixture() + @pytest.fixture def test_area(self, request): """Produce test area for parallax correction unit tests. @@ -713,12 +713,12 @@ def test_modifier_interface_cloud_moves_to_observer(self, cth, use_dask, test_ar class TestParallaxCorrectionSceneLoad: """Test that scene load interface works as expected.""" - @pytest.fixture() + @pytest.fixture def yaml_code(self): """Return YAML code for parallax_corrected_VIS006.""" return _test_yaml_code - @pytest.fixture() + @pytest.fixture def conf_file(self, yaml_code, tmp_path): """Produce a fake configuration file.""" conf_file = tmp_path / "test.yaml" @@ -726,7 +726,7 @@ def conf_file(self, yaml_code, tmp_path): fp.write(yaml_code) return conf_file - @pytest.fixture() + @pytest.fixture def fake_scene(self, yaml_code): """Produce fake scene and prepare fake composite config.""" from satpy import Scene diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index c003106dea..250d4450e3 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -59,7 +59,7 @@ def _get_expected_stack_blend(scene1: Scene, scene2: Scene) -> xr.DataArray: return expected -@pytest.fixture() +@pytest.fixture def test_area(): """Get area definition used by test DataArrays.""" return _create_test_area() @@ -77,7 +77,7 @@ def image_mode(request): return request.param -@pytest.fixture() +@pytest.fixture def cloud_type_data_array1(test_area, data_type, image_mode): """Get DataArray for cloud type in the first test Scene.""" dsid1 = make_dataid( @@ -107,7 +107,7 @@ def cloud_type_data_array1(test_area, data_type, image_mode): return data_arr -@pytest.fixture() +@pytest.fixture def cloud_type_data_array2(test_area, data_type, image_mode): """Get DataArray for cloud type in the second test Scene.""" dsid1 = make_dataid( @@ -133,7 +133,7 @@ def cloud_type_data_array2(test_area, data_type, image_mode): return data_arr -@pytest.fixture() +@pytest.fixture def scene1_with_weights(cloud_type_data_array1, test_area): """Create first test scene with a dataset of weights.""" from satpy import Scene @@ -160,7 +160,7 @@ def scene1_with_weights(cloud_type_data_array1, test_area): return scene, [wgt1, wgt2] -@pytest.fixture() +@pytest.fixture def scene2_with_weights(cloud_type_data_array2, test_area): """Create second test scene.""" from satpy import Scene @@ -183,7 +183,7 @@ def scene2_with_weights(cloud_type_data_array2, test_area): return scene, [wgt1, wgt2] -@pytest.fixture() +@pytest.fixture def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): """Create small multi-scene for testing.""" from satpy import MultiScene @@ -193,7 +193,7 @@ def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): return MultiScene([scene1, scene2]), [weights1, weights2] -@pytest.fixture() +@pytest.fixture def groups(): """Get group definitions for the MultiScene.""" return { @@ -277,7 +277,7 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr assert result.attrs["start_time"] == dt.datetime(2023, 1, 16, 11, 9, 17) assert result.attrs["end_time"] == dt.datetime(2023, 1, 16, 11, 28, 1, 900000) - @pytest.fixture() + @pytest.fixture def datasets_and_weights(self): """X-Array datasets with area definition plus weights for input to tests.""" shape = (8, 12) @@ -389,7 +389,7 @@ def _check_stacked_metadata(data_arr: xr.DataArray, exp_name: str) -> None: class TestTemporalRGB: """Test the temporal RGB blending method.""" - @pytest.fixture() + @pytest.fixture def nominal_data(self): """Return the input arrays for the nominal use case.""" da1 = xr.DataArray([1, 0, 0], attrs={"start_time": dt.datetime(2023, 5, 22, 9, 0, 0)}) @@ -398,7 +398,7 @@ def nominal_data(self): return [da1, da2, da3] - @pytest.fixture() + @pytest.fixture def expected_result(self): """Return the expected result arrays.""" return [[1, 0, 0], [0, 1, 0], [0, 0, 1]] diff --git a/satpy/tests/multiscene_tests/test_misc.py b/satpy/tests/multiscene_tests/test_misc.py index 9f6e400e31..1a029cd703 100644 --- a/satpy/tests/multiscene_tests/test_misc.py +++ b/satpy/tests/multiscene_tests/test_misc.py @@ -131,7 +131,7 @@ def test_from_files(self): class TestMultiSceneGrouping: """Test dataset grouping in MultiScene.""" - @pytest.fixture() + @pytest.fixture def scene1(self): """Create first test scene.""" from satpy import Scene @@ -152,7 +152,7 @@ def scene1(self): scene[dsid2] = _create_test_dataset(name="ds2") return scene - @pytest.fixture() + @pytest.fixture def scene2(self): """Create second test scene.""" from satpy import Scene @@ -173,13 +173,13 @@ def scene2(self): scene[dsid2] = _create_test_dataset(name="ds4") return scene - @pytest.fixture() + @pytest.fixture def multi_scene(self, scene1, scene2): """Create small multi scene for testing.""" from satpy import MultiScene return MultiScene([scene1, scene2]) - @pytest.fixture() + @pytest.fixture def groups(self): """Get group definitions for the MultiScene.""" return { diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index 76b4b72bf2..f10109e763 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -116,12 +116,12 @@ def with_compression(self, request): """Enable compression.""" return request.param - @pytest.fixture() + @pytest.fixture def open_function(self, with_compression): """Get open function for writing test files.""" return gzip.open if with_compression else open - @pytest.fixture() + @pytest.fixture def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): """Get test VISSR file.""" filename = tmp_path / "vissr_file" @@ -130,7 +130,7 @@ def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): writer.write(filename, file_contents) return filename - @pytest.fixture() + @pytest.fixture def file_contents(self, control_block, image_parameters, image_data): """Get VISSR file contents.""" return { @@ -139,7 +139,7 @@ def file_contents(self, control_block, image_parameters, image_data): "image_data": image_data, } - @pytest.fixture() + @pytest.fixture def control_block(self, dataset_id): """Get VISSR control block.""" block_size = {"IR1": 16, "VIS": 4} @@ -148,7 +148,7 @@ def control_block(self, dataset_id): ctrl_block["available_block_size_of_image_data"] = 2 return ctrl_block - @pytest.fixture() + @pytest.fixture def image_parameters(self, mode_block, cal_params, nav_params): """Get VISSR image parameters.""" image_params = {"mode": mode_block} @@ -156,7 +156,7 @@ def image_parameters(self, mode_block, cal_params, nav_params): image_params.update(nav_params) return image_params - @pytest.fixture() + @pytest.fixture def nav_params( self, coordinate_conversion, @@ -170,7 +170,7 @@ def nav_params( nav_params.update(coordinate_conversion) return nav_params - @pytest.fixture() + @pytest.fixture def cal_params( self, vis_calibration, @@ -186,7 +186,7 @@ def cal_params( "wv_calibration": wv_calibration, } - @pytest.fixture() + @pytest.fixture def mode_block(self): """Get VISSR mode block.""" mode = np.zeros(1, dtype=fmt.MODE_BLOCK) @@ -201,7 +201,7 @@ def mode_block(self): mode["vis_frame_parameters"]["number_of_pixels"] = 2 return mode - @pytest.fixture() + @pytest.fixture def coordinate_conversion(self, coord_conv, simple_coord_conv_table): """Get all coordinate conversion parameters.""" return { @@ -209,7 +209,7 @@ def coordinate_conversion(self, coord_conv, simple_coord_conv_table): "simple_coordinate_conversion_table": simple_coord_conv_table } - @pytest.fixture() + @pytest.fixture def coord_conv(self): """Get parameters for coordinate conversions. @@ -255,14 +255,14 @@ def coord_conv(self): conv["orbital_parameters"]["latitude_of_ssp"] = 1.0 return conv - @pytest.fixture() + @pytest.fixture def attitude_prediction(self): """Get attitude prediction.""" att_pred = np.zeros(1, dtype=fmt.ATTITUDE_PREDICTION) att_pred["data"] = real_world.ATTITUDE_PREDICTION return {"attitude_prediction": att_pred} - @pytest.fixture() + @pytest.fixture def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): """Get predictions of orbital parameters.""" return { @@ -270,21 +270,21 @@ def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): "orbit_prediction_2": orbit_prediction_2 } - @pytest.fixture() + @pytest.fixture def orbit_prediction_1(self): """Get first block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_1 return orb_pred - @pytest.fixture() + @pytest.fixture def orbit_prediction_2(self): """Get second block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_2 return orb_pred - @pytest.fixture() + @pytest.fixture def vis_calibration(self): """Get VIS calibration block.""" vis_cal = np.zeros(1, dtype=fmt.VIS_CALIBRATION) @@ -292,7 +292,7 @@ def vis_calibration(self): table[0, 0:4] = np.array([0, 0.25, 0.5, 1]) return vis_cal - @pytest.fixture() + @pytest.fixture def ir1_calibration(self): """Get IR1 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) @@ -300,32 +300,32 @@ def ir1_calibration(self): table[0, 0:4] = np.array([0, 100, 200, 300]) return cal - @pytest.fixture() + @pytest.fixture def ir2_calibration(self): """Get IR2 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal - @pytest.fixture() + @pytest.fixture def wv_calibration(self): """Get WV calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal - @pytest.fixture() + @pytest.fixture def simple_coord_conv_table(self): """Get simple coordinate conversion table.""" table = np.zeros(1, dtype=fmt.SIMPLE_COORDINATE_CONVERSION_TABLE) table["satellite_height"] = 123457.0 return table - @pytest.fixture() + @pytest.fixture def image_data(self, dataset_id, image_data_ir1, image_data_vis): """Get VISSR image data.""" data = {"IR1": image_data_ir1, "VIS": image_data_vis} return data[dataset_id["name"]] - @pytest.fixture() + @pytest.fixture def image_data_ir1(self): """Get IR1 image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_IR) @@ -336,7 +336,7 @@ def image_data_ir1(self): image_data["image_data"] = [[0, 1], [2, 3]] return image_data - @pytest.fixture() + @pytest.fixture def image_data_vis(self): """Get VIS image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_VIS) @@ -347,7 +347,7 @@ def image_data_vis(self): image_data["image_data"] = [[0, 1], [2, 3]] return image_data - @pytest.fixture() + @pytest.fixture def vissr_file_like(self, vissr_file, with_compression): """Get file-like object for VISSR test file.""" if with_compression: @@ -355,14 +355,14 @@ def vissr_file_like(self, vissr_file, with_compression): return FSFile(open_file) return vissr_file - @pytest.fixture() + @pytest.fixture def file_handler(self, vissr_file_like, mask_space): """Get file handler to be tested.""" return vissr.GMS5VISSRFileHandler( vissr_file_like, {}, {}, mask_space=mask_space ) - @pytest.fixture() + @pytest.fixture def vis_refl_exp(self, mask_space, lons_lats_exp): """Get expected VIS reflectance.""" lons, lats = lons_lats_exp @@ -384,7 +384,7 @@ def vis_refl_exp(self, mask_space, lons_lats_exp): }, ) - @pytest.fixture() + @pytest.fixture def ir1_counts_exp(self, lons_lats_exp): """Get expected IR1 counts.""" lons, lats = lons_lats_exp @@ -402,7 +402,7 @@ def ir1_counts_exp(self, lons_lats_exp): }, ) - @pytest.fixture() + @pytest.fixture def ir1_bt_exp(self, lons_lats_exp): """Get expected IR1 brightness temperature.""" lons, lats = lons_lats_exp @@ -420,7 +420,7 @@ def ir1_bt_exp(self, lons_lats_exp): }, ) - @pytest.fixture() + @pytest.fixture def lons_lats_exp(self, dataset_id): """Get expected lon/lat coordinates. @@ -456,7 +456,7 @@ def lons_lats_exp(self, dataset_id): lats = xr.DataArray(exp["lats"], dims=("y", "x")) return lons, lats - @pytest.fixture() + @pytest.fixture def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): """Get expected dataset.""" ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000) @@ -473,7 +473,7 @@ def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): } return expectations[dataset_id] - @pytest.fixture() + @pytest.fixture def area_def_exp(self, dataset_id): """Get expected area definition.""" if dataset_id["name"] == "IR1": @@ -507,7 +507,7 @@ def area_def_exp(self, dataset_id): height=size, ) - @pytest.fixture() + @pytest.fixture def attrs_exp(self, area_def_exp): """Get expected dataset attributes.""" return { @@ -546,7 +546,7 @@ def test_time_attributes(self, file_handler, attrs_exp): class TestCorruptFile: """Test reading corrupt files.""" - @pytest.fixture() + @pytest.fixture def file_contents(self): """Get corrupt file contents (all zero).""" control_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) @@ -557,7 +557,7 @@ def file_contents(self): "image_data": image_data, } - @pytest.fixture() + @pytest.fixture def corrupt_file(self, file_contents, tmp_path): """Write corrupt VISSR file to disk.""" filename = tmp_path / "my_vissr_file" diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index 3b93748007..066e36f1c7 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -329,7 +329,7 @@ def test_normalize_vector(self): class TestImageNavigation: """Test navigation of an entire image.""" - @pytest.fixture() + @pytest.fixture def expected(self): """Get expected coordinates.""" exp = { @@ -418,12 +418,12 @@ def test_interpolate_attitude_prediction( attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) _assert_namedtuple_close(attitude, attitude_expected) - @pytest.fixture() + @pytest.fixture def obs_time(self): """Get observation time.""" return 2.5 - @pytest.fixture() + @pytest.fixture def orbit_expected(self): """Get expected orbit.""" return nav.Orbit( @@ -440,7 +440,7 @@ def orbit_expected(self): nutation_precession=1.6 * np.identity(3), ) - @pytest.fixture() + @pytest.fixture def attitude_expected(self): """Get expected attitude.""" return nav.Attitude( @@ -450,13 +450,13 @@ def attitude_expected(self): ) -@pytest.fixture() +@pytest.fixture def sampling_angle(): """Get sampling angle.""" return 0.000095719995443 -@pytest.fixture() +@pytest.fixture def scan_params(sampling_angle): """Get scanning parameters.""" return nav.ScanningParameters( @@ -467,7 +467,7 @@ def scan_params(sampling_angle): ) -@pytest.fixture() +@pytest.fixture def attitude_prediction(): """Get attitude prediction.""" return nav.AttitudePrediction( @@ -480,7 +480,7 @@ def attitude_prediction(): ) -@pytest.fixture() +@pytest.fixture def orbit_prediction(): """Get orbit prediction.""" return nav.OrbitPrediction( @@ -506,7 +506,7 @@ def orbit_prediction(): ) -@pytest.fixture() +@pytest.fixture def proj_params(sampling_angle): """Get projection parameters.""" return nav.ProjectionParameters( @@ -526,19 +526,19 @@ def proj_params(sampling_angle): ) -@pytest.fixture() +@pytest.fixture def static_nav_params(proj_params, scan_params): """Get static navigation parameters.""" return nav.StaticNavigationParameters(proj_params, scan_params) -@pytest.fixture() +@pytest.fixture def predicted_nav_params(attitude_prediction, orbit_prediction): """Get predicted navigation parameters.""" return nav.PredictedNavigationParameters(attitude_prediction, orbit_prediction) -@pytest.fixture() +@pytest.fixture def navigation_params(static_nav_params, predicted_nav_params): """Get image navigation parameters.""" return nav.ImageNavigationParameters(static_nav_params, predicted_nav_params) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index d663f7b9d9..eaf527eb1a 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -409,7 +409,7 @@ def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: return [full_path] -@pytest.fixture() +@pytest.fixture def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: """Create a single MOD02QKM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD02Qkm") diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py index ac82512a2a..321e62f1a5 100644 --- a/satpy/tests/reader_tests/test_abi_l1b.py +++ b/satpy/tests/reader_tests/test_abi_l1b.py @@ -137,7 +137,7 @@ def generate_l1b_filename(chan_name: str) -> str: return f"OR_ABI-L1b-RadC-M4{chan_name}_G16_s20161811540362_e20161811545170_c20161811545230_suffix.nc" -@pytest.fixture() +@pytest.fixture def c01_refl(tmp_path) -> xr.DataArray: """Load c01 reflectances.""" with _apply_dask_chunk_size(): @@ -145,7 +145,7 @@ def c01_refl(tmp_path) -> xr.DataArray: return reader.load(["C01"])["C01"] -@pytest.fixture() +@pytest.fixture def c01_rad(tmp_path) -> xr.DataArray: """Load c01 radiances.""" with _apply_dask_chunk_size(): @@ -153,7 +153,7 @@ def c01_rad(tmp_path) -> xr.DataArray: return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] -@pytest.fixture() +@pytest.fixture def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: """Load c01 radiances through h5netcdf.""" shape = RAD_SHAPE[1000] @@ -176,7 +176,7 @@ def c01_rad_h5netcdf(tmp_path) -> xr.DataArray: return reader.load([DataQuery(name="C01", calibration="radiance")])["C01"] -@pytest.fixture() +@pytest.fixture def c01_counts(tmp_path) -> xr.DataArray: """Load c01 counts.""" with _apply_dask_chunk_size(): @@ -184,7 +184,7 @@ def c01_counts(tmp_path) -> xr.DataArray: return reader.load([DataQuery(name="C01", calibration="counts")])["C01"] -@pytest.fixture() +@pytest.fixture def c07_bt_creator(tmp_path) -> Callable: """Create a loader for c07 brightness temperatures.""" def _load_data_array( diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index fbb0857734..f62c646d6c 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -194,7 +194,7 @@ def test_segment(self, fromfile, np2str): 5500000.035542117, -2200000.0142168473)) -@pytest.fixture() +@pytest.fixture def hsd_file_jp01(tmp_path): """Create a jp01 hsd file.""" from satpy.readers.ahi_hsd import ( # _IRCAL_INFO_TYPE, diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index 7c80a376ee..bd8d5fb8fb 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -117,7 +117,7 @@ def _fake_reader(counts_data: xr.DataArray) -> Iterator[AMIL1bNetCDF]: {"file_type": "ir087"}) -@pytest.fixture() +@pytest.fixture def fake_vis_reader(): """Create fake reader for loading visible data.""" attrs = _fake_vis_attrs() @@ -146,7 +146,7 @@ def _fake_vis_attrs(): } -@pytest.fixture() +@pytest.fixture def fake_ir_reader(): """Create fake reader for loading IR data.""" attrs = _fake_ir_attrs() @@ -175,7 +175,7 @@ def _fake_ir_attrs(): } -@pytest.fixture() +@pytest.fixture def fake_ir_reader2(): """Create fake reader for testing radiance clipping.""" counts_arr = FAKE_IR_DATA.copy() diff --git a/satpy/tests/reader_tests/test_atms_l1b_nc.py b/satpy/tests/reader_tests/test_atms_l1b_nc.py index f1f729311a..9dc7b210d3 100644 --- a/satpy/tests/reader_tests/test_atms_l1b_nc.py +++ b/satpy/tests/reader_tests/test_atms_l1b_nc.py @@ -28,7 +28,7 @@ # - tmp_path -@pytest.fixture() +@pytest.fixture def reader(l1b_file): """Return reader of ATMS level1b data.""" return AtmsL1bNCFileHandler( @@ -38,7 +38,7 @@ def reader(l1b_file): ) -@pytest.fixture() +@pytest.fixture def l1b_file(tmp_path, atms_fake_dataset): """Return file path to level1b file.""" l1b_file_path = tmp_path / "test_file_atms_l1b.nc" @@ -46,7 +46,7 @@ def l1b_file(tmp_path, atms_fake_dataset): return l1b_file_path -@pytest.fixture() +@pytest.fixture def atms_fake_dataset(): """Return fake ATMS dataset.""" atrack = 2 diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index 4615662b32..5f7c63fec5 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -41,13 +41,13 @@ def start_time(request): return request.param -@pytest.fixture() +@pytest.fixture def start_time_str(start_time): """Get string representation of the start time.""" return start_time.strftime("%Y-%m-%dT%H:%M:%SZ") -@pytest.fixture() +@pytest.fixture def fake_dataset(start_time_str): """Create a CLAAS-like test dataset.""" cph = xr.DataArray( @@ -81,7 +81,7 @@ def fake_dataset(start_time_str): ) -@pytest.fixture() +@pytest.fixture def encoding(): """Dataset encoding.""" return { @@ -89,7 +89,7 @@ def encoding(): } -@pytest.fixture() +@pytest.fixture def fake_file(fake_dataset, encoding, tmp_path): """Write a fake dataset to file.""" filename = tmp_path / "CPPin20140101001500305SVMSG01MD.nc" @@ -97,7 +97,7 @@ def fake_file(fake_dataset, encoding, tmp_path): return filename -@pytest.fixture() +@pytest.fixture def fake_files(fake_dataset, encoding, tmp_path): """Write the same fake dataset into two different files.""" filenames = [ @@ -109,7 +109,7 @@ def fake_files(fake_dataset, encoding, tmp_path): return filenames -@pytest.fixture() +@pytest.fixture def reader(): """Return reader for CMSAF CLAAS-2.""" from satpy._config import config_search_paths @@ -137,14 +137,14 @@ def test_file_pattern(reader): class TestCLAAS2MultiFile: """Test reading multiple CLAAS-2 files.""" - @pytest.fixture() + @pytest.fixture def multi_file_reader(self, reader, fake_files): """Create a multi-file reader.""" loadables = reader.select_files_from_pathnames(fake_files) reader.create_filehandlers(loadables) return reader - @pytest.fixture() + @pytest.fixture def multi_file_dataset(self, multi_file_reader): """Load datasets from multiple files.""" ds_ids = [make_dataid(name=name) for name in ["cph", "ctt"]] @@ -177,20 +177,20 @@ def test_number_of_datasets(self, multi_file_dataset): class TestCLAAS2SingleFile: """Test reading a single CLAAS2 file.""" - @pytest.fixture() + @pytest.fixture def file_handler(self, fake_file): """Return a CLAAS-2 file handler.""" from satpy.readers.cmsaf_claas2 import CLAAS2 return CLAAS2(fake_file, {}, {}) - @pytest.fixture() + @pytest.fixture def area_extent_exp(self, start_time): """Get expected area extent.""" if start_time < datetime.datetime(2017, 12, 6): return (-5454733.160460291, -5454733.160460292, 5454733.160460292, 5454733.160460291) return (-5456233.362099582, -5453232.958821001, 5453232.958821001, 5456233.362099582) - @pytest.fixture() + @pytest.fixture def area_exp(self, area_extent_exp): """Get expected area definition.""" proj_dict = { diff --git a/satpy/tests/reader_tests/test_epic_l1b_h5.py b/satpy/tests/reader_tests/test_epic_l1b_h5.py index 9861535b3f..ac690a1b1e 100644 --- a/satpy/tests/reader_tests/test_epic_l1b_h5.py +++ b/satpy/tests/reader_tests/test_epic_l1b_h5.py @@ -36,7 +36,7 @@ mas_data = RANDOM_GEN.choice([0, 1], size=(100, 100)) -@pytest.fixture() +@pytest.fixture def setup_hdf5_file(tmp_path): """Create temp hdf5 files.""" fn = tmp_path / "epic_1b_20150613120251_03.h5" diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index f7037752f3..aa98990df3 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -534,7 +534,7 @@ class FakeFCIFileHandlerAF(FakeFCIFileHandlerBase): # Fixtures preparation ------------------------------- # ---------------------------------------------------- -@pytest.fixture() +@pytest.fixture def reader_configs(): """Return reader configs for FCI.""" from satpy._config import config_search_paths @@ -585,7 +585,7 @@ def mocked_basefilehandler(filehandler): yield -@pytest.fixture() +@pytest.fixture def FakeFCIFileHandlerFDHSI_fixture(): """Get a fixture for the fake FDHSI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): @@ -597,7 +597,7 @@ def FakeFCIFileHandlerFDHSI_fixture(): yield param_dict -@pytest.fixture() +@pytest.fixture def FakeFCIFileHandlerFDHSIError_fixture(): """Get a fixture for the fake FDHSI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): @@ -609,7 +609,7 @@ def FakeFCIFileHandlerFDHSIError_fixture(): yield param_dict -@pytest.fixture() +@pytest.fixture def FakeFCIFileHandlerFDHSIIQTI_fixture(): """Get a fixture for the fake FDHSI IQTI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSIIQTI): @@ -621,7 +621,7 @@ def FakeFCIFileHandlerFDHSIIQTI_fixture(): yield param_dict -@pytest.fixture() +@pytest.fixture def FakeFCIFileHandlerFDHSIQ4_fixture(): """Get a fixture for the fake FDHSI Q4 filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): @@ -633,7 +633,7 @@ def FakeFCIFileHandlerFDHSIQ4_fixture(): yield param_dict -@pytest.fixture() +@pytest.fixture def FakeFCIFileHandlerHRFI_fixture(): """Get a fixture for the fake HRFI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerHRFI): @@ -645,7 +645,7 @@ def FakeFCIFileHandlerHRFI_fixture(): yield param_dict -@pytest.fixture() +@pytest.fixture def FakeFCIFileHandlerHRFIIQTI_fixture(): """Get a fixture for the fake HRFI IQTI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerHRFIIQTI): @@ -657,7 +657,7 @@ def FakeFCIFileHandlerHRFIIQTI_fixture(): yield param_dict -@pytest.fixture() +@pytest.fixture def FakeFCIFileHandlerHRFIQ4_fixture(): """Get a fixture for the fake HRFI Q4 filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerHRFI): @@ -669,7 +669,7 @@ def FakeFCIFileHandlerHRFIQ4_fixture(): yield param_dict -@pytest.fixture() +@pytest.fixture def FakeFCIFileHandlerAF_fixture(channel, resolution): """Get a fixture for the fake AF filehandler, it contains only one channel and one resolution.""" chan_patterns = {channel.split("_")[0] + "_{:>02d}": {"channels": [int(channel.split("_")[1])], diff --git a/satpy/tests/reader_tests/test_gld360_ualf2.py b/satpy/tests/reader_tests/test_gld360_ualf2.py index 73443ff944..cbe3d4bffc 100644 --- a/satpy/tests/reader_tests/test_gld360_ualf2.py +++ b/satpy/tests/reader_tests/test_gld360_ualf2.py @@ -29,7 +29,7 @@ TEST_END_TIME = TEST_START_TIME + dt.timedelta(hours=1) -@pytest.fixture() +@pytest.fixture def fake_file(tmp_path): """Create UALF2 file for the tests.""" fname = tmp_path / "2021.01.04.08.00.txt" @@ -48,7 +48,7 @@ def fake_file(tmp_path): return fname -@pytest.fixture() +@pytest.fixture def fake_filehandler(fake_file): """Create FileHandler for the tests.""" filename_info = {} @@ -297,7 +297,7 @@ def test_column_names_length(): np.testing.assert_equal(actual, expected) -@pytest.fixture() +@pytest.fixture def fake_scn(fake_file): """Create fake file for tests.""" from satpy import Scene diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py index 994f1336fd..1049d814dd 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py @@ -209,7 +209,7 @@ def _apply_yaw_flip(self, data_array, yaw_flip): data_array.data = np.flipud(data_array.data) return data_array - @pytest.fixture() + @pytest.fixture def lons_lats(self, yaw_flip): """Get longitudes and latitudes.""" lon = xr.DataArray( @@ -227,7 +227,7 @@ def lons_lats(self, yaw_flip): self._apply_yaw_flip(lat, yaw_flip) return lon, lat - @pytest.fixture() + @pytest.fixture def dataset(self, lons_lats, channel_id): """Create a fake dataset.""" lon, lat = lons_lats @@ -253,7 +253,7 @@ def dataset(self, lons_lats, channel_id): attrs={"Satellite Sensor": "G-15"} ) - @pytest.fixture() + @pytest.fixture def earth_mask(self, yaw_flip): """Get expected earth mask.""" earth_mask = xr.DataArray( @@ -265,7 +265,7 @@ def earth_mask(self, yaw_flip): self._apply_yaw_flip(earth_mask, yaw_flip) return earth_mask - @pytest.fixture() + @pytest.fixture def geometry(self, channel_id, yaw_flip): """Get expected geometry.""" shapes = { @@ -278,7 +278,7 @@ def geometry(self, channel_id, yaw_flip): "shape": shapes[channel_id] } - @pytest.fixture() + @pytest.fixture def expected(self, geometry, earth_mask, yaw_flip): """Define expected metadata.""" proj_dict = { @@ -311,7 +311,7 @@ def expected(self, geometry, earth_mask, yaw_flip): "nadir_col": 1 } - @pytest.fixture() + @pytest.fixture def mocked_file_handler(self, dataset): """Mock file handler to load the given fake dataset.""" from satpy.readers.goes_imager_nc import FULL_DISC, GOESNCFileHandler diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index cae8f771a6..a6c6472e40 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -132,7 +132,7 @@ def new_get_hd_compressed(instance, hdr_info): instance.mda["data_field_length"] = 1578312 -@pytest.fixture() +@pytest.fixture def stub_hrit_file(tmp_path): """Create a stub hrit file.""" filename = tmp_path / "some_hrit_file" @@ -156,7 +156,7 @@ def create_stub_hrit(filename, open_fun=open, meta=mda): return filename -@pytest.fixture() +@pytest.fixture def stub_bzipped_hrit_file(tmp_path): """Create a stub bzipped hrit file.""" filename = tmp_path / "some_hrit_file.bz2" @@ -164,7 +164,7 @@ def stub_bzipped_hrit_file(tmp_path): return filename -@pytest.fixture() +@pytest.fixture def stub_gzipped_hrit_file(tmp_path): """Create a stub gzipped hrit file.""" filename = tmp_path / "some_hrit_file.gz" @@ -172,7 +172,7 @@ def stub_gzipped_hrit_file(tmp_path): return filename -@pytest.fixture() +@pytest.fixture def stub_compressed_hrit_file(tmp_path): """Create a stub compressed hrit file.""" filename = tmp_path / "some_hrit_file.C_" diff --git a/satpy/tests/reader_tests/test_iasi_l2.py b/satpy/tests/reader_tests/test_iasi_l2.py index 39382314d3..fb1ab5a63f 100644 --- a/satpy/tests/reader_tests/test_iasi_l2.py +++ b/satpy/tests/reader_tests/test_iasi_l2.py @@ -316,7 +316,7 @@ def test_form_datetimes(self): self.check_sensing_times(times) -@pytest.fixture() +@pytest.fixture def fake_iasi_l2_cdr_nc_dataset(): """Create minimally fake IASI L2 CDR NC dataset.""" shp = (3, 4, 5) @@ -371,7 +371,7 @@ def fake_iasi_l2_cdr_nc_dataset(): "pressure_levels": pres}) -@pytest.fixture() +@pytest.fixture def fake_iasi_l2_cdr_nc_file(fake_iasi_l2_cdr_nc_dataset, tmp_path): """Write a NetCDF file with minimal fake IASI L2 CDR NC data.""" fn = ("W_XX-EUMETSAT-Darmstadt,HYPERSPECT+SOUNDING,METOPA+PW3+" diff --git a/satpy/tests/reader_tests/test_ici_l1b_nc.py b/satpy/tests/reader_tests/test_ici_l1b_nc.py index ab8bad2527..550d964008 100644 --- a/satpy/tests/reader_tests/test_ici_l1b_nc.py +++ b/satpy/tests/reader_tests/test_ici_l1b_nc.py @@ -44,7 +44,7 @@ N_183 = 3 -@pytest.fixture() +@pytest.fixture def reader(fake_file): """Return reader of ici level1b data.""" return IciL1bNCFileHandler( @@ -69,7 +69,7 @@ def reader(fake_file): ) -@pytest.fixture() +@pytest.fixture def fake_file(tmp_path): """Return file path to level1b file.""" file_path = tmp_path / "test_file_ici_l1b_nc.nc" @@ -78,7 +78,7 @@ def fake_file(tmp_path): return file_path -@pytest.fixture() +@pytest.fixture def dataset_info(): """Return dataset info.""" return { diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py index 52a894bd00..07066346b9 100644 --- a/satpy/tests/reader_tests/test_mws_l1b_nc.py +++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py @@ -43,7 +43,7 @@ N_PRTS = 6 -@pytest.fixture() +@pytest.fixture def reader(fake_file): """Return reader of mws level-1b data.""" return MWSL1BFile( @@ -70,7 +70,7 @@ def reader(fake_file): ) -@pytest.fixture() +@pytest.fixture def fake_file(tmp_path): """Return file path to level-1b file.""" file_path = tmp_path / "test_file_mws_l1b.nc" diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index d4f6dffedc..ef7e7353d4 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -110,7 +110,7 @@ def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs_geo): return filename -@pytest.fixture() +@pytest.fixture def nwcsaf_geo_ct_filehandler(nwcsaf_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_geo_ct_filename, {}, {}) @@ -161,13 +161,13 @@ def create_ctth_file(path, attrs=global_attrs): return filename -@pytest.fixture() +@pytest.fixture def nwcsaf_pps_cmic_filehandler(nwcsaf_pps_cmic_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_cmic_filename, {}, {"file_key_prefix": "cmic_"}) -@pytest.fixture() +@pytest.fixture def nwcsaf_pps_ctth_filehandler(nwcsaf_pps_ctth_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_ctth_filename, {}, {}) @@ -223,7 +223,7 @@ def create_ctth_alti_pal_variable_with_fill_value_color(nc_file, var_name): var.attrs["_FillValue"] = 65535 -@pytest.fixture() +@pytest.fixture def nwcsaf_pps_cpp_filehandler(nwcsaf_pps_cpp_filename): """Create a CPP filehandler.""" return NcNWCSAF(nwcsaf_pps_cpp_filename, {}, {"file_key_prefix": "cpp_"}) @@ -238,7 +238,7 @@ def nwcsaf_old_geo_ct_filename(tmp_path_factory): return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data-old"), attrs=attrs) -@pytest.fixture() +@pytest.fixture def nwcsaf_old_geo_ct_filehandler(nwcsaf_old_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_old_geo_ct_filename, {}, {}) diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index 0293a88fe3..5dea12d31e 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -32,7 +32,7 @@ # - tmp_path -@pytest.fixture() +@pytest.fixture def fake_dataset(): """Create a CLAAS-like test dataset.""" adg = xr.DataArray( @@ -95,7 +95,7 @@ def fake_dataset(): ds_list_kd = ["kd_490", "water_class10", "seawifs_nobs_sum"] -@pytest.fixture() +@pytest.fixture def fake_file_dict(fake_dataset, tmp_path): """Write a fake dataset to file.""" fdict = {} @@ -150,7 +150,7 @@ def _create_reader_for_resolutions(self, filename): assert reader.file_handlers return reader - @pytest.fixture() + @pytest.fixture def area_exp(self): """Get expected area definition.""" proj_dict = {"datum": "WGS84", "no_defs": "None", "proj": "longlat", "type": "crs"} diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index fb4fd6831b..56acccdeb9 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -262,7 +262,7 @@ def cf_scene(datasets, common_attrs): return scene -@pytest.fixture() +@pytest.fixture def nc_filename(tmp_path): """Create an nc filename for viirs m band.""" now = dt.datetime.utcnow() @@ -270,7 +270,7 @@ def nc_filename(tmp_path): return str(tmp_path / filename) -@pytest.fixture() +@pytest.fixture def nc_filename_i(tmp_path): """Create an nc filename for viirs i band.""" now = dt.datetime.utcnow() diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index f705796521..1b81aa1599 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -250,7 +250,7 @@ def test_get_padding_area_int(): class TestSatellitePosition: """Test locating the satellite.""" - @pytest.fixture() + @pytest.fixture def orbit_polynomial(self): """Get an orbit polynomial for testing.""" return OrbitPolynomial( @@ -269,7 +269,7 @@ def orbit_polynomial(self): ) ) - @pytest.fixture() + @pytest.fixture def time(self): """Get scan timestamp for testing.""" return dt.datetime(2006, 1, 1, 12, 15, 9, 304888) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index eab987d41f..e7a5d0f5f3 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -892,7 +892,7 @@ def test_calibrate( class TestNativeMSGDataset: """Tests for getting the dataset.""" - @pytest.fixture() + @pytest.fixture def file_handler(self): """Create a file handler for testing.""" trailer = { @@ -1133,7 +1133,7 @@ def test_padder_fes_hrv(self): class TestNativeMSGFilenames: """Test identification of Native format filenames.""" - @pytest.fixture() + @pytest.fixture def reader(self): """Return reader for SEVIRI Native format.""" from satpy._config import config_search_paths diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index d77933b9a0..cbd66b10be 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -192,7 +192,7 @@ def _get_fake_dataset(self, counts, h5netcdf): return ds - @pytest.fixture() + @pytest.fixture def h5netcdf(self): """Fixture for xr backend choice.""" return False diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py index f27d9d6f32..1e282297b1 100644 --- a/satpy/tests/reader_tests/test_viirs_compact.py +++ b/satpy/tests/reader_tests/test_viirs_compact.py @@ -31,7 +31,7 @@ # - tmp_path -@pytest.fixture() +@pytest.fixture def fake_dnb(): """Create fake DNB content.""" fake_dnb = { @@ -2419,7 +2419,7 @@ def fake_dnb(): return fake_dnb -@pytest.fixture() +@pytest.fixture def fake_dnb_file(fake_dnb, tmp_path): """Create an hdf5 file in viirs_compact format with DNB data in it.""" filename = tmp_path / "SVDNBC_j01_d20191025_t0611251_e0612478_b10015_c20191025062459000870_eum_ops.h5" diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index e0f89212eb..2dd52786b2 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -31,7 +31,7 @@ from netCDF4 import Dataset -@pytest.fixture() +@pytest.fixture def nc_filename(tmp_path): """Create an nc test data file and return its filename.""" now = dt.datetime.utcnow() diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 2b9193f187..11db6b1252 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -149,7 +149,7 @@ def test_with_empty_scene(self): assert len(ds.variables) == 0 assert len(ds.coords) == 0 - @pytest.fixture() + @pytest.fixture def single_area_scn(self): """Define Scene with single area.""" from pyresample.geometry import AreaDefinition @@ -164,7 +164,7 @@ def single_area_scn(self): scn["var1"] = data_array return scn - @pytest.fixture() + @pytest.fixture def multi_area_scn(self): """Define Scene with multiple area.""" from pyresample.geometry import AreaDefinition diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 508533e7e4..2af010e9ac 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -394,14 +394,14 @@ def test_bad_areas_diff(self): comp((self.ds1, self.ds2_big)) -@pytest.fixture() +@pytest.fixture def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def return create_area_def("skierffe", 4087, area_extent=[-5_000, -5_000, 5_000, 5_000], shape=(2, 2)) -@pytest.fixture() +@pytest.fixture def fake_dataset_pair(fake_area): """Return a fake pair of 2×2 datasets.""" ds1 = xr.DataArray(da.full((2, 2), 8, chunks=2, dtype=np.float32), attrs={"area": fake_area}) @@ -1586,7 +1586,7 @@ def test_multiple_sensors(self): class TestMaskingCompositor: """Test case for the simple masking compositor.""" - @pytest.fixture() + @pytest.fixture def conditions_v1(self): """Masking conditions with string values.""" return [{"method": "equal", @@ -1596,7 +1596,7 @@ def conditions_v1(self): "value": "Cloud-free_sea", "transparency": 50}] - @pytest.fixture() + @pytest.fixture def conditions_v2(self): """Masking conditions with numerical values.""" return [{"method": "equal", @@ -1606,12 +1606,12 @@ def conditions_v2(self): "value": 2, "transparency": 50}] - @pytest.fixture() + @pytest.fixture def test_data(self): """Test data to use with masking compositors.""" return xr.DataArray(da.random.random((3, 3)), dims=["y", "x"]) - @pytest.fixture() + @pytest.fixture def test_ct_data(self): """Test 2D CT data array.""" flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] @@ -1624,18 +1624,18 @@ def test_ct_data(self): ct_data.attrs["flag_values"] = flag_values return ct_data - @pytest.fixture() + @pytest.fixture def test_ct_data_v3(self, test_ct_data): """Set ct data to NaN where it originally is 1.""" return test_ct_data.where(test_ct_data == 1) - @pytest.fixture() + @pytest.fixture def reference_data(self, test_data, test_ct_data): """Get reference data to use in masking compositor tests.""" # The data are set to NaN where ct is `1` return test_data.where(test_ct_data > 1) - @pytest.fixture() + @pytest.fixture def reference_alpha(self): """Get reference alpha to use in masking compositor tests.""" ref_alpha = da.array([[0, 0.5, 0.5], diff --git a/satpy/tests/test_config.py b/satpy/tests/test_config.py index df33436b45..81005e435f 100644 --- a/satpy/tests/test_config.py +++ b/satpy/tests/test_config.py @@ -154,7 +154,7 @@ def _fake_importlib_files(module_name: str) -> Path: return _fake_importlib_files -@pytest.fixture() +@pytest.fixture def fake_composite_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake compositor YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -182,7 +182,7 @@ def _write_fake_composite_yaml(yaml_filename: str) -> None: """) -@pytest.fixture() +@pytest.fixture def fake_reader_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake reader YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -205,7 +205,7 @@ def _write_fake_reader_yaml(yaml_filename: str) -> None: """) -@pytest.fixture() +@pytest.fixture def fake_writer_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake writer YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -226,7 +226,7 @@ def _write_fake_writer_yaml(yaml_filename: str) -> None: """) -@pytest.fixture() +@pytest.fixture def fake_enh_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake enhancement YAML configure files. diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 6971efca41..eb8983d3cf 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -74,7 +74,7 @@ real_import = builtins.__import__ -@pytest.fixture() +@pytest.fixture def viirs_file(tmp_path, monkeypatch): """Create a dummy viirs file.""" filename = "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" @@ -85,7 +85,7 @@ def viirs_file(tmp_path, monkeypatch): return filename -@pytest.fixture() +@pytest.fixture def atms_file(tmp_path, monkeypatch): """Create a dummy atms file.""" filename = "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5" diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 699f6619b6..9e9bc7fa00 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -1261,7 +1261,7 @@ def test_find_missing_segments(self): assert proj is projectable -@pytest.fixture() +@pytest.fixture @patch.object(yr.GEOVariableSegmentYAMLReader, "__init__", lambda x: None) def GVSYReader(): """Get a fixture of the GEOVariableSegmentYAMLReader.""" @@ -1272,28 +1272,28 @@ def GVSYReader(): return reader -@pytest.fixture() +@pytest.fixture def fake_geswh(): """Get a fixture of the patched _get_empty_segment_with_height.""" with patch("satpy.readers.yaml_reader._get_empty_segment_with_height") as geswh: yield geswh -@pytest.fixture() +@pytest.fixture def fake_xr(): """Get a fixture of the patched xarray.""" with patch("satpy.readers.yaml_reader.xr") as xr: yield xr -@pytest.fixture() +@pytest.fixture def fake_mss(): """Get a fixture of the patched _find_missing_segments.""" with patch("satpy.readers.yaml_reader._find_missing_segments") as mss: yield mss -@pytest.fixture() +@pytest.fixture def fake_adef(): """Get a fixture of the patched AreaDefinition.""" with patch("satpy.readers.yaml_reader.AreaDefinition") as adef: diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 18a3682fc7..1a72894108 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -460,7 +460,7 @@ def test_global_attr_history_and_Conventions(self): class TestNetcdfEncodingKwargs: """Test netCDF compression encodings.""" - @pytest.fixture() + @pytest.fixture def scene(self): """Create a fake scene.""" scn = Scene() @@ -476,7 +476,7 @@ def compression_on(self, request): """Get compression options.""" return request.param - @pytest.fixture() + @pytest.fixture def encoding(self, compression_on): """Get encoding.""" enc = { @@ -492,19 +492,19 @@ def encoding(self, compression_on): enc["test-array"].update(comp_params) return enc - @pytest.fixture() + @pytest.fixture def filename(self, tmp_path): """Get output filename.""" return str(tmp_path / "test.nc") - @pytest.fixture() + @pytest.fixture def complevel_exp(self, compression_on): """Get expected compression level.""" if compression_on: return 7 return 0 - @pytest.fixture() + @pytest.fixture def expected(self, complevel_exp): """Get expectated file contents.""" return { @@ -552,7 +552,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): class TestEncodingAttribute(TestNetcdfEncodingKwargs): """Test CF writer with 'encoding' dataset attribute.""" - @pytest.fixture() + @pytest.fixture def scene_with_encoding(self, scene, encoding): """Create scene with a dataset providing the 'encoding' attribute.""" scene["test-array"].encoding = encoding["test-array"] diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index e05150a571..31fcb25efc 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -463,7 +463,7 @@ def ntg_latlon(test_image_latlon): SatelliteNameID=654321) -@pytest.fixture() +@pytest.fixture def _patch_datetime_now(monkeypatch): """Get a fake datetime.datetime.now().""" # Source: https://stackoverflow.com/a/20503374/974555, CC-BY-SA 4.0 From 99697f7b0eee263bc2aa1671278252af56e35f74 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 10 Sep 2024 10:37:02 +0300 Subject: [PATCH 22/47] Switch from Mambaforge to Miniforge --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f7b5f447c4..c9f5aa1f73 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -38,11 +38,11 @@ jobs: - name: Setup Conda Environment uses: conda-incubator/setup-miniconda@v3 with: - miniforge-variant: Mambaforge miniforge-version: latest - use-mamba: true python-version: ${{ matrix.python-version }} activate-environment: test-environment + mamba-version: "*" + channels: conda-forge - name: Set cache environment variables shell: bash -l {0} From 29f8e43f7148931c772551a0c4b1df57c2745f69 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 12 Sep 2024 11:34:22 -0500 Subject: [PATCH 23/47] Add OCI L2 BGC reader --- satpy/etc/readers/oci_l2_bgc.yaml | 37 +++++++++++ satpy/readers/seadas_l2.py | 7 +- satpy/tests/reader_tests/test_oci_l2_bgc.py | 72 +++++++++++++++++++++ 3 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 satpy/etc/readers/oci_l2_bgc.yaml create mode 100644 satpy/tests/reader_tests/test_oci_l2_bgc.py diff --git a/satpy/etc/readers/oci_l2_bgc.yaml b/satpy/etc/readers/oci_l2_bgc.yaml new file mode 100644 index 0000000000..74c0d21131 --- /dev/null +++ b/satpy/etc/readers/oci_l2_bgc.yaml @@ -0,0 +1,37 @@ +reader: + name: oci_l2_bgc + short_name: PACE OCI L2 BGC + long_name: PACE OCI L2 Biogeochemical in NetCDF format + description: PACE OCI L2 Biogeochemical Reader + status: Beta + supports_fsspec: false + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [oci] + +file_types: + bgc_nc: + file_patterns: + # Example: PACE_OCI.20240907T191809.L2.OC_BGC.V2_0.NRT.nc4 + - '{platform:s}_{sensor:s}.{start_time:%Y%m%dT%H%M%S}.L2.OC_BGC.V{sw_version:s}.{processing_type:s}nc4' + file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2NetCDFFileHandler + geo_resolution: 1000 + +datasets: + longitude: + name: longitude + file_type: [bgc_nc] + file_key: ["navigation_data/longitude", "longitude"] + resolution: 1000 + + latitude: + name: latitude + file_type: [bgc_nc] + file_key: ["navigation_data/latitude", "latitude"] + resolution: 1000 + + chlor_a: + name: chlor_a + file_type: [bgc_nc] + file_key: ["geophysical_data/chlor_a", "chlor_a"] + resolution: 1000 + coordinates: [longitude, latitude] diff --git a/satpy/readers/seadas_l2.py b/satpy/readers/seadas_l2.py index 24ee429fda..d26a432f7c 100644 --- a/satpy/readers/seadas_l2.py +++ b/satpy/readers/seadas_l2.py @@ -54,6 +54,8 @@ def _rows_per_scan(self): return 10 if "viirs" in self.sensor_names: return 16 + if "oci" in self.sensor_names: + return 0 raise ValueError(f"Don't know how to read data for sensors: {self.sensor_names}") def _platform_name(self): @@ -82,7 +84,10 @@ def sensor_names(self): sensor_name = self[self.sensor_attr_name].lower() if sensor_name.startswith("modis"): return {"modis"} - return {"viirs"} + if sensor_name.startswith("viirs"): + return {"viirs"} + # Example: OCI + return {sensor_name} def get_dataset(self, data_id, dataset_info): """Get DataArray for the specified DataID.""" diff --git a/satpy/tests/reader_tests/test_oci_l2_bgc.py b/satpy/tests/reader_tests/test_oci_l2_bgc.py new file mode 100644 index 0000000000..80a3d8ca56 --- /dev/null +++ b/satpy/tests/reader_tests/test_oci_l2_bgc.py @@ -0,0 +1,72 @@ +# Copyright (c) 2024 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for the 'oci_l2_bgc' reader.""" + +import numpy as np +import pytest +from pyresample.geometry import SwathDefinition + +from satpy import Scene, available_readers + +from .test_seadas_l2 import _create_seadas_chlor_a_netcdf_file + +# NOTE: +# The following fixtures are not defined in this file, but are used and injected by Pytest: +# - tmp_path_factory + + +@pytest.fixture(scope="module") +def oci_l2_bgc_netcdf(tmp_path_factory): + """Create MODIS SEADAS NetCDF file.""" + filename = "PACE_OCI.20211118T175853.L2.OC_BGC.V2_0.NRT.nc4" + full_path = str(tmp_path_factory.mktemp("oci_l2_bgc") / filename) + return _create_seadas_chlor_a_netcdf_file(full_path, "PACE", "OCI") + + +class TestSEADAS: + """Test the OCI L2 file reader.""" + + def test_available_reader(self): + """Test that OCI L2 reader is available.""" + assert "oci_l2_bgc" in available_readers() + + def test_scene_available_datasets(self, oci_l2_bgc_netcdf): + """Test that datasets are available.""" + scene = Scene(reader="oci_l2_bgc", filenames=oci_l2_bgc_netcdf) + available_datasets = scene.all_dataset_names() + assert len(available_datasets) > 0 + assert "chlor_a" in available_datasets + + @pytest.mark.parametrize("apply_quality_flags", [False, True]) + def test_load_chlor_a(self, oci_l2_bgc_netcdf, apply_quality_flags): + """Test that we can load 'chlor_a'.""" + reader_kwargs = {"apply_quality_flags": apply_quality_flags} + scene = Scene(reader="oci_l2_bgc", filenames=oci_l2_bgc_netcdf, reader_kwargs=reader_kwargs) + scene.load(["chlor_a"]) + data_arr = scene["chlor_a"] + assert data_arr.dims == ("y", "x") + assert data_arr.attrs["platform_name"] == "PACE" + assert data_arr.attrs["sensor"] == {"oci"} + assert data_arr.attrs["units"] == "mg m^-3" + assert data_arr.dtype.type == np.float32 + assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert data_arr.attrs["rows_per_scan"] == 0 + data = data_arr.data.compute() + if apply_quality_flags: + assert np.isnan(data[2, 2]) + assert np.count_nonzero(np.isnan(data)) == 1 + else: + assert np.count_nonzero(np.isnan(data)) == 0 From 8f93b73ad69b565115ac8e40ed30225544588fc2 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 12 Sep 2024 11:55:58 -0500 Subject: [PATCH 24/47] Try closing seadas test NetCDF before using it in tests --- satpy/tests/reader_tests/test_seadas_l2.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py index 8343abbef2..eed0713366 100644 --- a/satpy/tests/reader_tests/test_seadas_l2.py +++ b/satpy/tests/reader_tests/test_seadas_l2.py @@ -190,6 +190,7 @@ def _create_seadas_chlor_a_netcdf_file(full_path, mission, sensor): geophys_group = nc.createGroup("geophysical_data") _add_variable_to_netcdf_file(geophys_group, "chlor_a", chlor_a_info) _add_variable_to_netcdf_file(geophys_group, "l2_flags", l2_flags_info) + nc.close() return [full_path] From f310ac5fedefdc098a4f104593318d10624fcb72 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 12 Sep 2024 13:27:29 -0500 Subject: [PATCH 25/47] Add enhancement for chlor_a of the oci_l2_bgc reader --- satpy/etc/enhancements/generic.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 04da1cfdd3..5d17154aab 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1192,6 +1192,18 @@ enhancements: factor: 21.0 min_stretch: 0.0 max_stretch: 20.0 + chlor_a_bgc: + name: chlor_a + reader: oci_l2_bgc + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: log + base: "10" + factor: 21.0 + min_stretch: 0.0 + max_stretch: 20.0 cimss_cloud_type: standard_name: cimss_cloud_type From fb447298d0f767f5421c0a66fd1d93499a95fa52 Mon Sep 17 00:00:00 2001 From: Michael Schmutz Date: Fri, 13 Sep 2024 10:30:48 +0200 Subject: [PATCH 26/47] implement pyPublicDecompWT --- pyproject.toml | 2 +- satpy/readers/hrit_base.py | 95 ++++++++++---------------------------- 2 files changed, 25 insertions(+), 72 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bac66423af..196ae6a462 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -55,7 +55,7 @@ msi_safe = ["rioxarray", "bottleneck", "python-geotiepoints", "defusedxml"] nc_nwcsaf_msg = ["netCDF4 >= 1.1.8"] sar_c = ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"] abi_l1b = ["h5netcdf"] -seviri_l1b_hrit = ["pyorbital >= 1.3.1"] +seviri_l1b_hrit = ["pyorbital >= 1.3.1", "pyPublicDecompWT"] seviri_l1b_native = ["pyorbital >= 1.3.1"] seviri_l1b_nc = ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"] seviri_l2_bufr = ["eccodes"] diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index d0b9ee44db..b4a2918dce 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -33,14 +33,13 @@ import logging import os from contextlib import contextmanager, nullcontext -from io import BytesIO -from subprocess import PIPE, Popen # nosec B404 import dask import dask.array as da import numpy as np import xarray as xr from pyresample import geometry +from pyPublicDecompWT import xRITDecompress import satpy.readers.utils as utils from satpy import config @@ -96,61 +95,17 @@ } -def get_xritdecompress_cmd(): - """Find a valid binary for the xRITDecompress command.""" - cmd = os.environ.get("XRIT_DECOMPRESS_PATH", None) - if not cmd: - raise IOError("XRIT_DECOMPRESS_PATH is not defined (complete path to xRITDecompress)") - - question = ("Did you set the environment variable XRIT_DECOMPRESS_PATH correctly?") - if not os.path.exists(cmd): - raise IOError(str(cmd) + " does not exist!\n" + question) - elif os.path.isdir(cmd): - raise IOError(str(cmd) + " is a directory!\n" + question) - - return cmd - - -def get_xritdecompress_outfile(stdout): - """Analyse the output of the xRITDecompress command call and return the file.""" - outfile = b"" - for line in stdout: - try: - k, v = [x.strip() for x in line.split(b":", 1)] - except ValueError: - break - if k == b"Decompressed file": - outfile = v - break - - return outfile - - -def decompress(infile, outdir="."): - """Decompress an XRIT data file and return the path to the decompressed file. - - It expect to find Eumetsat's xRITDecompress through the environment variable - XRIT_DECOMPRESS_PATH. +def decompress(infile): + """ + Decompress an XRIT data file and return the decompressed buffer """ - cmd = get_xritdecompress_cmd() - infile = os.path.abspath(infile) - cwd = os.getcwd() - os.chdir(outdir) - - p = Popen([cmd, infile], stdout=PIPE) # nosec B603 - stdout = BytesIO(p.communicate()[0]) - status = p.returncode - os.chdir(cwd) - - if status != 0: - raise IOError("xrit_decompress '%s', failed, status=%d" % (infile, status)) - - outfile = get_xritdecompress_outfile(stdout) - if not outfile: - raise IOError("xrit_decompress '%s', failed, no output file is generated" % infile) + # decompress in-memory + with open(infile, mode="rb") as fh: + xrit = xRITDecompress() + xrit.decompress(fh.read()) - return os.path.join(outdir, outfile.decode("utf-8")) + return xrit.data() def get_header_id(fp): @@ -343,18 +298,6 @@ def _read_data(filename, mda): return HRITSegment(filename, mda).read_data() -@contextmanager -def decompressed(filename): - """Decompress context manager.""" - try: - new_filename = decompress(filename, config["tmp_dir"]) - except IOError as err: - logger.error("Unpacking failed: %s", str(err)) - raise - yield new_filename - os.remove(new_filename) - - class HRITSegment: """An HRIT segment with data.""" @@ -389,11 +332,21 @@ def _read_data_from_disk(self): # For reading the image data, unzip_context is faster than generic_open dtype, shape = self._get_input_info() with utils.unzip_context(self.filename) as fn: - with decompressed(fn) if self.compressed else nullcontext(fn) as filename: - return np.fromfile(filename, - offset=self.offset, - dtype=dtype, - count=np.prod(shape)) + + if self.compressed: + return np.frombuffer( + decompress(fn), + offset=self.offset, + dtype=dtype, + count=np.prod(shape) + ) + else: + return np.fromfile( + nullcontext(fn), + offset=self.offset, + dtype=dtype, + count=np.prod(shape) + ) def _read_file_like(self): # filename is likely to be a file-like object, already in memory From 3b5462d94e257f151a5c9d9f267bc241992ea166 Mon Sep 17 00:00:00 2001 From: Michael Schmutz Date: Fri, 13 Sep 2024 10:36:31 +0200 Subject: [PATCH 27/47] updated authors --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index d7e78eecae..c5f8607c85 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -76,6 +76,7 @@ The following people have made contributions to this project: - Marco Sassi - meteoswiss - [Stefan Scheiblauer (StefanSnippetCoder)](https://github.com/StefanSnippetCoder) - [Ronald Scheirer](https://github.com/) +- [Michael Schmutz (Graenni)](https://github.com/Graenni) - Meteotest AG - [Hauke Schulz (observingClouds)](https://github.com/observingClouds) - [Jakub Seidl (seidlj)](https://github.com/seidlj) - [Eysteinn Sigurðsson (eysteinn)](https://github.com/eysteinn) From 37ea808dcb9ab7505039118bba346fc17458b0d1 Mon Sep 17 00:00:00 2001 From: Michael Schmutz Date: Fri, 13 Sep 2024 10:44:28 +0200 Subject: [PATCH 28/47] remove obsolete doc string; add dependency on pyPublicDecompWT to project --- pyproject.toml | 3 ++- satpy/readers/hrit_base.py | 7 ++----- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 196ae6a462..eeceb99ee7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ dependencies = [ "pykdtree", "pyorbital", "pyproj>=2.2", + "pyPublicDecompWT", "pyresample>=1.24.0", "pyyaml>=5.1", "trollimage>=1.24", @@ -55,7 +56,7 @@ msi_safe = ["rioxarray", "bottleneck", "python-geotiepoints", "defusedxml"] nc_nwcsaf_msg = ["netCDF4 >= 1.1.8"] sar_c = ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"] abi_l1b = ["h5netcdf"] -seviri_l1b_hrit = ["pyorbital >= 1.3.1", "pyPublicDecompWT"] +seviri_l1b_hrit = ["pyorbital >= 1.3.1"] seviri_l1b_native = ["pyorbital >= 1.3.1"] seviri_l1b_nc = ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"] seviri_l2_bufr = ["eccodes"] diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index b4a2918dce..b747f6c644 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -21,11 +21,8 @@ This module is the base module for all HRIT-based formats. Here, you will find the common building blocks for hrit reading. -One of the features here is the on-the-fly decompression of hrit files. It needs -a path to the xRITDecompress binary to be provided through the environment -variable called XRIT_DECOMPRESS_PATH. When compressed hrit files are then -encountered (files finishing with `.C_`), they are decompressed to the system's -temporary directory for reading. +One of the features here is the on-the-fly decompression of hrit files when +compressed hrit files are encountered (files finishing with `.C_`). """ From 43a1531a55379b0ab867b9498b1c6b0b586c4a71 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 13 Sep 2024 09:23:36 -0500 Subject: [PATCH 29/47] Add rows_per_scan information to reader metadata docs --- doc/source/reading.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/source/reading.rst b/doc/source/reading.rst index 0dbea74046..2f274e9b3f 100644 --- a/doc/source/reading.rst +++ b/doc/source/reading.rst @@ -305,6 +305,13 @@ time etc. The following attributes are standardized across all readers: should happen and when they actually do. See :ref:`time_metadata` below for details. * ``raw_metadata``: Raw, unprocessed metadata from the reader. +* ``rows_per_scan``: Optional integer indicating how many rows of data + represent a single scan of the instrument. This is primarily used by + some resampling algorithms (ex. EWA) to produce better results and only + makes sense for swath-based (usually polar-orbiting) instruments. For + example, MODIS 1km data has 10 rows of data per scan. If an instrument + does not have multiple rows per scan this should usually be set to 0 rather + than 1 to indicate that the entire swath should be treated as a whole. Note that the above attributes are not necessarily available for each dataset. From ae4876a9bbaee9685820bf586fd2f54fd4634b96 Mon Sep 17 00:00:00 2001 From: Michael Schmutz Date: Mon, 16 Sep 2024 11:45:43 +0200 Subject: [PATCH 30/47] fix formatting errors; remove obsolete tests --- satpy/readers/hrit_base.py | 8 +--- satpy/tests/reader_tests/test_hrit_base.py | 55 +--------------------- 2 files changed, 3 insertions(+), 60 deletions(-) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index b747f6c644..20253d44ec 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -29,7 +29,7 @@ import datetime as dt import logging import os -from contextlib import contextmanager, nullcontext +from contextlib import nullcontext import dask import dask.array as da @@ -39,7 +39,6 @@ from pyPublicDecompWT import xRITDecompress import satpy.readers.utils as utils -from satpy import config from satpy.readers import FSFile from satpy.readers.eum_base import time_cds_short from satpy.readers.file_handlers import BaseFileHandler @@ -93,10 +92,7 @@ def decompress(infile): - """ - Decompress an XRIT data file and return the decompressed buffer - """ - + """Decompress an XRIT data file and return the decompressed buffer.""" # decompress in-memory with open(infile, mode="rb") as fh: xrit = xRITDecompress() diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index a6c6472e40..7fbf51498e 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -29,7 +29,7 @@ import pytest from satpy.readers import FSFile -from satpy.readers.hrit_base import HRITFileHandler, decompress, get_xritdecompress_cmd, get_xritdecompress_outfile +from satpy.readers.hrit_base import HRITFileHandler, decompress from satpy.tests.utils import RANDOM_GEN # NOTE: @@ -37,59 +37,6 @@ # - tmp_path -class TestHRITDecompress(unittest.TestCase): - """Test the on-the-fly decompression.""" - - def test_xrit_cmd(self): - """Test running the xrit decompress command.""" - old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) - - os.environ["XRIT_DECOMPRESS_PATH"] = "/path/to/my/bin" - with pytest.raises(IOError, match=".* does not exist!"): - get_xritdecompress_cmd() - - os.environ["XRIT_DECOMPRESS_PATH"] = gettempdir() - with pytest.raises(IOError, match=".* is a directory!.*"): - get_xritdecompress_cmd() - - with NamedTemporaryFile() as fd: - os.environ["XRIT_DECOMPRESS_PATH"] = fd.name - fname = fd.name - res = get_xritdecompress_cmd() - - if old_env is not None: - os.environ["XRIT_DECOMPRESS_PATH"] = old_env - else: - os.environ.pop("XRIT_DECOMPRESS_PATH") - - assert fname == res - - def test_xrit_outfile(self): - """Test the right decompression filename is used.""" - stdout = [b"Decompressed file: bla.__\n"] - outfile = get_xritdecompress_outfile(stdout) - assert outfile == b"bla.__" - - @mock.patch("satpy.readers.hrit_base.Popen") - def test_decompress(self, popen): - """Test decompression works.""" - popen.return_value.returncode = 0 - popen.return_value.communicate.return_value = [b"Decompressed file: bla.__\n"] - - old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) - - with NamedTemporaryFile() as fd: - os.environ["XRIT_DECOMPRESS_PATH"] = fd.name - res = decompress("bla.C_") - - if old_env is not None: - os.environ["XRIT_DECOMPRESS_PATH"] = old_env - else: - os.environ.pop("XRIT_DECOMPRESS_PATH") - - assert res == os.path.join(".", "bla.__") - - # From a compressed msg hrit file. # uncompressed data field length 17223680 # compressed data field length 1578312 From 9971291b9837e8deae340318d55a567144ac5b43 Mon Sep 17 00:00:00 2001 From: Michael Schmutz Date: Mon, 16 Sep 2024 12:00:26 +0200 Subject: [PATCH 31/47] remove unused imports --- satpy/tests/reader_tests/test_hrit_base.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 7fbf51498e..9695ffccdb 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -21,15 +21,13 @@ import datetime as dt import gzip import os -import unittest -from tempfile import NamedTemporaryFile, gettempdir from unittest import mock import numpy as np import pytest from satpy.readers import FSFile -from satpy.readers.hrit_base import HRITFileHandler, decompress +from satpy.readers.hrit_base import HRITFileHandler from satpy.tests.utils import RANDOM_GEN # NOTE: From 6fbf545418bf446482f0bf0ccea410f56ecbe078 Mon Sep 17 00:00:00 2001 From: Michael Schmutz Date: Mon, 16 Sep 2024 12:15:37 +0200 Subject: [PATCH 32/47] fix isort --- satpy/readers/hrit_base.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index 20253d44ec..a84aff9a9b 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -23,7 +23,6 @@ One of the features here is the on-the-fly decompression of hrit files when compressed hrit files are encountered (files finishing with `.C_`). - """ import datetime as dt @@ -35,8 +34,8 @@ import dask.array as da import numpy as np import xarray as xr -from pyresample import geometry from pyPublicDecompWT import xRITDecompress +from pyresample import geometry import satpy.readers.utils as utils from satpy.readers import FSFile From 9b6e660df46b3992b44f2e6f7356aa00a2b85294 Mon Sep 17 00:00:00 2001 From: Michael Date: Tue, 17 Sep 2024 13:59:16 +0200 Subject: [PATCH 33/47] Update satpy/readers/hrit_base.py Co-authored-by: Martin Raspaud --- satpy/readers/hrit_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index a84aff9a9b..5099473c8d 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -334,7 +334,7 @@ def _read_data_from_disk(self): ) else: return np.fromfile( - nullcontext(fn), + fn, offset=self.offset, dtype=dtype, count=np.prod(shape) From 1fdf6abc2beaf9e18e11f95fe020cb704c3c82a5 Mon Sep 17 00:00:00 2001 From: Michael Schmutz Date: Tue, 17 Sep 2024 14:05:15 +0200 Subject: [PATCH 34/47] update dependencies --- continuous_integration/environment.yaml | 1 + pyproject.toml | 3 +-- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index d91ff2ced3..ec5668c8e4 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -64,3 +64,4 @@ dependencies: - trollimage>=1.24 - pyspectral - pyorbital + - pyPublicDecompWT diff --git a/pyproject.toml b/pyproject.toml index eeceb99ee7..196ae6a462 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,6 @@ dependencies = [ "pykdtree", "pyorbital", "pyproj>=2.2", - "pyPublicDecompWT", "pyresample>=1.24.0", "pyyaml>=5.1", "trollimage>=1.24", @@ -56,7 +55,7 @@ msi_safe = ["rioxarray", "bottleneck", "python-geotiepoints", "defusedxml"] nc_nwcsaf_msg = ["netCDF4 >= 1.1.8"] sar_c = ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"] abi_l1b = ["h5netcdf"] -seviri_l1b_hrit = ["pyorbital >= 1.3.1"] +seviri_l1b_hrit = ["pyorbital >= 1.3.1", "pyPublicDecompWT"] seviri_l1b_native = ["pyorbital >= 1.3.1"] seviri_l1b_nc = ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"] seviri_l2_bufr = ["eccodes"] From 82dcd40c6a31946214fd51e132ed5399324592cb Mon Sep 17 00:00:00 2001 From: Michael Schmutz Date: Tue, 17 Sep 2024 14:16:42 +0200 Subject: [PATCH 35/47] lint --- satpy/readers/hrit_base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index 5099473c8d..2bc3c89698 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -28,7 +28,6 @@ import datetime as dt import logging import os -from contextlib import nullcontext import dask import dask.array as da From aad7eba5a69a5364971f9ba0d08819096a8c5f22 Mon Sep 17 00:00:00 2001 From: Michael Schmutz Date: Tue, 17 Sep 2024 14:34:01 +0200 Subject: [PATCH 36/47] make dependencies for decompression optional --- satpy/readers/hrit_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index 2bc3c89698..665fb1b7c7 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -33,7 +33,6 @@ import dask.array as da import numpy as np import xarray as xr -from pyPublicDecompWT import xRITDecompress from pyresample import geometry import satpy.readers.utils as utils @@ -91,6 +90,8 @@ def decompress(infile): """Decompress an XRIT data file and return the decompressed buffer.""" + from pyPublicDecompWT import xRITDecompress + # decompress in-memory with open(infile, mode="rb") as fh: xrit = xRITDecompress() From 7375f7f5e5d234dab839e6c91c11ec42547de24b Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 18 Sep 2024 12:21:10 +0200 Subject: [PATCH 37/47] add test for MCD12Q1 MODIS reader --- .../modis_tests/_modis_fixtures.py | 61 +++++++++++++- .../modis_tests/test_modis_l3_mcd12q1.py | 79 +++++++++++++++++++ 2 files changed, 138 insertions(+), 2 deletions(-) create mode 100644 satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index d663f7b9d9..04704630e4 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -215,6 +215,23 @@ def _get_l1b_geo_variable_info(filename: str, variables_info.update(_get_angles_variable_info(geo_resolution)) return variables_info +def _get_l3_land_cover_info() -> dict: + + lc_data = np.zeros((2400, 2400), dtype=np.uint8) + + variables_info = \ + { + "LC_Type1": {"data": lc_data, + "type": SDC.UINT8, + "fill_value": 255, + "attrs": { + "dim_labels": ["YDim:MCD12Q1", "XDim:MCD12Q1"], + }, + } + } + + return variables_info + def generate_nasa_l1b_filename(prefix): """Generate a filename that follows NASA MODIS L1b convention.""" @@ -331,19 +348,30 @@ def _create_struct_metadata_cmg(ftype: str) -> str: gridline = 'GridName="MOD09CMG"\n' upleft = "UpperLeftPointMtrs=(-180000000.000000,90000000.000000)\n" upright = "LowerRightMtrs=(180000000.000000,-90000000.000000)\n" + XDim=7200 + YDim=3600 + # Case of a MCD12Q1 file + elif ftype == "MCD12Q1": + gridline = 'GridName="MCD12Q1"\n' + upleft = "UpperLeftPointMtrs=(-8895604.157333,-1111950.519667)\n" + upright = "LowerRightMtrs=(-7783653.637667,-2223901.039333)\n" + XDim=2400 + YDim=2400 # Case of a MCD43 file else: gridline = 'GridName="MCD_CMG_BRDF_0.05Deg"\n' upleft = "UpperLeftPointMtrs=(-180.000000,90.000000)\n" upright = "LowerRightMtrs=(180.000000,-90.000000)\n" + XDim=7200 + YDim=3600 struct_metadata_header = ("GROUP=SwathStructure\n" "END_GROUP=SwathStructure\n" "GROUP=GridStructure\n" "GROUP=GRID_1\n" f"{gridline}\n" - "XDim=7200\n" - "YDim=3600\n" + f"XDim={XDim}\n" + f"YDim={YDim}\n" f"{upleft}\n" f"{upright}\n" "END_GROUP=GRID_1\n" @@ -598,6 +626,10 @@ def generate_nasa_l2_filename(prefix: str) -> str: now = dt.datetime.now() return f"{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf" +def generate_nasa_l3_tile_filename(prefix: str) -> str: + """Generate a file name that follows MODIS sinusoidal grid tile pattern.""" + now = dt.datetime.now() + return f"{prefix}.A{now:%Y}001.h34v07.061.{now:%Y%j%H%M%S}.hdf" @pytest.fixture(scope="session") def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: @@ -613,6 +645,31 @@ def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]: _create_header_metadata()) return [full_path] +@pytest.fixture(scope="session") +def modis_l3_nasa_mcd12q1_file(tmpdir_factory) -> list[str]: + """Create a single MOD35 L2 HDF4 file with headers.""" + filename = generate_nasa_l3_tile_filename("MCD12Q1") + full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) + variable_infos = _get_l3_land_cover_info() + archive_header = \ +"""GROUP = ARCHIVEDMETADATA + GROUPTYPE = MASTERGROUP + + OBJECT = NADIRDATARESOLUTION + NUM_VAL = 1 + VALUE = "500m" + END_OBJECT = NADIRDATARESOLUTION + +END_GROUP = ARCHIVEDMETADATA + +END +""" + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata_cmg("MCD12Q1"), + _create_core_metadata("MCD12Q1"), + archive_header) + return [full_path] def generate_nasa_l3_filename(prefix: str) -> str: """Generate a file name that follows MODIS 09 L3 convention in a temporary directory.""" diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py new file mode 100644 index 0000000000..c02d16cbec --- /dev/null +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unit tests for MODIS L2 HDF reader.""" + +from __future__ import annotations + +import dask +import dask.array as da +import numpy as np +import pytest +from pytest_lazy_fixtures import lf as lazy_fixture + +from satpy import Scene, available_readers +from satpy.tests.utils import CustomScheduler, make_dataid + +from ._modis_fixtures import _shape_for_resolution, modis_l3_nasa_mcd12q1_file + +# NOTE: +# The following fixtures are not defined in this file, but are used and injected by Pytest: +# - modis_l2_imapp_mask_byte1_file +# - modis_l2_imapp_mask_byte1_geo_files +# - modis_l2_imapp_snowmask_file +# - modis_l2_imapp_snowmask_geo_files +# - modis_l2_nasa_mod06_file +# - modis_l2_nasa_mod35_file +# - modis_l2_nasa_mod35_mod03_files + + +def _check_shared_metadata(data_arr, expect_area=False): + assert data_arr.attrs["sensor"] == "modis" + assert data_arr.attrs["platform_name"] == "EOS-Terra" + assert "rows_per_scan" in data_arr.attrs + assert isinstance(data_arr.attrs["rows_per_scan"], int) + assert data_arr.attrs["reader"] == "mcd12q1" + if expect_area: + assert data_arr.attrs.get("area") is not None + else: + assert "area" not in data_arr.attrs + + +class TestModisL3MCD12Q1: + """Test MODIS L3 MCD12Q1 reader.""" + + def test_available_reader(self): + """Test that MODIS L3 reader is available.""" + assert "mcd12q1" in available_readers() + + def test_scene_available_datasets(self, modis_l3_nasa_mcd12q1_file): + """Test that datasets are available.""" + scene = Scene(reader="mcd12q1", filenames=modis_l3_nasa_mcd12q1_file) + available_datasets = scene.all_dataset_names() + assert len(available_datasets) > 0 + assert "LC_Type1" in available_datasets + + def test_load_l3_dataset(self, modis_l3_nasa_mcd12q1_file): + """Load and check an L2 variable.""" + scene = Scene(reader="mcd12q1", filenames=modis_l3_nasa_mcd12q1_file) + ds_name = 'LC_Type1' + scene.load([ds_name]) + assert ds_name in scene + data_arr = scene[ds_name] + assert isinstance(data_arr.data, da.Array) + assert data_arr.attrs.get("resolution") == 500 + From 839900ae0c7caaf38c42db636d427c53e5187545 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 18 Sep 2024 13:36:20 +0200 Subject: [PATCH 38/47] pre-commit checks --- satpy/readers/mcd12q1.py | 6 +++--- .../modis_tests/test_modis_l3_mcd12q1.py | 18 ++---------------- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/satpy/readers/mcd12q1.py b/satpy/readers/mcd12q1.py index 28d56bdf83..ad3d336d50 100644 --- a/satpy/readers/mcd12q1.py +++ b/satpy/readers/mcd12q1.py @@ -94,8 +94,8 @@ def _get_res(self): raise ValueError("Only MCD12Q1 grids are supported") resolution_string = self.metadata["ARCHIVEDMETADATA"]["NADIRDATARESOLUTION"]["VALUE"] - if resolution_string[-1] == 'm': - return int(resolution_string.removesuffix('m')) + if resolution_string[-1] == "m": + return int(resolution_string.removesuffix("m")) else: raise ValueError("Cannot parse resolution of MCD12Q1 grid") @@ -103,7 +103,7 @@ def _get_res(self): def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" dataset_name = dataset_id["name"] - # xxx + # xxx dataset = self.load_dataset(dataset_name, dataset_info.pop("category", False)) self._add_satpy_metadata(dataset_id, dataset) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py index c02d16cbec..21bb14f8b6 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py @@ -19,26 +19,13 @@ from __future__ import annotations -import dask import dask.array as da -import numpy as np -import pytest -from pytest_lazy_fixtures import lf as lazy_fixture from satpy import Scene, available_readers -from satpy.tests.utils import CustomScheduler, make_dataid - -from ._modis_fixtures import _shape_for_resolution, modis_l3_nasa_mcd12q1_file # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: -# - modis_l2_imapp_mask_byte1_file -# - modis_l2_imapp_mask_byte1_geo_files -# - modis_l2_imapp_snowmask_file -# - modis_l2_imapp_snowmask_geo_files -# - modis_l2_nasa_mod06_file -# - modis_l2_nasa_mod35_file -# - modis_l2_nasa_mod35_mod03_files +# - modis_l3_nasa_mcd12q1_file def _check_shared_metadata(data_arr, expect_area=False): @@ -70,10 +57,9 @@ def test_scene_available_datasets(self, modis_l3_nasa_mcd12q1_file): def test_load_l3_dataset(self, modis_l3_nasa_mcd12q1_file): """Load and check an L2 variable.""" scene = Scene(reader="mcd12q1", filenames=modis_l3_nasa_mcd12q1_file) - ds_name = 'LC_Type1' + ds_name = "LC_Type1" scene.load([ds_name]) assert ds_name in scene data_arr = scene[ds_name] assert isinstance(data_arr.data, da.Array) assert data_arr.attrs.get("resolution") == 500 - From 65b9283d75829f55b8df2bf548574a33996c7504 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 18 Sep 2024 13:43:07 +0200 Subject: [PATCH 39/47] Fix two "Multiple spaces after operator. (E222)" errors --- satpy/readers/hdfeos_base.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 7be1045878..46cb9c6fb4 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -183,7 +183,7 @@ def start_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"]) - if len(date) == 19: + if len(date) == 19: return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S") else: return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") @@ -199,7 +199,7 @@ def end_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"]) - if len(date) == 19: + if len(date) == 19: return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S") else: return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") From 87e91094bcadc0b3998b37e3e462c31d64aafbc8 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 18 Sep 2024 14:55:12 +0200 Subject: [PATCH 40/47] Fix hrit base test --- satpy/tests/reader_tests/test_hrit_base.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 9695ffccdb..b6949561b8 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -87,6 +87,14 @@ def stub_hrit_file(tmp_path): def create_stub_hrit(filename, open_fun=open, meta=mda): """Create a stub hrit file.""" + stub_hrit_data = create_stub_hrit_data(meta) + + with open_fun(filename, mode="wb") as fd: + fd.write(stub_hrit_data) + return filename + +def create_stub_hrit_data(meta): + """Create the data for the stub hrit.""" nbits = meta["number_of_bits_per_pixel"] lines = meta["number_of_lines"] cols = meta["number_of_columns"] @@ -94,11 +102,10 @@ def create_stub_hrit(filename, open_fun=open, meta=mda): arr = RANDOM_GEN.integers(0, 256, size=int(total_bits / 8), dtype=np.uint8) - with open_fun(filename, mode="wb") as fd: - fd.write(b" " * meta["total_header_length"]) - bytes_data = arr.tobytes() - fd.write(bytes_data) - return filename + header_data = b" " * meta["total_header_length"] + bytes_data = arr.tobytes() + stub_hrit_data = header_data + bytes_data + return stub_hrit_data @pytest.fixture @@ -221,10 +228,9 @@ def test_start_end_time(self): assert self.reader.end_time == self.reader.observation_end_time -def fake_decompress(infile, outdir="."): +def fake_decompress(): """Fake decompression.""" - filename = os.fspath(infile)[:-3] - return create_stub_hrit(filename) + return create_stub_hrit_data(mda) class TestHRITFileHandlerCompressed: From 862c558d87591eacc0aeb73608a1c5085d395479 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 18 Sep 2024 15:17:57 +0200 Subject: [PATCH 41/47] Fix test --- satpy/tests/reader_tests/test_hrit_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index b6949561b8..899ebf74f4 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -228,8 +228,9 @@ def test_start_end_time(self): assert self.reader.end_time == self.reader.observation_end_time -def fake_decompress(): +def fake_decompress(filename): """Fake decompression.""" + del filename return create_stub_hrit_data(mda) From bff96ff46fe0da7b7bd8e33208a36f6ce4ffc1e4 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 18 Sep 2024 22:49:54 +0200 Subject: [PATCH 42/47] add import of fixture for pytest --- satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py index 21bb14f8b6..204e4affae 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py @@ -23,9 +23,12 @@ from satpy import Scene, available_readers +from ._modis_fixtures import modis_l3_nasa_mcd12q1_file as _ # noqa: F401 + # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - modis_l3_nasa_mcd12q1_file +# The import above is necessary for pytest to know about the fixture def _check_shared_metadata(data_arr, expect_area=False): From e7f79ac5dc5910dc372aaf6b5bb01b927c487583 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Wed, 18 Sep 2024 22:50:13 +0200 Subject: [PATCH 43/47] add function _modis_date --- satpy/readers/hdfeos_base.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 46cb9c6fb4..0f563efe2c 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -89,6 +89,12 @@ def _find_and_run_interpolation(interpolation_functions, src_resolution, dst_res logger.debug("Interpolating from {} to {}".format(src_resolution, dst_resolution)) return interpolation_function(*args) +def _modis_date(date): + """Transform a date and time string into a datetime object.""" + if len(date) == 19: + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S") + else: + return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") class HDFEOSBaseFileReader(BaseFileHandler): """Base file handler for HDF EOS data for both L1b and L2 products.""" @@ -183,10 +189,7 @@ def start_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"]) - if len(date) == 19: - return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S") - else: - return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") + return _modis_date(date) except KeyError: return self._start_time_from_filename() @@ -199,10 +202,7 @@ def end_time(self): try: date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"]) - if len(date) == 19: - return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S") - else: - return dt.datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") + return _modis_date(date) except KeyError: return self.start_time From db3b2decfe0d5565cbf75fd9731294d2a7e682a3 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 19 Sep 2024 09:43:36 +0200 Subject: [PATCH 44/47] Add modis_l3_nasa_mcd12q1_file to conftest.py --- satpy/tests/reader_tests/modis_tests/conftest.py | 1 + satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index 309b16321f..5a1c0778bd 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -29,6 +29,7 @@ modis_l2_imapp_mask_byte1_geo_files, modis_l2_imapp_snowmask_file, modis_l2_imapp_snowmask_geo_files, + modis_l3_nasa_mcd12q1_file, modis_l2_nasa_mod06_file, modis_l2_nasa_mod35_file, modis_l2_nasa_mod35_mod03_files, diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py index 204e4affae..21bb14f8b6 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py @@ -23,12 +23,9 @@ from satpy import Scene, available_readers -from ._modis_fixtures import modis_l3_nasa_mcd12q1_file as _ # noqa: F401 - # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: # - modis_l3_nasa_mcd12q1_file -# The import above is necessary for pytest to know about the fixture def _check_shared_metadata(data_arr, expect_area=False): From 3ab2fdc3eaa68d42402e76f4c6dee09a5f2a7486 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 19 Sep 2024 10:30:09 +0200 Subject: [PATCH 45/47] fix isort --- satpy/tests/reader_tests/modis_tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index 5a1c0778bd..1b47007e63 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -29,10 +29,10 @@ modis_l2_imapp_mask_byte1_geo_files, modis_l2_imapp_snowmask_file, modis_l2_imapp_snowmask_geo_files, - modis_l3_nasa_mcd12q1_file, modis_l2_nasa_mod06_file, modis_l2_nasa_mod35_file, modis_l2_nasa_mod35_mod03_files, + modis_l3_nasa_mcd12q1_file, modis_l3_nasa_mod09_file, modis_l3_nasa_mod43_file, ) From 097a655bb3105ebdcb49e3e24f10d06bba482f39 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 19 Sep 2024 12:17:39 +0200 Subject: [PATCH 46/47] improve coverage --- .../modis_tests/_modis_fixtures.py | 9 ++++++++- .../modis_tests/test_modis_l3_mcd12q1.py | 20 ++++++++----------- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 04704630e4..b47b63c18c 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -227,7 +227,14 @@ def _get_l3_land_cover_info() -> dict: "attrs": { "dim_labels": ["YDim:MCD12Q1", "XDim:MCD12Q1"], }, - } + }, + "LC_Type2": {"data": lc_data, + "type": SDC.UINT8, + "fill_value": 255, + "attrs": { + "dim_labels": ["YDim:MCD12Q1", "XDim:MCD12Q1"], + }, + }, } return variables_info diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py index 21bb14f8b6..c34ec438e5 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py @@ -28,18 +28,6 @@ # - modis_l3_nasa_mcd12q1_file -def _check_shared_metadata(data_arr, expect_area=False): - assert data_arr.attrs["sensor"] == "modis" - assert data_arr.attrs["platform_name"] == "EOS-Terra" - assert "rows_per_scan" in data_arr.attrs - assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs["reader"] == "mcd12q1" - if expect_area: - assert data_arr.attrs.get("area") is not None - else: - assert "area" not in data_arr.attrs - - class TestModisL3MCD12Q1: """Test MODIS L3 MCD12Q1 reader.""" @@ -47,6 +35,14 @@ def test_available_reader(self): """Test that MODIS L3 reader is available.""" assert "mcd12q1" in available_readers() + def test_metadata(self, modis_l3_nasa_mcd12q1_file): + """Test some basic metadata that should exist in the file.""" + scene = Scene(reader="mcd12q1", filenames=modis_l3_nasa_mcd12q1_file) + ds_name = "LC_Type2" + scene.load([ds_name]) + assert scene[ds_name].attrs['area'].description == 'Tiled sinusoidal L3 MODIS area' + assert scene[ds_name].attrs['sensor'] == 'modis' + def test_scene_available_datasets(self, modis_l3_nasa_mcd12q1_file): """Test that datasets are available.""" scene = Scene(reader="mcd12q1", filenames=modis_l3_nasa_mcd12q1_file) From 9e3b3424fcb7e4ee4ecb83a6efb89d53f7e19774 Mon Sep 17 00:00:00 2001 From: Pierre de Buyl Date: Thu, 19 Sep 2024 13:35:17 +0200 Subject: [PATCH 47/47] fix quotes --- satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py index c34ec438e5..12729ea83c 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3_mcd12q1.py @@ -40,8 +40,8 @@ def test_metadata(self, modis_l3_nasa_mcd12q1_file): scene = Scene(reader="mcd12q1", filenames=modis_l3_nasa_mcd12q1_file) ds_name = "LC_Type2" scene.load([ds_name]) - assert scene[ds_name].attrs['area'].description == 'Tiled sinusoidal L3 MODIS area' - assert scene[ds_name].attrs['sensor'] == 'modis' + assert scene[ds_name].attrs["area"].description == "Tiled sinusoidal L3 MODIS area" + assert scene[ds_name].attrs["sensor"] == "modis" def test_scene_available_datasets(self, modis_l3_nasa_mcd12q1_file): """Test that datasets are available."""