diff --git a/.github/workflows/build_docker_and_trigger_metplus.yml b/.github/workflows/build_docker_and_trigger_metplus.yml index 7d1ab738d8..27def1339e 100644 --- a/.github/workflows/build_docker_and_trigger_metplus.yml +++ b/.github/workflows/build_docker_and_trigger_metplus.yml @@ -4,6 +4,7 @@ on: push: branches: - develop + - 'main_v[0-9]+.[0-9]+' paths-ignore: - 'docs/**' @@ -43,24 +44,6 @@ jobs: runs-on: ubuntu-latest needs: build_met_docker steps: - - name: Print GitHub values for reference - env: - GITHUB_CONTEXT: ${{ toJson(github) }} - run: echo "$GITHUB_CONTEXT" - - uses: actions/github-script@v7 + - uses: dtcenter/metplus-action-trigger-use-cases@v1 with: - github-token: ${{ secrets.METPLUS_BOT_TOKEN }} - script: | - await github.rest.actions.createWorkflowDispatch({ - owner: 'dtcenter', - repo: 'METplus', - workflow_id: 'testing.yml', - ref: 'develop', - inputs: { - repository: '${{ github.repository }}', - ref: '${{ github.ref }}', - actor: '${{ github.actor }}', - sha: '${{ github.sha }}', - pusher_email: '${{ github.event.pusher.email }}' - }, - }); + token: ${{ secrets.METPLUS_BOT_TOKEN }} diff --git a/docs/Flowchart/MET_flowchart.pptx b/docs/Flowchart/MET_flowchart.pptx index cc62a9a960..64c3a64d54 100644 Binary files a/docs/Flowchart/MET_flowchart.pptx and b/docs/Flowchart/MET_flowchart.pptx differ diff --git a/docs/Flowchart/MET_flowchart_v12.0.0.png b/docs/Flowchart/MET_flowchart_v12.0.0.png new file mode 100644 index 0000000000..d34a9fefe5 Binary files /dev/null and b/docs/Flowchart/MET_flowchart_v12.0.0.png differ diff --git a/docs/Users_Guide/appendixA.rst b/docs/Users_Guide/appendixA.rst index 384422af1f..5f7a861ae0 100644 --- a/docs/Users_Guide/appendixA.rst +++ b/docs/Users_Guide/appendixA.rst @@ -139,8 +139,7 @@ Q. What types of NetCDF files can MET read? the wrf_interp utility 3. NetCDF data following the `climate-forecast (CF) convention - `_ + `_ Lastly, users can write python scripts to pass data that's gridded to the MET tools in memory. If the data doesn't fall into one of those categories, diff --git a/docs/Users_Guide/data_io.rst b/docs/Users_Guide/data_io.rst index 03d7ab02e6..0221cc7888 100644 --- a/docs/Users_Guide/data_io.rst +++ b/docs/Users_Guide/data_io.rst @@ -22,19 +22,19 @@ Tropical cyclone forecasts and observations are typically provided in a specific Requirements for CF Compliant NetCDF ------------------------------------ -The MET tools use following attributes and variables for input CF Compliant NetCDF data. +The MET tools use following attributes and variables for input "`CF Compliant NetCDF data `_". 1. The global attribute "Conventions". -2. The "`standard_name `_" and "`units `_" attributes for coordinate variables. The "`axis `_" attribute ("T" or "time") must exist as the time variable if the "standard_name" attribute does not exist. +2. The "`standard_name `_" and "`units `_" attributes for coordinate variables. The "`axis `_" attribute ("T" or "time") must exist as the time variable if the "standard_name" attribute does not exist. -3. The "`coordinates `_" attribute for the data variables. It contains the coordinate variable names. +3. The "`coordinates `_" attribute for the data variables. It contains the coordinate variable names. -4. The "`grid_mapping `_" attribute for the data variables for projections and the matching grid mapping variable (optional for the latitude_longitude projection). +4. The "`grid_mapping `_" attribute for the data variables for projections and the matching grid mapping variable (optional for latitude_longitude and rotated_latitude_longitude projections). 5. The gridded data should be evenly spaced horizontally and vertically. -6. (Optional) the "`forecast_reference_time `_" variable for init_time. +6. (Optional) the "`forecast_reference_time `_" variable for init_time. MET processes the CF-Compliant gridded NetCDF files with the projection information. The CF-Compliant NetCDF is defined by the global attribute "Conventions" whose value begins with "CF-" ("CF-"). The global attribute "Conventions" is mandatory. MET accepts the variation of this attribute ("conventions" and "CONVENTIONS"). The value should be started with "CF-" and followed by the version number. MET accepts the attribute value that begins with "CF " ("CF" and a space instead of a hyphen) or "COARDS". @@ -78,7 +78,7 @@ Here are examples for the grid mapping variable ("edr" is the data variable): grid_mapping:GRIB_earth_shape = "spherical" ; grid_mapping:GRIB_earth_shape_code = 0 ; -When the grid mapping variable is not available, MET detects the latitude_longitude projection in following order: +When the grid mapping variable is not available, MET can detect either a latitude_longitude or rotated_latitude_longitude projection. It detects the latitude_longitude projection in the following order: 1. the lat/lon projection from the dimensions @@ -98,6 +98,16 @@ MET gets the valid time from the time variable and the "forecast_reference_time" 2. TRMM_3B42_daily_filename (3B42_daily...
.7.G3.nc) +For rotated_latitude_longitude projections, MET detects the projection using the following logic: + +1. Looking for a variable that has the same name as a dimension. + +2. If a variable is found, checking to make sure the variable has a standard name attribute and that that attribute is not empty. + +3. Checking to see if the standard name attribute is called grid_latitude for latitude variables and grid_longitude for the longitude variable. + +The latitude and longitude variables must be one dimensional and with their size matching the corresponding dimension for latitude_longitude and rotated_latitude_longitude grids. + .. list-table:: Valid strings for the "units" attribute. :widths: auto :header-rows: 1 diff --git a/docs/Users_Guide/ensemble-stat.rst b/docs/Users_Guide/ensemble-stat.rst index 1680164d70..7ee5dada75 100644 --- a/docs/Users_Guide/ensemble-stat.rst +++ b/docs/Users_Guide/ensemble-stat.rst @@ -533,10 +533,10 @@ The format of the STAT and ASCII output of the Ensemble-Stat tool are described - Version number * - 2 - MODEL - - User provided text string designating model name + - User-provided text string designating model name * - 3 - DESC - - User provided text string describing the verification task + - User-provided text string describing the verification task * - 4 - FCST_LEAD - Forecast lead time in HHMMSS format diff --git a/docs/Users_Guide/figure/overview-figure.png b/docs/Users_Guide/figure/overview-figure.png index b7b11f32c2..d34a9fefe5 100644 Binary files a/docs/Users_Guide/figure/overview-figure.png and b/docs/Users_Guide/figure/overview-figure.png differ diff --git a/docs/Users_Guide/grid-stat.rst b/docs/Users_Guide/grid-stat.rst index 631afbdaf2..a70159c4e0 100644 --- a/docs/Users_Guide/grid-stat.rst +++ b/docs/Users_Guide/grid-stat.rst @@ -509,10 +509,10 @@ The format of the STAT and ASCII output of the Grid-Stat tool are the same as th - Version number * - 2 - MODEL - - User provided text string designating model name + - User-provided text string designating model name * - 3 - DESC - - User provided text string describing the verification task + - User-provided text string describing the verification task * - 4 - FCST_LEAD - Forecast lead time in HHMMSS format @@ -551,7 +551,7 @@ The format of the STAT and ASCII output of the Grid-Stat tool are the same as th - Selected Vertical level for observations * - 16 - OBTYPE - - User provided text string designating the observation type + - User-provided text string designating the observation type * - 17 - VX_MASK - Verifying masking region indicating the masking grid or polyline region applied diff --git a/docs/Users_Guide/masking.rst b/docs/Users_Guide/masking.rst index 5dd8fe72d8..9d75704820 100644 --- a/docs/Users_Guide/masking.rst +++ b/docs/Users_Guide/masking.rst @@ -1,15 +1,15 @@ .. _masking: -******************************************* -Regional Verification using Spatial Masking -******************************************* +*************** +Spatial Masking +*************** -Verification over a particular region or area of interest may be performed using "masking". Defining a masking region is simply selecting the desired set of grid points to be used. The Gen-Vx-Mask tool automates this process and replaces the Gen-Poly-Mask and Gen-Circle-Mask tools from previous releases. It may be run to create a bitmap verification masking region to be used by many of the statistical tools. This tool enables the user to generate a masking region once for a domain and apply it to many cases. It has been enhanced to support additional types of masking region definition (e.g. tropical-cyclone track over water only). An iterative approach may be used to define complex areas by combining multiple masking regions together. +Verification over a particular region or area of interest may be performed using "masking". Defining a masking region is simply selecting the desired set of grid points to be used. The Gen-Vx-Mask tool automates this process and replaces the Gen-Poly-Mask and Gen-Circle-Mask tools from previous releases. It may be run to create a bitmap verification masking region to be used by many of the statistical tools. This tool enables the user to generate a masking region once for a domain and apply it to many cases. It supports multiple methods for defining regional spatial masks, as described below. In addition, Gen-Vx-Mask can be run iteratively, passing the output from one run as input to the next, to combine multiple masking regions and define a complex area of interest. Gen-Vx-Mask Tool ================ -The Gen-Vx-Mask tool may be run to create a bitmap verification masking region to be used by the MET statistics tools. This tool enables the user to generate a masking region once for a domain and apply it to many cases. While the MET statistics tools can define some masking regions on the fly using polylines, doing so can be slow, especially for complex polylines containing hundreds of vertices. Using the Gen-Vx-Mask tool to create a bitmap masking region before running the other MET tools will make them run more efficiently. +The Gen-Vx-Mask tool may be run to create a bitmap verification masking region to be used by the MET statistics tools. This tool enables the user to generate a masking region once for a domain and apply it to many cases. While the MET statistics tools can define some masking regions on the fly using pre-defined grids and polylines, doing so can be slow, especially for complex polylines containing hundreds of vertices. Using the Gen-Vx-Mask tool to create a bitmap masking region before running the other MET tools will make them run more efficiently. gen_vx_mask Usage ----------------- @@ -38,61 +38,69 @@ The usage statement for the Gen-Vx-Mask tool is shown below: [-v level] [-compress level] -gen_vx_mask has four required arguments and can take optional ones. Note that **-type string** (masking type) was previously optional but is now required. +gen_vx_mask has four required arguments and can take optional ones. Note that **-type string** (masking type) was optional in prior versions but is now required. Required Arguments for gen_vx_mask ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -1. The **input_grid** argument is a named grid, the path to a gridded data file, or an explicit grid specification string (see :numref:`App_B-grid_specification_strings`) which defines the grid for which a mask is to be defined. If set to a gen_vx_mask output file, automatically read mask data as the **input_field**. +1. The **input_grid** is a named grid, the path to a gridded data file, or an explicit grid specification string (see :numref:`App_B-grid_specification_strings`) which defines the grid for which a mask is to be defined. If set to a gen_vx_mask output file, automatically read mask data as the **input_field**. -2. The **mask_file** argument defines the masking information, see below. +2. The **mask_file** defines the masking information, see below. • For "poly", "poly_xy", "box", "circle", and "track" masking, specify an ASCII Lat/Lon file. Refer to :ref:`Types_of_masking_gen_vx_mask` for details on how to construct the ASCII Lat/Lon file for each type of mask. -• For "grid" and "data" masking, specify a gridded data file. +• For "grid" masking, specify a named grid, the path to a gridded data file, or an explicit grid specification. -• For "solar_alt" and "solar_azi" masking, specify a gridded data file or a time string in YYYYMMDD[_HH[MMSS]] format. +• For "data" masking, specify a gridded data file. -• For "lat" and "lon" masking, no "mask_file" needed, simply repeat the path for "input_file". +• For "solar_alt", "solar_azi", and "solar_time" masking, specify a gridded data file or a time string in YYYYMMDD[_HH[MMSS]] UTC format. -• For "shape" masking, specify an ESRI shapefile (.shp). +• For "lat" and "lon" masking, no "mask_file" is needed, simply repeat "input_grid". -3. The **out_file** argument is the output NetCDF mask file to be written. +• For "shape" masking, specify a shapefile (suffix ".shp"). + +3. The **out_file** is the output NetCDF mask file to be written. + +4. The **-type string** is a comma-separated list of masking types to be applied. The application will print an error message and exit if "-type string" is not specified at least once on the command line. Use multiple times for multiple mask types. See a list of supported masking types described below. + +.. note:: + + While multiple **-type** mask types can be requested in a single run, all requested masking types must use the same **mask_file** setting. -4. The **-type string** is required to set the masking type. The application will give an error message and exit if "-type string" is not specified on the command line. See the description of supported types below. - Optional Arguments for gen_vx_mask ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -5. The **-input_field string** option can be used to read existing mask data from "input_file". +5. The **-input_field string** option initializes the "input_grid" with values from this field. -6. The **-mask_field string** option can be used to define the field from "mask_file" to be used for "data" masking. +6. The **-mask_field string** option defines the field from "mask_file" to be used for "data" masking. Use multiple times for multiple mask types. 7. The **-complement** option can be used to compute the complement of the area defined by "mask_file". -8. The **-union | -intersection | -symdiff** option can be used to specify how to combine the masks from "input_file" and "mask_file". +8. The **-union | -intersection | -symdiff** options specify how to combine multiple binary masks. Applies to masks read from the "input_field" and those generated during the current run. -9. The **-thresh string** option can be used to define the threshold to be applied. +9. The **-thresh string** option is a comma-separated list of thresholds to be applied. Use multiple times for multiple mask types. • For "circle" and "track" masking, threshold the distance (km). • For "data" masking, threshold the values of "mask_field". -• For "solar_alt" and "solar_azi" masking, threshold the computed solar values. +• For "solar_alt" and "solar_azi" masking, threshold the computed solar values (deg). + +• For "solar_time" masking, threshold the solar time (hr). -• For "lat" and "lon" masking, threshold the latitude and longitude values. +• For "lat" and "lon" masking, threshold the latitude and longitude values (deg). -10. The **-height n** and **-width n** options set the size in grid units for "box" masking. +10. The **-height n** and **-width n** options specify the dimensions in grid units for "box" masking. 11. The **-shapeno n** option is only used for shapefile masking. See the description of shapefile masking below. 12. The **-shape_str name string** option is only used for shapefile masking. See the description of shapefile masking below. -13. The **-value n** option can be used to override the default output mask data value (1). +13. The **-value n** option overrides the default output mask data value (1). -14. The **-name string** option can be used to specify the output variable name for the mask. +14. The **-name string** option specifies the output variable name for the mask. -15. The **-log file** option directs output and errors to the specified log file. All messages will be written to that file as well as standard out and error. Thus, users can save the messages without having to redirect the output on the command line. The default behavior is no log file. +15. The **-log file** option writes log messages to the specified log file. All messages will be written to that file as well as standard out and error. Thus, users can save the messages without having to redirect the output on the command line. The default behavior is no log file. 16. The **-v level** option indicates the desired level of verbosity. The value of "level" will override the default setting of 2. Setting the verbosity to 0 will make the tool run with no log messages, while increasing the verbosity will increase the amount of logging. @@ -118,11 +126,13 @@ The Gen-Vx-Mask tool supports the following types of masking region definition s 7. Data (**data**) masking reads an input gridded data file, extracts the field specified using the **-mask_field** command line option, thresholds the data using the **-thresh** command line option, and selects grid points which meet that threshold criteria. The option is useful when thresholding topography to define a mask based on elevation or when threshold land use to extract a particular category. -8. Solar altitude (**solar_alt**) and solar azimuth (**solar_azi**) masking computes the solar altitude and azimuth values at each grid point for the time defined by the **mask_file** setting. **mask_file** may either be set to an explicit time string in YYYYMMDD[_HH[MMSS]] format or to a gridded data file. If set to a gridded data file, the **-mask_field** command line option specifies the field of data whose valid time should be used. If the **-thresh** command line option is not used, the raw solar altitude or azimuth value for each grid point will be written to the output. If it is used, the resulting binary mask field will be written. This option is useful when defining a day/night mask. +8. Solar altitude (**solar_alt**) and solar azimuth (**solar_azi**) masking computes the solar altitude and azimuth values in degrees at each grid point for the time defined by the **mask_file** setting. **mask_file** may either be set to an explicit time string in YYYYMMDD[_HH[MMSS]] UTC format or to a gridded data file. If set to a gridded data file, the **-mask_field** command line option specifies the field of data whose valid time should be used. If the **-thresh** command line option is not used, the raw solar altitude or azimuth degrees for each grid point will be written to the output. If it is used, the resulting binary mask field will be written. This option is useful when defining a day/night mask. -9. Latitude (**lat**) and longitude (**lon**) masking computes the latitude and longitude value at each grid point. This logic only requires the definition of the grid, specified by the **input_file**. Technically, the **mask_file** is not needed, but a value must be specified for the command line to parse correctly. Users are advised to simply repeat the **input_file** setting twice. If the **-thresh** command line option is not used, the raw latitude or longitude values for each grid point will be written to the output. This option is useful when defining latitude or longitude bands over which to compute statistics. +9. Solar time (**solar_time**) masking computes the solar time in decimal hours at each grid point for the for the time defined by the **mask_file** setting, as described above. The solar hours of the day range from 0 to 24, with a value of 12 indicating solar noon. Note that solar time is based only on longitude. If the **-thresh** command line option is not used, the raw solar time hours will be written to the output. -10. Shapefile (**shape**) masking uses closed polygons taken from an ESRI shapefile to define the masking region. Gen-Vx-Mask reads the shapefile with the ".shp" suffix and extracts the latitude and longitudes of the vertices. The shapefile must consist of closed polygons rather than polylines, points, or any of the other data types that shapefiles support. When the **-shape_str** command line option is used, Gen-Vx-Mask also reads metadata from the corresponding dBASE file with the ".dbf" suffix. +10. Latitude (**lat**) and longitude (**lon**) masking computes the latitude and longitude value at each grid point. This logic only requires the definition of the grid, specified by the **input_file**. Technically, the **mask_file** is not needed, but a value must be specified for the command line to parse correctly. Users are advised to simply repeat the **input_file** setting twice. If the **-thresh** command line option is not used, the raw latitude or longitude values for each grid point will be written to the output. This option is useful when defining latitude or longitude bands over which to compute statistics. + +11. Shapefile (**shape**) masking uses closed polygons taken from an ESRI shapefile to define the masking region. Gen-Vx-Mask reads the shapefile with the ".shp" suffix and extracts the latitude and longitudes of the vertices. The shapefile must consist of closed polygons rather than polylines, points, or any of the other data types that shapefiles support. When the **-shape_str** command line option is used, Gen-Vx-Mask also reads metadata from the corresponding dBASE file with the ".dbf" suffix. Shapefiles usually contain more than one polygon, and the user must select which of these shapes should be used. The **-shapeno n** and **-shape_str name string** command line options enable the user to select one or more polygons from the shapefile. For **-shape n**, **n** is a comma-separated list of integer shape indices to be used. Note that these values are zero-based. So the first polygon in the shapefile is shape number 0, the second polygon in the shapefile is shape number 1, etc. For example, **-shapeno 0,1,2** uses the first three shapes in the shapefile. When multiple shapes are specified, the mask is defined as their union. So all grid points falling inside at least one of the specified shapes are included in the mask. @@ -130,50 +140,76 @@ The Gen-Vx-Mask tool supports the following types of masking region definition s The polyline, polyline XY, box, circle, and track masking methods all read an ASCII file containing Lat/Lon locations. Those files must contain a string, which defines the name of the masking region, followed by a series of whitespace-separated latitude (degrees north) and longitude (degree east) values. +Logic for gen_vx_mask +^^^^^^^^^^^^^^^^^^^^^ + The Gen-Vx-Mask tool performs three main steps, described below. -1. Determine the **input_field** and grid definition. +1. Determine the input grid definition. -• Read the **input_file** to determine the grid over which the mask should be defined. +• Read the **input_grid** to determine the grid over which the mask should be defined. -• By default, initialize the **input_field** at each grid point to a value of zero. +• By default, initialize the input field value at each grid point to zero. -• If the **-input_field** option was specified, initialize the **input_field** at each grid point to the value of that field. +• If the **-input_field** option was specified, initialize each input field value using the values from that field. -• If the **input_file** is the output from a previous run of Gen-Vx-Mask, automatically initialize each grid point with the **input_field** value. +• If the **input_grid** is the output from a previous run of Gen-Vx-Mask, automatically initialize each input field value with the previously-generated mask value. -2. Determine the **mask_field**. +2. Process each of the requested masking regions. -• Read the **mask_file**, process it based on the **-type** setting (as described above), and define the **mask_field** value for each grid point to specify whether or not it is included in the mask. +• For each **-type** mask type option requested, process the **mask_file** setting. + +• Read the **mask_file**, process it based on the **-type** setting (as described above), and define the masking region value for each grid point to specify whether or not it is included in the mask. • By default, store the mask value as 1 unless the **-value** option was specified to override that default value. -• If the **-complement** option was specified, the opposite of the masking area is selected. +• If the **-complement** option was specified, select the opposite of the masking area. + +• Apply logic to combine the newly generated masking region with those defined by previous **-type** mask type options to create a **mask_field**. -3. Apply logic to combine the **input_field** and **mask_field** and write the **out_file**. + • By default, compute the **-union** of multiple masks, unless **-intersection** or **-symdiff** were specified to override this default. -• By default, the output value at each grid point is set to the value of **mask_field** if included in the mask, or the value of **input_field** if not included. +3. Apply logic to combine the input field and current masking region and write the **out_file**. -• If the **-union, -intersection**, or **-symdiff** option was specified, apply that logic to the **input_field** and **mask_field** values at each grid point to determine the output value. +• By default, the output value at each grid point is set to the value of current masking region if included in the mask, or the value of **input_field** if not included. + +• If the **-union, -intersection**, or **-symdiff** option was specified, apply that logic to the input field and current masking region values at each grid point to determine the output value. • Write the output value for each grid point to the **out_file**. -This three step process enables the Gen-Vx-Mask tool to be run iteratively on its own output to generate complex masking areas. Additionally, the **-union, -intersection**, and **-symdiff** options control the logic for combining the input data value and current mask value at each grid point. For example, one could define a complex masking region by selecting grid points with an elevation greater than 1000 meters within a specified geographic region by doing the following: +Examples for gen_vx_mask +^^^^^^^^^^^^^^^^^^^^^^^^ + +An example of defining the northwest hemisphere of the earth, as defined by latitudes >= 0 and longitudes < 0, in a single run is shown below: + +.. code-block:: none + + gen_vx_mask G004 G004 northwest_hemisphere.nc \ + -type lat,lon -thresh ge0,lt0 \ + -intersection -name nw_hemisphere + -• Run the Gen-Vx-Mask tool to apply data masking by thresholding a field of topography greater than 1000 meters. +The Gen-Vx-Mask tool to be run iteratively on its own output using different **mask_file** settings to generate complex masking areas. The **-union, -intersection**, and **-symdiff** options control the logic for combining the input field and current mask values at each grid point. For example, one could define a complex masking region by selecting grid points with an elevation greater than 1000 meters within a Contiguous United States geographic region by doing the following: -• Rerun the Gen-Vx-Mask tool passing in the output of the first call and applying polyline masking to define the geographic area of interest. +• Run Gen-Vx-Mask to apply data masking by thresholding a field of topography greater than 1000 meters. - - Use the **-intersection** option to only select grid points whose value is non-zero in both the input field and the current mask. +• Run Gen-Vx-Mask a second time on the output from the first call and applying polyline masking to define the geographic area of interest. Use the **-intersection** option to only select grid points whose value is non-zero in both the input field and the current mask. -An example of the gen_vx_mask calling sequence is shown below: +An example of this Gen-Vx-Mask calling sequence is shown below: .. code-block:: none - gen_vx_mask sample_fcst.grb \ - CONUS.poly CONUS_poly.nc + gen_vx_mask fcst.grib fcst.grib TOPO_mask.nc \ + -type data \ + -mask_field 'name="TOPO"; level="L0";' \ + -thresh '>1000' + + gen_vx_mask TOPO_mask.nc CONUS.poly TOPO_CONUS_mask.nc \ + -type poly \ + -intersection -name TOPO_CONUS_mask + -In this example, the Gen-Vx-Mask tool will read the ASCII Lat/Lon file named **CONUS.poly** and apply the default polyline masking method to the domain on which the data in the file **sample_fcst.grib** resides. It will create a NetCDF file containing a bitmap for the domain with a value of 1 for all grid points inside the CONUS polyline and a value of 0 for all grid points outside. It will write an output NetCDF file named **CONUS_poly.nc**. +Here, Gen-Vx-Mask uses the **data** masking type to read topography data (**TOPO**) from a GRIB file and thresholds the values **>1000** to define a topography mask. The second run of Gen-Vx-Mask uses the **poly** masking type to read the ASCII Lat/Lon file named **CONUS.poly** and select all grid points within that region to define a polyline mask. When reading its own output, Gen-Vx-Mask automatically reads the topography mask as the **input_field** and applies the **intersection** logic to combine it with the polyline mask, selecting grid points where both conditions are true. The resulting complex mask is written to the output NetCDF file named **TOPO_CONUS_mask.nc**. Feature-Relative Methods ======================== diff --git a/docs/Users_Guide/mode-td.rst b/docs/Users_Guide/mode-td.rst index f7dd558a7c..70c594f430 100644 --- a/docs/Users_Guide/mode-td.rst +++ b/docs/Users_Guide/mode-td.rst @@ -397,10 +397,10 @@ The contents of the OBJECT_ID and OBJECT_CAT columns identify the objects using - Version number * - 2 - MODEL - - User provided text string giving model name + - User-provided text string giving model name * - 3 - DESC - - User provided text string describing the verification task + - User-provided text string describing the verification task * - 4 - FCST_LEAD - Forecast lead time in HHMMSS format @@ -482,40 +482,22 @@ The contents of the OBJECT_ID and OBJECT_CAT columns identify the objects using - Time index of slice * - 26 - AREA - - 2D cross-sectional area - * - 27 - - CENTROID_X - - x coordinate of centroid - * - 28 - - CENTROID_Y - - y coordinate of centroid - * - 29 - - CENTROID_LAT - - Latitude of centroid - * - 30 - - CENTROID_LON - - Longitude of centroid + - 2D cross-sectional area (in grid squares) + * - 27-28 + - CENTROID_X,_Y + - Location of centroid (in grid units) + * - 29-30 + - CENTROID_LAT,_LON + - Location of centroid (in lat/lon degrees) * - 31 - AXIS_ANG - - Angle that the axis makes with the grid x direction - * - 32 - - INTENSITY_10 - - :math:`\text{10}^{th}` percentile intensity in time slice - * - 33 - - INTENSITY_25 - - :math:`\text{25}^{th}` percentile intensity in time slice - * - 34 - - INTENSITY_50 - - :math:`\text{60}^{th}` percentile intensity in time slice - * - 35 - - INTENSITY_75 - - :math:`\text{75}^{th}` percentile intensity in time slice - * - 36 - - INTENSITY_90 - - :math:`\text{90}^{th}` percentile intensity in time slice + - Angle that the axis makes with the grid x direction (in degrees) + * - 32-36 + - INTENSITY_10,_25,_50,_75,_90 + - 10th, 25th, 50th, 75th, and 90th percentile intensity in time slice (various units) * - 37 - - INTENSITY_* - - User-specified percentile intensity in time slice + - INTENSITY_NN + - User-specified percentile intensity in time slice (various units) .. _table_mtd-3D-single-attributes: @@ -535,60 +517,42 @@ The contents of the OBJECT_ID and OBJECT_CAT columns identify the objects using * - 24 - OBJECT_CAT - Object category - * - 25 - - CENTROID_X - - x coordinate of centroid - * - 26 - - CENTROID_Y - - y coordinate of centroid + * - 25-26 + - CENTROID_X,_Y + - Location of the centroid (in grid units) * - 27 - CENTROID_T - - t coordinate of centroid - * - 28 - - CENTROID_LAT - - Latitude of centroid - * - 29 - - CENTROID_LON - - Longitude of centroid + - Time coordinate of centroid (in time steps) + * - 28-29 + - CENTROID_LAT,_LON + - Location of the centroid (in lat/lon degrees) * - 30 - X_DOT - - x component of object velocity + - X component of object velocity (in grid units per time step) * - 31 - Y_DOT - - y component of object velocity + - Y component of object velocity (in grid units per time step) * - 32 - AXIS_ANG - - Angle that the axis plane of an object makes with the grid x direction + - Angle that the axis plane of an object makes with the grid x direction (in degrees) * - 33 - VOLUME - - Integer count of the number of 3D "cells" in an object + - 3D object volume (integer count of 3D spacetime cells) * - 34 - START_TIME - - Object start time + - Object start time (in time steps) * - 35 - END_TIME - - Object end time + - Object end time (in time steps) * - 36 - CDIST_TRAVELLED - - Total great circle distance travelled by the 2D spatial centroid over the lifetime of the 3D object - * - 37 - - INTENSITY_10 - - :math:`\text{10}^{th}` percentile intensity inside object - * - 38 - - INTENSITY_25 - - :math:`\text{25}^{th}` percentile intensity inside object - * - 39 - - INTENSITY_50 - - :math:`\text{50}^{th}` percentile intensity inside object - * - 40 - - INTENSITY_75 - - :math:`\text{75}^{th}` percentile intensity inside object - * - 41 - - INTENSITY_90 - - :math:`\text{90}^{th}` percentile intensity inside object + - Total great circle distance travelled by the 2D spatial centroid over the lifetime of the 3D object (in kilometers) + * - 37-41 + - INTENSITY_10,_25,_50,_75,_90 + - 10th, 25th, 50th, 75th, and 90th percentile intensity in spacetime object (various units) * - 42 - - INTENSITY_* - - User-specified percentile intensity inside object + - INTENSITY_NN + - User-specified percentile intensity in spacetime object (various units) .. _table_mtd-3D-Pair-Attribute: @@ -610,37 +574,37 @@ The contents of the OBJECT_ID and OBJECT_CAT columns identify the objects using - Object category * - 25 - SPACE_CENTROID_DIST - - Spatial distance between :math:`(x,y)` coordinates of object spacetime centroid + - Cartesian distance between :math:`(x,y)` coordinates of object spacetime centroids (in grid units) * - 26 - TIME_CENTROID_DELTA - - Difference in *t* index of object spacetime centroid + - Observation minus forecast difference in *t* temporal index of object spacetime centroid (in time steps) * - 27 - AXIS_DIFF - - Difference in spatial axis plane angles + - Angle between the spatial axis plane angles (in degrees, from 0 to 90) * - 28 - SPEED_DELTA - - Difference in object speeds + - Forecast minus observation speed difference (in grid units per time step) * - 29 - DIRECTION_DIFF - - Difference in object direction of movement + - Difference in object direction of movement (in degrees, from 0 to 180) * - 30 - VOLUME_RATIO - - Forecast object volume divided by observation object volume + - Forecast 3D object volume divided by observation 3D object volume (unitless) * - 31 - START_TIME_DELTA - - Difference in object starting time steps + - Forecast minus observation starting time step (in time steps) * - 32 - END_TIME_DELTA - - Difference in object ending time steps + - Forecast minus observation ending time step (in time steps) * - 33 - INTERSECTION_VOLUME - - "Volume" of object intersection + - 3D object intersection volume (integer count of 3D spacetime cells) * - 34 - DURATION_DIFF - - Difference in the lifetimes of the two objects + - Forecast minus observation difference in object lifetimes (in time steps) * - 35 - INTEREST - - Total interest for this object pair + - Total interest for this object pair (unitless) **NetCDF File** diff --git a/docs/Users_Guide/mode.rst b/docs/Users_Guide/mode.rst index 2dc4bc3e96..6088e993ea 100644 --- a/docs/Users_Guide/mode.rst +++ b/docs/Users_Guide/mode.rst @@ -590,16 +590,16 @@ The MODE tool creates two ASCII output files. The first ASCII file contains cont - Version number * - 2 - MODEL - - User provided text string designating model name + - User-provided text string giving model name * - 3 - N_VALID - Number of valid data points * - 4 - GRID_RES - - User provided nominal grid resolution + - User-provided nominal grid resolution * - 5 - DESC - - User provided text string describing the verification task + - User-provided text string describing the verification task * - 6 - FCST_LEAD - Forecast lead time in HHMMSS format @@ -620,13 +620,13 @@ The MODE tool creates two ASCII output files. The first ASCII file contains cont - Observation accumulation time in HHMMSS format * - 12 - FCST_RAD - - Forecast convolution radius in grid squares + - Forecast convolution radius in grid units * - 13 - FCST_THR - Forecast convolution threshold * - 14 - OBS_RAD - - Observation convolution radius in grid squares + - Observation convolution radius in grid units * - 15 - OBS_THR - Observation convolution threshold @@ -650,7 +650,7 @@ The MODE tool creates two ASCII output files. The first ASCII file contains cont - Observation vertical level * - 22 - OBTYPE - - User provided observation type + - User-provided observation type * - 23 - FIELD - Field type for this line:* RAW for the raw input fields * OBJECT for the resolved object fields @@ -790,16 +790,16 @@ The contents of the columns in this ASCII file are summarized in :numref:`MODE_o - Version number * - 2 - MODEL - - User provided text string designating model name + - User-provided text string designating model name * - 3 - N_VALID - Number of valid data points * - 4 - GRID_RES - - User provided nominal grid resolution + - User-provided nominal grid resolution * - 5 - DESC - - User provided text string describing the verification task + - User-provided text string describing the verification task * - 6 - FCST_LEAD - Forecast lead time in HHMMSS format @@ -850,7 +850,7 @@ The contents of the columns in this ASCII file are summarized in :numref:`MODE_o - Observation vertical level * - 22 - OBTYPE - - User provided observation type + - User-provided observation type * - 23 - OBJECT_ID - Object numbered from 1 to the number of objects in each field diff --git a/docs/Users_Guide/point-stat.rst b/docs/Users_Guide/point-stat.rst index 41e154ac8c..edf3315c3a 100644 --- a/docs/Users_Guide/point-stat.rst +++ b/docs/Users_Guide/point-stat.rst @@ -539,10 +539,10 @@ The first set of header columns are common to all of the output files generated - Version number * - 2 - MODEL - - User provided text string designating model name + - User-provided text string designating model name * - 3 - DESC - - User provided text string describing the verification task + - User-provided text string describing the verification task * - 4 - FCST_LEAD - Forecast lead time in HHMMSS format diff --git a/docs/Users_Guide/release-notes.rst b/docs/Users_Guide/release-notes.rst index 19e635afbe..867f73aa0d 100644 --- a/docs/Users_Guide/release-notes.rst +++ b/docs/Users_Guide/release-notes.rst @@ -9,6 +9,32 @@ When applicable, release notes are followed by the GitHub issue number which des enhancement, or new feature (`MET GitHub issues `_). Important issues are listed **in bold** for emphasis. +MET Version 12.0.0-rc1 Release Notes (20241114) +----------------------------------------------- + + .. dropdown:: Bugfixes + + * Fix the wind-based diagnostics computations in TC-Diag + (`#2729 `_). + + .. dropdown:: Enhancements + + * Update ndbc_stations.xml after 7-character buoy ids are introduced in Aug/Sept 2023 + (`#2631 `_). + * Update GRIB tables in MET based on wgrib2 versions 3.1.4 and 3.4.0 + (`#2780 `_). + * Enhance MET to parse the set_attr options prior reading data from gridded data files + (`#2826 `_). + * Eliminate Point2Grid warning when no valid output data is found + (`#3000 `_). + + .. dropdown:: Documentation + + * Enhance MTD documentation so that tables 21.3 and 21.4 have units + (`#2750 `_). + * Update documentation about parsing grid information from CF-compliant NetCDF files + (`#3009 `_). + MET Version 12.0.0-beta6 Release Notes (20241018) ------------------------------------------------- @@ -205,3 +231,4 @@ MET Version 12.0.0 Upgrade Instructions * The optional `ecKit `_ library dependency was added in the 12.0.0-beta2 development cycle (`#2574 `_). * Note that the `#2833 `_ bugfix affects all previously generated output from the TC-Diag and TC-RMW tools. +* Note that the GRIB table updates in `#2780 `_ modified some existing units strings for consistency and clarity. diff --git a/docs/Users_Guide/tc-gen.rst b/docs/Users_Guide/tc-gen.rst index 762fc0069f..acecd3853c 100644 --- a/docs/Users_Guide/tc-gen.rst +++ b/docs/Users_Guide/tc-gen.rst @@ -506,7 +506,7 @@ TC-Gen produces output in STAT and, optionally, ASCII and NetCDF formats. The AS - Current ATCF Technique name * - 3 - DESC - - User provided text string describing the "filter" options + - User-provided text string describing the "filter" options * - 4 - FCST_LEAD - Forecast lead time in HHMMSS format diff --git a/docs/Users_Guide/tc-rmw.rst b/docs/Users_Guide/tc-rmw.rst index 5f226cc76a..ae84a0a8d0 100644 --- a/docs/Users_Guide/tc-rmw.rst +++ b/docs/Users_Guide/tc-rmw.rst @@ -7,7 +7,7 @@ TC-RMW Tool Introduction ============ -The TC-RMW tool regrids tropical cyclone model data onto a moving range-azimuth grid centered on points along the storm track provided in ATCF format, most likely the adeck generated from the file. The radial grid spacing can be defined in kilometers or as a factor of the radius of maximum winds (RMW). The azimuthal grid spacing is defined in degrees clockwise from due east. If wind vector fields are specified in the configuration file, the radial and tangential wind components will be computed. Any regridding method available in MET can be used to interpolate data on the model output grid to the specified range-azimuth grid. The regridding will be done separately on each vertical level. The model data files must coincide with track points in a user provided ATCF formatted track file. +The TC-RMW tool regrids tropical cyclone model data onto a moving range-azimuth grid centered on points along the storm track provided in ATCF format, most likely the adeck generated from the file. The radial grid spacing can be defined in kilometers or as a factor of the radius of maximum winds (RMW). The azimuthal grid spacing is defined in degrees clockwise from due east. If wind vector fields are specified in the configuration file, the radial and tangential wind components will be computed. Any regridding method available in MET can be used to interpolate data on the model output grid to the specified range-azimuth grid. The regridding will be done separately on each vertical level. The model data files must coincide with track points in a user-provided ATCF formatted track file. Practical Information ===================== diff --git a/docs/Users_Guide/wavelet-stat.rst b/docs/Users_Guide/wavelet-stat.rst index fdfcfe95bc..33b038c6aa 100644 --- a/docs/Users_Guide/wavelet-stat.rst +++ b/docs/Users_Guide/wavelet-stat.rst @@ -345,10 +345,10 @@ The format of the STAT and ASCII output of the Wavelet-Stat tool is similar to t - Version number * - 2 - MODEL - - User provided text string designating model name + - User-provided text string designating model name * - 3 - DESC - - User provided text string describing the verification task + - User-provided text string describing the verification task * - 4 - FCST_LEAD - Forecast lead time in HHMMSS format @@ -387,7 +387,7 @@ The format of the STAT and ASCII output of the Wavelet-Stat tool is similar to t - Selected Vertical level for observations * - 16 - OBTYPE - - User provided text string designating the observation type + - User-provided text string designating the observation type * - 17 - VX_MASK - Verifying masking region indicating the masking grid or polyline region applied diff --git a/docs/conf.py b/docs/conf.py index 659a095b96..86beea82f9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,11 +20,11 @@ project = 'MET' author = 'UCAR/NCAR, NOAA, CSU/CIRA, and CU/CIRES' author_list = 'Prestopnik, J., H. Soh, L. Goodrich, B. Brown, R. Bullock, J. Halley Gotway, K. Newman, J. Opatz, T. Jensen' -version = '12.0.0-beta6' +version = '12.0.0-rc1' verinfo = version release = f'{version}' release_year = '2024' -release_date = f'{release_year}-10-18' +release_date = f'{release_year}-11-14' copyright = f'{release_year}, {author}' # -- General configuration --------------------------------------------------- diff --git a/internal/scripts/installation/config/install_met_env.acorn b/internal/scripts/installation/config/install_met_env.acorn index 930323e5c8..48cd970fc1 100644 --- a/internal/scripts/installation/config/install_met_env.acorn +++ b/internal/scripts/installation/config/install_met_env.acorn @@ -12,19 +12,20 @@ module load gsl/2.7 module load g2c/1.6.4 module load proj/7.1.0 module use /apps/dev/lmodules/intel/19.1.3.304 -module load atlas/0.30.0 -module load eckit/1.20.2 +#module load fckit/0.11.0 +module load atlas/0.35.0 +module load eckit/1.24.4 export FC=ifort export F77=ifort export F90=ifort export CC=icc export CXX=icpc -export TEST_BASE=/apps/sw_review/emc/MET/12.0.0 +export TEST_BASE=/apps/sw_review/emc/MET/12.0.0-beta6 export LIB_DIR=${TEST_BASE}/external_libs export COMPILER=intel_19.1.3.304 export MET_SUBDIR=${TEST_BASE} -export MET_TARBALL=v12.0.0.tar.gz +export MET_TARBALL=v12.0.0-beta6.tar.gz export USE_MODULES=TRUE export ADDTL_DIR=/apps/spack/gettext/0.21/intel/19.1.3.304/at2kdo4edvuhyzrt5g6zhwrdb7bdui4s/lib64 export PYTHON_MODULE=python_3.10.4 @@ -38,13 +39,14 @@ export MET_BUFRLIB=/apps/ops/prod/libs/intel/19.1.3.304/bufr/11.5.0/lib64 export MET_GRIB2CLIB=/apps/ops/prod/libs/intel/19.1.3.304/g2c/1.6.4/lib64 export MET_GRIB2CINC=/apps/ops/prod/libs/intel/19.1.3.304/g2c/1.6.4/include export MET_GSL=/apps/spack/gsl/2.7/intel/19.1.3.304/xks7dxbowrdxhjck5zxc4rompopocevb -export MET_PROJ=/apps/spack/proj/7.1.0/intel/19.1.3.304/cjbmc7tacv5qcfatslqmcrzo5kb4raaq/ -export MET_ATLAS=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/atlas/0.30.0 -export MET_ECKIT=/apps/dev/intel-19.1.3.304/cray-mpich-8.1.9/eckit/1.20.2 +export MET_PROJ=/apps/spack/proj/7.1.0/intel/19.1.3.304/cjbmc7tacv5qcfatslqmcrzo5kb4raaq +export MET_ATLASLIB=/apps/prod/atlas/install-0.35.0/lib64 +export MET_ATLASINC=/apps/prod/atlas/install-0.35.0/include/atlas +export MET_ECKITLIB=/apps/prod/eckit/install-1.24.4/lib64 +export MET_ECKITINC=/apps/prod/eckit/install-1.24.4/include/eckit export BUFRLIB_NAME=-lbufr_4 export GRIB2CLIB_NAME=-lg2c export LIB_JASPER=/apps/spack/jasper/2.0.25/intel/19.1.3.304/sjib74krrorkyczqpqah4tvewmlnqdx4/lib64 export LIB_LIBPNG=/apps/spack/libpng/1.6.37/intel/19.1.3.304/4ohkronuhlyherusoszzrmur5ewvlwzh/lib export LIB_Z=/apps/spack/zlib/1.2.11/intel/19.1.3.304/hjotqkckeoyt6j6tibalwzrlfljcjtdh/lib -export export MAKE_ARGS=-j -export CXXFLAGS="-std=c++11" +export MAKE_ARGS=-j diff --git a/internal/scripts/installation/config/install_met_env.hera b/internal/scripts/installation/config/install_met_env.hera index 2b0b054190..65282af1d4 100755 --- a/internal/scripts/installation/config/install_met_env.hera +++ b/internal/scripts/installation/config/install_met_env.hera @@ -2,11 +2,11 @@ module load gnu/9.2.0 module load intel/2022.1.2 module load cmake/3.28.1 -export FC=ifort -export F77=ifort -export F90=ifort -export CC=icc -export CXX=icpc +export FC=ifx +export F77=ifx +export F90=ifx +export CC=icx +export CXX=icpx export PATH=/scratch1/BMC/dtc/miniconda/miniconda3/envs/metplus_v5.1_py3.10/bin:${PATH} export TEST_BASE=/contrib/met/12.0.0 export COMPILER=intel_2022.1.2 diff --git a/internal/scripts/installation/config/install_met_env.jet b/internal/scripts/installation/config/install_met_env.jet index 15b5563ac2..6f997b506f 100644 --- a/internal/scripts/installation/config/install_met_env.jet +++ b/internal/scripts/installation/config/install_met_env.jet @@ -1,11 +1,11 @@ module load intel/2022.1.2 module load cmake/3.28.1 -export FC=ifort -export F77=ifort -export F90=ifort -export CC=icc -export CXX=icpc +export FC=ifx +export F77=ifx +export F90=ifx +export CC=icx +export CXX=icpx export TEST_BASE=/contrib/met/12.0.0 export COMPILER=intel_2022.1.2 export MET_SUBDIR=${TEST_BASE} diff --git a/internal/test_unit/config/GridStatConfig_SEEPS b/internal/test_unit/config/GridStatConfig_SEEPS index 808e5a10f3..c9c1d80f77 100644 --- a/internal/test_unit/config/GridStatConfig_SEEPS +++ b/internal/test_unit/config/GridStatConfig_SEEPS @@ -198,6 +198,7 @@ nc_pairs_flag = { diff = TRUE; climo = TRUE; climo_cdp = FALSE; + seeps = TRUE; weight = FALSE; nbrhd = FALSE; fourier = FALSE; diff --git a/internal/test_unit/xml/unit_gen_vx_mask.xml b/internal/test_unit/xml/unit_gen_vx_mask.xml index ca66cea64c..5128485162 100644 --- a/internal/test_unit/xml/unit_gen_vx_mask.xml +++ b/internal/test_unit/xml/unit_gen_vx_mask.xml @@ -438,6 +438,27 @@ + + + + + + &MET_BIN;/gen_vx_mask + \ + 'G004' \ + '20050808_12' \ + &OUTPUT_DIR;/gen_vx_mask/SOLAR_MIDNIGHT_NH.nc \ + -type solar_time,solar_alt,lat \ + -thresh 'ge21||le3,le0,ge0' \ + -intersection \ + -name SOLAR_MIDNIGHT_NH \ + -v 3 + + + &OUTPUT_DIR;/gen_vx_mask/SOLAR_MIDNIGHT_NH.nc + + + @@ -472,6 +493,27 @@ + + + + + + &MET_BIN;/gen_vx_mask + \ + &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F012.grib2 \ + &DATA_DIR_MODEL;/grib2/gfs/gfs_2012040900_F012.grib2 \ + &OUTPUT_DIR;/gen_vx_mask/DATA_DATA_LAT_LON_mask.nc \ + -type data,data,lat,lon \ + -mask_field 'name="LAND"; level="L0";' \ + -mask_field 'name="TMP"; level="L0";' \ + -thresh eq1,lt273,gt0,lt0 \ + -intersection -v 5 + + + &OUTPUT_DIR;/gen_vx_mask/DATA_DATA_LAT_LON_mask.nc + + + diff --git a/src/libcode/vx_solar/solar.cc b/src/libcode/vx_solar/solar.cc index 2d6f6a5280..3ca09df709 100644 --- a/src/libcode/vx_solar/solar.cc +++ b/src/libcode/vx_solar/solar.cc @@ -60,6 +60,49 @@ return; //////////////////////////////////////////////////////////////////////// +double solar_time(unixtime gmt, double lon) + +{ + + // + // right ascension and declination + // + +double Ra; +double Dec; + +solar_radec(gmt, Ra, Dec); + + // + // local hour angle + // + +double lha = gmt_to_gmst(gmt) - lon - Ra; + + // + // rescale angle to -180 to 180 + // + +lha -= 360.0*floor((lha + 180.0)/360.0); + + // + // rescale local hour angle to decimal hours of the solar day + // + +double solar_hr = (lha + 180.0)/360.0 * 24; + + // + // done + // + +return solar_hr; + +} + + +//////////////////////////////////////////////////////////////////////// + + void dh_to_aa(double lat, double Dec, double lha, double & alt, double & azi) { diff --git a/src/libcode/vx_solar/solar.h b/src/libcode/vx_solar/solar.h index a78876e7ff..f4c717d879 100644 --- a/src/libcode/vx_solar/solar.h +++ b/src/libcode/vx_solar/solar.h @@ -46,6 +46,25 @@ extern void solar_altaz(unixtime gmt, double lat, double lon, double & alt, doub //////////////////////////////////////////////////////////////////////// +extern double solar_time(unixtime gmt, double lon); + + // + // calculates the solar time for the given longitude. + // + // + // Input: gmt, greenwich mean time expressed as unix time + // + // lon, longitude (degrees) of given location (+ west, - east) + // + // + // Output: decimal hours f the solar day in range [0, 24), + // where 12 is solar noon + // + + +//////////////////////////////////////////////////////////////////////// + + extern void solar_radec(unixtime gmt, double & Ra, double & Dec); // diff --git a/src/tools/core/grid_stat/grid_stat.cc b/src/tools/core/grid_stat/grid_stat.cc index 45afc74615..a1c43f0824 100644 --- a/src/tools/core/grid_stat/grid_stat.cc +++ b/src/tools/core/grid_stat/grid_stat.cc @@ -114,6 +114,7 @@ // 056 01/29/24 Halley Gotway MET #2801 Configure time difference warnings. // 057 07/05/24 Halley Gotway MET #2924 Support forecast climatology. // 058 10/03/24 Halley Gotway MET #2887 Compute weighted contingency tables. +// 059 11/15/24 Halley Gotway MET #3020 SEEPS NetCDF output. // //////////////////////////////////////////////////////////////////////// @@ -1267,23 +1268,36 @@ void process_scores() { SeepsAggScore seeps_agg; int month, day, year, hour, minute, second; + // Compute SEEPS statistics unix_to_mdyhms(fcst_dp.valid(), month, day, year, hour, minute, second); compute_aggregated_seeps_grid(fcst_dp_smooth, obs_dp_smooth, seeps_dp, seeps_dp_fcat, seeps_dp_ocat, &seeps_agg, month, hour, conf_info.seeps_p1_thresh, conf_info.seeps_climo_name); - write_nc("SEEPS_MPR_SCORE", seeps_dp, - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - write_nc("SEEPS_MPR_FCAT", seeps_dp_fcat, - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - write_nc("SEEPS_MPR_OCAT", seeps_dp_ocat, - i, mthd, pnts, - conf_info.vx_opt[i].interp_info.field); - write_seeps_row(shc, &seeps_agg, conf_info.output_flag[i_seeps], - stat_at, i_stat_row, txt_at[i_seeps], i_txt_row[i_seeps]); + // Write out SEEPS + if(conf_info.vx_opt[i].output_flag[i_seeps] != STATOutputType::None && + seeps_agg.n_obs > 0) { + + write_seeps_row(shc, &seeps_agg, + conf_info.output_flag[i_seeps], + stat_at, i_stat_row, + txt_at[i_seeps], i_txt_row[i_seeps]); + } + + // MET #3020 + // Write out the SEEPS data if requested in the config file + if(conf_info.vx_opt[i].nc_info.do_seeps) { + write_nc("SEEPS_MPR_SCORE", seeps_dp, + i, mthd, pnts, + conf_info.vx_opt[i].interp_info.field); + write_nc("SEEPS_MPR_FCAT", seeps_dp_fcat, + i, mthd, pnts, + conf_info.vx_opt[i].interp_info.field); + write_nc("SEEPS_MPR_OCAT", seeps_dp_ocat, + i, mthd, pnts, + conf_info.vx_opt[i].interp_info.field); + } } // Compute gradient statistics if requested in the config file diff --git a/src/tools/core/grid_stat/grid_stat_conf_info.cc b/src/tools/core/grid_stat/grid_stat_conf_info.cc index d334804850..f9b704e455 100644 --- a/src/tools/core/grid_stat/grid_stat_conf_info.cc +++ b/src/tools/core/grid_stat/grid_stat_conf_info.cc @@ -359,6 +359,7 @@ void GridStatConfInfo::process_flags() { if(vx_opt[i].nc_info.do_diff) nc_info.do_diff = true; if(vx_opt[i].nc_info.do_climo) nc_info.do_climo = true; if(vx_opt[i].nc_info.do_climo_cdp) nc_info.do_climo_cdp = true; + if(vx_opt[i].nc_info.do_seeps) nc_info.do_seeps = true; if(vx_opt[i].nc_info.do_weight) nc_info.do_weight = true; if(vx_opt[i].nc_info.do_nbrhd) nc_info.do_nbrhd = true; if(vx_opt[i].nc_info.do_fourier) nc_info.do_fourier = true; @@ -987,6 +988,7 @@ void GridStatVxOpt::parse_nc_info(Dictionary &odict) { nc_info.do_diff = d->lookup_bool(conf_key_diff_flag); nc_info.do_climo = d->lookup_bool(conf_key_climo_flag); nc_info.do_climo_cdp = d->lookup_bool(conf_key_climo_cdp_flag); + nc_info.do_seeps = d->lookup_bool(conf_key_seeps_flag); nc_info.do_weight = d->lookup_bool(conf_key_weight); nc_info.do_nbrhd = d->lookup_bool(conf_key_nbrhd); nc_info.do_fourier = d->lookup_bool(conf_key_fourier); @@ -1310,10 +1312,10 @@ void GridStatNcOutInfo::clear() { bool GridStatNcOutInfo::all_false() const { - bool status = do_latlon || do_raw || do_diff || - do_climo || do_climo_cdp || do_weight || - do_nbrhd || do_fourier || do_gradient || - do_distance_map || do_apply_mask; + bool status = do_latlon || do_raw || do_diff || + do_climo || do_climo_cdp || do_seeps || + do_weight || do_nbrhd || do_fourier || + do_gradient || do_distance_map || do_apply_mask; return !status; } @@ -1327,6 +1329,7 @@ void GridStatNcOutInfo::set_all_false() { do_diff = false; do_climo = false; do_climo_cdp = false; + do_seeps = false; do_weight = false; do_nbrhd = false; do_fourier = false; @@ -1346,6 +1349,7 @@ void GridStatNcOutInfo::set_all_true() { do_diff = true; do_climo = true; do_climo_cdp = true; + do_seeps = true; do_weight = true; do_nbrhd = true; do_fourier = true; diff --git a/src/tools/core/grid_stat/grid_stat_conf_info.h b/src/tools/core/grid_stat/grid_stat_conf_info.h index c3e72ee3a4..e24326c7cd 100644 --- a/src/tools/core/grid_stat/grid_stat_conf_info.h +++ b/src/tools/core/grid_stat/grid_stat_conf_info.h @@ -106,6 +106,7 @@ struct GridStatNcOutInfo { bool do_diff; bool do_climo; bool do_climo_cdp; + bool do_seeps; bool do_weight; bool do_nbrhd; bool do_fourier; diff --git a/src/tools/other/gen_vx_mask/gen_vx_mask.cc b/src/tools/other/gen_vx_mask/gen_vx_mask.cc index 83fe7cc568..e2d2580436 100644 --- a/src/tools/other/gen_vx_mask/gen_vx_mask.cc +++ b/src/tools/other/gen_vx_mask/gen_vx_mask.cc @@ -31,6 +31,7 @@ // 013 07/06/22 Howard Soh METplus-Internal #19 Rename main to met_main // 014 09/28/22 Prestopnik MET #2227 Remove namespace std and netCDF from header files // 015 05/03/23 Halley Gotway MET #1060 Support multiple shapes +// 016 11/04/24 Halley Gotway MET #2966 Add solar time option. // //////////////////////////////////////////////////////////////////////// @@ -71,25 +72,77 @@ using namespace netCDF; //////////////////////////////////////////////////////////////////////// int met_main(int argc, char *argv[]) { - static DataPlane dp_data, dp_mask, dp_out; // Process the command line arguments process_command_line(argc, argv); // Process the input grid - process_input_grid(dp_data); + static DataPlane dp_input; + process_input_grid(dp_input); - // Process the mask file - process_mask_file(dp_mask); + static DataPlane dp_mask; - // Apply combination logic if the current mask is binary + // Process each -type setting + for(int i=0; i i) { + mask_field_str = mask_field_opts[i]; + } + else { + mask_field_str.clear(); + } + + // Set the current threshold + if(thresh_opts.n() == 1) { + thresh = thresh_opts[0]; + } + else if(thresh_opts.n() > i) { + thresh = thresh_opts[i]; + } + else { + thresh.clear(); + } + + // Process the mask file + static DataPlane dp_cur; + process_mask_file(dp_cur); + + // Build mask type description string + if(i>0) mask_type_desc_cs << " " << setlogic_to_abbr(set_logic) << " "; + mask_type_desc_cs << masktype_to_string(mask_type); + if(mask_type == MaskType::Data) mask_type_desc_cs << "(" << data_desc_cs << ")"; + if(thresh.get_type() != thresh_na) mask_type_desc_cs << thresh.get_str(); + + // Combine with prior masks + if(dp_mask.nxy() == 0) dp_mask = dp_cur; + else dp_mask = combine(dp_mask, dp_cur, set_logic); + + } // end for i + + // Combine the input data with the current binary mask + static DataPlane dp_out; if(mask_type == MaskType::Poly || mask_type == MaskType::Poly_XY || mask_type == MaskType::Shape || mask_type == MaskType::Box || mask_type == MaskType::Grid || thresh.get_type() != thresh_na) { - dp_out = combine(dp_data, dp_mask, set_logic); + + // Combination logic based on presence of input data + SetLogic logic = (have_input_data ? + set_logic : SetLogic::None); + + dp_out = combine(dp_input, dp_mask, logic); } // Otherwise, pass through the distance or raw values else { @@ -104,13 +157,13 @@ int met_main(int argc, char *argv[]) { //////////////////////////////////////////////////////////////////////// -const string get_tool_name() { +string get_tool_name() { return "gen_vx_mask"; } //////////////////////////////////////////////////////////////////////// -void process_command_line(int argc, char **argv) { +static void process_command_line(int argc, char **argv) { CommandLine cline; // Check for zero arguments @@ -155,14 +208,13 @@ void process_command_line(int argc, char **argv) { mask_filename = cline[1]; out_filename = cline[2]; - // Check for the mask type (from -type string) - if(mask_type == MaskType::None) { - mlog << Error << "\n" << program_name << " -> " - << "the -type command line requirement must be set to a specific masking type!\n" - << "\t\t \"poly\", \"box\", \"circle\", \"track\", \"grid\", " - << "\"data\", \"solar_alt\", \"solar_azi\", \"lat\", \"lon\" " - << "or \"shape\"" << "\n\n"; - exit(1); + // Check for at least one mask type + if(mask_type_opts.empty()) { + mlog << Error << "\n" << program_name << " -> " + << "the -type command line option must be used at least once!\n" + << "\t\t poly, box, circle, track, grid, data, solar_alt, " + << "solar_azi, solar_time, lat, lon, or shape\n\n"; + exit(1); } // List the input files @@ -175,9 +227,11 @@ void process_command_line(int argc, char **argv) { //////////////////////////////////////////////////////////////////////// -void process_input_grid(DataPlane &dp) { +static void process_input_grid(DataPlane &dp) { + + // Read grid string + if(!build_grid_by_grid_string(input_gridname, grid, "process_input_grid", false)) { - if (!build_grid_by_grid_string(input_gridname, grid, "process_input_grid", false)) { // Extract the grid from a gridded data file mlog << Debug(3) << "Use input grid defined by file \"" << input_gridname @@ -189,6 +243,7 @@ void process_input_grid(DataPlane &dp) { // If not yet set, fill the input data plane with zeros if(dp.is_empty()) { + have_input_data = false; dp.set_size(grid.nx(), grid.ny()); dp.set_constant(0.0); } @@ -202,7 +257,7 @@ void process_input_grid(DataPlane &dp) { //////////////////////////////////////////////////////////////////////// -void process_mask_file(DataPlane &dp) { +static void process_mask_file(DataPlane &dp) { // Initialize solar_ut = (unixtime) 0; @@ -226,7 +281,7 @@ void process_mask_file(DataPlane &dp) { else if(mask_type == MaskType::Shape) { // If -shape_str was specified, find the matching records - if(shape_str_map.size() > 0) get_shapefile_strings(); + if(!shape_str_map.empty()) get_shapefile_strings(); // Get the records specified by -shapeno and -shape_str get_shapefile_records(); @@ -256,9 +311,10 @@ void process_mask_file(DataPlane &dp) { << unix_to_yyyymmdd_hhmmss(solar_ut) << "\n"; } - // Nothing to do for Lat/Lon masking types + // For Lat/Lon masking types else if(mask_type == MaskType::Lat || mask_type == MaskType::Lon) { + // Nothing to do for Lat/Lon masking types } // Otherwise, process the mask file as a named grid, grid specification @@ -306,8 +362,8 @@ void process_mask_file(DataPlane &dp) { if(mask_field_str.empty()) { mlog << Error << "\nprocess_mask_file() -> " << "use \"-mask_field\" to specify the data whose valid " - << "time should be used for \"solar_alt\" and " - << "\"solar_azi\" masking.\n\n"; + << "time should be used for \"solar_alt\", \"solar_azi\", " + << "and \"solar_time\" masking.\n\n"; exit(1); } solar_ut = dp.valid(); @@ -325,6 +381,13 @@ void process_mask_file(DataPlane &dp) { exit(1); } + // Report the threshold + if(is_thresh_masktype(mask_type)) { + mlog << Debug(2) + << masktype_to_description(mask_type) + << " Threshold:\t" << thresh.get_str() << "\n"; + } + // Initialize the masking field, if needed if(dp.is_empty()) dp.set_size(grid.nx(), grid.ny()); @@ -361,6 +424,7 @@ void process_mask_file(DataPlane &dp) { case MaskType::Solar_Alt: case MaskType::Solar_Azi: + case MaskType::Solar_Time: apply_solar_mask(dp); break; @@ -384,10 +448,10 @@ void process_mask_file(DataPlane &dp) { //////////////////////////////////////////////////////////////////////// -void get_data_plane(const ConcatString &file_name, - const ConcatString &config_str, - bool read_gen_vx_mask_output, - DataPlane &dp, Grid &dp_grid) { +static void get_data_plane(const ConcatString &file_name, + const ConcatString &config_str, + bool read_gen_vx_mask_output, + DataPlane &dp, Grid &dp_grid) { ConcatString local_cs = config_str; GrdFileType ftype = FileType_None; @@ -395,7 +459,7 @@ void get_data_plane(const ConcatString &file_name, MetConfig local_config = global_config; // Parse non-empty config strings - if(local_cs.length() > 0) { + if(!local_cs.empty()) { local_config.read_string(local_cs.c_str()); ftype = parse_conf_file_type(&local_config); } @@ -411,15 +475,14 @@ void get_data_plane(const ConcatString &file_name, // Read gen_vx_mask output from a previous run if(read_gen_vx_mask_output && - local_cs.length() == 0 && - mtddf_ptr->file_type() == FileType_NcMet) { - if(get_gen_vx_mask_config_str((MetNcMetDataFile *) mtddf_ptr, local_cs)) { - local_config.read_string(local_cs.c_str()); - } + local_cs.empty() && + mtddf_ptr->file_type() == FileType_NcMet && + get_gen_vx_mask_config_str((MetNcMetDataFile *) mtddf_ptr, local_cs)) { + local_config.read_string(local_cs.c_str()); } // Read data plane, if requested - if(local_cs.length() > 0) { + if(!local_cs.empty()) { // Allocate new VarInfo object VarInfo *vi_ptr = VarInfoFactory::new_var_info(mtddf_ptr->file_type()); @@ -449,6 +512,12 @@ void get_data_plane(const ConcatString &file_name, << mtddf_ptr->filename() << "\" with data ranging from " << dmin << " to " << dmax << ".\n"; + // Store the units string if no threhsold was specified + if(thresh.get_type() == thresh_na) units_cs = vi_ptr->units(); + + // Store the description of the data + data_desc_cs = vi_ptr->magic_str(); + // Clean up if(vi_ptr) { delete vi_ptr; vi_ptr = (VarInfo *) nullptr; } @@ -465,8 +534,8 @@ void get_data_plane(const ConcatString &file_name, //////////////////////////////////////////////////////////////////////// -bool get_gen_vx_mask_config_str(MetNcMetDataFile *mnmdf_ptr, - ConcatString &config_str) { +static bool get_gen_vx_mask_config_str(const MetNcMetDataFile *mnmdf_ptr, + ConcatString &config_str) { bool status = false; ConcatString tool; @@ -488,8 +557,9 @@ bool get_gen_vx_mask_config_str(MetNcMetDataFile *mnmdf_ptr, // Read the first non-lat/lon variable config_str << cs_erase - << "'name=\"" << mnmdf_ptr->MetNc->Var[i].name - << "\"; level=\"(*,*)\";'"; + << R"('name=")" + << mnmdf_ptr->MetNc->Var[i].name + << R"_("; level="(*,*)";')_"; status = true; break; } @@ -499,7 +569,7 @@ bool get_gen_vx_mask_config_str(MetNcMetDataFile *mnmdf_ptr, //////////////////////////////////////////////////////////////////////// -void get_shapefile_strings() { +static void get_shapefile_strings() { DbfFile f; StringArray rec_names; StringArray rec_values; @@ -530,13 +600,10 @@ void get_shapefile_strings() { << ").\n"; // Check that the attributes requested actually exist - map::const_iterator it; - for(it = shape_str_map.begin(); - it != shape_str_map.end(); it++) { - - if(!rec_names.has(it->first)) { + for(const auto& pair: shape_str_map) { + if(!rec_names.has(pair.first)) { mlog << Warning << "\nget_shapefile_strings() -> " - << "the \"-shape_str\" name \"" << it->first + << R"(the "-shape_str" name ")" << pair.first << "\" is not in the list of " << rec_names.n() << " shapefile attributes and will be ignored:\n" << write_css(rec_names) << "\n\n"; @@ -663,7 +730,7 @@ bool is_shape_str_match(const int i_shape, const StringArray &names, const Strin //////////////////////////////////////////////////////////////////////// -void apply_poly_mask(DataPlane & dp) { +static void apply_poly_mask(DataPlane & dp) { int n_in = 0; bool inside; double lat; @@ -693,23 +760,26 @@ void apply_poly_mask(DataPlane & dp) { if(complement) { mlog << Debug(3) - << "Applying complement of polyline mask.\n"; + << "Applying complement of the " + << masktype_to_string(mask_type) << " mask.\n"; } // List number of points inside the mask mlog << Debug(3) - << "Polyline Masking:\t" << n_in << " of " << grid.nx() * grid.ny() - << " points inside\n"; + << masktype_to_description(mask_type) + << " Masking:\t" << n_in << " of " + << grid.nxy() << " points inside\n"; return; } //////////////////////////////////////////////////////////////////////// -void apply_poly_xy_mask(DataPlane & dp) { +static void apply_poly_xy_mask(DataPlane & dp) { int n_in = 0; bool inside; - double x_dbl, y_dbl; + double x_dbl; + double y_dbl; GridClosedPoly poly_xy; // Convert MaskPoly Lat/Lon coordinates to Grid X/Y @@ -739,28 +809,30 @@ void apply_poly_xy_mask(DataPlane & dp) { if(complement) { mlog << Debug(3) - << "Applying complement of polyline XY mask.\n"; + << "Applying complement of the " + << masktype_to_string(mask_type) << " mask.\n"; } // List number of points inside the mask mlog << Debug(3) - << "Polyline XY Masking:\t" << n_in << " of " << grid.nx() * grid.ny() - << " points inside\n"; + << masktype_to_description(mask_type) + << " Masking:\t" << n_in << " of " + << grid.nxy() << " points inside\n"; return; } //////////////////////////////////////////////////////////////////////// -void apply_box_mask(DataPlane &dp) { - int i, x_ll, y_ll, x, y, n_in; - double cen_x, cen_y; - bool inside; +static void apply_box_mask(DataPlane &dp) { + int n_in = 0; + double cen_x; + double cen_y; // Process the height and width if(is_bad_data(height) && is_bad_data(width)) { mlog << Error << "\napply_box_mask() -> " - << "the \"-height\" and/or \"-width\" options must be " + << R"(the "-height" and/or "-width" options must be )" << "specified in grid units for box masking.\n\n"; exit(1); } @@ -772,19 +844,21 @@ void apply_box_mask(DataPlane &dp) { } // Process each lat/lon point - for(i=0; i= dp.nx()) continue; - for(y=y_ll; y= dp.ny()) continue; // Set the mask @@ -795,16 +869,16 @@ void apply_box_mask(DataPlane &dp) { } // end for i // Loop through the field, handle the complement, and count up points - for(x=0,n_in=0; x since \"-thresh\" was not used " - << "to specify a threshold in kilometers for circle masking, " - << "the minimum distance to the points will be written.\n\n"; + mlog << Debug(3) + << "Write the minimum distance in kilometers to the " + << "nearest point for " << masktype_to_description(mask_type) + << R"( masking since no "-thresh" specified.)" + << "\n"; + units_cs = "km"; } // For each grid point, compute mimumum distance to polyline points @@ -859,13 +936,14 @@ void apply_circle_mask(DataPlane &dp) { // Apply threshold, if specified if(thresh.get_type() != thresh_na) { - check = thresh.check(dist); + + bool check = thresh.check(dist); // Check the complement if(complement) check = !check; // Increment count - n_in += check; + if(check) n_in++; v = (check ? 1.0 : 0.0); } @@ -881,14 +959,16 @@ void apply_circle_mask(DataPlane &dp) { if(thresh.get_type() != thresh_na && complement) { mlog << Debug(3) - << "Applying complement of circle mask.\n"; + << "Applying complement of the " + << masktype_to_string(mask_type) << " mask.\n"; } // List the number of points inside the mask if(thresh.get_type() != thresh_na) { mlog << Debug(3) - << "Circle Masking:\t" << n_in << " of " << grid.nx() * grid.ny() - << " points inside\n"; + << masktype_to_description(mask_type) + << " Masking:\t" << n_in << " of " + << grid.nxy() << " points inside\n"; } // Otherwise, list the min/max distances computed else { @@ -896,7 +976,8 @@ void apply_circle_mask(DataPlane &dp) { double dmax; dp.data_range(dmin, dmax); mlog << Debug(3) - << "Circle Masking:\tDistances ranging from " + << masktype_to_description(mask_type) + << " Masking:\tDistances ranging from " << dmin << " km to " << dmax << " km\n"; } @@ -905,20 +986,21 @@ void apply_circle_mask(DataPlane &dp) { //////////////////////////////////////////////////////////////////////// -void apply_track_mask(DataPlane &dp) { +static void apply_track_mask(DataPlane &dp) { int n_in = 0; double lat; double lon; double dist; double v; - bool check; // Check for no threshold if(thresh.get_type() == thresh_na) { - mlog << Warning - << "\napply_track_mask() -> since \"-thresh\" was not used " - << "to specify a threshold for track masking, the minimum " - << "distance to the track will be written.\n\n"; + mlog << Debug(3) + << "Write the minimum distance in kilometers to the " + << "nearest point for " << masktype_to_description(mask_type) + << R"( masking since no "-thresh" specified.)" + << "\n"; + units_cs = "km"; } // For each grid point, compute mimumum distance to track @@ -940,13 +1022,14 @@ void apply_track_mask(DataPlane &dp) { // Apply threshold, if specified if(thresh.get_type() != thresh_na) { - check = thresh.check(dist); + + bool check = thresh.check(dist); // Check the complement if(complement) check = !check; // Increment count - n_in += check; + if(check) n_in++; v = (check ? 1.0 : 0.0); } @@ -962,21 +1045,25 @@ void apply_track_mask(DataPlane &dp) { if(thresh.get_type() != thresh_na && complement) { mlog << Debug(3) - << "Applying complement of track mask.\n"; + << "Applying complement of the " + << masktype_to_string(mask_type) << " mask.\n"; } // List the number of points inside the mask if(thresh.get_type() != thresh_na) { mlog << Debug(3) - << "Track Masking:\t\t" << n_in << " of " << grid.nx() * grid.ny() - << " points inside\n"; + << masktype_to_description(mask_type) + << " Masking:\t" << n_in << " of " + << grid.nxy() << " points inside\n"; } // Otherwise, list the min/max distances computed else { - double dmin, dmax; + double dmin; + double dmax; dp.data_range(dmin, dmax); mlog << Debug(3) - << "Track Masking:\t\tDistances ranging from " + << masktype_to_description(mask_type) + << " Masking:\tDistances ranging from " << dmin << " km to " << dmax << " km\n"; } @@ -985,34 +1072,33 @@ void apply_track_mask(DataPlane &dp) { //////////////////////////////////////////////////////////////////////// -void apply_grid_mask(DataPlane &dp) { +static void apply_grid_mask(DataPlane &dp) { int n_in = 0; - bool inside; - double lat; - double lon; - double mask_x; - double mask_y; // Check each grid point being inside the masking grid for(int x=0; x= 0 && mask_x < grid_mask.nx() && - mask_y >= 0 && mask_y < grid_mask.ny()); + bool inside = (mask_x >= 0 && mask_x < grid_mask.nx() && + mask_y >= 0 && mask_y < grid_mask.ny()); // Apply the complement if(complement) inside = !inside; // Increment count - n_in += inside; + if(inside) n_in++; // Store the current mask value dp.set(inside, x, y); @@ -1022,33 +1108,38 @@ void apply_grid_mask(DataPlane &dp) { if(complement) { mlog << Debug(3) - << "Applying complement of grid mask.\n"; + << "Applying complement of the " + << masktype_to_string(mask_type) << " mask.\n"; } + // List number of points inside the mask mlog << Debug(3) - << "Grid Masking:\t\t" << n_in << " of " << grid.nx() * grid.ny() - << " points inside\n"; + << masktype_to_description(mask_type) + << " Masking:\t" << n_in << " of " + << grid.nxy() << " points inside\n"; return; } //////////////////////////////////////////////////////////////////////// -void apply_data_mask(DataPlane &dp) { +static void apply_data_mask(DataPlane &dp) { int n_in = 0; - bool check; // Nothing to do without a threshold if(thresh.get_type() == thresh_na) { - double dmin, dmax; + mlog << Debug(3) + << "Write the raw inputs values for " + << masktype_to_description(mask_type) + << R"( masking since no "-thresh" specified.)" + << "\n"; + double dmin; + double dmax; dp.data_range(dmin, dmax); mlog << Debug(3) - << "Data Masking:\t\tValues ranging from " - << dmin << " km to " << dmax << " km\n"; - mlog << Warning - << "\napply_data_mask() -> since \"-thresh\" was not used " - << "to specify a threshold for data masking, the raw data " - << "values will be written.\n\n"; + << masktype_to_description(mask_type) + << " Masking:\t\tValues ranging from " + << dmin << " to " << dmax << "\n"; return; } @@ -1069,13 +1160,13 @@ void apply_data_mask(DataPlane &dp) { for(int y=0; y since \"-thresh\" was not used " - << "the raw " << masktype_to_string(mask_type) - << " values will be written.\n\n"; + mlog << Debug(3) + << "Write the raw " + << masktype_to_description(mask_type) + << R"( values since no "-thresh" specified.)" + << "\n"; + units_cs = (mask_type == MaskType::Solar_Time ? + "hr" : "deg"); } // Compute solar value for each grid point Lat/Lon @@ -1111,22 +1205,33 @@ void apply_solar_mask(DataPlane &dp) { for(int y=0; y since \"-thresh\" was not used " - << "the raw " << masktype_to_string(mask_type) - << " values will be written.\n\n"; + mlog << Debug(3) + << "Write the raw " + << masktype_to_description(mask_type) + << R"( values since no "-thresh" specified.)" + << "\n"; + units_cs = "deg"; } // Compute Lat/Lon value for each grid point @@ -1185,19 +1290,22 @@ void apply_lat_lon_mask(DataPlane &dp) { for(int y=0; y poly_list; - vector::const_iterator rec_it; - for(rec_it = shape_recs.begin(); - rec_it != shape_recs.end(); ++rec_it) { - poly.set(*rec_it, grid); + for(const auto& cur_rec: shape_recs) { + poly.set(cur_rec, grid); poly_list.push_back(poly); } @@ -1255,12 +1361,10 @@ void apply_shape_mask(DataPlane & dp) { for(int x=0; x<(grid.nx()); x++) { for(int y=0; y<(grid.ny()); y++) { - vector::const_iterator poly_it; - for(poly_it = poly_list.begin(); - poly_it != poly_list.end(); ++poly_it) { + for(const auto& cur_poly: poly_list) { // Check if point is inside - status = poly_it->is_inside(x, y); + status = cur_poly.is_inside(x, y); // Break after the first match if(status) break; @@ -1280,23 +1384,25 @@ void apply_shape_mask(DataPlane & dp) { if(complement) { mlog << Debug(3) - << "Applying complement of the shapefile mask.\n"; + << "Applying complement of the " + << masktype_to_string(mask_type) << " mask.\n"; } // List number of points inside the mask mlog << Debug(3) - << "Shape Masking:\t\t" << n_in << " of " << grid.nx() * grid.ny() - << " points inside\n"; + << masktype_to_description(mask_type) + << " Masking:\t" << n_in << " of " + << grid.nxy() << " points inside\n"; return; } //////////////////////////////////////////////////////////////////////// -DataPlane combine(const DataPlane &dp_data, const DataPlane &dp_mask, - SetLogic logic) { +static DataPlane combine(const DataPlane &dp_data, + const DataPlane &dp_mask, + SetLogic logic) { int n_in = 0; - bool v_data, v_mask; double v; DataPlane dp; @@ -1323,8 +1429,8 @@ DataPlane combine(const DataPlane &dp_data, const DataPlane &dp_mask, for(int y=0; y0) mask_name << "_"; + mask_name << masktype_to_string(mask_type_opts[i]); + } + mask_name << "_mask"; } } @@ -1424,10 +1534,9 @@ void write_netcdf(const DataPlane &dp) { mask_var = add_var(f_out, string(mask_name), ncFloat, lat_dim, lon_dim, deflate_level); cs << cs_erase << mask_name << " masking region"; add_att(&mask_var, "long_name", string(cs)); + add_att(&mask_var, "units", string(units_cs)); add_att(&mask_var, "_FillValue", bad_data_float); - cs << cs_erase << masktype_to_string(mask_type); - if(thresh.get_type() != thresh_na) cs << thresh.get_str(); - add_att(&mask_var, "mask_type", string(cs)); + add_att(&mask_var, "mask_type", mask_type_desc_cs); // Write the solar time if(is_solar_masktype(mask_type)) { @@ -1448,7 +1557,7 @@ void write_netcdf(const DataPlane &dp) { for(int x=0; x " << "unsupported masking type \"" << s << "\"\n\n"; @@ -1502,23 +1627,24 @@ MaskType string_to_masktype(const char *s) { //////////////////////////////////////////////////////////////////////// const char * masktype_to_string(const MaskType t) { - const char *s = (const char *) nullptr; + const char *s = nullptr; switch(t) { - case MaskType::Poly: s = "poly"; break; - case MaskType::Poly_XY: s = "poly_xy"; break; - case MaskType::Box: s = "box"; break; - case MaskType::Circle: s = "circle"; break; - case MaskType::Track: s = "track"; break; - case MaskType::Grid: s = "grid"; break; - case MaskType::Data: s = "data"; break; - case MaskType::Solar_Alt: s = "solar_alt"; break; - case MaskType::Solar_Azi: s = "solar_azi"; break; - case MaskType::Lat: s = "lat"; break; - case MaskType::Lon: s = "lon"; break; - case MaskType::Shape: s = "shape"; break; - case MaskType::None: s = na_str; break; - default: s = (const char *) nullptr; break; + case MaskType::Poly: s = "poly"; break; + case MaskType::Poly_XY: s = "poly_xy"; break; + case MaskType::Box: s = "box"; break; + case MaskType::Circle: s = "circle"; break; + case MaskType::Track: s = "track"; break; + case MaskType::Grid: s = "grid"; break; + case MaskType::Data: s = "data"; break; + case MaskType::Solar_Alt: s = "solar_alt"; break; + case MaskType::Solar_Azi: s = "solar_azi"; break; + case MaskType::Solar_Time: s = "solar_time"; break; + case MaskType::Lat: s = "lat"; break; + case MaskType::Lon: s = "lon"; break; + case MaskType::Shape: s = "shape"; break; + case MaskType::None: s = na_str; break; + default: s = (const char *) nullptr; break; } return s; @@ -1526,7 +1652,33 @@ const char * masktype_to_string(const MaskType t) { //////////////////////////////////////////////////////////////////////// -void usage() { +const char * masktype_to_description(const MaskType t) { + const char *s = nullptr; + + switch(t) { + case MaskType::Poly: s = "Polyline"; break; + case MaskType::Poly_XY: s = "Polyline XY"; break; + case MaskType::Box: s = "Box"; break; + case MaskType::Circle: s = "Circle"; break; + case MaskType::Track: s = "Track"; break; + case MaskType::Grid: s = "Grid"; break; + case MaskType::Data: s = "Data"; break; + case MaskType::Solar_Alt: s = "Solar Alt"; break; + case MaskType::Solar_Azi: s = "Solar Azi"; break; + case MaskType::Solar_Time: s = "Solar Time"; break; + case MaskType::Lat: s = "Latitude"; break; + case MaskType::Lon: s = "Longitude"; break; + case MaskType::Shape: s = "Shapefile"; break; + case MaskType::None: s = na_str; break; + default: s = (const char *) nullptr; break; + } + + return s; +} + +//////////////////////////////////////////////////////////////////////// + +__attribute__((noreturn)) static void usage() { cout << "\n*** Model Evaluation Tools (MET" << met_version << ") ***\n\n" @@ -1564,11 +1716,11 @@ void usage() { << "\t\t For \"grid\" masking, specify a named grid, the " << "path to a gridded data file, or an explicit grid " << "specification.\n" - << "\t\t For \"data\" masking specify a gridded data file.\n" - << "\t\t For \"solar_alt\" and \"solar_azi\" masking, " - << "specify a gridded data file or a timestring in " - << "YYYYMMDD[_HH[MMSS]] format.\n" - << "\t\t For \"lat\" and \"lon\" masking, no \"mask_file\" " + << "\t\t For \"data\" masking, specify a gridded data file.\n" + << "\t\t For \"solar_alt\", \"solar_azi\", and \"solar_time\" " + << "masking, specify a gridded data file or a timestring in " + << "YYYYMMDD[_HH[MMSS]] UTC format.\n" + << "\t\t For \"lat\" and \"lon\" masking, no \"mask_file\" is " << "needed, simply repeat \"input_grid\".\n" << "\t\t For \"shape\" masking, specify a shapefile " << "(suffix \".shp\").\n" @@ -1576,49 +1728,56 @@ void usage() { << "\t\t\"out_file\" is the output NetCDF mask file to be " << "written (required).\n" - << "\t\t\"-type string\" specify the masking type " + << "\t\t\"-type string\" is a comma-separated list of masking types " << "(required).\n" << "\t\t \"poly\", \"poly_xy\", \"box\", \"circle\", \"track\", " - << "\"grid\", \"data\", \"solar_alt\", \"solar_azi\", \"lat\", " - << "\"lon\" or \"shape\"\n" + << "\"grid\", \"data\", \"solar_alt\", \"solar_azi\", \"solar_time\", " + << "\"lat\", \"lon\" or \"shape\"\n" + << "\t\t Use multiple times for multiple mask types.\n" - << "\t\t\"-input_field string\" reads existing mask data from " - << "the \"input_grid\" gridded data file (optional).\n" + << "\t\t\"-input_field string\" initializes the \"input_grid\" with " + << "values from this field (optional).\n" << "\t\t\"-mask_field string\" (optional).\n" << "\t\t For \"data\" masking, define the field from " << "\"mask_file\" to be used.\n" + << "\t\t Use multiple times for multiple mask types.\n" << "\t\t\"-complement\" computes the complement of the current " << "mask (optional).\n" - << "\t\t\"-union | -intersection | -symdiff\" specify how " - << "to combine the \"input_field\" data with the current mask " - << "(optional).\n" + << "\t\t\"-union | -intersection | -symdiff\" specify how to combine " + << "multiple binary masks (optional).\n" + << "\t\t Applies to masks read from the \"input_field\" and those " + << "generated during the current run.\n" - << "\t\t\"-thresh string\" defines the threshold to be applied " - << "(optional).\n" + << "\t\t\"-thresh string\" is a comma-separated list of thresholds " + << "to be applied (optional).\n" << "\t\t For \"circle\" and \"track\" masking, threshold the " << "distance (km).\n" << "\t\t For \"data\" masking, threshold the values of " << "\"mask_field\".\n" << "\t\t For \"solar_alt\" and \"solar_azi\" masking, " - << "threshold the computed solar values.\n" + << "threshold the solar values (deg).\n" + << "\t\t For \"solar_time\" masking, " + << "threshold the solar time (hr).\n" << "\t\t For \"lat\" and \"lon\" masking, threshold the " - << "latitude and longitude values.\n" + << "latitude and longitude values (deg).\n" + << "\t\t Use multiple times for multiple mask types.\n" << "\t\t\"-height n\" and \"-width n\" (optional).\n" - << "\t\t For \"box\" masking, specify these dimensions in grid " - << "units.\n" + << "\t\t For \"box\" masking, specify the dimensions (grid " + << "units).\n" << "\t\t\"-shapeno n\" (optional).\n" - << "\t\t For \"shape\" masking, specify the integer shape " - << "number(s) (0-based) to be used as a comma-separated list.\n" + << "\t\t For \"shape\" masking, specify a comma-separated list " + << "of 0-based integer shape number(s).\n" << "\t\t\"-shape_str name string\" (optional).\n" << "\t\t For \"shape\" masking, specify the shape(s) to be used " << "as a named attribute followed by a comma-separated list of " - << "matching strings. If used multiple times, only shapes matching " + << "matching strings.\n" + << "\t\t If used multiple times, only shapes matching " << "all named attributes will be used.\n" << "\t\t\"-value n\" overrides the default output mask data " @@ -1627,7 +1786,7 @@ void usage() { << "\t\t\"-name string\" specifies the output variable name " << "for the mask (optional).\n" - << "\t\t\"-log file\" outputs log messages to the specified " + << "\t\t\"-log file\" writes log messages to the specified " << "file (optional).\n" << "\t\t\"-v level\" overrides the default level of logging (" @@ -1644,94 +1803,95 @@ void usage() { //////////////////////////////////////////////////////////////////////// -void set_type(const StringArray & a) { - if(type_is_set) { - mlog << Error << "\n" << program_name << " -> " - << "the -type command line requirement can only be used once!\n" - << "To apply multiple masks, run this tool multiple times " - << "using the output of one run as the input to the next." - << "\n\n"; - exit(1); +static void set_type(const StringArray & a) { + StringArray sa; + sa.parse_css(a[0]); + for(int i=0; i mask_type_opts; +static MaskType mask_type; +static ConcatString mask_type_desc_cs; // Optional arguments -static ConcatString input_field_str, mask_field_str; +static bool have_input_data = true; +static ConcatString input_field_str; +static StringArray mask_field_opts; +static ConcatString mask_field_str; +static ConcatString data_desc_cs; static SetLogic set_logic = SetLogic::None; static bool complement = false; +static ThreshArray thresh_opts; static SingleThresh thresh; static int height = bad_data_double; static int width = bad_data_double; static double mask_val = default_mask_val; static ConcatString mask_name; static unixtime solar_ut = (unixtime) 0; +static ConcatString units_cs("flag"); static std::map shape_str_map; static NumArray shape_numbers; @@ -114,7 +123,8 @@ static std::vector shape_recs; static MaskPoly poly_mask; // Grid on which the data field resides -static Grid grid, grid_mask; +static Grid grid; +static Grid grid_mask; // Configuration object for reading config strings static MetConfig global_config; @@ -128,7 +138,7 @@ static void process_mask_file(DataPlane &dp); static void get_data_plane(const ConcatString &file_name, const ConcatString &config_str, bool, DataPlane &dp, Grid &dp_grid); -static bool get_gen_vx_mask_config_str(MetNcMetDataFile *, +static bool get_gen_vx_mask_config_str(const MetNcMetDataFile *, ConcatString &); static void get_shapefile_strings(); static void get_shapefile_records();