From f072a6457e08a4ab89ac5c31b8c39fe93bd834d7 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 12 Oct 2021 14:38:28 -0600 Subject: [PATCH 01/33] Move all non-ConfigCAM classes to new, separate 'cam_config_classes.py' file (Issue #152). --- cime_config/cam_config.py | 882 +---------------------------- cime_config/cam_config_classes.py | 903 ++++++++++++++++++++++++++++++ test/pylint_test.sh | 1 + test/run_tests.sh | 2 + 4 files changed, 912 insertions(+), 876 deletions(-) create mode 100644 cime_config/cam_config_classes.py diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index 3411e29e..e8047843 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -15,12 +15,14 @@ import argparse import os.path -from collections import OrderedDict - #----------------------------------- # Import CAM-specific python modules #----------------------------------- +# Import internal CAM configuration classes: +from cam_config_classes import ConfigInteger, ConfigString, ConfigList +from cam_config_classes import CamConfigValError, CamConfigTypeError + # Import build cache object: from cam_build_cache import BuildCacheCAM # Re-build consistency cache @@ -28,878 +30,6 @@ from cam_autogen import generate_registry, generate_physics_suites from cam_autogen import generate_init_routines - -# Determine regular rexpression type (for later usage in check_string_val) -REGEX_TYPE = type(re.compile(r" ")) - -############################################################################### -# Error-handling classes -############################################################################### - -class CamConfigValError(ValueError): - """Class used to handle CAM config value errors - (e.g., log user errors without backtrace)""" - # pylint: disable=useless-super-delegation - def __init__(self, message): - super(CamConfigValError, self).__init__(message) - # pylint: enable=useless-super-delegation - -############################################################################### - -class CamConfigTypeError(TypeError): - """Class used to handle CAM config type errors - (e.g., log user errors without backtrace)""" - # pylint: disable=useless-super-delegation - def __init_(self, message): - super(CamConfigTypeError, self).__init__(message) - # pylint: enable=useless-super-delegation - -############################################################################### -# Valid value-checking functions -############################################################################### - -def _check_integer_val(name, val, valid_vals=None): - - """ - Checks if a provided integer value is "valid" - as defined by the provided "valid_vals" entry - for the given config variable (name). - - If value is not valid, then an error message is returned, - otherwise the function returns None. - - Possible valid_val types are: - - list -> If a list, then just check that provided value is in the list. - - tuple -> If a tuple, then there must be only two values, which define - a possible range of values, e.g. (min, max). If only one value - is provided, then only a minimum (or maximum) value will be - enforced, depending on if the tuple is (x, None) or (None ,x). - - Doctests: - - Please note that "successful" validation tests are done in the ConfigInteger doctests. - - 1. Check that using a non-integer value throws an error: - >>> _check_integer_val("test", 5.0, valid_vals=None) - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: Value being checked in 'check_integer_val' must be an integer type, not ''. - - 2. Check that using a valid_vals option that is not a list or tuple throws an error: - >>> _check_integer_val("test", 5, valid_vals="test_vals") - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: Valid values for integers must by provided as either a tuple or a list, not ''. - - 3. Check that using non-integer values inside the valid_vals list or tuple throws an error: - >>> _check_integer_val("test", 5, valid_vals=[1,2,5,"test_val"]) - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: Valid value, 'test_val', for variable 'test', must be an integer. Currently it is ''. - - - 4. Check that using a tuple with only one entry throws an error: - >>> _check_integer_val("test", 5, valid_vals=(1,)) - Traceback (most recent call last): - ... - CamConfigValError: ERROR: Valid values tuple for variable, 'test', must have two elements, not '1' elements. - - 5. Check that using a tuple with more than two entries throws an error: - >>> _check_integer_val("test", 5, valid_vals=(1,2,5)) - Traceback (most recent call last): - ... - CamConfigValError: ERROR: Valid values tuple for variable, 'test', must have two elements, not '3' elements. - - 6. Check that using a tuple with only Nones throws an error: - >>> _check_integer_val("test", 5, valid_vals=(None,None)) - Traceback (most recent call last): - ... - CamConfigValError: ERROR: Valid values tuple for variable, 'test', must contain at least one integer. - - 7. Check that an integer less than the tuple min is "invalid": - >>> _check_integer_val("test", 5, valid_vals=(6,None)) - "ERROR: Value, '5', provided for variable, 'test', is less than minimum valid value, '6'" - - 8. Check that an integer greater than the tuple max is "invalid": - >>> _check_integer_val("test", 5, valid_vals=(None,4)) - "ERROR: Value, '5', provided for variable, 'test', is greater than max valid value, '4'" - - 9. Check that an integer outside min/max tuple range is "invalid": - >>> _check_integer_val("test", 5, valid_vals=(10,13)) - "ERROR: Value, '5', provided for variable, 'test', is outside valid value range, '(10, 13)'" - - 10. Check that an integer not included in the list is "invalid": - >>> _check_integer_val("test", 5, valid_vals=[1,2,3,4]) - "ERROR: Value, '5', provided for variable, 'test', does not match any of the valid values: '[1, 2, 3, 4]'" - - """ - - # Make sure that provided value is an integer: - if not isinstance(val, int): - emsg = "ERROR: Value being checked in 'check_integer_val' " - emsg += "must be an integer type, not '{}'." - raise CamConfigTypeError(emsg.format(type(val))) - # End if - - # Only check the given value if valid_vals is not "None" - if valid_vals is not None: - - # Check if valid values is a tuple - if isinstance(valid_vals, tuple): - - # Check that all tuple elements are either None or integers - emsg = "" - for valid_val in valid_vals: - if valid_val is not None and not isinstance(valid_val, int): - emsg += "ERROR: Valid value, '{}', for variable '{}', must be " - emsg += "either None or an integer. Currently it is '{}'.\n" - emsg = emsg.format(valid_val, name, type(valid_val)) - # End if - # End for - if emsg: - raise CamConfigTypeError(emsg) - # end if - - # Check that length of valid values tuple is 2 - if len(valid_vals) != 2: - emsg = ("ERROR: Valid values tuple for variable, " - "'{}', must have two elements, not '{}' elements.") - raise CamConfigValError(emsg.format(name, - len(valid_vals))) - # End if - - if valid_vals[0] is None: - # If first valid value is "None", then just check that - # given value is less than second valid value, and - # that second value is an integer - if valid_vals[1] is None: - emsg = "ERROR: Valid values tuple for variable, '{}', " - emsg += "must contain at least one integer." - raise CamConfigValError(emsg.format(name)) - # End if - if val > valid_vals[1]: - emsg = "ERROR: Value, '{}', provided for variable, " - emsg += "'{}', is greater than max valid value, '{}'" - return emsg.format(val, name, valid_vals[1]) - # End if - elif valid_vals[1] is None: - # Check if second value is "None". - # If so, then just check that given value is greater - # than first valid value - if val < valid_vals[0]: - emsg = "ERROR: Value, '{}', provided for variable, " - emsg += "'{}', is less than minimum valid value, '{}'" - return emsg.format(val, name, valid_vals[0]) - # End if - else: - # If both valid values are integers, then check that - # given value is between both valid values - if (val < valid_vals[0]) or (val > valid_vals[1]): - emsg = "ERROR: Value, '{}', provided for variable, " - emsg += "'{}', is outside valid value range, '{}'" - return emsg.format(val, name, valid_vals) - # End if - # End if - - elif isinstance(valid_vals, list): - - # Check that all list elements are integers - emsg = "" - for valid_val in valid_vals: - if not isinstance(valid_val, int): - emsg += "ERROR: Valid value, '{}', for variable '{}', " - emsg += "must be an integer. Currently it is '{}'.\n" - emsg = emsg.format(valid_val, name, type(valid_val)) - # End if - # End for - if emsg: - raise CamConfigTypeError(emsg) - # end if - - - # If valid_vals is a list, then just check that the given value - # matches one of the valid values in the list - if not val in valid_vals: - emsg = "ERROR: Value, '{}', provided for variable, '{}', " - emsg += "does not match any of the valid values: '{}'" - return emsg.format(val, name, valid_vals) - # End if - - else: - # valid_vals is neither a list nor a tuple, so throw an error: - emsg = "ERROR: Valid values for integers must by provided as " - emsg += "either a tuple or a list, not '{}'." - raise CamConfigTypeError(emsg.format(type(valid_vals))) - - # End if - # End if - - # Return nothing if value is valid - return None - -############################################################################### - -def _check_string_val(name, val, valid_vals=None): - - """ - Checks if a provided string value is "valid" - as defined by the provided "valid_vals" entry. - - Possible valid_val types are: - - list -> If a list, then just check that provided value is in the list. - - regex -> If a compiled regular expression, then check that the provided - value is matched by the regular expression. - - Doctests: - - Please note that "successful" validation tests are done in the ConfigString doctests. - - 1. Check that using a non-string value throws an error: - >>> _check_string_val("test", [5], valid_vals=None) - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: Value being checked in 'check_string_val' must be a string type, not ''. - - 2. Check that using a valid_vals option that is not None, a list, or a regex throws an error: - >>> _check_string_val("test", "test_val", valid_vals=5) - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: Valid values for strings must by provided as either a regular expression or a list, not ''. - - 3. Check that using non-string values inside the valid_vals list throws an error: - >>> _check_string_val("test", "1", valid_vals=["1","2","5",6]) - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: Valid value, '6', for variable 'test', must be a string. Currently it is ''. - - 9. Check that a string that doesn't match the provided regex is "invalid": - >>> _check_string_val("test", "test_val", valid_vals=re.compile(r"foo")) - "ERROR: Value, 'test_val', provided for variable, 'test', does not match the valid regular expression." - - 10. Check that a string not included in the list is "invalid": - >>> _check_string_val("test", "test_val", valid_vals=["1","2","3","4"]) - "ERROR: Value, 'test_val', provided for variable, 'test', does not match any of the valid values: '['1', '2', '3', '4']'" - """ - - # Make sure that provided value is a string: - if not isinstance(val, str): - emsg = "ERROR: Value being checked in 'check_string_val' " - emsg += "must be a string type, not '{}'." - raise CamConfigTypeError(emsg.format(type(val))) - # End if - - # Only check the given value if valid_vals is not "None" - if valid_vals is not None: - - # If a list, then check that the given value - # matches one of the valid values in the list - if isinstance(valid_vals, list): - - # Check that all list elements are strings: - for valid_val in valid_vals: - if not isinstance(valid_val, str): - emsg = ("ERROR: Valid value, '{}', for variable '{}', must be " - "a string. Currently it is '{}'.") - raise CamConfigTypeError(emsg.format(valid_val, name, type(valid_val))) - # End if - # End for - - if not val in valid_vals: - emsg = "ERROR: Value, '{}', provided for variable, '{}', " - emsg += "does not match any of the valid values: '{}'" - return emsg.format(val, name, valid_vals) - # End if - elif isinstance(valid_vals, REGEX_TYPE): - # If a regular expression object, then check that - # value is matched by the expression - if valid_vals.match(val) is None: - emsg = "ERROR: Value, '{}', provided for variable, '{}', " - emsg += "does not match the valid regular expression." - return emsg.format(val, name) - # End if - # End if - else: - # valid_vals is neither a list nor a regex, so throw an error: - emsg = "ERROR: Valid values for strings must by provided as " - emsg += "either a regular expression or a list, not '{}'." - raise CamConfigTypeError(emsg.format(type(valid_vals))) - - # End if - # End if - - # Return nothing if value is valid - return None - -# Helper function to better generalize config value checking: -_TYPE_CHECK_FUNCTIONS = {"int" : _check_integer_val, "str" : _check_string_val} - -############################################################################### -# CAM configure option classes -############################################################################### - -class ConfigGen(object): - - """ - Generic configuration class used to - store CAM configuration names and - descriptions. - - Inputs to initalize class are: - name -> Name of new CAM configure option - desc -> Text description of CAM configure option - is_nml_attr (optional) -> Logical that determines if this option - is also a namelist attribute (defaut is False) - - Doctests: - - 1. Check that ConfigGen works properly: - - >>> ConfigGen("test", "test object description").name - 'test' - - >>> ConfigGen("test", "test object description").desc - '# test object description' - - >>> print(ConfigGen("test", ["test", "object", "description"]).desc) - # test - # object - # description - - >>> ConfigGen("test", "test object description", is_nml_attr=True).is_nml_attr - True - - 2. Check that non-optional inputs must be strings: - - >>> ConfigGen(5, "test_object_description").name - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: Configuration variable name, '5', must be a string, not - - >>> ConfigGen("test", (5,)).desc - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: Configuration variable, 'test', must have a string-type description, or a list of string-type descriptions, not ((5,)) - - >>> ConfigGen("test", ["test", ("object", "description")]).desc - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: Configuration variable, 'test', must have a string-type description, or a list of string-type descriptions, not [, ] - - """ - - def __init__(self, name, desc, is_nml_attr=False): - - # Check that "name" is a string - if not isinstance(name, str): - emsg = "ERROR: Configuration variable name, '{}', must be a string, not {}" - raise CamConfigTypeError(emsg.format(name, type(name))) - # end if - - # Check that "desc" is a string or a list of strings - desc_ok = isinstance(desc, str) - if (not desc_ok) and isinstance(desc, list): - desc_ok = all(isinstance(x, str) for x in desc) - # end if - if not desc_ok: - emsg = ("ERROR: Configuration variable, '{}', " - "must have a string-type description, or a list of " - "string-type descriptions, not {}") - if isinstance(desc, str): - derr = type(desc) - elif isinstance(desc, list): - derr = [type(x) for x in desc] - else: - derr = "{} ({})".format(type(desc), desc) - # end if - raise CamConfigTypeError(emsg.format(name, derr)) - # end if - - # Add name, description, and namelist attribute logical to object - self.__name = name - if isinstance(desc, str): - self.__desc = "# {}".format(desc) - elif isinstance(desc, list): - self.__desc = "# " + "\n# ".join(desc) - # end if - self.__is_nml_attr = is_nml_attr - - #++++++++++++++++++++++++ - - # Create properties needed to return name and description properties - @property - def name(self): - """Return the name of this config object""" - return self.__name - - @property - def desc(self): - """Return the description of this config object""" - return self.__desc - - @property - def is_nml_attr(self): - """Return the namelist attribute logical of this config object""" - return self.__is_nml_attr - -############################################################################### - -class ConfigInteger(ConfigGen): - - """ - Configuration class used to store - integer-based CAM configuration - options. - - Inputs to initalize class are: - name -> Name of new CAM configure option - desc -> Text description of CAM configure option - val -> Integer value for CAM configure option - valid_vals (optional) -> Range or list of valid CAM configure option values (default is None) - is_nml_attr (optional) -> Logical that determines if option is also a namelist attribute (defaut is False) - - Doctests: - - 1. Check that ConfigInteger works properly: - - With no valid values: - >>> ConfigInteger("test", "test object description", 5).value - 5 - - With valid values tuple: - >>> ConfigInteger("test", "test object description", 5, (1, 10)).valid_vals - (1, 10) - - With valid values list: - >>> ConfigInteger("test", "test object description", 5, [4, 5, 6]).valid_vals - [4, 5, 6] - - With namelist attribute set to "True": - >>> ConfigInteger("test", "test object description", 5, [4, 5, 6], is_nml_attr=True).is_nml_attr - True - - """ - - def __init__(self, name, desc, val, valid_vals=None, is_nml_attr=False): - - # Add generic attributes - super().__init__(name, desc, is_nml_attr=is_nml_attr) - - # Add valid_vals to object - self.__valid_vals = valid_vals - - # Check that provided value is "valid" based on the - # valid values list or tuple. Note that this function - # also checks valid_vals itself to ensure that it is - # of the correct type and format. - self.__check_value(val) - - # If everything is ok, then add provided value to object - self.__value = val - - #++++++++++++++++++++++++ - - # Create properties needed to return given value and valid values - @property - def value(self): - """Return the value of this config object""" - return self.__value - - @property - def valid_vals(self): - """Return the valid values of this config object""" - return self.__valid_vals - - #++++++++++++++++++++++++ - - def __check_value(self, val): - - """ - Checks input/given value to make sure - it is valid according to the - object's valid values list or tuple. - - If a tuple, then assume the two - valid values in the tuple represent a range, - and check that the given value is - in-between that range. - - If a list, then assume the given value - must match at least one of the valid values - included in that list. - """ - - # Check if integer value is valid - bad_val_msg = _check_integer_val(self.name, val, - valid_vals=self.valid_vals) - - # Raise an error if a bad value is found: - if bad_val_msg: - raise CamConfigValError(bad_val_msg) - - #++++++++++++++++++++++++ - - def set_value(self, val): - - """ - Set configure object's value to the one provided. - """ - - # First, check that the provided value is valid - self.__check_value(val) - - # If ok, then set object's value to one provided - self.__value = val - -############################################################################### - -class ConfigString(ConfigGen): - - """ - Configuration class used to store - string-based CAM configuration - options. - - Inputs to initalize class are: - name -> Name of new CAM configure option - desc -> Text description of CAM configure option - val -> Integer value for CAM configure option - valid_vals (optional) -> List or regex of valid CAM configure option values (default is None) - is_nml_attr (optional) -> Logical that determines if option is also a namelist attribute (defaut is False) - - Doctests: - - 1. Check that ConfigString works properly: - - With no valid values: - >>> ConfigString("test", "test object description", "test_val").value - 'test_val' - - With valid values list: - >>> ConfigString("test", "test object description", "test_val", ["test_val", "test_val_II"]).valid_vals - ['test_val', 'test_val_II'] - - With valid values regular expression: - >>> ConfigString("test", "test_object description", "test_val", re.compile(r"test_val")).value - 'test_val' - - With namelist attribute set to "True": - >>> ConfigString("test", "test_object description", "test_val", re.compile(r"test_val"), is_nml_attr=True).is_nml_attr - True - - """ - - def __init__(self, name, desc, val, valid_vals=None, is_nml_attr=False): - - # Add generic attributes - super().__init__(name, desc, is_nml_attr=is_nml_attr) - - # If ok, then add valid_vals to object - self.__valid_vals = valid_vals - - # Next, check that provided value is "valid" based on the - # valid values list or regular expression. Note that this - # function also checks valid_vals itself to ensure that it - # is of the correct type and format. - - self.__check_value(val) - - # If everything is ok, then add provided value to object - self.__value = val - - #++++++++++++++++++++++++ - - # Create properties needed to return given value and valid values - # without underscores - @property - def value(self): - """Return the value of this config object""" - return self.__value - - @property - def valid_vals(self): - """Return the valid values of this config object""" - return self.__valid_vals - - #++++++++++++++++++++++++ - - def __check_value(self, val): - - """ - Checks input/given value to make sure - it is valid according to the - object's valid values list or - regular expression. - - If a list, then assume the given value - must match at least one of the valid values - included in that list. - - If a compiled regular expression, then - assume the value must match the regular - expression. - """ - - # Check if string value is valid - bad_val_msg = _check_string_val(self.name, val, - valid_vals=self.valid_vals) - - # Raise an error if a bad value is found: - if bad_val_msg: - raise CamConfigValError(bad_val_msg) - - #++++++++++++++++++++++++ - - def set_value(self, val): - - """ - Set configure object's value to the one provided. - """ - - # First, check that the provided value is valid - self.__check_value(val) - - # If ok, then set object's value to one provided - self.__value = val - -############################################################################### - -class ConfigList(ConfigGen): - - """ - Configuration class used to store list-based - CAM configuration options. - - Inputs to initalize class are: - name -> Name of new CAM configure option - desc -> Text description of CAM configure option - list_vals -> List values for CAM configure option - valid_type (optional) -> Specify valid type for CAM configure option list values. - Currently accepts "int" for integer and "str" for string. - valid_vals (optional) -> Valid CAM configure option values (default is None), - valid_type must be included in order to use valid_vals. - - Doctests: - - 1. Check that ConfigList works properly with no valid_type: - - >>> ConfigList("test", "test object description", [1,2,3]).value - [1, 2, 3] - - 2. Check that ConfigList works with a correct valid type provided: - >>> ConfigList("test", "test object description", ["x", "y", "z"], valid_type="str").value - ['x', 'y', 'z'] - - - 3. Check that ConfigList With a non-string passed to "valid_type" fails with the correct error: - >>> ConfigList("test", "test object description", [1, 2, 3], valid_type=5).value - Traceback (most recent call last): - ... - CamConfigTypeError: ERROR: valid_type entry for variable 'test' must be a string, not type ''. - - 4. Check that ConfigList with a non-recognized "valid_type" option fails with the correct error: - >>> ConfigList("test", "test object description", [1, 2, 3], valid_type="foo").value - Traceback (most recent call last): - ... - CamConfigValError: ERROR: 'foo' is not a recognized option for 'valid_type'. Please use either 'int' or 'str'. - - 5. Check that ConfigList with list entries that don't match the valid_type entry fails with the correct error: - >>> ConfigList("test", "test object description", [1, 2, 3], valid_type="str").value - Traceback (most recent call last): - ... - CamConfigValError: ERROR: The following list entries, provided for variable, 'test', are not strings, but instead are: - '1': type='' - '2': type='' - '3': type='' - - - 6. Check that ConfigList with "valid_vals" but no "valid_type" fails with the correct error: - >>> ConfigList("test", "test object description", [1, 2, 3], valid_vals=[1,2,3,4,5]).value - Traceback (most recent call last): - ... - CamConfigValError: ERROR: valid values can only be used if valid_type is 'int' or 'str', not 'None'. - - 7. check that ConfigList with a list that matches the "valid_vals" entry works as expected: - >>> ConfigList("test", "test object description", [1, 2, 3], valid_type="int", valid_vals=(0,5)).value - [1, 2, 3] - - 8. check that ConfigList with a list that does not mach the "valid_vals" entry fails with the correct error: - >>> ConfigList("test", "test object description", ["1", "b", "c"], valid_type="str", valid_vals=["1","2","3"]).value - Traceback (most recent call last): - ... - CamConfigValError: The following errors were found for a list-type config variable: - ERROR: Value, 'b', provided for variable, 'test', does not match any of the valid values: '['1', '2', '3']' - - ERROR: Value, 'c', provided for variable, 'test', does not match any of the valid values: '['1', '2', '3']' - """ - - def __init__(self, name, desc, val, valid_type=None, valid_vals=None): - - # Add generic attributes - super().__init__(name, desc, is_nml_attr=False) - - # Check if valid_type is not None - if valid_type is not None: - # If not None, make sure valid_type is a string: - if not isinstance(valid_type, str): - emsg = "ERROR: valid_type entry for variable '{}' must be a string, " - emsg += " not type '{}'." - raise CamConfigTypeError(emsg.format(name, type(valid_type))) - # End if - # End if - - # Check that the valid values option is only being used with a valid type: - if valid_vals is not None and valid_type not in ["int", "str"]: - # Currently valid values can only be used with strings or integers, - # so throw an error: - emsg = "ERROR: valid values can only be used if valid_type is 'int' or 'str', not '{}'." - raise CamConfigValError(emsg.format(valid_type)) - - # If ok, then add valid_type and valid_vals to object - self.__valid_type = valid_type - self.__valid_vals = valid_vals - - # Next, check that provided list entry types and values are "valid" - # based on the valid type and valid values provided: - if self.__valid_type is not None: - self.__check_type(val) - - #If valid values are provided, then check them as well: - if self.__valid_vals is not None: - self.__check_values(val) - - # If everything is ok, then add provided value to object - self.__value = val - - #++++++++++++++++++++++++ - - # Create properties needed to return given value and valid values - # without underscores - @property - def value(self): - """Return the value of this config object""" - return self.__value - - @property - def valid_type(self): - """Return the valid type of this config object""" - return self.__valid_type - - @property - def valid_vals(self): - """Return the valid values of this config object""" - return self.__valid_vals - - #++++++++++++++++++++++++ - - def __check_type(self, val): - - """ - Check if the entries in the provided - list (val) are of the correct type as - specified by the "valid_type" entry. - """ - - # Extract valid type (valid_type) from object - valid_type = self.valid_type - - # Create empty dictionary to store errors: - bad_val_types = OrderedDict() - - good_type = "??" - if valid_type == "str": - #All list entries should be strings: - good_type = "string" - for list_entry in val: - if not isinstance(list_entry, str): - bad_val_types[str(list_entry)] = str(type(list_entry)) - # end if - # end for - elif valid_type == "int": - #All list entries should be integers: - good_type = "int" - for list_entry in val: - if not isinstance(list_entry, int): - bad_val_types[str(list_entry)] = str(type(list_entry)) - # end if - # end for - else: - #Invalid option given for "valid_type", so raise error: - emsg = "ERROR: '{}' is not a recognized option for 'valid_type'." - emsg += " Please use either 'int' or 'str'." - raise CamConfigValError(emsg.format(valid_type)) - # End if - #If bad values dictionary is non-empty, then raise error: - if bad_val_types: - if len(bad_val_types) > 1: - emsg = "ERROR: The following list entries, provided for variable," - emsg += " '{}', are not {}s, but instead are:\n".format(self.name, good_type) - else: - emsg = "ERROR: The following list entry, provided for variable," - emsg += " '{}', is not a {}, but instead is: ".format(self.name, good_type) - # end if - for key_str, type_str in bad_val_types.items(): - emsg += "'{}': type='{}'\n".format(key_str, type_str) - # end for - raise CamConfigValError(emsg) - # End if - - #++++++++++++++++++++++++ - - def __check_values(self, list_vals): - - """ - Check if the entries in the provided - list (val) are valid as specified by - specified by the "valid_vals" entry. - """ - - # Create empty list: - bad_val_msgs = [] - - # Check if valid type is string or integer - if self.valid_type in _TYPE_CHECK_FUNCTIONS: - for val in list_vals: - #Check if integer or string value in list is valid - bad_val_msg = _TYPE_CHECK_FUNCTIONS[self.valid_type](self.name, val, - valid_vals=self.valid_vals) - # If return value is not None, then add - # to bad value list - if bad_val_msg: - bad_val_msgs.append(bad_val_msg) - # End if - # end for - else: - emsg = "Internal Error: Bad valid_type, '{}'" - raise CamConfigTypeError(emsg.format(self.valid_type)) - # end if - - # If bad values are present, then raise an error - if bad_val_msgs: - emsg = "The following errors were found for a list-type config variable:\n" - emsg += "\n\n".join(bad_val_msgs) - raise CamConfigValError(emsg) - # End if - - #++++++++++++++++++++++++ - - def set_value(self, list_vals): - - """ - Set configure object's value to the one provided. - """ - - # First, check that the provided value is valid - if self.__valid_type is not None: - self.__check_type(list_vals) - - # If ok, then set object's value to one provided - self.__value = list_vals - - ############################################################################### # MAIN CAM CONFIGURE OBJECT ############################################################################### @@ -1522,14 +652,14 @@ def add_cppdef(self, cppname, value=None): >>> FCONFIG.add_cppdef("TEST_CPPDEF") # doctest: +ELLIPSIS Traceback (most recent call last): ... - CamConfigValError: ERROR: CPP definition 'TEST_CPPDEF' has already been set + cam_config_classes.CamConfigValError: ERROR: CPP definition 'TEST_CPPDEF' has already been set Check that a duplicate cppdef creates an error even if an equals sign is present in the stored copy but not the passed variable: >>> FCONFIG.add_cppdef("NEW_TEST") # doctest: +ELLIPSIS Traceback (most recent call last): ... - CamConfigValError: ERROR: CPP definition 'NEW_TEST' has already been set + cam_config_classes.CamConfigValError: ERROR: CPP definition 'NEW_TEST' has already been set """ #Create string to check if CPP definition is already present: diff --git a/cime_config/cam_config_classes.py b/cime_config/cam_config_classes.py new file mode 100644 index 00000000..6a77df92 --- /dev/null +++ b/cime_config/cam_config_classes.py @@ -0,0 +1,903 @@ +""" +Location of interal python classes used by the +"ConfigCAM" class to generate, store, and pass-on +any CAM configuration variables to other components +of the build system. +""" + +#---------------------------------------- +# Import generic python libraries/modules +#---------------------------------------- + +import re +from collections import OrderedDict + +# Determine regular rexpression type (for later usage in check_string_val) +REGEX_TYPE = type(re.compile(r" ")) + +############################################################################### +# Error-handling classes +############################################################################### + +class CamConfigValError(ValueError): + """Class used to handle CAM config value errors + (e.g., log user errors without backtrace)""" + # pylint: disable=useless-super-delegation + def __init__(self, message): + super(CamConfigValError, self).__init__(message) + # pylint: enable=useless-super-delegation + +############################################################################### + +class CamConfigTypeError(TypeError): + """Class used to handle CAM config type errors + (e.g., log user errors without backtrace)""" + # pylint: disable=useless-super-delegation + def __init_(self, message): + super(CamConfigTypeError, self).__init__(message) + # pylint: enable=useless-super-delegation + +############################################################################### +# Valid value-checking functions +############################################################################### + +def _check_integer_val(name, val, valid_vals=None): + + """ + Checks if a provided integer value is "valid" + as defined by the provided "valid_vals" entry + for the given config variable (name). + + If value is not valid, then an error message is returned, + otherwise the function returns None. + + Possible valid_val types are: + + list -> If a list, then just check that provided value is in the list. + + tuple -> If a tuple, then there must be only two values, which define + a possible range of values, e.g. (min, max). If only one value + is provided, then only a minimum (or maximum) value will be + enforced, depending on if the tuple is (x, None) or (None ,x). + + Doctests: + + Please note that "successful" validation tests are done in the ConfigInteger doctests. + + 1. Check that using a non-integer value throws an error: + >>> _check_integer_val("test", 5.0, valid_vals=None) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigTypeError: ERROR: Value being checked in 'check_integer_val' must be an integer type, not ''. + + 2. Check that using a valid_vals option that is not a list or tuple throws an error: + >>> _check_integer_val("test", 5, valid_vals="test_vals") #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigTypeError: ERROR: Valid values for integers must by provided as either a tuple or a list, not ''. + + 3. Check that using non-integer values inside the valid_vals list or tuple throws an error: + >>> _check_integer_val("test", 5, valid_vals=[1,2,5,"test_val"]) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigTypeError: ERROR: Valid value, 'test_val', for variable 'test', must be an integer. Currently it is ''. + + + 4. Check that using a tuple with only one entry throws an error: + >>> _check_integer_val("test", 5, valid_vals=(1,)) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigValError: ERROR: Valid values tuple for variable, 'test', must have two elements, not '1' elements. + + 5. Check that using a tuple with more than two entries throws an error: + >>> _check_integer_val("test", 5, valid_vals=(1,2,5)) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigValError: ERROR: Valid values tuple for variable, 'test', must have two elements, not '3' elements. + + 6. Check that using a tuple with only Nones throws an error: + >>> _check_integer_val("test", 5, valid_vals=(None,None)) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigValError: ERROR: Valid values tuple for variable, 'test', must contain at least one integer. + + 7. Check that an integer less than the tuple min is "invalid": + >>> _check_integer_val("test", 5, valid_vals=(6,None)) + "ERROR: Value, '5', provided for variable, 'test', is less than minimum valid value, '6'" + + 8. Check that an integer greater than the tuple max is "invalid": + >>> _check_integer_val("test", 5, valid_vals=(None,4)) + "ERROR: Value, '5', provided for variable, 'test', is greater than max valid value, '4'" + + 9. Check that an integer outside min/max tuple range is "invalid": + >>> _check_integer_val("test", 5, valid_vals=(10,13)) + "ERROR: Value, '5', provided for variable, 'test', is outside valid value range, '(10, 13)'" + + 10. Check that an integer not included in the list is "invalid": + >>> _check_integer_val("test", 5, valid_vals=[1,2,3,4]) + "ERROR: Value, '5', provided for variable, 'test', does not match any of the valid values: '[1, 2, 3, 4]'" + + """ + + # Make sure that provided value is an integer: + if not isinstance(val, int): + emsg = "ERROR: Value being checked in 'check_integer_val' " + emsg += "must be an integer type, not '{}'." + raise CamConfigTypeError(emsg.format(type(val))) + # End if + + # Only check the given value if valid_vals is not "None" + if valid_vals is not None: + + # Check if valid values is a tuple + if isinstance(valid_vals, tuple): + + # Check that all tuple elements are either None or integers + emsg = "" + for valid_val in valid_vals: + if valid_val is not None and not isinstance(valid_val, int): + emsg += "ERROR: Valid value, '{}', for variable '{}', must be " + emsg += "either None or an integer. Currently it is '{}'.\n" + emsg = emsg.format(valid_val, name, type(valid_val)) + # End if + # End for + if emsg: + raise CamConfigTypeError(emsg) + # end if + + # Check that length of valid values tuple is 2 + if len(valid_vals) != 2: + emsg = ("ERROR: Valid values tuple for variable, " + "'{}', must have two elements, not '{}' elements.") + raise CamConfigValError(emsg.format(name, + len(valid_vals))) + # End if + + if valid_vals[0] is None: + # If first valid value is "None", then just check that + # given value is less than second valid value, and + # that second value is an integer + if valid_vals[1] is None: + emsg = "ERROR: Valid values tuple for variable, '{}', " + emsg += "must contain at least one integer." + raise CamConfigValError(emsg.format(name)) + # End if + if val > valid_vals[1]: + emsg = "ERROR: Value, '{}', provided for variable, " + emsg += "'{}', is greater than max valid value, '{}'" + return emsg.format(val, name, valid_vals[1]) + # End if + elif valid_vals[1] is None: + # Check if second value is "None". + # If so, then just check that given value is greater + # than first valid value + if val < valid_vals[0]: + emsg = "ERROR: Value, '{}', provided for variable, " + emsg += "'{}', is less than minimum valid value, '{}'" + return emsg.format(val, name, valid_vals[0]) + # End if + else: + # If both valid values are integers, then check that + # given value is between both valid values + if (val < valid_vals[0]) or (val > valid_vals[1]): + emsg = "ERROR: Value, '{}', provided for variable, " + emsg += "'{}', is outside valid value range, '{}'" + return emsg.format(val, name, valid_vals) + # End if + # End if + + elif isinstance(valid_vals, list): + + # Check that all list elements are integers + emsg = "" + for valid_val in valid_vals: + if not isinstance(valid_val, int): + emsg += "ERROR: Valid value, '{}', for variable '{}', " + emsg += "must be an integer. Currently it is '{}'.\n" + emsg = emsg.format(valid_val, name, type(valid_val)) + # End if + # End for + if emsg: + raise CamConfigTypeError(emsg) + # end if + + + # If valid_vals is a list, then just check that the given value + # matches one of the valid values in the list + if not val in valid_vals: + emsg = "ERROR: Value, '{}', provided for variable, '{}', " + emsg += "does not match any of the valid values: '{}'" + return emsg.format(val, name, valid_vals) + # End if + + else: + # valid_vals is neither a list nor a tuple, so throw an error: + emsg = "ERROR: Valid values for integers must by provided as " + emsg += "either a tuple or a list, not '{}'." + raise CamConfigTypeError(emsg.format(type(valid_vals))) + + # End if + # End if + + # Return nothing if value is valid + return None + +############################################################################### + +def _check_string_val(name, val, valid_vals=None): + + """ + Checks if a provided string value is "valid" + as defined by the provided "valid_vals" entry. + + Possible valid_val types are: + + list -> If a list, then just check that provided value is in the list. + + regex -> If a compiled regular expression, then check that the provided + value is matched by the regular expression. + + Doctests: + + Please note that "successful" validation tests are done in the ConfigString doctests. + + 1. Check that using a non-string value throws an error: + >>> _check_string_val("test", [5], valid_vals=None) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigTypeError: ERROR: Value being checked in 'check_string_val' must be a string type, not ''. + + 2. Check that using a valid_vals option that is not None, a list, or a regex throws an error: + >>> _check_string_val("test", "test_val", valid_vals=5) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigTypeError: ERROR: Valid values for strings must by provided as either a regular expression or a list, not ''. + + 3. Check that using non-string values inside the valid_vals list throws an error: + >>> _check_string_val("test", "1", valid_vals=["1","2","5",6]) #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigTypeError: ERROR: Valid value, '6', for variable 'test', must be a string. Currently it is ''. + + 9. Check that a string that doesn't match the provided regex is "invalid": + >>> _check_string_val("test", "test_val", valid_vals=re.compile(r"foo")) + "ERROR: Value, 'test_val', provided for variable, 'test', does not match the valid regular expression." + + 10. Check that a string not included in the list is "invalid": + >>> _check_string_val("test", "test_val", valid_vals=["1","2","3","4"]) + "ERROR: Value, 'test_val', provided for variable, 'test', does not match any of the valid values: '['1', '2', '3', '4']'" + """ + + # Make sure that provided value is a string: + if not isinstance(val, str): + emsg = "ERROR: Value being checked in 'check_string_val' " + emsg += "must be a string type, not '{}'." + raise CamConfigTypeError(emsg.format(type(val))) + # End if + + # Only check the given value if valid_vals is not "None" + if valid_vals is not None: + + # If a list, then check that the given value + # matches one of the valid values in the list + if isinstance(valid_vals, list): + + # Check that all list elements are strings: + for valid_val in valid_vals: + if not isinstance(valid_val, str): + emsg = ("ERROR: Valid value, '{}', for variable '{}', must be " + "a string. Currently it is '{}'.") + raise CamConfigTypeError(emsg.format(valid_val, name, type(valid_val))) + # End if + # End for + + if not val in valid_vals: + emsg = "ERROR: Value, '{}', provided for variable, '{}', " + emsg += "does not match any of the valid values: '{}'" + return emsg.format(val, name, valid_vals) + # End if + elif isinstance(valid_vals, REGEX_TYPE): + # If a regular expression object, then check that + # value is matched by the expression + if valid_vals.match(val) is None: + emsg = "ERROR: Value, '{}', provided for variable, '{}', " + emsg += "does not match the valid regular expression." + return emsg.format(val, name) + # End if + # End if + else: + # valid_vals is neither a list nor a regex, so throw an error: + emsg = "ERROR: Valid values for strings must by provided as " + emsg += "either a regular expression or a list, not '{}'." + raise CamConfigTypeError(emsg.format(type(valid_vals))) + + # End if + # End if + + # Return nothing if value is valid + return None + +# Helper function to better generalize config value checking: +_TYPE_CHECK_FUNCTIONS = {"int" : _check_integer_val, "str" : _check_string_val} + +############################################################################### +# Internal generic CAM configure class +############################################################################### + +class _ConfigGen(object): + + """ + Generic configuration class used to + store CAM configuration names and + descriptions. + + Inputs to initalize class are: + name -> Name of new CAM configure option + desc -> Text description of CAM configure option + is_nml_attr (optional) -> Logical that determines if this option + is also a namelist attribute (defaut is False) + + Doctests: + + 1. Check that ConfigGen works properly: + + >>> _ConfigGen("test", "test object description").name + 'test' + + >>> _ConfigGen("test", "test object description").desc + '# test object description' + + >>> print(_ConfigGen("test", ["test", "object", "description"]).desc) + # test + # object + # description + + >>> _ConfigGen("test", "test object description", is_nml_attr=True).is_nml_attr + True + + 2. Check that non-optional inputs must be strings: + + >>> _ConfigGen(5, "test_object_description").name #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CamConfigTypeError: ERROR: Configuration variable name '5' must be a string, not + + >>> _ConfigGen("test", (5,)).desc #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CamConfigTypeError: ERROR: + + >>> _ConfigGen("test", ["test", ("object", "description")]).desc #doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + CamConfigTypeError: ERROR: Configuration variable, 'test', must have a string-type description, or a list of string-type descriptions, not [, ] + + """ + + def __init__(self, name, desc, is_nml_attr=False): + + # Check that "name" is a string + if not isinstance(name, str): + emsg = "ERROR: Configuration variable name '{}' must be a string, not {}" + raise CamConfigTypeError(emsg.format(name, type(name))) + # end if + + # Check that "desc" is a string or a list of strings + desc_ok = isinstance(desc, str) + if (not desc_ok) and isinstance(desc, list): + desc_ok = all(isinstance(x, str) for x in desc) + # end if + if not desc_ok: + emsg = ("ERROR: Configuration variable, '{}', " + "must have a string-type description, or a list of " + "string-type descriptions, not {}") + if isinstance(desc, str): + derr = type(desc) + elif isinstance(desc, list): + derr = [type(x) for x in desc] + else: + derr = "{} ({})".format(type(desc), desc) + # end if + raise CamConfigTypeError(emsg.format(name, derr)) + # end if + + # Add name, description, and namelist attribute logical to object + self.__name = name + if isinstance(desc, str): + self.__desc = "# {}".format(desc) + elif isinstance(desc, list): + self.__desc = "# " + "\n# ".join(desc) + # end if + self.__is_nml_attr = is_nml_attr + + #++++++++++++++++++++++++ + + # Create properties needed to return name and description properties + @property + def name(self): + """Return the name of this config object""" + return self.__name + + @property + def desc(self): + """Return the description of this config object""" + return self.__desc + + @property + def is_nml_attr(self): + """Return the namelist attribute logical of this config object""" + return self.__is_nml_attr + +############################################################################### +# CAM configure option classes +############################################################################### + +class ConfigInteger(_ConfigGen): + + """ + Configuration class used to store + integer-based CAM configuration + options. + + Inputs to initalize class are: + name -> Name of new CAM configure option + desc -> Text description of CAM configure option + val -> Integer value for CAM configure option + valid_vals (optional) -> Range or list of valid CAM configure option values (default is None) + is_nml_attr (optional) -> Logical that determines if option is also a namelist attribute (defaut is False) + + Doctests: + + 1. Check that ConfigInteger works properly: + + With no valid values: + >>> ConfigInteger("test", "test object description", 5).value + 5 + + With valid values tuple: + >>> ConfigInteger("test", "test object description", 5, (1, 10)).valid_vals + (1, 10) + + With valid values list: + >>> ConfigInteger("test", "test object description", 5, [4, 5, 6]).valid_vals + [4, 5, 6] + + With namelist attribute set to "True": + >>> ConfigInteger("test", "test object description", 5, [4, 5, 6], is_nml_attr=True).is_nml_attr + True + + """ + + def __init__(self, name, desc, val, valid_vals=None, is_nml_attr=False): + + # Add generic attributes + super(ConfigInteger, self).__init__(name, desc, is_nml_attr=is_nml_attr) + + # Add valid_vals to object + self.__valid_vals = valid_vals + + # Check that provided value is "valid" based on the + # valid values list or tuple. Note that this function + # also checks valid_vals itself to ensure that it is + # of the correct type and format. + self.__check_value(val) + + # If everything is ok, then add provided value to object + self.__value = val + + #++++++++++++++++++++++++ + + # Create properties needed to return given value and valid values + @property + def value(self): + """Return the value of this config object""" + return self.__value + + @property + def valid_vals(self): + """Return the valid values of this config object""" + return self.__valid_vals + + #++++++++++++++++++++++++ + + def __check_value(self, val): + + """ + Checks input/given value to make sure + it is valid according to the + object's valid values list or tuple. + + If a tuple, then assume the two + valid values in the tuple represent a range, + and check that the given value is + in-between that range. + + If a list, then assume the given value + must match at least one of the valid values + included in that list. + """ + + # Check if integer value is valid + bad_val_msg = _check_integer_val(self.name, val, + valid_vals=self.valid_vals) + + # Raise an error if a bad value is found: + if bad_val_msg: + raise CamConfigValError(bad_val_msg) + + #++++++++++++++++++++++++ + + def set_value(self, val): + + """ + Set configure object's value to the one provided. + """ + + # First, check that the provided value is valid + self.__check_value(val) + + # If ok, then set object's value to one provided + self.__value = val + +############################################################################### + +class ConfigString(_ConfigGen): + + """ + Configuration class used to store + string-based CAM configuration + options. + + Inputs to initalize class are: + name -> Name of new CAM configure option + desc -> Text description of CAM configure option + val -> Integer value for CAM configure option + valid_vals (optional) -> List or regex of valid CAM configure option values (default is None) + is_nml_attr (optional) -> Logical that determines if option is also a namelist attribute (defaut is False) + + Doctests: + + 1. Check that ConfigString works properly: + + With no valid values: + >>> ConfigString("test", "test object description", "test_val").value + 'test_val' + + With valid values list: + >>> ConfigString("test", "test object description", "test_val", ["test_val", "test_val_II"]).valid_vals + ['test_val', 'test_val_II'] + + With valid values regular expression: + >>> ConfigString("test", "test_object description", "test_val", re.compile(r"test_val")).value + 'test_val' + + With namelist attribute set to "True": + >>> ConfigString("test", "test_object description", "test_val", re.compile(r"test_val"), is_nml_attr=True).is_nml_attr + True + + """ + + def __init__(self, name, desc, val, valid_vals=None, is_nml_attr=False): + + # Add generic attributes + super(ConfigString, self).__init__(name, desc, is_nml_attr=is_nml_attr) + + # If ok, then add valid_vals to object + self.__valid_vals = valid_vals + + # Next, check that provided value is "valid" based on the + # valid values list or regular expression. Note that this + # function also checks valid_vals itself to ensure that it + # is of the correct type and format. + + self.__check_value(val) + + # If everything is ok, then add provided value to object + self.__value = val + + #++++++++++++++++++++++++ + + # Create properties needed to return given value and valid values + # without underscores + @property + def value(self): + """Return the value of this config object""" + return self.__value + + @property + def valid_vals(self): + """Return the valid values of this config object""" + return self.__valid_vals + + #++++++++++++++++++++++++ + + def __check_value(self, val): + + """ + Checks input/given value to make sure + it is valid according to the + object's valid values list or + regular expression. + + If a list, then assume the given value + must match at least one of the valid values + included in that list. + + If a compiled regular expression, then + assume the value must match the regular + expression. + """ + + # Check if string value is valid + bad_val_msg = _check_string_val(self.name, val, + valid_vals=self.valid_vals) + + # Raise an error if a bad value is found: + if bad_val_msg: + raise CamConfigValError(bad_val_msg) + + #++++++++++++++++++++++++ + + def set_value(self, val): + + """ + Set configure object's value to the one provided. + """ + + # First, check that the provided value is valid + self.__check_value(val) + + # If ok, then set object's value to one provided + self.__value = val + +############################################################################### + +class ConfigList(_ConfigGen): + + """ + Configuration class used to store list-based + CAM configuration options. + + Inputs to initalize class are: + name -> Name of new CAM configure option + desc -> Text description of CAM configure option + list_vals -> List values for CAM configure option + valid_type (optional) -> Specify valid type for CAM configure option list values. + Currently accepts "int" for integer and "str" for string. + valid_vals (optional) -> Valid CAM configure option values (default is None), + valid_type must be included in order to use valid_vals. + + Doctests: + + 1. Check that ConfigList works properly with no valid_type: + + >>> ConfigList("test", "test object description", [1,2,3]).value + [1, 2, 3] + + 2. Check that ConfigList works with a correct valid type provided: + >>> ConfigList("test", "test object description", ["x", "y", "z"], valid_type="str").value + ['x', 'y', 'z'] + + + 3. Check that ConfigList With a non-string passed to "valid_type" fails with the correct error: + >>> ConfigList("test", "test object description", [1, 2, 3], valid_type=5).value #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigTypeError: ERROR: valid_type entry for variable 'test' must be a string, not type ''. + + 4. Check that ConfigList with a non-recognized "valid_type" option fails with the correct error: + >>> ConfigList("test", "test object description", [1, 2, 3], valid_type="foo").value #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigValError: ERROR: 'foo' is not a recognized option for 'valid_type'. Please use either 'int' or 'str'. + + 5. Check that ConfigList with list entries that don't match the valid_type entry fails with the correct error: + >>> ConfigList("test", "test object description", [1, 2, 3], valid_type="str").value #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigValError: ERROR: The following list entries, provided for variable, 'test', are not strings, but instead are: + '1': type='' + '2': type='' + '3': type='' + + + 6. Check that ConfigList with "valid_vals" but no "valid_type" fails with the correct error: + >>> ConfigList("test", "test object description", [1, 2, 3], valid_vals=[1,2,3,4,5]).value #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigValError: ERROR: valid values can only be used if valid_type is 'int' or 'str', not 'None'. + + 7. check that ConfigList with a list that matches the "valid_vals" entry works as expected: + >>> ConfigList("test", "test object description", [1, 2, 3], valid_type="int", valid_vals=(0,5)).value + [1, 2, 3] + + 8. check that ConfigList with a list that does not mach the "valid_vals" entry fails with the correct error: + >>> ConfigList("test", "test object description", ["1", "b", "c"], valid_type="str", valid_vals=["1","2","3"]).value #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigValError: The following errors were found for a list-type config variable: + ERROR: Value, 'b', provided for variable, 'test', does not match any of the valid values: '['1', '2', '3']' + + ERROR: Value, 'c', provided for variable, 'test', does not match any of the valid values: '['1', '2', '3']' + """ + + def __init__(self, name, desc, val, valid_type=None, valid_vals=None): + + # Add generic attributes + super(ConfigList, self).__init__(name, desc, is_nml_attr=False) + + # Check if valid_type is not None + if valid_type is not None: + # If not None, make sure valid_type is a string: + if not isinstance(valid_type, str): + emsg = "ERROR: valid_type entry for variable '{}' must be a string, " + emsg += " not type '{}'." + raise CamConfigTypeError(emsg.format(name, type(valid_type))) + # End if + # End if + + # Check that the valid values option is only being used with a valid type: + if valid_vals is not None and valid_type not in ["int", "str"]: + # Currently valid values can only be used with strings or integers, + # so throw an error: + emsg = "ERROR: valid values can only be used if valid_type is 'int' or 'str', not '{}'." + raise CamConfigValError(emsg.format(valid_type)) + + # If ok, then add valid_type and valid_vals to object + self.__valid_type = valid_type + self.__valid_vals = valid_vals + + # Next, check that provided list entry types and values are "valid" + # based on the valid type and valid values provided: + if self.__valid_type is not None: + self.__check_type(val) + + #If valid values are provided, then check them as well: + if self.__valid_vals is not None: + self.__check_values(val) + + # If everything is ok, then add provided value to object + self.__value = val + + #++++++++++++++++++++++++ + + # Create properties needed to return given value and valid values + # without underscores + @property + def value(self): + """Return the value of this config object""" + return self.__value + + @property + def valid_type(self): + """Return the valid type of this config object""" + return self.__valid_type + + @property + def valid_vals(self): + """Return the valid values of this config object""" + return self.__valid_vals + + #++++++++++++++++++++++++ + + def __check_type(self, val): + + """ + Check if the entries in the provided + list (val) are of the correct type as + specified by the "valid_type" entry. + """ + + # Extract valid type (valid_type) from object + valid_type = self.valid_type + + # Create empty dictionary to store errors: + bad_val_types = OrderedDict() + + good_type = "??" + if valid_type == "str": + #All list entries should be strings: + good_type = "string" + for list_entry in val: + if not isinstance(list_entry, str): + bad_val_types[str(list_entry)] = str(type(list_entry)) + # end if + # end for + elif valid_type == "int": + #All list entries should be integers: + good_type = "int" + for list_entry in val: + if not isinstance(list_entry, int): + bad_val_types[str(list_entry)] = str(type(list_entry)) + # end if + # end for + else: + #Invalid option given for "valid_type", so raise error: + emsg = "ERROR: '{}' is not a recognized option for 'valid_type'." + emsg += " Please use either 'int' or 'str'." + raise CamConfigValError(emsg.format(valid_type)) + # End if + #If bad values dictionary is non-empty, then raise error: + if bad_val_types: + if len(bad_val_types) > 1: + emsg = "ERROR: The following list entries, provided for variable," + emsg += " '{}', are not {}s, but instead are:\n".format(self.name, good_type) + else: + emsg = "ERROR: The following list entry, provided for variable," + emsg += " '{}', is not a {}, but instead is: ".format(self.name, good_type) + # end if + for key_str, type_str in bad_val_types.items(): + emsg += "'{}': type='{}'\n".format(key_str, type_str) + # end for + raise CamConfigValError(emsg) + # End if + + #++++++++++++++++++++++++ + + def __check_values(self, list_vals): + + """ + Check if the entries in the provided + list (val) are valid as specified by + specified by the "valid_vals" entry. + """ + + # Create empty list: + bad_val_msgs = [] + + # Check if valid type is string or integer + if self.valid_type in _TYPE_CHECK_FUNCTIONS: + for val in list_vals: + #Check if integer or string value in list is valid + bad_val_msg = _TYPE_CHECK_FUNCTIONS[self.valid_type](self.name, val, + valid_vals=self.valid_vals) + # If return value is not None, then add + # to bad value list + if bad_val_msg: + bad_val_msgs.append(bad_val_msg) + # End if + # end for + else: + emsg = "Internal Error: Bad valid_type, '{}'" + raise CamConfigTypeError(emsg.format(self.valid_type)) + # end if + + # If bad values are present, then raise an error + if bad_val_msgs: + emsg = "The following errors were found for a list-type config variable:\n" + emsg += "\n\n".join(bad_val_msgs) + raise CamConfigValError(emsg) + # End if + + #++++++++++++++++++++++++ + + def set_value(self, list_vals): + + """ + Set configure object's value to the one provided. + """ + + # First, check that the provided value is valid + if self.__valid_type is not None: + self.__check_type(list_vals) + + # If ok, then set object's value to one provided + self.__value = list_vals + +############################################################################### +#IGNORE EVERYTHING BELOW HERE UNLESS RUNNING TESTS ON CAM_CONFIG! +############################################################################### + +#Call testing routine, if script is run directly +if __name__ == "__main__": + + # Import modules needed for testing + import doctest + import sys + + # Run doctests on this file's python objects + TEST_SUCCESS = doctest.testmod()[0] + + # Exit script with error code matching number of failed tests: + sys.exit(TEST_SUCCESS) + +############# +# End of file +############# diff --git a/test/pylint_test.sh b/test/pylint_test.sh index 8f0265d1..7721bc28 100755 --- a/test/pylint_test.sh +++ b/test/pylint_test.sh @@ -17,6 +17,7 @@ export PYTHONPATH="${CIMEDIR}:${CCPPDIR}:$PYTHONPATH" pylintcmd="pylint --rcfile=${CURRDIR}/.pylintrc" +${pylintcmd} ${CAMROOT}/cime_config/cam_config_classes.py ${pylintcmd} ${CAMROOT}/cime_config/cam_config.py ${pylintcmd} ${CAMROOT}/cime_config/cam_autogen.py ${pylintcmd} ${CAMROOT}/cime_config/cam_build_cache.py diff --git a/test/run_tests.sh b/test/run_tests.sh index f7e8da55..33472859 100755 --- a/test/run_tests.sh +++ b/test/run_tests.sh @@ -52,6 +52,8 @@ if [ ! -d "cime_config" ]; then exit 1 fi +#CAM config classes doctests: +${PYTHON} cime_config/cam_config_classes.py # CAM config doctests: run_doctest cime_config/cam_config.py # CAM autogen doctests: From 59c291c8d579350d3d2fc5f0b1fd4382d9724d7f Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Wed, 13 Oct 2021 08:40:04 -0600 Subject: [PATCH 02/33] Update CIME to use ParamGen branch. --- Externals.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index 27e88b64..ec3386d9 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -59,7 +59,7 @@ required = True [cime] branch = CAMDEN_ParamGen protocol = git -repo_url = https://github.com/gold2718/cime +repo_url = https://github.com/nusbaume/cime local_path = cime required = True From b183035fade86c7e42341f128e14289d248cfd56 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Wed, 13 Oct 2021 13:33:47 -0600 Subject: [PATCH 03/33] Ignore new share code directories. --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index e3bd2173..943cd484 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,8 @@ ccpp_framework chem_proc cime +share +libraries components manage_externals.log src/physics/carma/base From 926286321e1741c40d113f9b43b353b30cf9a36c Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 5 Nov 2021 20:54:16 -0600 Subject: [PATCH 04/33] Initial implementation of ParamGen as the model namelist generator. (Please note that this commit won't run unless one of the suite_cam6*.xml SDFs are modified in order to avoid a suite name collision.) --- cime_config/atm_in_paramgen.py | 339 + cime_config/buildnml | 87 +- cime_config/cam_autogen.py | 79 +- cime_config/cam_config.py | 127 +- cime_config/config_component.xml | 4 +- cime_config/namelist_definition_cam.xml | 11901 +--------------------- src/physics/utils/phys_comp.F90 | 44 +- 7 files changed, 874 insertions(+), 11707 deletions(-) create mode 100644 cime_config/atm_in_paramgen.py diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py new file mode 100644 index 00000000..b9d3e7a7 --- /dev/null +++ b/cime_config/atm_in_paramgen.py @@ -0,0 +1,339 @@ +""" +Wrapper-class for the ParamGen +CIME tool, and associated methods +needed to generated the "atm_in" +Fortran namelist file. +""" + +#---------------------------------------- +# Import generic python libraries/modules +#---------------------------------------- + +import os +import sys +import re +from collections import OrderedDict +#CAM specific config error: +from cam_config_classes import CamConfigValError + +#---------------- +# Import ParamGen +#---------------- + +_CIMEROOT = os.environ.get("CIMEROOT") +if _CIMEROOT is None: + raise SystemExit("ERROR: must set CIMEROOT environment variable") +sys.path.append(os.path.join(_CIMEROOT, "scripts", "lib", "CIME", "ParamGen")) +from paramgen import ParamGen + +############################## +# MAIN "atm_in" ParamGen class +############################## + +class AtmInParamGen(ParamGen): + """ + Encapsulates data and read/write methods for + the atm_in Fortran namelist file and ParamGen + object. + """ + + def __init__(self, pg_data_dict): + + """ + Initialize a ParamGen object directly + using a ParamGen data dictionary, and + create a new dictionary to match namelist + variables to their associated groups + when reading in "user_nl_cam". + ---------- + pg_data_dict -> python dictionary with ParamGen keys/values + + """ + + #Initialize ParamGen directly: + super().__init__(pg_data_dict) + + #Create namelist var/group dictionary: + self.__var_group_dict = {} + + for nml_group in self._data: + for var in self._data[nml_group]: + + #Check if variable already exists in dictionary: + if var in self.__var_group_dict: + #If so, then append group to list: + self.__var_group_dict[var].append(nml_group) + else: + #If not, then add variable and group to dictionary: + self.__var_group_dict[var] = [nml_group] + + #### + + @classmethod + def from_namelist_xml(cls, nml_xml_file): + + """ + Initialize atm_in ParamGen object with XML file, + ---------- + nml_xml_file -> path (str) to namelist definition XML file + + """ + + #Create ParamGen object using base class: + _pg_xml = ParamGen.from_xml_nml(nml_xml_file) + + #Initialize new "atm_in" object: + atm_in_pg = AtmInParamGen(_pg_xml.data) + + #Return object: + return atm_in_pg + + #### + + @classmethod + def from_user_nl_file(cls, user_nl_file): + """ + Reads in a given "user_nl_cam" file or equivalent and initializes + a AtmInParamGen object. This method is an alternative to the xml, + yaml, and json methods already available from the base ParamGen class. + ---------- + user_nl_file -> path (str) to namelist definition XML file + + """ + + #Parse user_nl_cam file: + _data = AtmInParamGen._read_user_input(user_nl_file) + + #Create new ParamGen object: + atm_in_paramgen_obj = AtmInParamGen(_data) + + return atm_in_paramgen_obj + + #### + + def append_user_nl_file(self, user_nl_file): + """ + Reads in user_nl_cam files and converts + them to the proper ParamGen syntax. + """ + + _data = OrderedDict() + with open(user_nl_file,'r') as user_file: + within_comment_block = False + curr_group = "UNSET" + for line in user_file: + if len(line)>1: + line_s = line.split() + + # check if within comment block. + if (not within_comment_block) and line.strip()[0:2] == "/*": + within_comment_block = True + + if within_comment_block and line.strip()[-2:] == "*/": + within_comment_block = False + continue + + if not within_comment_block and line_s[0][0] != "!": # not a single comment line either + # check format: + if curr_group == "UNSET" and line.strip()[0] == "&": + curr_group = line.strip()[1:] + elif curr_group != "UNSET" and line.strip()[0] == "/": + curr_group = "UNSET" + else: + + #Join string elements back together: + line_j = ' '.join(line_s) + + # now parse the line: + if ("=" in line_j): + line_ss = line_j.split("=") + var_str = (line_ss[0]).strip() # the first element is the parameter name + val_str = ' '.join(line_ss[1:]) # the rest is tha value string + if '!' in val_str: + val_str = val_str.split("!")[0] # discard the comment in val str, if one exists + + #Check if variable already exists in group dictionary: + if var_str in self.__var_group_dict: + #Extract namelist group list for variable: + var_group_list = self.__var_group_dict[var_str] + + #Check if no group has been assigned (i.e. it is "Global"): + if curr_group == "UNSET": + #If only one group option exists, then assign that group: + if len(var_group_list) == 1: + data_group = var_group_list[0] + else: + #Raise an error stating that the user must + #specify the associated group: + group_list_str = ', '.join(var_group_list) + emsg = "Namelist variable '{}' is associated" + emsg += " with the following namelist groups:\n" + emsg += "{}\nPlease specify which group using" + emsg += " '&groupname' in '{}'" + raise CamConfigValError(emsg.format(var_str, group_list_str, + user_nl_file)) + + else: + #Check that the specified group matches one of + #the defined groups for that variable: + if curr_group in var_group_list: + #If so, then use specified group: + data_group = curr_group + else: + #If not, then raise an error: + emsg = "There is no variable '{}', associated with namelist group '{}'." + emsg += " Please double-check '{}'." + raise CamConfigValError(emsg.format(var_str, curr_group, + user_nl_file)) + + else: + #Raise error that namelist variable isn't listed in + #anywhere in a definition file: + emsg = "Variable '{}' not found in any namelist definition files." + emsg += " Please double-check '{}'." + raise CamConfigValError(emsg.format(var_str, user_nl_file)) + + #Add the namelist group if not already in data dict: + if not data_group in _data: + _data[data_group] = dict() + + #Check if variable already exists in data dictionary: + if var_str in _data[data_group]: + emsg = "Namelist variable '{}' listed more than once in '{}'" + emsg += "\nPlease either list the variable only once, or specify separate namelist groups" + emsg += "for each listed instance, if relevant." + raise CamConfigValError(emsg.format(var_str, user_nl_file)) + + #Enter the parameter in the dictionary: + _data[data_group][var_str] = {'values':val_str} + else: + emsg = "Cannot parse the following line in '{}' :\n'{}'" + raise CamConfigValError(emsg.format(user_nl_file, line)) + + #Check if there is unclosed block: + if within_comment_block: + raise CamConfigValError("Un-closed comment block! Please check '{}'".format(user_nl_file)) + if curr_group!="UNSET": + raise CamConfigValError("Un-closed namelist group block! Please check `{}`".format(user_nl_file)) + + #Create new ParamGen object: + pg_user = ParamGen(_data) + + #Append new user_nl_cam object to main atm_in namelist object: + self.append(pg_user) + + #### + + def write(self, output_path): + + """ + Write data to Fortran namelist file. + ---------- + output_path -> path (str) to Fortran namelist (atm_in) file + + """ + + #Compile regular expression to determine if variable value + #is a number or Fortran logical. + + #All "values" stored in ParamGen are strings. However, booleans and numbers + #(either integers or reals) shouldn't have wrapping quotes when written to the + #fortran namelist. Thus the value needs to be evaluated to see if it is actually + #a fortran boolean, integer, or real. This done using the following regular expressions: + #-------------------------------------------------------------------------------------- + + #Integer and real (including scientific notation) regular expression string: + regex_num_string = r"(^[+-]?\.\d+)($|[de][+-]?\d+$)|^[+-]?\d+\.?(\d+|($|[de][+-]?\d+$))($|[de][+-]?\d+$)" + + #Compile regular expression for numbers, ignoring case for the "d" and "e" notations: + re_num_check = re.compile(regex_num_string, flags=re.I) + + #compile regular expressiong for booleans, ignoring case: + re_bool_check = re.compile(r"^(\.true\.|\.false\.)$", flags=re.I) + + # Make sure ParamGen object has been reduced: + if not self.reduced: + emsg = "ParamGen object for atm_in must be reduced before being " + emsg += "written to file. Please check CAM's buildnml script." + raise SystemError(emsg) + + # Write Fortran namelist file: + with open(os.path.join(output_path), 'w') as atm_in_fil: + for nml_group in self._data: + # Write namelist group: + atm_in_fil.write("&"+nml_group+"\n") + + # Write all variables within that group: + for var in self._data[nml_group]: + val = self._data[nml_group][var]["values"].strip() + if val==None: + continue + + #Check if value matches fortran number or boolean: + is_num = re_num_check.match(val) + is_bool = re_bool_check.match(val) + + #Check if variable value is a number or boolean: + if is_num or is_bool: + #If so, then write value as-is: + atm_in_fil.write(" {} = {}\n".format(var, val)) + else: + #Value is a string, so check if there is a starting quote: + if val[0] == "'" or val[0] == '"': + #If so, then assume value is already wrapped in quotes, + #so write value as-is: + atm_in_fil.write(" {} = {}\n".format(var, val)) + else: + #If not, then write string with added quotes: + atm_in_fil.write(" {} = '{}'\n".format(var, val)) + + # Add space for next namelist group: + atm_in_fil.write('/\n\n') + + #### + + def reduce_atm_in(self, case, atm_attr_dict): + + """ + Reduce XML namelist attributes + (i.e. replace attribute/guard dictionary with value) + ---------- + case -> CIME case object + atm_attr_dict -> dictionary containing attribute values + + """ + + # Set internal variables for use by "expand_func": + self.__case = case + self.__atm_attr_dict = atm_attr_dict + + # Reduce Param Data: + self.reduce(self.__expand_func) + + #### + + def __expand_func(self, varname): + + """ + Function used to convert $XXX + variables and XML attributes to + their associated values. + """ + + #Check if varname matches a CIME case variable: + val = self.__case.get_value(varname) + + #If not, then attempt to extract variable from + #attribute dictionary: + if val is None: + if varname in self.__atm_attr_dict: + val = self.__atm_attr_dict[varname] + else: + #Assume the XML attribute/guard is an empty string: + val = "" + + #Return value if found: + return val + +############ +#End of file diff --git a/cime_config/buildnml b/cime_config/buildnml index 462c2af9..f889ddaf 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -21,7 +21,6 @@ from CIME.XML.standard_module_setup import * from CIME.buildnml import create_namelist_infile, parse_input from CIME.case import Case from CIME.utils import expect -from CIME.nmlgen import NamelistGenerator # Save local (cime_config) directory path: _CIME_CONFIG_PATH = os.path.dirname(os.path.abspath(__file__)) @@ -32,6 +31,9 @@ sys.path.append(_CIME_CONFIG_PATH) # Import CAM's configure structure: from cam_config import ConfigCAM +#Import CAM's ParamGen class: +from atm_in_paramgen import AtmInParamGen + # Open CIME case log: _LOGGER = logging.getLogger(__name__) @@ -154,9 +156,9 @@ def buildnml(case, caseroot, compname): cam_nml_dict = nml_attr_set(config) - #--------------------------------- - # Construct the namelist generator: - #--------------------------------- + #-------------------------- + # Construct ParamGen object: + #-------------------------- # Determine directory for user modified namelist_definition_cam.xml: user_xml_dir = os.path.join(caseroot, "SourceMods", "src.cam") @@ -171,19 +173,18 @@ def buildnml(case, caseroot, compname): user_xml_dir) # Set definition file: - definition_file = [user_definition] + definition_file = user_definition else: # If no user-defined file exists, then use standard file: - definition_file = [os.path.join(_CIME_CONFIG_PATH, - "namelist_definition_cam.xml")] + definition_file = os.path.join(_CIME_CONFIG_PATH, + "namelist_definition_cam.xml") # Check that the standard namelist file actually exists: - for file_ in definition_file: - expect(os.path.isfile(file_), - "Namelist XML file %s not found!" % file_) + expect(os.path.isfile(definition_file), + "Namelist XML file '{}' not found!".format(definition_file)) - # Create the namelist generator object - independent of instance: - nmlgen = NamelistGenerator(case, definition_file) + # Create the ParamGen object: + pg_atm = AtmInParamGen.from_namelist_xml(definition_file) #------------------------ # Loop over all instances: @@ -279,14 +280,6 @@ def buildnml(case, caseroot, compname): # Determine location and name of namelist input file: namelist_infile = os.path.join(confdir, "namelist_infile") - #----------------------------------------------------------- - # Determine CCPP physics suite chosen by user, and set suite - # name to be a namelist attribute: - #----------------------------------------------------------- - - # Find user-chosen CCPP physics suite, and set as an attribute: - config.ccpp_phys_set(cam_nml_dict, user_nl_file) - #-------------------------------- # Create CIME namelist input file: #-------------------------------- @@ -294,38 +287,47 @@ def buildnml(case, caseroot, compname): "\n".join(infile_lines)) # Convert namelist input file to list: - namelist_infile_list = [namelist_infile] + #namelist_infile_list = [namelist_infile] + + #------------------------------------------- + # Add user_nl_cam entries to ParamGen object: + #------------------------------------------- + + pg_atm.append_user_nl_file(user_nl_file) + + #----------------------------------------------------------- + # Determine CCPP physics suite chosen by user, and set suite + # name to be a namelist attribute: + #----------------------------------------------------------- + + # Find user-chosen CCPP physics suite, and set as an attribute: + config.ccpp_phys_set(cam_nml_dict, pg_atm.data["physics_nl"]) #------------------------------------------------- - # Initialize only "vert_coord_nl" namelist entries + # Set "nlev" attribute using "pver" namelist entry: #------------------------------------------------- - # Initalize vert_coord_nl defaults: - skip_groups = _get_namelist_groups(nmlgen, - skip_groups=config.nml_groups) - skip_def_groups = filter(lambda group: group !='vert_coord_nl', - config.nml_groups) - nmlgen.init_defaults(namelist_infile_list, cam_nml_dict, - skip_groups=skip_groups, - skip_default_for_groups=skip_def_groups) + #Extract pver dictionary: + pver_dict = pg_atm.data["vert_coord_nl"]["pver"] + + #Create new ParamGen object using pver data dictionary: + pg_pver = AtmInParamGen(pver_dict) - #-------------------------------------------- - # Set "nlev" namelist attribute to equal pver - #-------------------------------------------- + #Reduce pg_pver dictionary: + pg_pver.reduce_atm_in(case, cam_nml_dict) - cam_nml_dict["nlev"] = nmlgen.get_value("pver") + #Extract pver value and add to namelist attributes dict: + cam_nml_dict["nlev"] = pg_pver.data["values"] #--------------------------------- - # Initialize all namelist defaults: + # Set all ParamGen namelist values: #--------------------------------- - # Initalize namelist defaults in used namelist groups: - nmlgen.init_defaults(namelist_infile_list, cam_nml_dict, - skip_groups=skip_groups) + pg_atm.reduce_atm_in(case, cam_nml_dict) - #-------------------------- - # Write out namelist groups: - #-------------------------- + #--------------------------- + # Write out Fortran namelist: + #--------------------------- # Create resolved CAM namelist file name: namelist_file = os.path.join(rundir, "atm_in") @@ -334,8 +336,7 @@ def buildnml(case, caseroot, compname): namelist_file += inst_string # Create CAM namelist using CIME's nmlgen routine: - nmlgen.write_output_file(namelist_file, input_data_list, - groups=config.nml_groups, sorted_groups=False) + pg_atm.write(namelist_file) ############################################################################### def _main_func(): diff --git a/cime_config/cam_autogen.py b/cime_config/cam_autogen.py index 74321ef4..5d1097a5 100644 --- a/cime_config/cam_autogen.py +++ b/cime_config/cam_autogen.py @@ -318,6 +318,54 @@ def _find_metadata_files(source_dirs, scheme_finder): # Return meta_files dictionary: return meta_files +############################################################################### +def _find_CCPP_SDF_paths(source_dirs): +############################################################################### + """ + Find all CCPP Suite Definiition File (SDF) paths given a list + of directories, assuming every SDF has the form "suite_*.xml". + + Currently this function assumes that suite files are only in the + provided directories, and will do no sub-directory searching. It + also assumes that if an SDF has already found then all other + SDFs with the same name can be ignored. This means that user-modifiable + directories (e.g. "SourceMods/src.cam") should always be listed first + in the provided "source_dirs" list. + """ + + # Create empty dictionary to store SDF filenames and paths + SDF_files = {} + + for direc in source_dirs: + + # Search for SDFs in directory: + SDF_list = glob.glob(os.path.join(direc, "suite_*.xml")) + + # Move on to next directory if empty: + if not SDF_list: + continue + + # Loop over SDF paths: + for SDF in SDF_list: + + # Check if filename is already in dictionary: + if not os.path.basename(SDF) in SDF_files: + # If not, then add to dictionary: + SDF_files[os.path.basename(SDF)] = SDF + # End if + # End for + # End for + + # Raise error if no SDF files are found: + if not SDF_files: + emsg = "ERROR: No CCPP Suite Definition Files (SDF)s of the " + emsg = "type 'suite_*.xml' were found in the provided directories:" + emsg += "\n".join(source_dirs) + raise CamAutoGenError(emsg) + + # Return SDF file paths: + return SDF_files.values() + ############################################################################### def _update_genccpp_dir(utility_files, genccpp_dir): ############################################################################### @@ -477,14 +525,37 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, # Collect all source directories source_search = [source_mods_dir, os.path.join(atm_root, "src", "physics", "ncar_ccpp")] + # Finda all possible CCPP suite names and SDF paths: + SDF_paths = _find_CCPP_SDF_paths(source_search) # Find all metadata files, organize by scheme name all_scheme_files = _find_metadata_files(source_search, find_scheme_names) - # Find the SDFs - sdfs = list() - scheme_files = list() + + # Create dictionary of SDF suite names->paths + suite_name_path_dict = {} + for SDF_path in SDF_paths: + # Open SDF file + _, suite = read_xml_file(SDF_path) + + # Extract suite name + suite_name = suite.get('name') + + # Check if suite is already present in dictioanry + if suite_name in suite_name_path_dict: + #If so, then raise an error, because we are unsure which SDF to use + emsg = "Two SDFs have the same suite name: '{}'. The two SDFs are:\n" + emsg += "{}\n{}" + raise CamAutoGenError(emsg.format(suite_name, SDF_path, + suite_name_path_dict[suite_name])) + + # Add suite to dictionary + suite_name_path_dict[suite_name] = SDF_path + + # Find the SDFs specified for this model build + sdfs = [] + scheme_files = [] xml_files = {} # key is scheme, value is xml file path for sdf in phys_suites_str.split(';'): - sdf_path = _find_file(f"suite_{sdf}.xml", source_search) + sdf_path = suite_name_path_dict[sdf] if not sdf_path: emsg = "ERROR: Unable to find SDF for suite '{}'" raise CamAutoGenError(emsg.format(sdf)) diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index e8047843..305688f8 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -783,7 +783,7 @@ def generate_cam_src(self, gen_fort_indent): #++++++++++++++++++++++++ - def ccpp_phys_set(self, cam_nml_attr_dict, user_nl_file): + def ccpp_phys_set(self, cam_nml_attr_dict, phys_nl_pg_dict): """ Find the physics suite to run. @@ -797,83 +797,68 @@ def ccpp_phys_set(self, cam_nml_attr_dict, user_nl_file): specify a suite or that they did specify a suite and that it matches the available suite. + Inputs: + + cam_nml_attr_dict -> Dictionary of ParamGen (XML) + attribute values. + + phys_nl_pg_dict -> ParamGen data dictionary for + the "physics_nl" namelist group """ - #Extract physics suite list: + #Extract physics suites list: phys_suites = self.get_value('physics_suites').split(';') - #Check the "user_nl_cam" file to see if user - #specified a particular suite to use for this - #simulation: - with open(user_nl_file, 'r') as nl_file: - #Read lines in file: - nl_user_lines = nl_file.readlines() - #End with - - #Break out "physics_suite" lines: - phys_suite_lines = [] - for line in nl_user_lines: - #Must check if line.lstrip is non-empty first, - #Otherwise blank spaces in user_nl_cam will - #cause problems: - if line.lstrip(): - if line.lstrip()[0] != '!' and 'physics_suite' in line: - phys_suite_lines.append([x.strip() for x in line.split('=')]) - #End if - #End if - #End for - - if not phys_suite_lines: - #If there is no "physics_suite" line, - #then check if there is only one physics suite option: - if len(phys_suites) == 1: - #If so, then just use the only possible suite option: - phys_suite_val = phys_suites[0] + #Determine current value of "physics_suite" namelist variable: + phys_nl_val = phys_nl_pg_dict['physics_suite']['values'].strip() + + #Check if only one physics suite is listed: + if len(phys_suites) == 1: + #Check if "physics_suite" has been set by the user: + if phys_nl_val != 'UNSET': + #If so, then check that user-provided suite matches + #suite in physics_suites config list: + if phys_nl_val == phys_suites[0].strip(): + #If so, then set attribute to phys_suites value: + cam_nml_attr_dict["phys_suite"] = phys_suites[0].strip() + else: + #If not, then throw an error: + emsg = "physics_suite specified in user_nl_cam, '{}', does not\n" + emsg += "match the suite listed in CAM_CONFIG_OPTS: '{}'" + raise CamConfigValError(emsg.format(user_nl_pg_dict['physics_suite'], + phys_suites[0])) + else: - #If more than one option, then raise an error: - emsg = "No 'physics_suite' variable is present in user_nl_cam.\n" - emsg += "This is required if more than one suite is listed\n" - emsg += "in CAM_CONFIG_OPTS." - raise CamConfigValError(emsg) - #End if + #If not, then just set the attribute and nl value to phys_suites value: + phys_nl_pg_dict['physics_suite']['values'] = phys_suites[0].strip() + cam_nml_attr_dict["phys_suite"] = phys_suites[0].strip() + else: + #Check if "physics_suite" has been set by the user: + if phys_nl_val != 'UNSET': + #If so, then check if user-provided value is present in the + #physics_suites config list: + match_found = False + for phys_suite in phys_suites: + if phys_nl_val == phys_suite.strip(): + #If a match is found, then set attribute and leave loop: + cam_nml_attr_dict["phys_suite"] = phys_suite.strip() + match_found = True + break + + #Check that a match was found, if not, then throw an error: + if not match_found: + emsg = "physics_suite specified in user_nl_cam, '{}', doesn't match any suites\n" + emsg += "listed in CAM_CONFIG_OPTS: '{}'" + raise CamConfigValError(emsg.format(phys_nl_val, + self.get_value('physics_suites'))) - #If there is more than one "physics_suite" entry, then throw an error: - if len(phys_suite_lines) > 1: - emsg = "More than one 'physics_suite' variable is present in user_nl_cam.\n" - emsg += "Only one 'physics_suite' line is allowed." - raise CamConfigValError(emsg) - #End if - - #The split string list exists inside another, otherwise empty list, so extract - #from empty list: - phys_suite_list = phys_suite_lines[0] - - if len(phys_suite_list) == 1: - #If there is only one string entry, then it means the equals (=) sign was never found: - emsg = "No equals (=) sign was found with the 'physics_suite' variable." - raise CamConfigValError(emsg) - #End if - - if len(phys_suite_list) > 2: - #If there is more than two entries, it means there were two or more equals signs: - emsg = "There must only be one equals (=) sign in the 'physics_suite' namelist line." - raise CamConfigValError(emsg) - #End if - - #Remove quotation marks around physics_suite entry, if any: - phys_suite_val = phys_suite_list[1].strip(''' "' ''') - - #Check that physics suite specified is actually in config list: - if phys_suite_val not in phys_suites: - emsg = "physics_suite specified in user_nl_cam, '{}', doesn't match any suites\n" - emsg += "listed in CAM_CONFIG_OPTS" - raise CamConfigValError(emsg.format(phys_suite_val)) - #End if - #End if (phys_suite_lines check). - - #Add new namelist attribute to dictionary: - cam_nml_attr_dict["phys_suite"] = phys_suite_val + else: + #If not, then throw an error, because one needs to be specified: + emsg = "No 'physics_suite' variable is present in user_nl_cam.\n" + emsg += "This is required because more than one suite is listed\n" + emsg += "in CAM_CONFIG_OPTS: '{}'" + raise CamConfigValError(emsg.format(self.get_value('physics_suites'))) ############################################################################### diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index 9a5831f3..6f9fd5fd 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -164,8 +164,8 @@ -phys tj2016 -analytic_ic -phys held_suarez -phys kessler -chem terminator -analytic_ic --> - --physics-suites kessler --analytic_ic - --physics-suites held_suarez_1994 --analytic_ic + --physics-suites kessler_cam --analytic_ic + --physics-suites held_suarez --analytic_ic --dyn none --physics-suites adiabatic diff --git a/cime_config/namelist_definition_cam.xml b/cime_config/namelist_definition_cam.xml index 2b5704e6..d5a86467 100644 --- a/cime_config/namelist_definition_cam.xml +++ b/cime_config/namelist_definition_cam.xml @@ -1,11679 +1,434 @@ - + - - - - - - - - logical - nudging - nudging_nl - - Toggle Model Nudging ON/OFF. - - FORCING: - -------- - Nudging tendencies are applied as a relaxation force between the current - model state values and target state values derived from the avalilable - analyses. The form of the target values is selected by the 'Nudge_Force_Opt' - option, the timescale of the forcing is determined from the given - 'Nudge_TimeScale_Opt', and the nudging strength Alpha=[0.,1.] for each - variable is specified by the 'Nudge_Xcoef' values. Where X={U,V,T,Q,PS} - - F_nudge = Alpha*((Target-Model(t_curr))/TimeScale - - WINDOWING: - ---------- - The region of applied nudging can be limited using Horizontal/Vertical - window functions that are constructed using a parameterization of the - Heaviside step function. - - The Heaviside window function is the product of separate horizonal and vertical - windows that are controled via 12 parameters: - - Nudge_Hwin_lat0: Specify the horizontal center of the window in degrees. - Nudge_Hwin_lon0: The longitude must be in the range [0,360] and the - latitude should be [-90,+90]. - Nudge_Hwin_latWidth: Specify the lat and lon widths of the window as positive - Nudge_Hwin_lonWidth: values in degrees.Setting a width to a large value (e.g. 999) - renders the window a constant in that direction. - Nudge_Hwin_latDelta: Controls the sharpness of the window transition with a - Nudge_Hwin_lonDelta: length in degrees. Small non-zero values yeild a step - function while a large value yeilds a smoother transition. - Nudge_Hwin_Invert : A logical flag used to invert the horizontal window function - to get its compliment.(e.g. to nudge outside a given window). - - Nudge_Vwin_Lindex: In the vertical, the window is specified in terms of model - Nudge_Vwin_Ldelta: level indcies. The High and Low transition levels should - Nudge_Vwin_Hindex: range from [0,(NLEV+1)]. The transition lengths are also - Nudge_Vwin_Hdelta: specified in terms of model indices. For a window function - constant in the vertical, the Low index should be set to 0, - the High index should be set to (NLEV+1), and the transition - lengths should be set to 0.001 - Nudge_Vwin_Invert : A logical flag used to invert the vertical window function - to get its compliment. - - - .false. - - - - char*256 - nudging - nudging_nl - - Full pathname of analyses data to use for nudging. - (e.g. '$DIN_LOC_ROOT/atm/cam/nudging/') - Default: none - - - - char*80 - nudging - nudging_nl - - Template for Nudging analyses file names. - (e.g. '%y/ERAI_ne30np4_L30.cam2.i.%y-%m-%d-%s.nc') - Default: none - - - - integer - nudging - nudging_nl - - Number of analyses files per day. - (e.g. 4 --> 6 hourly analyses) - Default: none - - - - integer - nudging - nudging_nl - - Number of time to update model data per day. - (e.g. 48 --> 1800 Second timestep) - Default: none - - - - integer - nudging - nudging_nl - - Year at which Nudging Begins. - Default: none - - - - integer - nudging - nudging_nl - - Month at which Nudging Begins. - Default: none - - - - integer - nudging - nudging_nl - - Day at which Nudging Begins. - Default: none - - - - integer - nudging - nudging_nl - - Year at which Nudging Ends. - Default: none - - - - integer - nudging - nudging_nl - - Month at which Nudging Ends. - Default: none - - - - integer - nudging - nudging_nl - - Day at which Nudging Ends. - Default: none - - - - integer - nudging - nudging_nl - - Select the form of nudging forcing, where (t'==Analysis times ; t==Model Times) - 0 -> NEXT-OBS: Target=Anal(t'_next) - 1 -> LINEAR: Target=(F*Anal(t'_curr) +(1-F)*Anal(t'_next)) - F =(t'_next - t_curr )/Tdlt_Anal - - - 0 - - - - integer - nudging - nudging_nl - - Select the timescale of nudging force, where (t'==Analysis times ; t==Model Times) - 0 --> TimeScale = 1/Tdlt_Anal - 1 --> TimeScale = 1/(t'_next - t_curr ) - - - 0 - - - - integer - nudging - nudging_nl - - Profile index for U nudging. - 0 == OFF (No Nudging of this variable) - 1 == CONSTANT (Spatially Uniform Nudging) - 2 == HEAVISIDE WINDOW FUNCTION - - - 0 - - - - real - nudging - nudging_nl - - Normalized Coeffcient for U nudging. - [0.,1.] fraction of nudging tendency applied. - - - 0.0 - - - - integer - nudging - nudging_nl - - Profile index for V nudging. - 0 == OFF (No Nudging of this variable) - 1 == CONSTANT (Spatially Uniform Nudging) - 2 == HEAVISIDE WINDOW FUNCTION - - - 0 - - - - real - nudging - nudging_nl - - Normalized Coeffcient for V nudging. - [0.,1.] fraction of nudging tendency applied. - - - 0.0 - - - - integer - nudging - nudging_nl - - Profile index for T nudging. - 0 == OFF (No Nudging of this variable) - 1 == CONSTANT (Spatially Uniform Nudging) - 2 == HEAVISIDE WINDOW FUNCTION - - - 0 - - - - real - nudging - nudging_nl - - Normalized Coeffcient for T nudging. - [0.,1.] fraction of nudging tendency applied. - - - 0.0 - - - - integer - nudging - nudging_nl - - Profile index for Q nudging. - 0 == OFF (No Nudging of this variable) - 1 == CONSTANT (Spatially Uniform Nudging) - 2 == HEAVISIDE WINDOW FUNCTION - - - 0 - - - - real - nudging - nudging_nl - - Normalized Coeffcient for Q nudging. - [0.,1.] fraction of nudging tendency applied. - - - 0.0 - - - - integer - nudging - nudging_nl - - Profile index for PS nudging. - 0 == OFF (No Nudging of this variable) - 1 == CONSTANT (Spatially Uniform Nudging) - 2 == HEAVISIDE WINDOW FUNCTION - - - 0 - - - - real - nudging - nudging_nl - - Normalized Coeffcient for PS nudging. - [0.,1.] fraction of nudging tendency applied. - - - 0.0 - - - - real - nudging - nudging_nl - - LAT0 center of Horizontal Window in degrees [-90.,90.]. - Default: none - - - - real - nudging - nudging_nl - - Width of LAT Window in degrees. - Default: none - - - - real - nudging - nudging_nl - - Width of transition which controls the steepness of window transition in latitude. - 0. --> Step function - Default: none - - - - real - nudging - nudging_nl - - LON0 center of Horizontal Window in degrees [0.,360.]. - Default: none - - - - real - nudging - nudging_nl - - Width of LON Window in degrees. - Default: none - - - - real - nudging - nudging_nl - - Width of transition which controls the steepness of window transition in longitude. - 0. --> Step function - Default: none - - - - logical - nudging - nudging_nl - - Invert Horizontal Window Function to its Compliment. - TRUE = value=0 inside the specified window, 1 outside - FALSE = value=1 inside the specified window, 0 outside - - - .false. - - - - real - nudging - nudging_nl - - HIGH Level Index for Verical Window specified in terms of model level indices. - (e.g. For a 30 level model, Nudge_Vwin_Hindex ~ 30 ) - Default: none - - - - real - nudging - nudging_nl - - Width of transition for HIGH end of Vertical Window. - Default: none - - - - real - nudging - nudging_nl - - LOW Level Index for Verical Window specified in terms of model level indices. - (e.g. Nudge_Vwin_Lindex ~ 0 ) - Default: none - - - - real - nudging - nudging_nl - - Width of transition for LOW end of Vertical Window. - Default: none - - - - logical - nudging - nudging_nl - - Invert Vertical Window Function to its Compliment. - TRUE = value=0 inside the specified window, 1 outside - FALSE = value=1 inside the specified window, 0 outside - - - .false. - - - - + - + char*256 - aero_data_cam - cam3_aero_data_nl - - Full pathname of time-variant boundary dataset for aerosol masses. - Default: UNKNOWN. - - - - logical - aero_data_cam - cam3_aero_data_nl - - Add CAM3 prescribed aerosols to the physics buffer. - - - .false. - - - - - - - integer - perf_dp_coup - phys_grid_nl - -1,0,1,2,11,12,13 - - Dynamics/physics transpose method for nonlocal load-balance. - 0: use mpi_alltoallv. - 1: use point-to-point MPI-1 two-sided implementation. - 2: use point-to-point MPI-2 one-sided implementation if supported, otherwise use - MPI-1 implementation. - 3: use Co-Array Fortran implementation if supported, otherwise use MPI-1 implementation. - 11-13: use mod_comm, choosing any of several methods internal to mod_comm. The method - within mod_comm (denoted mod_method) has possible values 0,1,2 and is set according - to mod_method = phys_alltoall - modmin_alltoall, where modmin_alltoall is 11. - -1: use option 1 when each process communicates with less than half of the other - processes, otherwise use option 0 (if max_nproc_smpx and nproc_busy_d are both > npes/2). - - - -1 - - - - integer - perf_dp_coup - phys_grid_nl - - Select target number of chunks per thread. Must be positive. - - - 1 - - - - integer - perf_dp_coup - phys_grid_nl - - Physics grid decomposition options. - -1: each chunk is a dynamics block. - 0: chunk definitions and assignments do not require interprocess comm. - 1: chunk definitions and assignments do not require internode comm. - 2: optimal diurnal, seasonal, and latitude load-balanced chunk definition and assignments. - 3: chunk definitions and assignments only require communication with one other process. - 4: concatenated blocks, no load balancing, no interprocess communication. - - - 3 - 2 - - - - integer - perf_dp_coup - phys_grid_nl - - Physics grid decomposition options. - 0: assign columns to chunks as single columns, wrap mapped across chunks - 1: use (day/night; north/south) twin algorithm to determine load-balanced pairs of - columns and assign columns to chunks in pairs, wrap mapped - Default: 0 for unstructured grid dycores, 1 for lat/lon grid dycores - - - - - - - char*8 - diagnostics - cam_diag_opts - none,q_only,all - - Output constituent tendencies due to convection. Set to - 'none', 'q_only' or 'all'. - - - q_only - - - - logical - diagnostics - circ_diag_nl - - Turns on TEM circulation diagnostics history output. Only valid for FV dycore. - - Default: .false., unless it is overridden (WACCM with interactive chemistry and a few other specific - configurations do this) - - - - logical - diagnostics - check_energy_nl + initial_conditions + cam_initfiles_nl - Turn on verbose output identifying columns that fail energy/water - conservation checks. + Full pathname of initial atmospheric state dataset (NetCDF format). + Default: UNSET_PATH - .false. - - - - real - diagnostics - physics_nl - - Controls processing of comparison between physics outputs and data from `{{ hilight }}ncdata_check{{ closehilight }}`. - Threshold over which differences are counted in the physics check process. - Default: 0 - - - 0.0 - - - - real - diagnostics - physics_nl - - Controls processing of comparison between physics outputs and data from `{{ hilight }}ncdata_check{{ closehilight }}`. - Threshold under which the absolute difference is calculated rather than the relative difference. - Default: 1e-5 - - - 1.e-5 - - - - char*8 - diagnostics - qneg_nl - summary,timestep,off - - Control the writing of qneg3 and qneg4 warning messages. - 'summary' causes a summary of QNEG3 and QNEG4 errors to be - printed at the end of the run - 'timestep' causes a summary of QNEG3 and QNEG4 errors to be printed at the - end of each timestep. The total is reset at the end of each timestep. - 'off' causes the qneg3 and qneg4 warnings to be supressed. - Note that these settings do not affect the availability of qneg - history variables. - Default: summary - - - summary - - - - char*32 - diagnostics - physics_nl - - Name of parameterization to take snapshot before running - Default: none - - - none - - - - char*32 - diagnostics - physics_nl - - Name of parameterization to take snapshot after running - Default: none - - - none - - - - - - - integer - dry_conv_adj - dadadj_nl - - Number of layers from the top of the model over which to do dry convective - adjustment. Must be less than plev (the number of vertical levels). - - - 3 - - - - integer - dry_conv_adj - dadadj_nl - - The maximum number of iterations to achieve convergence in dry adiabatic adjustment. - For WACCM-X it can be advantageous to use a number which is much higher than the CAM - default. - - - 15 - 100 - - - - - - - integer - dyn_fv - dyn_fv_inparm - - Number of dynamics timesteps per physics timestep. If zero, a best-estimate - will be automatically calculated. - - - 8 - 16 - 32 - 0 - - - - integer - dyn_fv - dyn_fv_inparm - - Number of tracer advection timesteps per physics timestep. - Nsplit is partitioned into nspltrac and nsplit/nspltrac, - with the latter being the number of dynamics timesteps per - tracer timestep, possibly rounded upward; after initialization, - the code quantity nsplit is redefined to be the number of - dynamics timesteps per tracer timestep. - - - 2 - 4 - 16 - 0 - - - - integer - dyn_fv - dyn_fv_inparm - - Number of vertical re-mapping timesteps per physics timestep. - Nspltrac is partitioned into nspltvrm and nspltrac/nspltvrm, - with the latter being the number of tracer timesteps per - re-mapping timestep, possibly rounded upward; after initialization, - the code quantity nspltrac is redefined to be the number of - tracer timesteps per re-mapping timestep. - - - 1 - 2 - 2 - 2 - 2 - 4 - 16 - - - - integer - dyn_fv - dyn_fv_inparm - - Order (mode) of X interpolation (1,..,6). - East-West transport scheme. - = 1: first order upwind - = 2: 2nd order van Leer (Lin et al 1994) - = 3: standard PPM - = 4: enhanced PPM (default) - - - 4 - - - - integer - dyn_fv - dyn_fv_inparm - - Order (mode) of Y interpolation (1,..,6). - North-South transport scheme. - = 1: first order upwind - = 2: 2nd order van Leer (Lin et al 1994) - = 3: standard PPM - = 4: enhanced PPM (default) - - - 4 - - - - integer - dyn_fv - dyn_fv_inparm - - Scheme to be used for vertical mapping. - = 1: first order upwind - = 2: 2nd order van Leer (Lin et al 1994) - = 3: standard PPM - = 4: enhanced PPM (default) - - - 4 - - - - logical - dyn_fv - dyn_fv_inparm - - Flag indicating whether the dynamics uses internal algorithm for energy - conservation. - - - .false. - - - - integer - dyn_fv - dyn_fv_inparm - 0,1 - - Enables optional filter for intermediate c-grid winds, (courtesy of Bill Putman). - - - 0 - 1 - - - - integer - dyn_fv - dyn_fv_inparm - 0,1 - - 1 for FFT filter always, 0 for combined algebraic/FFT filter. The value 0 - is used for CAM3, otherwise it is using the value 1. - - - 0 - 1 - - - - integer - dyn_fv - dyn_fv_inparm - 2,4,42 - - Chooses type of divergence damping and velocity diffusion. - div24del2flag = 2 for ldiv2 (default), - = 4 for ldiv4, - = 42 for ldiv4 + ldel2 - where - ldiv2: 2nd-order divergence damping everywhere and increasing in top layers - ldiv4: 4th-order divergence damping - ldel2: 2nd-order velocity-component damping targetted to top layers, - with coefficient del2coef - - - - 2 - 4 - 4 - 4 - 42 - 42 - 42 - 42 - 42 - 42 - 42 - 42 - 42 - 42 - - - - real - dyn_fv - dyn_fv_inparm - - Chooses level of velocity diffusion. - - - 3.e+5 - - - - logical - dyn_fv - dyn_fv_inparm - - Flag to extend standard 4th-order PPM scheme to model top. - - - .false. - - - - logical - dyn_fv - dyn_fv_inparm - - Flag to turn on corrections in FV geometry and/or pressure terms. - - - .false. - - - - logical - dyn_fv - dyn_fv_inparm - - Flag to turn on corrections that improve angular momentum conservation. - - - .false. - .true. - - - - logical - dyn_fv - dyn_fv_inparm - - Flag to apply an arbitrary fix based on solid-body rotation to the zonal - velocity fields to improve conservation of angular momentum. - - - .false. - .true. - - - - logical - dyn_fv - dyn_fv_inparm - - Flag to apply the fixer turned on by fv_am_fixer level by level. The - intent is to not contaminate the stratospheric circulation with - tropospheric AM loss, where it is most likely greatest (due to the larger - divergence fields). This option is experimental. - - - .false. - .true. - - - - logical - dyn_fv - dyn_fv_inparm - - Flag to turn on a diagnostic calculation of angular momentum which is - written to the log file at each time step. Also enables calculation of - fields written to history file which are used in conjuction with those - enabled by do_circulation_diags for detailed analysis. - - - .false. - .true. - - - - logical - dyn_fv - dyn_fv_inparm - - Switch to apply variable physics appropriate for the thermosphere and ionosphere - Default: UNKNOWN - - - - char*8 - dyn_fv - dyn_fv_inparm - off,full - - Flag to determine how to handle dpcoup warning messages - Default: off - - - off - - - - - - - integer - dyn_fv - spmd_fv_inparm - - Set to 1 to force the 2D transpose computation when a 1D decomposition is - used. This is intended for debugging purposes only. - - - 0 - - - - integer - dyn_fv - spmd_fv_inparm - 0,1,2 - - Geopotential method (routines geopk, geopk16, or geopk_d). - =0 for transpose method; - =1 for method using semi-global z communication with optional 16-byte arithmetic; - =2 for method using local z communication; - method 0, method 1 with 16-byte arithmetic and method 2 are all bit-for-bit across decompositions; - method 0 scales better than method 1 with npr_z, and method 1 is superior to method 0 for small npr_z; - The optimum speed is attained using either method 1 with 8-byte - arithmetic (standard for geopk16) or method 2 when utilizing the - optimal value for the associated parameter geopkblocks; for the last - two subcycles of a timestep, method 0 is automatically used; see - geopk.F90 and cd_core.F90. - - - - 0 - - - - integer - dyn_fv - spmd_fv_inparm - - Geopotential method 2 pipeline parameter (routine geopk_d). - geopk_d implements a pipeline algorithm by dividing the - information that must be moved between processes into blocks. geopkblocks - specifies the number of blocks to use. The larger the number of blocks, - the greater the opportunity for overlapping communication with computation - and for decreasing instantaneous bandwidth requirements. The smaller the - number of blocks, the fewer MPI messages sent, decreasing MPI total latency. - See geopk_d within geopk.F90. - - - 1 - - - - integer - dyn_fv - spmd_fv_inparm - 0,1,2 - - Mod_comm irregular underlying communication method for dyn_run/misc. - 0 for original mp_sendirr/mp_recvirr - 1 for mp_swapirr and a point-to-point implementation of communication pattern - 2 for mp_swapirr and a collective (MPI_Alltoallv) implementation of communication pattern - - - 0 - - - - logical - dyn_fv - spmd_fv_inparm - - True for mod_comm irregular communication handshaking for dyn_run/misc - - - .true. - - - - logical - dyn_fv - spmd_fv_inparm - - True for mod_comm irregular communication blocking send for dyn_run/misc, - false for nonblocking send - - - .true. - - - - integer - dyn_fv - spmd_fv_inparm - - Maximum number of outstanding nonblocking MPI requests to allow when - using mp_swapirr and point-to-point communications for dyn_run/misc. - Setting this less than the maximum can improve robustness for large process - count runs. If set to less than zero, then do not limit the number of - outstanding send/receive requests. - Default: -1 (so no limit) - - - - integer - dyn_fv - spmd_fv_inparm - 0,1,2 - - Mod_comm irregular underlying communication method for cd_core/geopk - 0 for original mp_sendirr/mp_recvirr - 1 for mp_swapirr and a point-to-point implementation of communication pattern - 2 for mp_swapirr and a collective (MPI_Alltoallv) implementation of communication pattern - - - 0 - - - - logical - dyn_fv - spmd_fv_inparm - - True for mod_comm irregular communication handshaking for cd_core/geopk - - - .true. - - - - logical - dyn_fv - spmd_fv_inparm - - True for geopk_d and mod_comm irregular communication blocking send for - cd_core/geopk; false for nonblocking send. - - - .true. - - - - integer - dyn_fv - spmd_fv_inparm - - Maximum number of outstanding nonblocking MPI requests to allow when - using mp_swapirr and point-to-point communications for cd_core/geopk. - Setting this less than the maximum can improve robustness for large process - count runs. If set to less than zero, then do not limit the number of - outstanding send/receive requests. - Default: -1 (so no limit) - - - - integer - dyn_fv - spmd_fv_inparm - 0,1,2 - - Mod_comm irregular underlying communication method for gather - 0 for original mp_sendirr/mp_recvirr - 1 for mp_swapirr and a point-to-point implementation of communication pattern - 2 for mp_swapirr and a collective (MPI_Alltoallv) implementation of communication pattern - - - 1 - - - - logical - dyn_fv - spmd_fv_inparm - - True for mod_comm irregular communication handshaking for gather - - - .true. - - - - logical - dyn_fv - spmd_fv_inparm - - True for mod_comm irregular communication blocking send for gather, - false for nonblocking send - - - .true. - - - - integer - dyn_fv - spmd_fv_inparm - - Maximum number of outstanding nonblocking MPI requests to allow when - using mp_swapirr and point-to-point communications for gather. - Setting this less than the maximum can improve robustness for large process - count runs. If set to less than zero, then do not limit the number of - outstanding send/receive requests. - - - 64 - - - - integer - dyn_fv - spmd_fv_inparm - 0,1,2 - - Mod_comm irregular underlying communication method for scatter - 0 for original mp_sendirr/mp_recvirr - 1 for mp_swapirr and a point-to-point implementation of communication pattern - 2 for mp_swapirr and a collective (MPI_Alltoallv) implementation of communication pattern - - - 0 - - - - logical - dyn_fv - spmd_fv_inparm - - True for mod_comm irregular communication handshaking for scatter - - - .false. - - - - logical - dyn_fv - spmd_fv_inparm - - True for mod_comm irregular communication blocking send for scatter, - false for nonblocking send - - - .true. - - - - integer - dyn_fv - spmd_fv_inparm - - Maximum number of outstanding nonblocking MPI requests to allow when - using mp_swapirr and point-to-point communications for scatter. - Setting this less than the maximum can improve robustness for large process - count runs. If set to less than zero, then do not limit the number of - outstanding send/receive requests. - Default: -1 (so no limit) - - - - integer - dyn_fv - spmd_fv_inparm - - Mod_comm irregular underlying communication method for multiple tracers - 0 for original mp_sendtrirr/mp_recvtrirr - 1 for mp_swaptrirr and point-to-point communications - 2 for mp_swaptrirr and all-to-all communications - - - 0 - - - - logical - dyn_fv - spmd_fv_inparm - - True for mod_comm irregular communication handshaking for multiple tracers - - - .true. - - - - logical - dyn_fv - spmd_fv_inparm - - True for mod_comm irregular communication blocking send for multiple - tracers, false for nonblocking send - - - .true. - - - - integer - dyn_fv - spmd_fv_inparm - - Maximum number of outstanding nonblocking MPI requests to allow when - using mp_swaptrirr and point-to-point communications for multiple tracers. - Setting this less than the maximum can improve robustness for large process - count runs. If set to less than zero, then do not limit the number of - outstanding send/receive requests. - Default: -1 (so no limit) - - - - integer - dyn_fv - spmd_fv_inparm - 1,2 - - One or two simultaneous mod_comm irregular communications (excl. tracers) - - - 2 - - - - integer - dyn_fv - spmd_fv_inparm - - Max number of tracers for simultaneous mod_comm irregular communications - - - 3 - - - - integer - dyn_fv - spmd_fv_inparm - 0,1 - - For mod_comm gather/scatters, 0 for temporary contiguous buffers; 1 for mpi derived - types. - - - 0 - - - - integer - dyn_fv - spmd_fv_inparm - 0,1 - - For geopk (geopktrans=1) messages, 0 for temporary contiguous buffers; 1 for mpi derived - types. - - - 0 - - - - integer - dyn_fv - spmd_fv_inparm - 0,1 - - For mod_comm transposes, 0 for temporary contiguous buffers; 1 for mpi derived - types. - - - 0 - - - - integer(4) - dyn_fv - spmd_fv_inparm - - A four element integer array which specifies the YZ and XY decompositions. - The first two elements are the number of Y subdomains and number of Z - subdomains in the YZ decomposition. The second two elements are the number - of X subdomains and the number of Y subdomains in the XY decomposition. - Note that both the X and Y subdomains must contain at least 3 grid points. - For example, a grid with 96 latitudes can contain no more than 32 Y - subdomains. There is no restriction on the number of grid points (levels) - in a Z subdomain, but note that the threading parallelism in the FV dycore - is over levels, so for parallel efficiency it is best to have at least the - number of levels in each Z subdomain as there are threads available. - - There are a couple of rough rules of thumb to follow when setting the 2D - decompositions. The first is that the number of Y subdomains in the YZ - decomposition should be the same as the number of Y subdomains in the XY - decomposition (npr_yz(1) == npr_yz(4)). The second is that the total - number of YZ subdomains (npr_yz(1)*npr_yz(2)) should equal the total number - of XY subdomains (npr_yz(3)*npr_yz(4)). - - Default: ntask,1,1,ntask where ntask is the number of MPI tasks. This is a - 1D decomposition in latitude. - - - - integer - dyn_fv - spmd_fv_inparm - - Overlapping of trac2d and cd_core subcycles. - - - 0 - - - - integer - dyn_fv - spmd_fv_inparm - - Size of tracer domain decomposition for trac2d. - - - 1 - - - - char*8 - dyn_fv - fill_nl - off,full - - Control the writing of filew warning messages. - - - off - - - - - - - logical - dyn_fv_off - metdata_nl - - TRUE => the offline meteorology winds are defined on the model grid cell walls. - - - .false. - - - - char*256 - dyn_fv_off - metdata_nl - - Name of file that contains the offline meteorology data. - Default: none - - - - char*256 - dyn_fv_off - metdata_nl - - Name of directory that contains the offline meteorology data. - Default: none - - - - char*256 - dyn_fv_off - metdata_nl - - Name of file that contains names of the offline meteorology data files. - Default: none - - - - logical - dyn_fv_off - metdata_nl - - TRUE => the offline meteorology file will be removed from local disk when no longer needed. - - - .false. - - - - real - dyn_fv_off - metdata_nl - - (km) top of relaxation region of winds for offline waccm - - - 60.0 - - - - real - dyn_fv_off - metdata_nl - - (km) bottom of relaxation region of winds for offline waccm - - - 50.0 - - - - real - dyn_fv_off - metdata_nl - - (km) top of ramping relaxation region for metdata at model bottom - - - 0.0 - - - - real - dyn_fv_off - metdata_nl - - (km) bottom of ramping relaxation region for metdata at model bottom - - - 0.0 - - - - real - dyn_fv_off - metdata_nl - - Relaxation time (hours) applied to specified meteorology. - - positive values less then time step size gives 100% nudging - - negative values gives 0.0% nudging (infinite relaxation time) - - - 0.0 - - - - logical - dyn_fv_off - metdata_nl - - switch to turn on/off mass fixer for offline driver - - - .true. - - - - char*16 - dyn_fv_off - metdata_nl - - srf heat flux field name in met data file - - - SHFLX - - - - char*16 - dyn_fv_off - metdata_nl - - water vapor flux field name in met data file - - - QFLX - - - - real - dyn_fv_off - metdata_nl - - multiplication factor for srf heat flux - - - 1.0 - - - - real - dyn_fv_off - metdata_nl - - multiplication factor for water vapor flux - - - 1.0 - - - - real - dyn_fv_off - metdata_nl - - multiplication factor for snow hieght - - - 1.0 - - - - logical - dyn_fv_off - metdata_nl - - If false then do not allow surface models feedbacks influence climate - - - .true. - - - - logical - dyn_fv_off - metdata_nl - - If true nudge meteorology surface fields TAUX, TAUY, SHFLX, QFLX rather than force - - - .true. - - - - logical - dyn_fv_off - metdata_nl - - If true nudge meteorology surface fields over the land. If false, then fields are - still nudged over the ocean. - - - .true. - - - - logical - dyn_fv_off - metdata_nl - - If met_srf_land is false, then determines whether to nudge proportional to the non-land - fraction (i.e. 1 - land fraction) (true), or to nudge everywhere except where land - fraction is 1 (false). - - - .false. - - - - logical - dyn_fv_off - metdata_nl - - If true nudge meteorology surface fields from radiation. These include ASDIR, ASDIF, - ALDIR, ALDIF, and LWUP. - - - .false. - - - - logical - dyn_fv_off - metdata_nl - - If true nudge meteorology reference surface fields. These include TSREF, QREF, and U10. - - - .false. - - - - logical - dyn_fv_off - metdata_nl - - If true nudge meteorology reference for sea surface temperature and sea ice fraction - (SST and SEAICEFRAC). - - - .false. - - - - logical - dyn_fv_off - metdata_nl - - If true nudge surface sheer stress (TAUX, TAUY) from the meteorology. - - - .true. - - - - logical - dyn_fv_off - metdata_nl - - If true nudge atmospheric temperature (T) from the meteorology. - - - .true. - - - - - - - real - dyn_eul - dyn_eul_inparm - - del^2 horizontal diffusion coefficient. This is used above the Nth order - diffusion. - - - 2.5D5 - 2.5D7 - - - - integer - dyn_eul - dyn_eul_inparm - - Order (N) of horizontal diffusion operator used below the sponge layers. - N must be a positive multiple of 2. - - - 4 - - - - integer - dyn_eul - dyn_eul_inparm - - The order N horizontal diffusion operator will be used in and below the - layer specified by this variable. - - - 4 - - - - real - dyn_eul - dyn_eul_inparm - - Nth order horizontal diffusion coefficient. - - - 1.0D18 - 2.0D16 - 2.0D16 - 1.17D16 - 7.14D14 - 1.5D14 - 1.5D13 - - - - real - dyn_eul - dyn_eul_inparm - - Number of days (from timestep 0) to run divergence damper. Use only if spectral - model becomes dynamicallly unstable during initialization. Suggested value: - 2. (Value must be >= 0.) Default: 0. - - - 0.0D0 - - - - real - dyn_eul - dyn_eul_inparm - - Time filter coefficient. Default: 0.06 - - - 0.06D0 - - - - integer - dyn_eul - dyn_eul_inparm - - Number of levels over which to apply Courant limiter, starting at top of - model. - - - 5 - - - - integer - dyn_eul - dyn_eul_inparm - - Number of dynamics timesteps per physics timestep. If zero, a best-estimate - will be automatically calculated. - - - 1 - 12 - - - - - - - integer - dyn_spectral_spmd - spmd_dyn_inparm - - Spectral dynamics gather option. - 0: use mpi_allgatherv - 1: use point-to-point MPI-1 two-sided implementation - 2: use point-to-point MPI-2 one-sided implementation if supported, - otherwise use MPI-1 implementation - 3: use Co-Array Fortran implementation if supported, - otherwise use MPI-1 implementation - - - 0 - - - - integer - dyn_spectral_spmd - spmd_dyn_inparm - - Spectral dynamics transpose option. - 0: use mpi_alltoallv - 1: use point-to-point MPI-1 two-sided implementation - 2: use point-to-point MPI-2 one-sided implementation if supported, - otherwise use MPI-1 implementation - 3: use Co-Array Fortran implementation if supported, - otherwise use MPI-1 implementation - - - 0 - - - - logical - dyn_spectral_spmd - spmd_dyn_inparm - - Flag indicating whether to assign latitudes to equidistribute columns or - latitudes. This only matters when using a reduced grid. - - - .true. - - - - integer - dyn_spectral_spmd - spmd_dyn_inparm - - Number of processes assigned to dynamics (SE, EUL and SLD dycores). - Default: Total number of processes assigned to job. - - - - integer - dyn_spectral_spmd - spmd_dyn_inparm - - Stride for dynamics processes (EUL and SLD dycores). - E.g., if stride=2, assign every second process to the dynamics. - - - 1 - - - - - - - integer - gw_drag - gw_drag_nl - - Gravity wave spectrum dimension (wave numbers are from -pgwv to pgwv). - - - 32 - 18 - - - - real - gw_drag - gw_drag_nl - - Width of speed bins (delta c) for gravity wave spectrum (reference wave - speeds are from -pgwv*dc to pgwv*dc). - Default: UNKNOWN. - - - - integer - gw_drag - gw_drag_nl - - Dimension for long wavelength gravity wave spectrum (wave numbers are from - -pgwv_long to pgwv_long). - Default: UNKNOWN. - - - - real - gw_drag - gw_drag_nl - - Width of speed bins (delta c) for long wavelength gravity wave spectrum - (reference wave speeds are from -pgwv_long*dc_long to pgwv_long*dc_long). - Default: UNKNOWN. - - - - logical - gw_drag - gw_drag_nl - - Force the stress due to gravity waves to be zero at the top of the model. - In the low-top model, this helps to conserve momentum and produce a QBO. - Default: UNKNOWN. - - - - logical - gw_drag - gw_drag_nl - - Apply limiters to tau before applying the efficiency factor, rather than - afterward. - - - .false. - .true. - .true. - .true. - - - - logical - gw_drag - gw_drag_nl - - Apply limiter on maximum wind tendency from stress divergence in gravity wave drag scheme. - - - .true. - .false. - .false. - .false. - - - - real - gw_drag - gw_drag_nl - - Efficiency associated with convective gravity waves from the Beres - scheme (deep convection). - - - 0.1D0 - 0.4D0 - 0.55D0 - 0.5D0 - 0.5D0 - 0.5D0 - - - - real - gw_drag - gw_drag_nl - - Efficiency associated with convective gravity waves from the Beres - scheme (shallow convection). - - - 0.03D0 - - - - real - gw_drag - gw_drag_nl - - Efficiency associated with gravity waves from frontogenesis. - - - 1.D0 - - - - real - gw_drag - gw_drag_nl - - Efficiency associated with inertial gravity waves from frontogenesis. - - - 0.1D0 - - - - real - gw_drag - gw_drag_nl - - Efficiency associated with orographic gravity waves. - - - 0.125D0 - 0.0625D0 - - - - logical - gw_drag - gw_drag_nl - - Whether or not to enable gravity waves produced by meso-Beta Ridges. - Default: UNKNOWN - - - - integer - gw_drag - gw_drag_nl - - Number of meso-Beta ridges (per gridbox) to invoke. - Default: 10 (UNKNOWN) - - - - real - gw_drag - gw_drag_nl - - Efficiency scaling factor associated with anisotropic OGW. - Default: UNKNOWN. - - - - real - gw_drag - gw_drag_nl - - Max efficiency associated with anisotropic OGW. - - - 1.0 - - - - real - gw_drag - gw_drag_nl - - Drag coefficient for obstacles in low-level flow. - - - 1.0 - - - - logical - gw_drag - gw_drag_nl - - Whether or not to allow trapping for meso-Beta Ridges. - Default: FALSE (UNKNOWN) - - - - logical - gw_drag - gw_drag_nl - - Whether or not to enable gravity waves produced by meso-gamma Ridges. - Default: FALSE (UNKNOWN) - - - - integer - gw_drag - gw_drag_nl - - Number of meso-gamma ridges (per gridbox) to invoke. - Default: -1 (UNKNOWN) - - - - real - gw_drag - gw_drag_nl - - Efficiency scaling factor associated with anisotropic OGW. - Default: UNKNOWN. - - - - real - gw_drag - gw_drag_nl - - Max efficiency associated with anisotropic OGW. - - - 1.0 - - - - real - gw_drag - gw_drag_nl - - Drag coefficient for obstacles in low-level flow. - - - 1.0 - - - - logical - gw_drag - gw_drag_nl - - Whether or not to allow trapping for meso-gamma Ridges. - Default: UNKNOWN - - - - char*256 - gw_drag - gw_drag_nl - - Full pathname of boundary dataset for meso-gamma ridges. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/cam/topo/fv_0.9x1.25_nc3000_Nsw006_Nrs002_Co008_Fi001_ZR_c160505.nc - $DIN_LOC_ROOT/atm/cam/topo/fv_1.9x2.5_nc3000_Nsw084_Nrs016_Co120_Fi001_ZR_061116.nc - - - - real - gw_drag - gw_drag_nl - - Critical Froude number squared (used only for orographic waves). - Default: UNKNOWN. - - - - real - gw_drag - gw_drag_nl - - Factor to multiply tau by, for orographic waves in the southern hemisphere. - - - 1.d0 - 2.d0 - 2.d0 - 2.d0 - - - - real - gw_drag - gw_drag_nl - - Inverse Prandtl number used in gravity wave diffusion - - - 0.25d0 - 0.5d0 - 0.5d0 - 0.5d0 - - - - real - gw_drag - gw_drag_nl - - Scaling factor for heating depth in gravity waves from convection. If less than 1.0 - this acts as an effective reduction of the gravity wave phase speeds needed to drive - the QBO. - Default: UNKNOWN - - - - logical - gw_drag - gw_drag_nl - - Scale SGH by land fraction in gravity wave drag - - - .true. - .false. - .false. - .false. - - - - real - gw_drag - gw_drag_nl - - Frontogenesis function critical threshold. - - - 1.25D-15 - 7.5D-16 - 3.0D-15 - - - - char*256 - gw_drag - gw_drag_nl - - Full pathname of Beres lookup table data file for gravity waves sourced - from deep convection. - - - $DIN_LOC_ROOT/atm/waccm/gw/newmfspectra40_dc25.nc - - - - char*256 - gw_drag - gw_drag_nl - - Full pathname of Beres lookup table data file for gravity waves sourced - from shallow convection. - - - $DIN_LOC_ROOT/atm/waccm/gw/mfspectra_shallow_c140530.nc - - - - real - gw_drag - gw_drag_nl - - Background source strength (used for waves from frontogenesis). - - - 1.5D-3 - 1.0D-3 - 2.5D-3 - - - - real - gw_drag - gw_drag_nl - - Background source strength (used for inertial waves from frontogenesis). - - - 12.0D-3 - - - - logical - gw_drag - gw_drag_nl - - Whether or not to use tapering at the poles to reduce the effects of - mid-scale gravity waves from frontogenesis. - - - .false. - .true. - - - - logical - gw_drag - gw_drag_nl - - Whether or not to apply tapering at the top of the model (above 0.6E-02 Pa) - to reduce undesired effects of gravity waves in the thermosphere/ionosphere. - - - .false. - .true. - - - - - - - logical - gw_rdg - gw_rdg_nl - - If .true. use separate dividing streamlines for downslope wind and flow - splitting regimes ("DS" configuration). - If .false. use single dividing streamline as in Scinocca & McFarlane - 2000 ("SM" configuration). - - - .true. - - - - logical - gw_rdg - gw_rdg_nl - - If true, then use smooth regimes - - - .false. - - - - logical - gw_rdg - gw_rdg_nl - - If true, then adujust tauoro - - - .true. - - - - logical - gw_rdg - gw_rdg_nl - - If true, then adjust for bit-for-bit answers with the ("N5") configuration - Default: UNKNOWN. - - - - real - gw_rdg - gw_rdg_nl - - Enhancement factor for downslope wind stress in DS configuration. - - - 0.0d0 - - - - real - gw_rdg - gw_rdg_nl - - Enhancement factor for depth of downslope wind regime in DS configuration - - - 2.0d0 - - - - real - gw_rdg - gw_rdg_nl - - Lower inverse Froude number limits on linear ramp terminating downslope wind regime for high mountains in DS configuration - - - 2.0d0 - - - - real - gw_rdg - gw_rdg_nl - - Upper inverse Froude number limits on linear ramp terminating downslope wind regime for high mountains in DS configuration - - - 3.0d0 - - - - real - gw_rdg - gw_rdg_nl - - Enhancement factor for downslope wind stress in SM configuration. - - - 2.0d0 - - - - real - gw_rdg - gw_rdg_nl - - Critical inverse4 Froude number - Default: UNKNOWN. - - - - real - gw_rdg - gw_rdg_nl - - minimum surface displacement height for orographic waves (m) - - - 0.01d0 - - - - real - gw_rdg - gw_rdg_nl - - Minimum wind speed for orographic waves - - - 1.0d-3 - - - - real - gw_rdg - gw_rdg_nl - - Minimum stratification allowing wave behavior - - - 0.002d0 - - - - real - gw_rdg - gw_rdg_nl - - Minimum stratification allowing wave behavior - - - 0.1d0 - - - - logical - gw_rdg - gw_rdg_nl - - If TRUE gravity wave ridge scheme will contribute to vertical diffusion tendencies. - - - .true. - - - - - - - char*256 - ghg_cam - chem_surfvals_nl - - Full pathname of time-variant boundary dataset for greenhouse gas surface - values. - - - $DIN_LOC_ROOT/atm/cam/ggas/ghg_hist_1765-2005_c091218.nc - - - - real - ghg_cam - chem_surfvals_nl - - CH4 volume mixing ratio. This is used as the time invariant surface value - of CH4 if no time varying values are specified. - - - 1760.0e-9 - - - - real - ghg_cam - chem_surfvals_nl - - CO2 volume mixing ratio. This is used as the time invariant surface value - of CO2 if no time varying values are specified. - - - 367.0e-6 - - - - real - ghg_cam - chem_surfvals_nl - - User override for the prescribed CO2 volume mixing ratio used by the radiation - calculation. Note however that the prescribed value of CO2 which is sent - to the surface models is still the one that is set using either the - {{ hilight }}co2vmr{{ closehilight }} or the {{ hilight }}scenario_ghg{{ closehilight }} variables. - Default: not used - - - - real - ghg_cam - chem_surfvals_nl - - CFC11 volume mixing ratio adjusted to reflect contributions from many GHG - species. This is used as the time invariant surface value of F11 if no - time varying values are specified. - - - 653.45e-12 - - - - real - ghg_cam - chem_surfvals_nl - - CFC12 volume mixing ratio. This is used as the time invariant surface value - of CFC12 if no time varying values are specified. - - - 535.0e-12 - - - - real - ghg_cam - chem_surfvals_nl - - N2O volume mixing ratio. This is used as the time invariant surface value - of N2O if no time varying values are specified. - - - 316.0e-9 - - - - integer - ghg_cam - chem_surfvals_nl - - Data start year. Use in conjunction - with {{ hilight }}ghg_yearstart_model{{ closehilight }}. - - - 0 - - - - integer - ghg_cam - chem_surfvals_nl - - Model start year. Use in conjunction - with {{ hilight }}ghg_yearstart_data{{ closehilight }}. - - - 0 - - - - real - ghg_cam - chem_surfvals_nl - - Amount of co2 ramping per year (percent). Only used - if {{ hilight }}scenario_ghg{{ closehilight }} = 'RAMP_CO2_ONLY' - - - 1.0 - - - - real - ghg_cam - chem_surfvals_nl - - CO2 cap if > 0, floor otherwise. Specified as multiple or fraction of - inital value; e.g., setting to 4.0 will cap at 4x initial CO2 setting. - Only used if {{ hilight }}scenario_ghg{{ closehilight }} = 'RAMP_CO2_ONLY' - Default: boundless if {{ hilight }}ramp_co2_annual_rate{{ closehilight }} > 0, zero otherwise. - - - - integer - ghg_cam - chem_surfvals_nl - - Date on which ramping of co2 begins. The date is encoded as an integer in - the form YYYYMMDD. Only used if {{ hilight }}scenario_ghg{{ closehilight }} = 'RAMP_CO2_ONLY' - - - 0 - - - - integer - ghg_cam - chem_surfvals_nl - - If {{ hilight }}scenario_ghg{{ closehilight }} is set to "RAMPED" then the greenhouse - gas surface values are interpolated between the annual average values - read from the file specified by {{ hilight }}bndtvghg{{ closehilight }}. - In that case, the value of this variable (> 0) fixes the year of the - lower bounding value (i.e., the value for calendar day 1.0) used in the - interpolation. For example, if rampyear_ghg = 1950, then the GHG surface - values will be the result of interpolating between the values for 1950 and - 1951 from the dataset. - - - 0 - - - - char*16 - ghg_cam - chem_surfvals_nl - FIXED,RAMPED,RAMP_CO2_ONLY,CHEM_LBC_FILE - - Controls treatment of prescribed co2, ch4, n2o, cfc11, cfc12 volume mixing - ratios. May be set to 'FIXED', 'RAMPED', 'RAMP_CO2_ONLY', or 'CHEM_LBC_FILE'. - FIXED => volume mixing ratios are fixed and have either default or namelist - input values. - RAMPED => volume mixing ratios are time interpolated from the dataset - specified by {{ hilight }}bndtvghg{{ closehilight }}. - RAMP_CO2_ONLY => only co2 mixing ratios are ramped at a rate determined by - the variables {{ hilight }}ramp_co2_annual_rate{{ closehilight }}, {{ hilight }}ramp_co2_cap{{ closehilight }}, - and {{ hilight }}ramp_co2_start_ymd{{ closehilight }}. - CHEM_LBC_FILE => volume mixing ratios are set from the chemistry lower boundary - conditions dataset specified by {{ hilight }}flbc_file{{ closehilight }}. - Default: FIXED - - - FIXED - - - - - - - char*256 - ghg_chem - chem_inparm - - Full pathname of time-variant boundary dataset for greenhouse gas production/loss - rates. Only used by the simple prognostic GHG chemistry scheme that is - enabled via the argument "-prog_species GHG" to configure. - - - $DIN_LOC_ROOT/atm/cam/ggas/noaamisc.r8.nc - - - - logical - ghg_chem - chem_inparm - - This variable should not be set by the user. It is set by build-namelist - when the user specifies the argument "-prog_species GHG" to configure which - turns on a simple prognostic chemistry scheme for CH4, N2O, CFC11 and - CFC12. - Default: UNKNOWN - - - - logical - co2_cycle - co2_cycle_nl - - If TRUE turn on CO2 code. - Default: UNKNOWN - - - - logical - co2_cycle - co2_cycle_nl - - If TRUE read co2 fuel flux from file. - Default: UNKNOWN - - - - logical - co2_cycle - co2_cycle_nl - - If TRUE read co2 ocn flux from file. - - - .false. - - - - logical - co2_cycle - co2_cycle_nl - - If TRUE read co2 aircraft flux from file. - Default: UNKNOWN - - - - char*256 - co2_cycle - co2_cycle_nl - - Filepath for dataset containing CO2 flux from ocn. - Default: none - - - - char*256 - co2_cycle - co2_cycle_nl - - Filepath for dataset containing CO2 flux from fossil fuel. - Default: none - - - $DIN_LOC_ROOT/atm/cam/ggas/emissions-cmip6_CO2_anthro_surface_175001-201512_fv_0.9x1.25_c20181011.nc - $DIN_LOC_ROOT/atm/cam/ggas/emissions-cmip6_CO2_anthro_surface_175001-201512_fv_1.9x2.5_c20181011.nc - - - - - - - char*1(10) - history - cam_history_nl - A,B,I,X,M,L,S - - Sets the averaging flag for all variables on a particular history file - series. Valid values are: - - A ==> Average - B ==> GMT 00:00:00 average - I ==> Instantaneous - M ==> Minimum - X ==> Maximum - L ==> Local-time - S ==> Standard deviation - - The default is to use the averaging flags for each variable that are set in - the code via calls to subroutine addfld. - - Defaults: set in code via the addfld and add_default subroutine calls. - - - - logical - history - cam_history_nl - - If true don't put any of the variables on the history tapes by - default. Only output the variables that the user explicitly lists in - the {{ hilight }}fincl#{{ closehilight }} namelist items. - - - .false. - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 1st history file (by default the name - of this file contains the string "h0"). - Default: none - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 2nd history file (by default the name - of this file contains the string "h1"). - Default: none - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 3rd history file (by default the name - of this file contains the string "h2"). - Default: none - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 4th history file (by default the name - of this file contains the string "h3"). - Default: none - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 5th history file (by default the name - of this file contains the string "h4"). - Default: none - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 6th history file (by default the name - of this file contains the string "h5"). - Default: none - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 7th history file (by default the name - of this file contains the string "h6"). - Default: none - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 8th history file (by default the name - of this file contains the string "h7"). - Default: none - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 9th history file (by default the name - of this file contains the string "h8"). - Default: none - - - - char*24(1000) - history - cam_history_nl - - List of fields to exclude from the 10th history file (by default the name - of this file contains the string "h9"). - Default: none - - - - char*26(1000) - history - cam_history_nl - - List of fields to include on the first history file (by default the name of - this file contains the string "h0"). The added fields must be in Master - Field List. The averaging flag for the output field can be specified by - appending a ":" and a valid averaging flag to the field name. Valid flags - are: - - A ==> Average - B ==> GMT 00:00:00 average - I ==> Instantaneous - M ==> Minimum - X ==> Maximum - L ==> Local-time - S ==> Standard deviation - - Default: set in code via the addfld and add_default subroutine calls. - - - - char*26(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for the 2nd history file (by default - the name of this file contains the string "h1"). - Default: none. - - - - char*26(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for the 3rd history file (by default - the name of this file contains the string "h2"). - Default: none. - - - - char*26(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for the 4th history file (by default - the name of this file contains the string "h3"). - Default: none. - - - - char*26(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for the 5th history file (by default - the name of this file contains the string "h4"). - Default: none. - - - - char*26(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for the 6th history file (by default - the name of this file contains the string "h5"). - Default: none. - - - - char*26(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for the 7th history file (by default - the name of this file contains the string "h6"). - Default: none. - - - - char*26(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for the 8th history file (by default - the name of this file contains the string "h7"). - Default: none. - - - - char*26(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for the 9th history file (by default - the name of this file contains the string "h8"). - Default: none. - - - - char*26(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for the 10th history file (by default - the name of this file contains the string "h9"). - Default: none. - - - - logical - history - clubb_his_nl - - if .true. then output CLUBBs history statistics - - - .false. - - - - logical - history - clubb_his_nl - - if .true. then output CLUBBs radiative history statistics - - - .false. - - - - char*16(10000) - history - clubb_stats_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for CLUBB statistics on zt grid. - Default: none. - - - - char*16(10000) - history - clubb_stats_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for CLUBB statistics on zm grid. - Default: none. - - - - char*16(10000) - history - clubb_stats_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for CLUBB statistics on radiation zt grid. - Default: none. - - - - char*16(10000) - history - clubb_stats_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for CLUBB statistics on radiation zm grid. - Default: none. - - - - char*16(10000) - history - clubb_stats_nl - - Same as {{ hilight }}fincl1{{ closehilight }}, but for CLUBB statistics on surface. - Default: none. - - - - logical(10) - history - cam_history_nl - - Collect all column data into a single field and output in ncol format, - much faster than default when you have a lot of columns. - - - .false. - - - - char*128(1000) - history - cam_history_nl - - List of columns or contiguous columns at which the fincl1 fields will be - output. Individual columns are specified as a string using a longitude - degree (greater or equal to 0.) followed by a single character - (e)ast/(w)est identifer, an underscore '_' , and a latitude degree followed - by a single character (n)orth/(s)outh identifier. For example, '10e_20n' - would pick the model column closest to 10 degrees east longitude by 20 - degrees north latitude. A group of contiguous columns can be specified - using bounding latitudes and longitudes separated by a colon. For example, - '10e:20e_15n:20n' would select the model columns which fall with in the - longitude range from 10 east to 20 east and the latitude range from 15 - north to 20 north. - Default: none - - - - char*128(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1lonlat{{ closehilight }}, but for 2nd history file. - - - - char*128(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1lonlat{{ closehilight }}, but for 3rd history file. - - - - char*128(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1lonlat{{ closehilight }}, but for 4th history file. - - - - char*128(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1lonlat{{ closehilight }}, but for 5th history file. - - - - char*128(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1lonlat{{ closehilight }}, but for 6th history file. - - - - char*128(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1lonlat{{ closehilight }}, but for 7th history file. - - - - char*128(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1lonlat{{ closehilight }}, but for 8th history file. - - - - char*128(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1lonlat{{ closehilight }}, but for 9th history file. - - - - char*128(1000) - history - cam_history_nl - - Same as {{ hilight }}fincl1lonlat{{ closehilight }}, but for 10th history file. - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 1st history file. - Default: none - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 2nd history file. - Default: none - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 3rd history file. - Default: none - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 4th history file. - Default: none - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 5th history file. - Default: none - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 6th history file. - Default: none - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 7th history file. - Default: none - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 8th history file. - Default: none - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 9th history file. - Default: none - - - - char*26(1000) - history - cam_history_nl - - Specific fields which will be written using the non-default precision on - the 10th history file. - Default: none - - - - char*256(10) - history - cam_history_nl - - - Array of history filename specifiers. The filenames of up to six history - output files can be controlled via this variable. Filename specifiers give - generic formats for the filenames with specific date and time components, - file series number (0-5), and caseid, filled in when the files are - created. The following strings are expanded when the filename is created: - %c=caseid; %t=file series number (0-5); %y=year (normally 4 digits, more - digits if needed); %m=month; %d=day; %s=seconds into current day; %%=% - symbol. Note that the caseid may be set using the namelist - variable {{ hilight }}case_name{{ closehilight }}. - - For example, for a simulation with caseid="test" and current date and time - of 0000-12-31 0:00UT, a filename specifier of "%c.cam2.h%t.%y-%m.nc" would - expand into "test.cam2.h0.0000-12.nc" for the first history file. The - filename specifier "%c.cam2.h%t.%y-%m-%d-%s.nc" would expand to - "test.cam2.h1.0000-12-31-00000.nc" for the second history file. Spaces are - not allowed in filename specifiers. Although the character "/" is allowed - in the specifier, it will be interpreted as a directory name and the - corresponding directories will have to be created in the model execution - directory (directory given to configure with -cam_exedir option) before - model execution. The first element is for the primary history file which - is output by default as a monthly history file. Entries 2 through 6 are - user specified auxilliary output files. - - Defaults: "%c.cam2.h0.%y-%m.nc", "%c.cam2.h1.%y-%m-%d-%s.nc", ..., - "%c.cam2.h5.%y-%m-%d-%s.nc" - - - - char*256 - history - satellite_options_nl - - Full pathname of the satellite track data used by the satellite track history - output feature. - Default: none - - - - char*256 - history - satellite_options_nl - - Satellite track history filename specifier. See {{ hilight }}hfilename_spec{{ closehilight }} - - - %c.cam2.sat.%y-%m-%d-%s.nc - - - - char*26(1000) - history - satellite_options_nl - - List of history fields to output along the satellite track specified by {{ hilight }}sathist_track_infile{{ closehilight }} - Default: none - - - - integer - history - satellite_options_nl - - Sets the maximum number of observation columns written to the satellite track history file - series. - - - 100000 - - - - integer - history - satellite_options_nl - - Sets the number of columns closest to the observation that should be output. Setting - this to a number greater than 1 allows for spatial interpolation in the post processing. - - - 1 - - - - integer - history - satellite_options_nl - - Sets the number of timesteps closest to the observation that should be output. Setting - this to a number greater than 1 allows for temporal interpolation in the post processing. - - - 1 - - - - char*8 - history - cam_history_nl - NONE,6-HOURLY,DAILY,MONTHLY,YEARLY,CAMIOP,ENDOFRUN - - Frequency that initial files will be output: 6-hourly, daily, monthly, - yearly, or never. Valid values: 'NONE', '6-HOURLY', 'DAILY', 'MONTHLY', - 'YEARLY', 'CAMIOP', 'ENDOFRUN'. - - - YEARLY - - - - logical - history - cam_history_nl - - If false then include only REQUIRED fields on IC file. If true then - include required AND optional fields on IC file. - - - .false. - - - - integer(10) - history - cam_history_nl - - Array containing the maximum number of time samples written to a history - file. The first value applies to the primary history file, the second - through tenth to the auxillary history files. - Default: 1,30,30,30,30,30,30,30,30,30 - - - 1500 - - - - integer(10) - history - cam_history_nl - - Array containing the starting time of day for local time history averaging. - Used in conjuction with lcltod_stop. If lcltod_stop is less than lcltod_start, - then the time range wraps around 24 hours. The start time is included in the - interval. Time is in seconds and defaults to 39600 (11:00 AM). The first value - applies to the primary hist. file, the second to the first aux. hist. file, etc. - Default: none - - - - integer(10) - history - cam_history_nl - - Array containing the stopping time of day for local time history averaging. - Used in conjuction with lcltod_start. If lcltod_stop is less than lcltod_start, - then the time range wraps around 24 hours. The stop time is not included in the - interval. Time is in seconds and defaults to 0 (midnight). The first value - applies to the primary hist. file, the second to the first aux. hist. file, etc. - Default: none - - - - integer(10) - history - cam_history_nl - 1,2 - - - Array specifying the precision of real data written to each history file - series. Valid values are 1 or 2. '1' implies output real values are 8-byte - and '2' implies output real values are 4-byte. - - Default: 2,2,2,2,2,2,2,2,2,2 - - - - integer(10) - history - cam_history_nl - - - Array of write frequencies for each history file series. - If {{ hilight }}nhtfrq(1){{ closehilight }} = 0, the file will be a monthly average. - Only the first file series may be a monthly average. If - {{ hilight }}nhtfrq(i){{ closehilight }} > 0, frequency is specified as number of - timesteps. If {{ hilight }}nhtfrq(i){{ closehilight }} < 0, frequency is specified - as number of hours. - - Default: 0,-24,-24,-24,-24,-24,-24,-24,-24,-24 - - - 9 - - - - logical(10) - history - cam_history_nl - - If interpolate_output(k) = .true., then the k'th history file will be - interpolated to a lat/lon grid before output. - - - .false. - - - - integer(10) - history - cam_history_nl - - Size of latitude dimension of grid for interpolated output. - If interpolate_nlat and interpolate_nlon are zero, reasonable values - will be chosen by the dycore based on the run resolution. - - - 0 - - - - integer(10) - history - cam_history_nl - - Size of longitude dimension of grid for interpolated output. - If interpolate_nlat and interpolate_nlon are zero, reasonable values - will be chosen by the dycore based on the run resolution. - - - 0 - - - - integer(10) - history - cam_history_nl - 0,1 - - Selects interpolation method for output on lat/lon grid. - 0: Use SE's native high-order method. - 1: Use a bilinear method. - Default: 1 (bilinear) - - - - integer(10) - history - cam_history_nl - 1,2,3 - - Selects output grid type for lat/lon interpolated output. - 1: Equally spaced, including poles (FV scalars output grid). - 2: Gauss grid (CAM Eulerian). - 3: Equally spaced, no poles (FV staggered velocity). - - - 1 - - - - - - - char*256 - initial_conditions - cam_initfiles_nl - - Full pathname of initial atmospheric state dataset (NetCDF format). - Default: UNSET_PATH - - - UNSET_PATH - $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L26_c180105.nc - $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L30_c180105.nc - $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L32_c180105.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.23x0.31_L26_c100513.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.23x0.31_L26_c061106.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1980-01-01_0.47x0.63_L26_c071226.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.47x0.63_L26_c061106.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-10-01_0.5x0.625_L26_c031204.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1987-01-01_0.9x1.25_L26_c060703.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.9x1.25_L26_c051205.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_1.9x2.5_L26_c070408.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L26_c040809.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L26_c110309.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L26_c091007.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0001-01-01_4x5_L26_c060608.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.23x0.31_L30_c110527.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.47x0.63_L30_c100929.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L30_c100618.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L30_c090306.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L30_c070109.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L30_c110309.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L30_c100831.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_4x5_L30_c090108.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L30_c081013.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L32_c141031.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L32_c150407.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam4_0000-01-01_10x15_L32_c170914.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.47x0.63_L26_APE_c080227.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L26_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L26_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L26_c161230.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L30_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L30_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L30_c170103.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L32_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L32_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L32_c170103.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_0.9x1.25_L30_c080724.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L26_c080114.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L30_c080215.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L26_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L30_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L26_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L30_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L26_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L30_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_10x15_L30_c121015.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_4x5_L30_c121015.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_1.9x2.5_L30_c121015.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_4x5_L26_c060217.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_10x15_L26_c060216.nc - $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-02-01_0.9x1.25_L66_c040928.nc - $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-07-01_1.9x2.5_L66_c040928.nc - $DIN_LOC_ROOT/atm/waccm/ic/FWT2000_f09_spinup01.cam.i.0001-01-02-00000_c160315.nc - $DIN_LOC_ROOT/atm/waccm/ic/f2000.waccm-mam3_1.9x2.5_L70.cam2.i.0017-01-01.c120410.nc - $DIN_LOC_ROOT/atm/waccm/ic/aqua.cam6.waccmsc_1.9x2.5_L70.2000-01-01.c170123.nc - $DIN_LOC_ROOT/atm/waccm/ic/aqua.waccm_tsmlt_1.9x2.5_L70_c170814.nc - $DIN_LOC_ROOT/atm/waccm/ic/f2000.waccm-mam3_4x5_L70.cam2.i.0017-01-01.c121113.nc - $DIN_LOC_ROOT/atm/waccm/ic/f2000.waccm-mam3_10x15_L70.cam2.i.0017-01-01.c141016.nc - $DIN_LOC_ROOT/atm/waccm/ic/b1850.waccm-mam3_1.9x2.5_L70.cam2.i.0156-01-01.c120523.nc - $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-05-01_1.9x2.5_L103_c040928.nc - $DIN_LOC_ROOT/atm/waccm/ic/wa3_4x5_1950_spinup.cam2.i.1960-01-01-00000.nc - $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-01-01_10x15_L66_c041121.nc - $DIN_LOC_ROOT/atm/waccm/ic/f40.2000.4deg.wcm.carma.sulf.004.cam2.i.0008-01-01-00000.nc - $DIN_LOC_ROOT/atm/waccm/ic/f40.2deg.wcm.carma.sulf.L66.cam2.i.2010-01-01.nc - $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_L81_c110906.nc - $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_4x5_L81_c160630.nc - $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_10x15_L81_c141027.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccmx_aqua_4x5_L126_c170705.nc - $DIN_LOC_ROOT/atm/waccm/ic/fx2000_0.9x1.25_126lev_0002-01-01-00000_c181221.nc - $DIN_LOC_ROOT/atm/waccm/ic/wcmx-cam6-phys_1.9x2.5_130lev_2000_c181115.nc - $DIN_LOC_ROOT/atm/waccm/ic/wcmx-cam6-phys_0.9x1.25_130lev_2000_c190122.nc - $DIN_LOC_ROOT/atm/waccm/ic/FC6X2000_f05_spinup01.cam.i.0002-01-01-00000_c190711.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_4x5_L130_c180803.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_1.9x2.5_L130_c180803.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/T341clim01.cam2.i.0024-01-01-00000.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_256x512_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_128x256_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_128x256_L26_c040422.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_T42_L26_c031110.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L30_c090102.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L30_c031210.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L32_c170510.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L26_c091218.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_48x96_L26_c040420.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L30_c100426.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_32x64_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_32x64_L30_c090107.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L26_c030228.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_8x16_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L30_c090102.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01_ne5np4_L30.140707.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L26_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L30_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L32_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L26_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L30_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam6_ne30np4_L32_c171023.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L26_c171018.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L30_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L32_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L26_c171018.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L30_c171024.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L32_c171023.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami_1850-01-01_ne240np4_L26_c110314.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami_0000-09-01_ne240np4_L26_c061106.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01-ne240np4_L30_c111004.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne5np4_L26_c170517.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne16np4_L26_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne30np4_L26_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne60np4_L26_c171023.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne120np4_L26_c170419.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne240np4_L26_c170613.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne5np4_L30_c170517.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne16np4_L30_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne30np4_L30_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne120np4_L30_c170419.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne5np4_L32_c170517.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne16np4_L32_c170509.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne30np4_L32_c170509.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne120np4_L32_c170908.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne240np4_L32_c170908.nc - $DIN_LOC_ROOT/atm/cam/inic/se/f2000_conus_ne30x8_L32_c190712.nc - $DIN_LOC_ROOT/atm/waccm/ic/wa3_ne5np4_1950_spinup.cam2.i.1960-01-01-00000_c150810.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccm5_1850_ne30np4_L70_0001-01-11-00000_c151217.nc - $DIN_LOC_ROOT/atm/waccm/ic/fw2000_ne30np4_L70_c181221.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L30_c031210.nc - - - - char*256 - initial_conditions - physics_nl - - Full pathname of atmospheric state dataset for comparison/check (NetCDF format). - If not UNSET_PATH, the physics check tool will run and print out stats every timestep. - Default: UNSET_PATH - - - UNSET_PATH - - - - real - initial_conditions - cam_initfiles_nl - - Perturb the initial conditions for temperature randomly by up to the given - amount. Only applied for initial simulations. - Default: 0.0 - - - 0.0 - - - - char*256 - initial_conditions - cam_initfiles_nl - - Full pathname of master restart file from which to branch. Setting is - Required for branch run. - Default: none - - - none - - - - char*80 - initial_conditions - cam_initfiles_nl - - - - UNSET_PATH - - - - logical - initial_conditions - constituents_nl - - If TRUE, try to initialize data for all consituents by reading from the - initial conditions dataset. If variable not found then data will be - initialized using internally-specified default values. If FALSE then don't - try reading constituent data from the IC file; just use the - internally-specified defaults. - - - .true. - - - - - - - logical - cosp - cospsimulator_nl - - If true, the COSP cloud simulator is run. - Setting this namelist variable happens automatically if you compile with COSP. - COSP will not run unless this is set to .true. in the namelist! - Turn on the desired simulators using lXXX_sim namelist vars - If no specific simulators are specified, all of the simulators - are run on all columns and all output is saved. (useful for testing). - COSP is available with CAM4, CAM5 and CAM6 physics. - This default logical is set in cospsimulator_intr.F90. - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, COSP cloud simulators are run to produce - all output required for the COSP plots in the AMWG diagnostics package. - sets cosp_ncolumns=10 and cosp_nradsteps=3 - (appropriate for COSP statistics derived from seasonal averages), - and runs MISR, ISCCP, MODIS, CloudSat radar and CALIPSO lidar simulators - (cosp_lmisr_sim=.true.,cosp_lisccp_sim=.true., - cosp_lmodis_sim=.true.,cosp_lradar_sim=.true.,cosp_llidar_sim=.true.). - This default logical is set in cospsimulator_intr.F90. - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, the COSP cloud simulators are run to produce - select output for the AMWG diagnostics package. - sets cosp_ncolumns=10 and cosp_nradsteps=3 - (appropriate for COSP statistics derived from seasonal averages), - and runs MISR, ISCCP, MODIS, and CALIPSO lidar simulators - (cosp_lmisr_sim=.true.,cosp_lisccp_sim=.true., - cosp_lmodis_sim=.true.,cosp_llidar_sim=.true.). - This default logical is set in cospsimulator_intr.F90. - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, the passive COSP cloud simulators are run to produce - select output for the AMWG diagnostics package. - sets cosp_ncolumns=10 and cosp_nradsteps=3 - (appropriate for COSP statistics derived from seasonal averages), - and runs MISR, ISCCP, and MODIS simulators - (cosp_lmisr_sim=.true.,cosp_lisccp_sim=.true.,cosp_lmodis_sim=.true.). - This default logical is set in cospsimulator_intr.F90. - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, the active COSP cloud simulators are run to produce - select output for the AMWG diagnostics package. - sets cosp_ncolumns=10 and cosp_nradsteps=3 - (appropriate for COSP statistics derived from seasonal averages), - and runs CloudSat radar and CALIPSO lidar simulators - (cosp_lradar_sim=.true.,cosp_llidar_sim=.true.). - This default logical is set in cospsimulator_intr.F90. - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, the ISCCP cloud simulator is run to produce - select output for the AMWG diagnostics package. - sets cosp_ncolumns=10 and cosp_nradsteps=3 - (appropriate for COSP statistics derived from seasonal averages), - and runs ISCCP simulator - (cosp_lmisr_sim=.false.,cosp_lisccp_sim=.true., - cosp_lmodis_sim=.false.,cosp_lradar_sim=.false.,cosp_llidar_sim=.false.). - This default logical is set in cospsimulator_intr.F90. - 1236: Default: FALSE - - - - logical - cosp - cospsimulator_nl - - If true, run all simulators using the default values cosp_ncolumns=50 and - cosp_nradsteps=1. This option is mainly intended for testing, but it also - must be used in order to output the input fields needed to run the - simulator in an offline mode (via setting cosp_histfile_aux=.true.). - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, COSP radar simulator will be run and all non-subcolumn output - will be saved. - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, COSP lidar simulator will be run and all non-subcolumn output - will be saved - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, COSP ISCCP simulator will be run and all non-subcolumn output - will be saved. ISCCP simulator is run on only daylight - columns. - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, MISR simulator will be run and all non-subcolumn output - will be saved. MISR simulator is run on only daylight - columns. - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, MODIS simulator will be run and all non-subcolumn output - will be saved. - - - - .false. - - - - - - - logical - cosp - cospsimulator_nl - - If true, the COSP cloud simulator is run for CFMIP 3-hourly - experiments. - This default logical is set in cospsimulator_intr.F90 - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, the COSP cloud simulator is run for CFMIP daily - experiments. - This default logical is set in cospsimulator_intr.F90 - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, the COSP cloud simulator is run for CFMIP off-line - monthly experiments. - This default logical is set in cospsimulator_intr.F90 - - - .false. - - - - logical - cosp - cospsimulator_nl - - If true, the COSP cloud simulator is run for CFMIP monthly - experiments. - This default logical is set in cospsimulator_intr.F90 - - - .false. - - - - - - - integer - cosp - cospsimulator_nl - - Number of subcolumns in SCOPS - This default logical is set in cospsimulator_intr.F90 - - - 50 - - - - - - - logical - cosp - cospsimulator_nl - - Turns on sampling along a-train orbit for radar and lidar simulators. - This default logical is set in cospsimulator_intr.F90 - - - .false. - - - - char*256 - cosp - cospsimulator_nl - - Full pathname for the Atrain orbit data file. - cosp_atrainorbitdata is requiref if cosp_sample_atrain is TRUE. - Default: NONE - - - - - - - integer - cosp - cospsimulator_nl - - This specifies the CAM history tape where COSP diagnostics will be written. - Ignored/not used if any of the cosp_cfmip_* namelist variables are invoked. - - This default is set in cospsimulator_intr.F90 - - - 1 - - - - logical - cosp - cospsimulator_nl - - If true, additional output is added to make it possible to - run COSP off-line. - - This default is set in cospsimulator_intr.F90 - - - .false. - - - - integer - cosp - cospsimulator_nl - - This specifies the CAM history tape where extra COSP diagnostics will be written. - - This default is set in cospsimulator_intr.F90 - - - -1 - - - - integer - cosp - cospsimulator_nl - - This specifies the frequency at which is COSP is called, - every cosp_nradsteps radiation timestep. - - This default is set in cospsimulator_intr.F90 - - - 1 - - - - logical - cosp - cospsimulator_nl - - Turns on sub-column output from COSP. - If both the isccp/misr simulators and the lidar/radar simulators - are run, lfrac_out is from the isccp/misr simulators columns. - This default logical is set in cospsimulator_intr.F90 - - - .false. - - - - - - real - conv - rk_stratiform_nl - - Threshold for autoconversion of cold ice in RK microphysics scheme. - - - 5.0e-6 - 9.5e-6 - 45.0e-6 - 45.0e-6 - 45.0e-6 - 18.0e-6 - 18.0e-6 - 9.5e-6 - 9.5e-6 - 9.5e-6 - 9.5e-6 - 30.0e-6 - 20.0e-6 - 16.0e-6 - 1.0e-6 - 18.0e-6 - - - - real - conv - rk_stratiform_nl - - Threshold for autoconversion of warm ice in RK microphysics scheme. - - - 4.0e-4 - 2.0e-4 - 2.0e-6 - 2.0e-4 - - - - real - conv - rk_stratiform_nl - - Tunable constant for evaporation of precip in RK microphysics scheme. - - - 10.0e-6 - 5.0e-6 - 5.0e-6 - 5.0e-6 - 5.0e-6 - 5.0e-6 - - - - real - conv - rk_stratiform_nl - - Critical radius at which autoconversion become efficient in RK microphysics - scheme. - - - 10.0e-6 - 1.0e-6 - - - - real - conv - rk_stratiform_nl - - Relative humidity threshold for stratospheric cloud water condensation in RK microphysics - poleward of 50 degrees. - Default: none - - - - - - logical - conv - macro_park_nl - - Switch to control whether Park macrophysics should prognose - cloud ice (cldice). - Default: .true., except for carma=cirrus and carma=carma_dust - - - - logical - conv - macro_park_nl - - Switch to control whether Park macrophysics should prognose - cloud liquid (cldliq). - - - .true. - - - - logical - conv - macro_park_nl - - Switch to control whether Park macrophysics should perform - detrainment into the stratiform cloud scheme. - Default: .true., except for carma=cirrus and carma=carma_dust - - - - - - - integer - microphys - micro_mg_nl - - Version number for MG microphysics. This value is set automatically based - on settings in configure and passed to build-namelist - Default: 1 for CAM5 and 2 for CAM6 - - - 1 - 2 - 3 - - - - integer - microphys - micro_mg_nl - - Sub-version number for MG microphysics - - - 0 - 0 - 0 - 0 - - - - real - microphys - micro_mg_nl - - Autoconversion size threshold - Default: UNKNOWN - - - 400.D-6 - 500.D-6 - 390.D-6 - 200.D-6 - 500.D-6 - - - - logical - microphys - micro_mg_nl - - Switch to control whether MG microphysics should prognose - cloud ice (cldice). - Default: .true., except for carma=cirrus and carma=carma_dust - - - - logical - microphys - micro_mg_nl - - Switch to control whether MG microphysics should prognose - cloud liquid (cldliq). - - - .true. - - - - logical - microphys - micro_mg_nl - - Switch to control whether MG microphysics should prognose - dense precipitating ice as graupel. mg3 only. - Default: .true. for mg3 - - - .false. - .true. - - - - logical - microphys - micro_mg_nl - - Switch to control whether MG microphysics should prognose - dense precipitating ice as hail. mg3 only. - - - .false. - - - - integer - microphys - micro_mg_nl - - Number of substeps over MG microphysics. - - - 1 - 1 - 1 - 1 - - - - char*16 - microphys - micro_mg_nl - max_overlap,in_cloud - - Type of precipitation fraction. - Default: for CLUBB runs => in_cloud; - all others => max_overlap - - - max_overlap - in_cloud - - - - real - microphys - micro_mg_nl - - Efficiency factor for berg - - - 1.0D0 - 1.0D0 - - - - logical - microphys - micro_mg_nl - - Do Seifert and Behang (2001) autoconversion and accretion physics when set to true. - - - .false. - - - - logical - microphys - micro_mg_nl - - Do destruction of massless droplets - - - .false. - .true. - - - - logical - microphys - micro_mg_nl - - Switch to control whether MG microphysics performs a uniform calculation or not - (useful for sub-columns) - Default: .false. unless use_subcol_microp is true - - - - logical - microphys - micro_mg_nl - - Switch to control whether MG microphysics should adjust the temperature - at the level containing the cold point tropopause by using the value - obtain by extrapolating between levels. - - - .false. - - - - logical - microphys - micro_mg_nl - - Set .true. to hold cloud droplet number constant. - - - .false. - - - - logical - microphys - micro_mg_nl - - Set .true. to hold cloud ice number constant. - - - .false. - - - - logical - microphys - micro_mg_nl - - Set .true. to hold cloud graupel number constant. - - - .false. - - - - real - microphys - micro_mg_nl - - In-cloud droplet number concentration when micro_mg_nccons=.true. - Default: 100.e6 m-3 - - - - real - microphys - micro_mg_nl - - In-cloud ice number concentration when micro_mg_nicons=.true. - Default: 0.1e6 m-3 - - - - real - microphys - micro_mg_nl - - In-cloud graupel number concentration when micro_mg_nicons=.true. - Default: 0.1e6 m-3 - - - - - - real - microphys - microp_aero_nl - - prescribed aerosol bulk sulfur scale factor - - - 2.0 - - - - logical - microphys - hetfrz_classnuc_nl - - Add diagnostic output for heterogeneous freezing code. - - - .false. - - - - logical - microphys - nucleate_ice_nl - - Switch to turn on treatment of pre-existing ice in the ice nucleation code. - Default: .false., except .true. for CAM6 - - - .false. - .true. - - - - logical - microphys - nucleate_ice_nl - - Add diagnostics for pre-existing ice option in ice nucleation code to history output. - - - .false. - - - - real - microphys - nucleate_ice_nl - - Subgrid scaling factor for relative humidity in ice nucleation code. If it has - a value of -1, then indicates that the subgrid scaling factor will be - calculated on the fly as 1 / qsatfac (i.e. the saturation scaling factor). - - - 1.0D0 - 1.2D0 - 1.2D0 - 1.2D0 - 1.0D0 - - - - real - microphys - nucleate_ice_nl - - Subgrid scaling factor for relative humidity in ice nucleation code in the - stratosphere. If it has a value of -1, then indicates that the subgrid - scaling factor will be calculated on the fly as 1 / qsatfac (i.e. the - saturation scaling factor). - - - 1.0D0 - 1.2D0 - 1.2D0 - 1.2D0 - - - - logical - microphys - nucleate_ice_nl - - Switch to determine whether ice nucleation happens using the incloud (true) or - the gridbox average (false) relative humidity. When true, it is assumed that - the incloud relative humidity for nucleation is 1. - Default: .true., except .false. for CAM6 - - - .true. - .false. - - - - real - microphys - nucleate_ice_nl - - Fraction of Aitken mode sulfate particles assumed to nucleate ice in the polar - stratospheric. Provides an increase in homogeneous freezing over the Liu&Penner method. - Temporary solution to adjust ice surface area density and dehydration in the - polar stratosphere where there doesn't seem to be enough nucleation. A value of - zero means Liu&Penner is used. - - - 1.0D0 - - - - logical - microphys - nucleate_ice_nl - - Indicates whether to use the tropopause level to determine where to adjust - nucleation for the stratosphere (true) or whether to use a hard coded transition - level from 100 to 125 hPa applied only in the polar regions (false). - - - .true. - - - - - - real - conv - hkconv_nl - - Characteristic adjustment time scale for Hack shallow scheme. - - - 1800.0D0 - - - - real - conv - hkconv_nl - - Rain water autoconversion coefficient for Hack shallow scheme. - - - 1.0e-4 - 5.0e-5 - 5.0e-5 - 5.0e-5 - 2.0e-4 - 2.0e-4 - 1.0e-5 - 1.0e-5 - 1.0e-4 - 1.0e-4 - 1.0e-4 - - - - - - real - conv - uwshcu_nl - - Penetrative entrainment efficiency in UW shallow scheme. - - - 10.0 - 5.0 - 5.0 - - - - - - - logical - cldfrc - cldfrc_nl - - Switch for Vavrus "freeze dry" adjustment in cloud fraction. Set to FALSE to - turn the adjustment off. - - - .true. - - - - logical - cldfrc - cldfrc_nl - - Switch for ice cloud fraction calculation. - Default: .true. for CAM5 and CAM6, otherwise .false. - - - .false. - .true. - .true. - - - - real - cldfrc - cldfrc_nl - - Minimum rh for low stable clouds. - - - 0.900D0 - 0.910D0 - 0.850D0 - 0.850D0 - 0.950D0 - 0.950D0 - 0.8975D0 - 0.8875D0 - 0.9125D0 - 0.910D0 - 0.950D0 - 0.950D0 - 0.8975D0 - 0.8875D0 - 0.9125D0 - 0.910D0 - 0.920D0 - 0.920D0 - 0.913D0 - 0.903D0 - 0.905D0 - 0.880D0 - 0.910D0 - - - - real - conv - cldfrc_nl - - Adjustment to rhminl for land without snow cover. - Default: 0.0 for CAM6; - all others => 0.10 - - - 0.100D0 - 0.000D0 - 0.000D0 - - - - real - cldfrc - cldfrc_nl - - Minimum rh for high stable clouds. - - - 0.800D0 - 0.770D0 - 0.700D0 - 0.770D0 - 0.500D0 - 0.900D0 - 0.900D0 - 0.680D0 - 0.680D0 - 0.650D0 - - - - real - cldfrc - cldfrc_nl - - parameter for shallow convection cloud fraction. - - - 0.07D0 - 0.04D0 - 0.10D0 - 0.04D0 - - - - real - cldfrc - cldfrc_nl - - parameter for shallow convection cloud fraction. - - - 500.0D0 - - - - real - cldfrc - cldfrc_nl - - parameter for deep convection cloud fraction. - - - 0.14D0 - 0.10D0 - 0.10D0 - 0.10D0 - 0.10D0 - - - - real - cldfrc - cldfrc_nl - - parameter for deep convection cloud fraction. - - - 500.0D0 - - - - real - cldfrc - cldfrc_nl - - top pressure bound for mid level cloud. - - - 75000.0D0 - 25000.0D0 - 25000.0D0 - 25000.0D0 - 25000.0D0 - 25000.0D0 - 25000.0D0 - 25000.0D0 - 40000.0D0 - 40000.0D0 - 40000.0D0 - 40000.0D0 - 40000.0D0 - 40000.0D0 - 40000.0D0 - 40000.0D0 - - - - real - conv - cldfrc_nl - - Bottom height (Pa) for mid-level liquid stratus fraction. - Default: 700.e2 for CAM5 and CAM6; all others=> 750.e2 - - - 750.0D2 - 700.0D2 - 700.0D2 - - - - integer - conv - cldfrc_nl - - Scheme for ice cloud fraction: 1=wang & sassen, 2=schiller (iciwc), - 3=wood & field, 4=Wilson (based on smith), 5=modified slingo (ssat & empyt cloud) - - - 1 - 5 - 5 - 4 - 4 - 4 - - - - real - conv - cldfrc_nl - - Critical RH for ice clouds (Wilson & Ballard scheme). - - - 0.95D0 - 0.93D0 - 0.93D0 - 0.70D0 - 0.70D0 - 0.70D0 - - - - real - conv - cldfrc2m_nl - - Minimum rh for ice cloud fraction > 0. - - - 0.80D0 - 0.85D0 - 0.80D0 - - - - real - conv - cldfrc2m_nl - - rhi at which ice cloud fraction = 1. - - - 1.1D0 - 1.0D0 - 1.05D0 - 1.0D0 - - - - real - conv - cldfrc2m_nl - - Minimum rh for ice cloud fraction > 0 in the stratosphere. - - - 1.0D0 - 0.85D0 - - - - real - conv - cldfrc2m_nl - - rhi at which ice cloud fraction = 1 in the stratosphere. - - - 1.1D0 - 1.0D0 - 1.0D0 - - - - logical - conv - cldfrc2m_nl - - Use cloud fraction to determine whether to do growth of ice clouds below - RHice of 1 down to RHice = rhmini. - Default: .true. for CAM6; all others => .false. - - - .false. - .true. - - - - real - conv - zmconv_nl - - Convective momentum transport parameter (upward) - - - 0.4000D0 - 0.7000D0 - 0.7000D0 - - - - real - conv - zmconv_nl - - Convective momentum transport parameter (downward) - - - 0.4000D0 - 0.7000D0 - 0.7000D0 - - - - - - real - conv - zmconv_nl - - Autoconversion coefficient over land in ZM deep convection scheme. - - - 0.0030D0 - 0.0059D0 - 0.0035D0 - 0.0075D0 - 0.0075D0 - 0.0059D0 - 0.0035D0 - 0.0075D0 - 0.0075D0 - 0.0035D0 - 0.0035D0 - 0.0020D0 - 0.0040D0 - 0.0040D0 - 0.0040D0 - - - - real - conv - zmconv_nl - - Autoconversion coefficient over ocean in ZM deep convection scheme. - - - 0.0030D0 - 0.0450D0 - 0.0035D0 - 0.0450D0 - 0.0035D0 - 0.0300D0 - 0.0300D0 - 0.0035D0 - 0.0035D0 - 0.0020D0 - 0.0040D0 - 0.0040D0 - 0.0040D0 - - - - real - conv - zmconv_nl - - Tunable evaporation efficiency for land in ZM deep convection scheme. - - - 3.0E-6 - 1.0E-5 - 1.0E-5 - - - - real - conv - zmconv_nl - - Tunable evaporation efficiency in ZM deep convection scheme. - - - 3.0E-6 - 5.0E-6 - 5.0E-6 - 5.0E-6 - 5.0E-6 - 5.0E-6 - - - - logical - conv - zmconv_nl - - Include organization parameterization in ZM. This value is set to true automatically - if -zmconv_org is set in configure. - Default: .false., unless -zmconv_org set in configure - - - .false. - .true. - - - - logical - conv - zmconv_nl - - Turn on convective microphysics - - - .false. - - - - integer - conv - zmconv_nl - - The number of negative buoyancy regions that are allowed before the convection top and CAPE calculations are completed. - Default: => 1 for CAM6; - => 5 for all other - - - 5 - 1 - - - - - - - real - cldsed - cldsed_nl - - Factor applied to the ice fall velocity computed from - Stokes terminal velocity. - - - 1.0D0 - 0.5D0 - 0.5D0 - - - - - - - char*16 - wv_sat - wv_sat_nl - GoffGratch,MurphyKoop - - Type of water vapor saturation vapor pressure scheme employed. 'GoffGratch' for - Goff and Gratch (1946); 'MurphyKoop' for Murphy & Koop (2005) - Default: GoffGratch; except MurphyKoop for carma=cirrus or carma=cirrus_dust - - - GoffGratch - MurphyKoop - MurphyKoop - - - - - - - char*16 - conv - subcol_nl - SILHS,CloudObj,tstcp,vamp,off - - Type of sub-column generator scheme employed. - 'SIHLS' Sub-columns generated with Latin Hypercube sampling of the CLUBB PDF; - 'CloudObj' Create sub-columns where most water is assigned to cloud sub-columns; - 'tstcp' testing; - 'vamp' Variation Across Microphysics Profiles simple deterministic scheme; - 'off' None - - - off - SILHS - - - - logical - conv - subcol_tstcp_nl - - Turns off averaging and assigns first subcolumn back to grid. Needed for BFB comparisons - 'true' for no averaging. - - - .false. - - - - logical - conv - subcol_tstcp_nl - - Turns on/off filtering during averaing in tstcp - 'true' to use filtering. - - - .false. - - - - logical - conv - subcol_tstcp_nl - - Turns on/off use of weights during averaging in tstcp - 'true' to use weights. - - - .false. - - - - logical - conv - subcol_tstcp_nl - - Perturbs the temperatures in state after copying for testing purposes - 'true' to perturb temperatures. - - - .false. - - - - logical - conv - subcol_tstcp_nl - - Tests the restart capabilities of weights with a more adequate test - 'true' to set the weights to a slightly more complicated pattern for restart testing - - - .false. - - - - logical - conv - subcol_silhs_nl - - Turns on/off use of weights during averaging in tstcp - 'true' to use weights. - - - .true. - - - - integer - conv - subcol_silhs_nl - - Number of subcolumns/samples to use in this simulation. Must be less than psubcols. - - - 4 - - - - char*256 - conv - subcol_silhs_nl - - Location of SILHS correlation input files - usually the path to the SILHS code directory. - - - . - - - - char*16 - conv - subcol_silhs_nl - - Correlation input file run name (as in 'rico' or 'arm97' or 'default') - - - default - - - - logical - conv - subcol_silhs_nl - - A special configuration that sends mean ice mixing ratios and number concentrations to microphysics but allows cloud liquid water and number concentrations to vary. Should reduce the impact of negative condensate. Default False. - - - .false. - - - - logical - conv - subcol_silhs_nl - - Whether to send SILHS liq and ice mixing ratios to microphysics (if false, uses grid mean values). - - - .true. - - - - logical - conv - subcol_silhs_nl - - Whether to send SILHS liq and ice num concentrations to microphysics (if false, uses grid mean values). - - - .true. - - - - logical - conv - subcol_silhs_nl - - Whether to constrain samples of ice, liquid and vapor to the same subcolumn mean as the grid mean value. - - - .false. - - - - logical - conv - subcol_silhs_nl - - Whether to use a clear-air only column to ensure consistant subcolumn cloud fraction - - - .false. - - - - logical - conv - subcol_silhs_nl - - Flag to use SILHS to calculate the effect of microphysics on CLUBB's predictive - (co)variances rtp2 (total water variance), thlp2 (theta-l variance), and rtpthlp - (covariance of total water and theta-l), as well as on CLUBB's predictive fluxes - wprtp (total water flux) and wpthlp (flux of theta-l). - - - .true. - - - - real - conv - subcol_silhs_nl - - Prescribed in-cloud ratio [N_cn'^2] / [N_cn]^2 [no units] - - - 0.05 - - - - - - - - - - - logical - pbl - blj_nl - - Logical switch to turn on the beljaars scheme - - - .false. - .true. - - - - logical - pbl - tms_nl - - Logical switch to turn on turbulent mountain stress calculation in - vertical diffusion routine. - - - .false. - .true. - .true. - .false. - .true. - - - - real - pbl - tms_nl - - Turbulent mountain stress parameter used when turbulent mountain stress calculation - is turned on. See {{ hilight }}do_tms{{ closehilight }}. - - - 1.0D0 - - - - real - pbl - tms_nl - - Factor determining z_0 from orographic standard deviation [ no unit ] - Used when turbulent mountain stress calc is turned on. See {{ hilight }}do_tms{{ closehilight }}. - - - 0.075D0 - 0.100D0 - 0.100D0 - - - - real - pbl - eddy_diff_nl - - Maximum master length scale designed to address issues in diag_TKE outside the - boundary layer. - In order not to disturb turbulence characteristics in the lower troposphere, - this should be set at least larger than a few km. However, this does not - significantly improve the values outside of the boundary layer. Smaller values - make some improvement, but it is also noisy. Better results are seen using - eddy_leng_max or kv_freetrop_scale. - Default: 40.e3 (m) - - - 40.D3 - - - - real - pbl - eddy_diff_nl - - Maximum dissipation length scale designed to address issues with diag_TKE outside - the boundary layer, where the default value generates large diffusivities. A value - of 30 m is consistent with the length scales used in the HB scheme; however, this - will also reduce value in the boundary layer. - Default: 40.e3 (m) - - - 40.D3 - 30.D0 - 100.D0 - 100.D0 - - - - real - pbl - eddy_diff_nl - - Bottom pressure level at which namelist values for eddy_leng_max and - eddy_lbulk_max are applied. Default values are used at lower levels (i.e. the - boundary layer). - Default: 100.e3 (hPa) - - - 100.D3 - 100.D0 - 100.D0 - - - - real - pbl - eddy_diff_nl - - Moist entrainment enhancement parameter. - - - 30.D0 - 40.D0 - - - - real - pbl - eddy_diff_nl - - Pressure (Pa) that defined the upper atmosphere for adjustment of - eddy diffusivities from diag_TKE using kv_top_scale. - - - 0.D0 - - - - real - pbl - eddy_diff_nl - - Scaling factor that is applied (multiplied) to the eddy diffusivities - in the upper atmosphere (see kv_top_pressure). - - - 1.D0 - 0.D0 - 0.D0 - - - - real - pbl - eddy_diff_nl - - Scaling factor that is applied (multiplied) to the eddy diffusivities - in the free troposphere (boundary layer to kv_top_pressure) - - - 1.D0 - - - - logical - pbl - vert_diff_nl - - Perform mass conservation check on eddy diffusion operation. - - - .false. - - - - logical - pbl - vert_diff_nl - - Logical switch to turn on implicit turbulent surface stress calculation in - diffusion solver routine. - - - .false. - .true. - .true. - .true. - - - - logical - diagnostics - unicon_nl - - Produce output for the offline unicon driver. - - - .false. - - - - integer - diagnostics - unicon_nl - - History file number for offline unicon driver output. - Default: 2 (i.e., h1 history file) - - - - logical - pblrad - clubbpbl_diff_nl - - Flag to perform a saturation adjustment for ice which will add ice mass if the - air is supersaturated with respect to ice. - - - .false. - .true. - - - - logical - pblrad - clubbpbl_diff_nl - - Apply cloud top radiative cooling parameterization - - - .false. - - - - logical - pblrad - clubbpbl_diff_nl - - Include effects of precip evaporation on turbulent moments - - - .false. - - - - logical - pblrad - clubbpbl_diff_nl - - Explicit diffusion on temperature and moisture when CLUBB is on - - - .false. - .true. - .false. - - - - - - real - pblrad - clubbpbl_diff_nl - - CLUBB timestep. - - - 300.0D0 - 150.0D0 - - - - real - pblrad - clubbpbl_diff_nl - - Rain evaporation efficiency factor. - - - 1.0D0 - - - - logical - pblrad - clubbpbl_diff_nl - - Switch for CLUBB_ADV - - - .false. - - - - - - real - pblrad - clubb_params_nl - - Plume widths for theta_l and rt - - - 2.4 - - - - real - pblrad - clubb_params_nl - - Limiting value of C1 when skewness of w (vertical velocity) is small in - magnitude in the C1 skewness function. Increasing the value of C1 increases the - damping of CLUBB's wp2 (variance of vertical velocity). - - - 1.0 - - - - real - pblrad - clubb_params_nl - - Limiting value of C1 when skewness of w (vertical velocity) is large in - magnitude in the C1 skewness function. Increasing the value of C1 increases the - damping of CLUBB's wp2 (variance of vertical velocity). - - - 1.0 - - - - real - pblrad - clubb_params_nl - - Low Skewness in C11 Skw. Function - Default: 0.7D0 - - - 0.7D0 - - - - real - pblrad - clubb_params_nl - - High Skewness in C11 Skw. Function - Default: 0.35D0 - - - 0.35D0 - - - - real - pblrad - clubb_params_nl - - Constant for u'^2 and v'^2 terms - Default: 2.2D0 - - - 2.2D0 - 0.5 - - - - real - pblrad - clubb_params_nl - - C2 coef. for the rtp2_dp1 term - - - 1.0 - 0.2 - - - - real - pblrad - clubb_params_nl - - C2 coef. for the thlp2_dp1 term - - - 1.0 - 0.2 - - - - real - pblrad - clubb_params_nl - - C2 coef. for the rtpthlp_dp1 term - - - 1.3 - 0.2 - - - - real - pblrad - clubb_params_nl - - Low Skewness in C7 Skw. Function - - - 0.5 - - - - real - pblrad - clubb_params_nl - - High Skewness in C7 Skw. Function - - - 0.5 - 0.8 - - - - real - pblrad - clubb_params_nl - - Coef. #1 in C8 Skewness Equation - - - 4.2 - 2.5 - - - - real - pblrad - clubb_params_nl - - Coefficient in the C8 skewness equation. Increasing the value of C8b increases - the damping of CLUBB's wp3 when skewness of w (vertical velocity) is large in - magnitude. - - - - 0.0 - 0.02 - - - - real - pblrad - clubb_params_nl - - Momentum coefficient of Kh_zm - - - 0.5 - 2.0 - - - - real - pblrad - clubb_params_nl - - Thermo of Kh_zm - - - 0.3 - 1.0 - - - - logical - conv - clubb_params_nl - - Apply liquid supersaturation adjustment code - - - .false. - - - - real - pblrad - clubb_params_nl - - Low Skw.: gamma coef. Skw. Fnct. - - - 0.308 - 0.280 - 0.24 - - - - real - pblrad - clubb_params_nl - - Limiting value of gamma when skewness of w (vertical velocity) is large in - magnitude in the gamma skewness function. Increasing the value of gamma - increases the standard deviation of w in both Gaussians in CLUBB's double - Gaussian PDF, and also decreases the difference between the means of w from - each Gaussian. - - - 0.32 - 0.37 - - - - real - pblrad - clubb_params_nl - - Intensity of stability correction applied to C1 and C6 - Default 0.04 - - - 0.04 - 0.03 - - - - real - pblrad - clubb_params_nl - - Coefficient used to determine the minimum allowable value of CLUBB's length - scale (Lscale) in a grid-spacing dependent formula. Increasing the value of - clubb_lmin_coef increases the minimum allowable value for length scale. - - - 0.1 - 0.5 - - - - real - pblrad - clubb_params_nl - - Coef. applied to log(avg dz/thresh) - - - 1.0D0 - 1.5 - - - - real - pblrad - clubb_params_nl - - Factor to decrease sensitivity in the denominator of Skw calculation - - - 0.0 - 4.0 - - - - real - pblrad - clubb_params_nl - - Maximum magnitude of skewness allowed. - - - 4.5 - 10.0 - - - - real - pblrad - clubb_params_nl - - Factor used in calculating the surface values of up2 (variance of the u wind - component) and vp2 (variance of the v wind component). Increasing - clubb_up2_vp2_factor increases the values of up2 and vp2 at the surface. - - - 2.0 - 4.0 - - - - - - logical - pblrad - clubb_params_nl - - Flag to uses an alternate equation to calculate the Brunt-Vaisala frequency. - This equation calculates an in-cloud Brunt-Vaisala frequency. - - - .false. - .true. - - - - logical - pblrad - clubb_params_nl - - Flag to call CLUBB's PDF closure at both thermodynamic and momentum vertical - grid levels. When this flag is turned off, CLUBB's PDF closure is only called - on thermodynamic grid levels. - - - .true. - .false. - - - - logical - pblrad - clubb_params_nl - - Flag to damp CLUBB's wp3 by the square of skewness of w (Skw). When this flag - is turned off, wp3 is damped according to Skw to the 4th power. This damping - equation also involves the coefficient clubb_C8b (see description of clubb_C8b). - - - .false. - .true. - - - - logical - pblrad - clubb_params_nl - - Uses PDF to compute perturbed values for l_avg_Lscale code - - - .false. - - - - logical - pblrad - clubb_params_nl - - Flag to set an additional minimum threshold on wp2 (in addition to w_tol_sqd) - based on maintaining a realizable correlation (value between -1 and 1) of w and - another variable, x. When this flag is enabled, the value of wp2 will be - increased when the correlation is not realizable until the correlation becomes - realizable. When this flag is turned off, the magnitude of wpxp will be - decreased when the correlation is not realizable until the correlation becomes - realizable. This correction is applied at the point in the code where wp2 is - advanced one timestep. - - - .false. - .true. - - - - logical - pblrad - clubb_params_nl - - Flag to set an additional minimum threshold on xp2 (in addition to x_tol^2), - where xp2 is a variance such as rtp2 (variance of total water) or thlp2 - (variance of liquid water potential temperature). This threshold is based on - maintaining a realizable correlation (value between -1 and 1) of w and x. When - this flag is enabled, the value of xp2 will be increased when the correlation is - not realizable until the correlation becomes realizable. When this flag is - turned off, the magnitude of wpxp will be decreased when the correlation is not - realizable until the correlation becomes realizable. This correction is applied - at the point in the code where xp2 is advanced one timestep. - - - .false. - .true. - - - - logical - pblrad - clubb_params_nl - - Flag to predict horizontal momentum fluxes upwp and vpwp along with mean - horizontal winds um and vm. When this flag is turned off, upwp and vpwp are - calculated by down-gradient diffusion. - - - .false. - .true. - - - - logical - pblrad - clubb_params_nl - - Flag to take any remaining supersaturation after CLUBB PDF call and add it to - rcm (mean cloud water mixing ratio). Supersaturation may be found after the - PDF call due to issues when the PDF is called on both the thermodynamic grid - levels and the momentum grid levels and variables are interpolated between the - two grid level types. - - - .false. - .true. - - - - logical - pblrad - clubb_params_nl - - Flag to use a stability corrected version of CLUBB's time scale (tau_zm). This - creates a time scale that provides stronger damping at altitudes where - Brunt-Vaisala frequency is large. - - - .true. - .false. - - - - logical - pblrad - clubb_params_nl - - Flag that uses the trapezoidal rule to adjust fields calculated by CLUBB's PDF - (e.g. cloud fraction) by taking into account the values of these fields from an - adjacent vertical grid level. The clubb_l_trapezoidal_rule_zm flag applies this - adjustment to PDF fields calculated on momentum vertical grid levels. - - - .true. - .false. - - - - logical - pblrad - clubb_params_nl - - Flag that uses the trapezoidal rule to adjust fields calculated by CLUBB's PDF - (e.g. cloud fraction) by taking into account the values of these fields from an - adjacent vertical grid level. The clubb_l_trapezoidal_rule_zt flag applies this - adjustment to PDF fields calculated on thermodynamic vertical grid levels. - - - .true. - .false. - - - - logical - pblrad - clubb_params_nl - - Flag to use "upwind" discretization in the turbulent advection term in the - xpyp predictive equation, where xpyp is a field such as rtp2 (variance of - vertical velocity) or rtpthlp (covariance of total water and liquid water - potential temperature). When this flag is turned off, centered discretization - is used. - - - .true. - - - - logical - pblrad - clubb_params_nl - - Flag to calculate the value of CLUBB's C7 based on Richardson number, where - C7 is a coefficient in a wpxp pressure term and is used to balance the effects - of pressure and buoyancy in the wpxp predictive equation. The variable wpxp is - a flux such as total water flux or flux of liquid water potential temperature. - - - .false. - .true. - - - - logical - pblrad - clubb_params_nl - - Flag to calculate the value of CLUBB's C11 based on Richardson number, where - C11 is a coefficient in a wp3 pressure term and is used to balance the effects - of pressure and buoyancy in the wp3 predictive equation. - - - .false. - .true. - - - - logical - pblrad - clubb_params_nl - - Flag to allow cloud fraction and mean cloud water at adjacent vertical grid - levels influence the amount of cloudiness and amount of cloud water in a - grid box. - - - .true. - .false. - - - - logical - pblrad - clubb_params_nl - - Include the effects of ice latent heating in turbulence terms - - - .false. - - - - logical - pblrad - clubb_params_nl - - Flag to use mean theta-v in the calculation of Brunt-Vaisala frequency. - - - .false. - .true. - - - - logical - pblrad - clubb_params_nl - - Flag that, when it is enabled, automatically enables CLUBB's - l_trapezoidal_rule_zt, l_trapezoidal_rule_zm, and l_call_pdf_closure_twice. - - - .true. - .false. - - - - logical - conv - clubb_params_nl - - Apply adjustments to dry static energy so that CLUBB conserves - energy. - - - .true. - - - - - - - char*32 - carma - carma_nl - - The name of the active CARMA microphysics model or none when CARMA - is not active. - Default: none - - - - real - carma - carma_nl - - A fraction that scales how tight the convergence criteria are to - determine that the substepping has resulted in a valid solution. - Smaller values will force more substepping. - CARMA particles. - - - 0.1 - - - - real - carma - carma_nl - - When non-zero, the largest change in temperature (K) - allowed per substep. - - - 0.0 - - - - logical - carma - carma_nl - - Flag indicating that the CARMA model is an aerosol model, and - should be called in tphysac. - - - .true. - - - - logical - carma - carma_nl - - Flag indicating that CARMA is a cloud ice model and should - be called in tphysbc. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that CARMA is a cloud liquid model and should - be called in tphysbc. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that CARMA should do clear sky calculations for - particles that are not part of a cloud in addition to doing a - separate calculation for incloud particles. Only valid when - carma_do_incloud is true. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating whether the coagulation process is enabled for - CARMA particles. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that CARMA is responsible for detrain condensate - from convection into the model. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that the dry deposition process is enabled for - CARMA particles. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that the emission of particles is enabled for - CARMA. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that sedimentation should be calculated using an - explicit technique where the substepping is used to keep the CFL - condition from being violated rather than the default PPM scheme. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating CARMA coefficients should only be initialized once from - a fixed temperature profile rather than recomputed for each column. This - improves performance, but reduces accuracy. By default the temperature - profile used is calculated as the average of the initial condition file, - but a predefined profile can be provided. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating used in cunjunction with carma_do_fixedinit to indicate - that only the coagulation coefficients should only be initialized from - a fixed temperature profile and all other coeeficients will be recalculated. - Coagulation is the slowest initialization, so this improves performance while - still retaining accuracy for most processes. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that the condensational growth process is enabled for - CARMA particles. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that CARMA sulfate mass mixing ratio will be used - in radiation calculation. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that CARMA sulfate surface area density will be used - in heterogeneous chemistry rate calculation. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that CARMA should treat cloud particles as incloud - rather than gridbox average calculations. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that carma should generate optical properties files - for the CAM radiation code. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that particle heating will be used for the condensational - growth process. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that particle heating will affect the atmospheric - temperature. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that substepping will be used for the condensational - growth process. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that changes in heating will be calculated as a result - CARMA processes and will affect the CAM heating tendency. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that the wet deposition process is enabled for - CARMA particles. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that the effect of Brownian diffusion will be calculated for - CARMA particles. NOTE: This needs to be used in conjunction with CARMA - sedimentation. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating that the sedimentation process is enabled for - CARMA particles. - - - .false. - - - - logical - carma - carma_nl - - Flag indicating whether CARMA is enabled. If CARMA has been included - in the build (configure -carma with something other than none), then - this will cause all of the CARMA constituents and field names to be - registered, but no other CARMA process will be preformed. This overrides - the individual CARMA process flags. - - - .false. - - - - integer - carma - carma_nl - - Specifies the maximum number of retry attempts to be used when - condensational growth requires substepping, but the original estimate - for the amount of substepping was insufficient. - - - 8 - - - - integer - carma - carma_nl - - Specifies the maximum number of substeps that could be used for the - first guess when condensational growth requires substepping. - - - 1 - - - - char*256 - carma - carma_nl - - Specifies the name of the reference temperature file that will be - used (and created if necessary) for initialization of CARMA to a - fixed temperature profile. - Default: carma_reft.nc - - - - real - carma - carma_nl - - Accommodation coefficient for coagulation. - - - 1.0 - - - - real - carma - carma_nl - - Accommodation coefficient for growth with ice. - - - 0.93 - - - - real - carma - carma_nl - - Accommodation coefficient for growth with liquid. - - - 1.0 - - - - real - carma - carma_nl - - Accommodation coefficient for temperature. - - - 1.0 - - - - real - carma - carma_nl - - Critical relative humidity for liquid cloud formation, used - for sub-grid scale in-cloud saturation. - - - 1.0 - - - - - - - real - carma_model - carma_model_nl - - Global mass of dust emission for the event. - Default: 0. (kg) - - - - real - carma_model - carma_model_nl - - Global mass of dust emission for the event. - Default: 0. (kg) - - - - integer - carma_model - carma_model_nl - - Starting date for emissions in the form of (yyyyddd) where yyyy is a year and - ddd is a day of year. - Default: 1 (yyyyddd) - - - - integer - carma_model - carma_model_nl - - Starting time for the emission event in GMT. - Default: 0. (s Z) - - - - integer - carma_model - carma_model_nl - - Stopping date for emissions in the form of (yyyyddd) where yyyy is a year and - ddd is a day of year. - Default: 1 (yyyyddd) - - - - integer - carma_model - carma_model_nl - - Stoping time for the emission event in GMT. - Default: 0. (s) - - - - real - carma_model - carma_model_nl - - Minimum latitude of the area for emssions from the event. - Default: -90. (degrees north) - - - - real - carma_model - carma_model_nl - - Maximum latitude of the area for emssions from the event. - Default: 90. (degrees north) - - - - real - carma_model - carma_model_nl - - Minimum longitude of the area for emssions from the event. - Default: 0. (degrees east) - - - - real - carma_model - carma_model_nl - - Maximum longitude of the area for emssions from the event. - Default: 360. (degrees east) - - - - logical - carma_model - carma_model_nl - - Are the soot particles treated as fractals? - - - .false. - - - - - - - logical - carma_model - carma_model_nl - - Flag indicating that meteor smoke emission will be scaled by a - global relative flux based upon the carma_escale_file. - - - .false. - - - - real - carma_model - carma_model_nl - - The total meteor smoke emission rate in kt/year. The flux will be - scaled to total that value. - - - 16.0 - - - - char*256 - carma_model - carma_model_nl - - Specifies the name of the file containing the meteor smoke emission - (ablation) profile. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/waccm/emis/meteor_smoke_kalashnikova.nc - $DIN_LOC_ROOT/atm/waccm/emis/meteor_smoke_kalashnikova.nc - $DIN_LOC_ROOT/atm/waccm/emis/meteor_smoke_kalashnikova.nc - $DIN_LOC_ROOT/atm/waccm/emis/meteor_smoke_kalashnikova.nc - $DIN_LOC_ROOT/atm/waccm/emis/early_earth_haze.nc - - - - char*256 - carma_model - carma_model_nl - - Specifies the name of the file containing the global realtive flux - specification. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/waccm/emis/smoke_grf_frentzke.nc - $DIN_LOC_ROOT/atm/waccm/emis/smoke_grf_frentzke.nc - $DIN_LOC_ROOT/atm/waccm/emis/smoke_grf_frentzke.nc - $DIN_LOC_ROOT/atm/waccm/emis/smoke_grf_frentzke.nc - - - - integer - carma_model - carma_model_nl - - Specifies the day of year when tracers will start being emitted for the tracer test. - - - 1 - - - - real - carma_model - carma_model_nl - - The emission rate of inert tracers used in the test. A positive value indicates that - the rate is a column mass (kg/m2/s) and a negative value indicate that it is a mass - mixing ratio (kg/kg/s). - - - 1e-09 - - - - logical - carma_model - carma_model_nl - - Flag indicating that h2so4 vapor pressures should be calculated as if they were - over sulfates that have been totally neutralized. - - - .false. - - - - - - - char*32 - carma_model - carma_model_nl - bulk,carma,fixed,modal - - Specifies the method to use to get the prescribed sulfate aerosols for use with nucleation - of cirrus clouds. This can be different than the sulfate aerosols that are used with the - climate. - Default: fixed - - - fixed - - - - - - - char*256 - carma_model - carma_model_nl - - Specifies the name of the file containing ice refrative indicies as a function of wavelength - used for the particle heating calculation. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/cam/physprops/mice_warren2008.nc - $DIN_LOC_ROOT/atm/cam/physprops/mice_warren2008.nc - - - - - - - char*256 - carma_model - carma_model_nl - - Specifies the name of the file containing soil erosion factors. This is used by - the dust model. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/cam/dst/soil_erosion_factor_1x1_c120907.nc - $DIN_LOC_ROOT/atm/cam/dst/soil_erosion_factor_1x1_c120907.nc - - - - - - - logical - carma_model - carma_model_nl - - Flag indicating that a calculated Weibull K should be used. - - - .false. - - - - char*32 - carma_model - carma_model_nl - - Specifies the name of the sea salt emission parameterization. - Default: Gong - - - - - - - char*256 - o3_data_cam - cam3_ozone_data_nl - - Full pathname of time-variant ozone mixing ratio boundary dataset. - Default: UNKNOWN. - - - - logical - o3_data_cam - cam3_ozone_data_nl - - Add CAM3 prescribed ozone to the physics buffer. - - - .false. - - - - logical - o3_data_cam - cam3_ozone_data_nl - - Flag for yearly cycling of ozone data. If set to FALSE, a multi-year - dataset is assumed, otherwise a single-year dataset is assumed, and ozone - will be cycled over the 12 monthly averages in the file. - - - .true. - - - - - - - char*256 - build - physics_nl - ESMF mesh file for CAM physics for use in regridding operations. - - - UNSET - - - - real - waccmx - ionosphere_nl - - Limiter for ambipolar diffusion coefficient used in O+ transport in the - ionosphere. - - - 150000000.0 - - - - real - waccmx - ionosphere_nl - - Shapiro constant for spatial smoother used in O+ transport in the - ionosphere. - - - 0.03 - - - - logical - waccmx - ionosphere_nl - - Switch to apply floor to O+ concentrations within in ionosphere O+ transport. - - - .true. - - - - logical - waccmx - ionosphere_nl - - Switch to apply ring polar filter within in ionosphere O+ transport. - - - .false. - .true. - .true. - - - - logical - waccmx - ionosphere_nl - - Switch to to turn on/off O+ transport in ionosphere. - Default: UNKNOWN - - - - integer - waccmx - ionosphere_nl - - Number of ion transport steps per physics timestep. - - - 5 - 30 - 90 - 5 - - - - logical - waccmx - ionosphere_nl - - Switch to invoke electro-dynamo to compute ion drift velocities used in - O+ transport in ionosphere. If false, ExB empirical model is used to - provide the ion drift velocities for O+ transport. - Default: UNKNOWN - - - - char*16 - waccmx - ionosphere_nl - heelis,weimer - - Electric potential model used in the waccmx ionosphere. - - - heelis - - - - logical - waccmx - ionosphere_nl - - Give the user the ability to input prescribed high-latitude electric potential. - - - .false. - - - - real(2) - waccmx - ionosphere_nl - - Co-latitudes (degrees) of the critical angles where the ionosphere - high-latitude electric potential is merged with the low and middle - latitude electric potential computed by the electro-dynamo of WACCM-X. - Default: none - - - - char*256 - waccmx - ionosphere_nl - - Full pathname of dataset for coefficient data used in Weimer05 - high latitude electric potential model. - - - $DIN_LOC_ROOT/atm/waccm/efld/wei05sc_c080415.nc - - - - char*256 - waccmx - ionosphere_nl - - Full pathname of AMIE inputs for northern hemisphere. - Default: NONE. - - - - char*256 - waccmx - ionosphere_nl - - Full pathname of AMIE inputs for southern hemisphere. - Default: NONE. - - - - - - - integer - vert_coord - vert_coord_nl - - Number of model vertical levels. - Default: 30 - - Please note that changing this variable - will likely change other namelist variables - via the "nlev" attribute. - - - 30 - 26 - 30 - 32 - - - - - - - real - press_lim - ref_pres_nl - - Troposphere cloud physics will be done only below the top defined - by this pressure (Pa). - - - 1.D2 - - - - real - press_lim - ref_pres_nl - - MAM affects climate only below the top defined by this pressure (Pa). - - - 1.D-4 - - - - real - press_lim - ref_pres_nl - - Molecular diffusion will be done only if the lowest pressure is - below this limit (Pa). - - - 0.1D0 - - - - real - press_lim - ref_pres_nl - - The level closest to this pressure (Pa) is the bottom of the region - where molecular diffusion is done. - - - 50.D0 - - - - - - - real - phys_debug - phys_debug_nl - - Use this variable to specify the latitude (in degrees) of a column to - debug. The closest column in the physics grid will be used. - Default: none - - - - real - phys_debug - phys_debug_nl - - Use this variable to specify the longitude (in degrees) of a column to - debug. The closest column in the physics grid will be used. - Default: none - - - - - - integer - radiation - conv_water_nl - 0,1,2 - - Convective water used in radiation? - 0 ==> No - 1 ==> Yes - Arithmetic average. - 2 ==> Yes - Average in emissivity. - - - 0 - 1 - 1 - 1 - - - - real - radiation - conv_water_nl - - Lower limit of cumulus cloud fraction. - - - 0.01d0 - 0.001d0 - - - - logical - radiation - radiation_nl - - If true, use a uniform angle for the calculation of coszrs within radiation - Default: UNKNOWN. - - - - real - radiation - radiation_nl - - The angle (in radians) to use in the calculation of coszrs within radiation - Default: UNKNOWN. - - - - char*256 - radiation - radiation_nl - - Full pathname of absorption/emission dataset. Used only by camrt scheme. - It consists of terms used for determining the absorptivity and - emissivity of water vapor in the longwave parameterization of radiation. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/cam/rad/abs_ems_factors_fastvx.c030508.nc - - - - integer - radiation - radiation_nl - - Frequency of absorptivity/emissivity calculations in time steps (if - positive) or model hours (if negative). To avoid having the abs/ems values - saved on the restart output, make sure that the interval of the abs/ems - calculation evenly divides the restart interval. - - - -12 - - - - integer - radiation - radiation_nl - - Frequency of long-wave radiation calculation in timesteps (if positive) or - model hours (if negative). - - - -1 - - - - integer - radiation - radiation_nl - - Frequency of short-wave radiation calculation in timesteps (if positive) or - model hours (if negative). - - - -1 - - - - integer - radiation - radiation_nl - - Specifies length of time in timesteps (positive) or hours (negative) SW/LW - radiation will be run for every timestep from the start of an initial run. - - - 0 - - - - logical - radiation - radiation_nl - - Return fluxes per band in addition to the total fluxes. - - - .false. - - - - logical - radiation - radiation_nl - - Calculate radiative effect of graupel/hail (using snow optics) - - - .false. - - - - logical - radiation - radiation_nl - - If true, then average the zenith angle over the radiation timestep rather - than using instantaneous values. - - - .false. - - - - char*256(120) - radiation - rad_cnst_nl - - Definitions for the aerosol modes that may be used in the rad_climate and - rad_diag_* variables. - Default: UNKNOWN - - - - - - - - logical - radiation - rad_data_nl - - output data needed for off-line radiation calculations - - - .false. - - - - integer - radiation - rad_data_nl - - History tape number radiation driver output data is written to. - - - 0 - - - - char*1 - radiation - rad_data_nl - - Averaging flag for radiation driver output data. - - - A - - - - logical - offline_unit_driver - rad_data_nl - - Switch to turn on Fixed Dynamical Heating in the offline radiation tool (PORT). - - - .false. - - - - - - - char*256 - radiation - modal_aer_opt_nl - - Full pathname of dataset for water refractive indices used in modal aerosol optics - Default: none - - - $DIN_LOC_ROOT/atm/cam/physprops/water_refindex_rrtmg_c080910.nc - - - - char*256 - cam_chem - chem_inparm - - Dry deposition surface values interpolated to model grid, required for unstructured atmospheric grids - with modal chemistry. - Default: none - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/atmsrf_ne5np4_110920.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/atmsrf_ne16np4_110920.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/atmsrf_ne30np4_110920.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/atmsrf_ne30pg3_180522.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/atmsrf_ne60np4_110920.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/atmsrf_ne120np4_110920.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/atmsrf_ne240np4_110920.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/atmsrf_ne0np4conus30x8_161116.nc - - - - logical - radiation - rad_cnst_nl - - filepath and name for ice optics data for rrtmg - Default: none - - - - char*32 - radiation - rad_cnst_nl - none,slingo,gammadist - - filepath and name for ice optics data for rrtmg - Default: none - - - none - slingo - gammadist - - - - char*32 - radiation - rad_cnst_nl - none,ebertcurry,mitchell - - filepath and name for ice optics data for rrtmg - Default: none - - - none - ebertcurry - mitchell - - - - char*256 - radiation - rad_cnst_nl - - filepath and name for ice optics data for rrtmg - Default: none - - - $DIN_LOC_ROOT/atm/cam/physprops/iceoptics_c080917.nc - - - - char*256 - radiation - rad_cnst_nl - - filepath and name for liquid cloud (gamma distributed) optics data for rrtmg - Default: none - - - $DIN_LOC_ROOT/atm/cam/physprops/F_nwvl200_mu20_lam50_res64_t298_c080428.nc - - - - - - - integer - rayleigh_friction - rayleigh_friction_nl - - Variable to specify the vertical index at which the - Rayleigh friction term is centered (the peak value). - - - 2 - - - - real - rayleigh_friction - rayleigh_friction_nl - - Rayleigh friction parameter to determine the width of the profile. If set - to 0 then a width is chosen by the algorithm (see rayleigh_friction.F90). - - - 0.0 - - - - real - rayleigh_friction - rayleigh_friction_nl - - Rayleigh friction parameter to determine the approximate value of the decay - time (days) at model top. If 0.0 then no Rayleigh friction is applied. - - - 0.0 - - - - - - - char*128 - scam - scam_nl - - Full pathname of IOP dataset. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/cam/scam/iop/ARM97_4scam.nc - $DIN_LOC_ROOT/atm/cam/scam/iop/ARM95_4scam.nc - - - - logical - scam - scam_nl - - Column bfb match with cam generated IOP. - - - .false. - - - - logical - scam - scam_nl - - Column radiation mode. - - - .false. - - - - logical - scam - scam_nl - - Use the specified surface properties. - - - .false. - - - - logical - scam - scam_nl - - Use relaxation. - - - .false. - - - - char*24(1000) - scam - scam_nl - - List of fields that will be relaxed to obs - Default: none - - - - logical - scam - scam_nl - - Use relaxation. Linearly interpolate the timescale within specified - pressure range. (bpm) - - - .false. - - - - real - scam - scam_nl - - Upper most pressure that will be relaxed. - - - 1e+36 - - - - real - scam - scam_nl - - Lower most pressure that will be relaxed. - - - -1e+36 - - - - real - scam - scam_nl - - SCAM relaxation time constant in seconds - - - 10800.0 - - - - real - scam - scam_nl - - SCAM relaxation time constant in seconds that will be used at - top of pressure range (i.e., the smaller pressure value). Will - be used from top of pressure range to model top. - - - 10800.0 - - - - real - scam - scam_nl - - SCAM relaxation time constant in seconds that will be used at - bottom of pressure range (i.e., the larger pressure value). - - - 10800.0 - - - - logical - scam - scam_nl - - Use the SCAM-IOP specified T instead of using forecasted T at each time step. - - - .false. - - - - logical - scam - scam_nl - - Use the SCAM-IOP specified u,v instead of using forecasted u,v at each time step. - - - .true. - .true. - - - - char*16 - scam - scam_nl - iop,eulc,off - - Use specific type of vertical advection for T. Possible choices are 'iop', 'eulc' and 'off' - - - eulc - - - - char*16 - scam - scam_nl - iop,eulc,off - - Use specific type of vertical advection for uv. Possible choices are 'iop', 'eulc' and 'off' - - - eulc - - - - char*16 - scam - scam_nl - iop,eulc,slt,off - - Use specific type of vertical advection for q. Possible choices are 'iop', 'eulc', 'slt' and 'off' - - - slt - slt - - - - logical - scam - scam_nl - - Use the SCAM-IOP specified surface LHFLX/SHFLX/ustar/Tg instead of using internally-computed values - - - .false. - - - - logical - scam - scam_nl - - Use the SCAM-IOP specified observed water vapor at each time step instead of forecast value - - - .false. - - - - logical - scam - scam_nl - - Force scam to use the lat lon fields specified in the scam namelist not what is closest to IOP avail lat lon - - - .false. - - - - - - - real - solar - solar_data_opts - - Total solar irradiance (W/m2). - - - 1361.27 - - - - char*256 - solar - solar_data_opts - - Full pathname of dataset for file that contains the solar photon energy spectra or TSI data - as a time series - - - $DIN_LOC_ROOT/atm/cam/solar/solar_ave_sc19-sc23.c090810.nc - $DIN_LOC_ROOT/atm/cam/solar/solar_ave_sc19-sc23.c090810.nc - $DIN_LOC_ROOT/atm/cam/solar/spectral_irradiance_Lean_1610-2009_ann_c100405.nc - $DIN_LOC_ROOT/atm/cam/solar/spectral_irradiance_Lean_1610-2009_ann_c100405.nc - $DIN_LOC_ROOT/atm/cam/solar/SolarForcing1995-2005avg_c160929.nc - $DIN_LOC_ROOT/atm/cam/solar/SolarForcing1995-2005avg_c160929.nc - - - - char*256 - solar - solar_data_opts - - Full pathname of dataset for file that contains the solar EUV data - as a time series - Default: none - - - - char*256 - waccm - solar_data_opts - - Full pathname of time-variant dataset for the time-dependent proxies for - solar and geomagnetic activity( F10.7, F10.7a, Kp, Ap ). - - - $DIN_LOC_ROOT/atm/waccm/phot/wasolar_ave.nc - $DIN_LOC_ROOT/atm/waccm/solar/wasolar_c140408.nc - $DIN_LOC_ROOT/atm/waccm/solar/wasolar_c140408.nc - $DIN_LOC_ROOT/atm/cam/solar/SolarForcing1995-2005avg_c160929.nc - - - - char*256 - waccm - solar_data_opts - - Full pathname of time-variant dataset for the time-dependent solar wind parameters - (solar wind velocity and density; IMF By and Bz components). - - - $DIN_LOC_ROOT/atm/waccm/solar/solar_wind_imf_OMNI_WACCMX_2000001-2017365_c180731.nc - - - - char*8 - solar - solar_data_opts - FIXED,SERIAL - - Type of time interpolation for data in {{ hilight }}solar_irrad_data_file{{ closehilight }}. - Can be set to "FIXED" or "SERIAL". - Default: SERIAL - - - SERIAL - - - - integer - solar - solar_data_opts - - If {{ hilight }}solar_data_type{{ closehilight }} is "FIXED" then solar_data_ymd - is the date the solar data is fixed to. If {{ hilight }}solar_data_type{{ closehilight }} - is "SERIAL" the solar_data_ymd is the start date of the time series - of solar data. - Format: YYYYMMDD - Default: none - - - - integer - solar - solar_data_opts - - Seconds of the day corresponding to {{ hilight }}solar_data_ymd{{ closehilight }} - Default: current model time of day - - - - logical - solar - solar_data_opts - - Use spectral scaling in the radiation heating - Default: UNKNOWN - - - - - - - char*16(30) - test_tracers - test_tracers_nl - - User can specify names for test tracers to be read from the initial file. - The number of names specified should be given as the value of the -nadv_tt - option to configure. - - - - - - - logical - test_tracers - test_tracers_nl - - This variable should not be set by the user. If configure has been invoked - with the '-nadv_tt N' option then build-namelist will set this variable to - the value N. If {{ hilight }}test_tracer_names{{ closehilight }} have been specified - then N should be the number of names supplied. - If {{ hilight }}test_tracer_names{{ closehilight }} have not been specified, then the - tracer_suite module generates the tracer names and supplies the initial - values. - Default: set by configuration - - - - logical - test_tracers - aoa_tracers_nl - - If true age of air tracers are included. This variable should not be set - by the user. It will be set by build-namelist to be consistent with the - '-age_of_air_trcs' argument specified to configure. - Default: set by configuration - - - - logical - test_tracers - aoa_tracers_nl - - If true age of air tracers are read from the initial conditions file. - If this is not specified then they are not read from IC file. - - - .true. - - - - - - - char*256 - topo - cam_initfiles_nl - - Full pathname of time-invariant boundary dataset for topography fields. - Default: UNSET_PATH - - - UNSET_PATH - UNSET_PATH - UNSET_PATH - UNSET_PATH - UNSET_PATH - $DIN_LOC_ROOT/atm/cam/topo/topo-from-cami_0000-01-01_256x512_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_128x256_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/T42_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20180111.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_48x96_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_32x64_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_8x16_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS_gtopo30_0.23x0.31_remap_c061107.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS_gtopo30_0.47x0.63_remap_c061106.nc - $DIN_LOC_ROOT/atm/cam/topo/fv_0.47x0.63_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171023.nc - $DIN_LOC_ROOT/atm/cam/topo/topo-from-cami_0000-10-01_0.5x0.625_L26_c031204.nc - $DIN_LOC_ROOT/atm/cam/topo/fv_0.9x1.25_nc3000_Nsw042_Nrs008_Co060_Fi001_ZR_sgh30_24km_GRNL_c170103.nc - $DIN_LOC_ROOT/atm/cam/topo/fv_1.9x2.5_nc3000_Nsw084_Nrs016_Co120_Fi001_ZR_GRNL_c190405.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_2.5x3.33_remap_c100204.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_4x5_remap_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/fv_10x15_nc0540_Nsw042_Nrs008_Co060_Fi001_20171220.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne5np4_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw064_20170515.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne16np4_nc3000_Co120_Fi001_PF_nullRR_Nsw084_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30np4_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171020.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne60np4_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne120np4_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171011.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne240np4_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne5pg2_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw060_20170706.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30pg2_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg2_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg2_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne240pg2_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne5pg3_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw064_20170516.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne16pg3_nc3000_Co120_Fi001_PF_nullRR_Nsw084_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30pg3_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg3_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg3_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne240pg3_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171015.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne5pg4_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw060_20170707.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30pg4_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg4_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171018.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg4_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30x8_conus_nc3000_Co060_Fi001_MulG_PF_nullRR_Nsw042_20190710.nc - - - - - - - char*256 - tropo - tropopause_nl - - Full pathname of boundary dataset for tropopause climatology. - Default: UNSET_PATH. - - - UNSET_PATH - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ub/clim_p_trop.nc - - - - logical - cam_chem - chem_inparm - - Flag to set rad_climate variable so that the chemical tracers are - radiatively passive. - - - .false. - - - - char*3 - cam_chem - wetdep_inparm - MOZ,NEU,OFF - - Wet deposition method used - MOZ --> mozart scheme is used - NEU --> J Neu's scheme is used - OFF --> wet deposition is turned off - Default: NEU except for SPCAM runs - - - NEU - MOZ - OFF - - - - char*16(1000) - cam_chem - wetdep_inparm - - List of gas-phase species that undergo wet deposition via the wet deposition scheme. - Default: NONE - - - - logical - cam_chem - aerosol_nl - - Turns on accumulation to coarse mode exchange appropriate for the stratosphere. - This also changes the default mode definitions (widths and edges) via default - aerosol property input files. - - - .false. - .true. - - - - logical - cam_chem - aerosol_nl - - Turns on prognostic modal sulfate aerosols in the stratosphere. - - - .false. - - - - char*16(1000) - cam_chem - aerosol_nl - - List of aerosol species that undergo wet deposition. - Default: UNKNOWN. - - - - char*16(1000) - cam_chem - aerosol_nl - - List of aerosol species that undergo sediment (dry deposition). - Default: UNKNOWN. - - - - real - cam_chem - aerosol_nl - - Tuning for below cloud scavenging of interstitial modal aerosols. - - - 0.1D0 - 1.0D0 - - - - real - cam_chem - aerosol_nl - - Tuning for in-cloud scavenging of interstitial modal aerosols. - - - 0.4D0 - 1.0D0 - - - - real - cam_chem - aerosol_nl - - Tuning for in-cloud scavenging of cloud-borne modal aerosols. - - - 1.0D0 - 0.6D0 - 1.0D0 - 1.0D0 - - - - real - cam_chem - aerosol_nl - - Tuning for seasalt_emis - - - 1.35D0 - 1.62D0 - 0.90D0 - 1.00D0 - 1.10D0 - 1.2D0 - 0.60D0 - 1.00D0 - 1.00D0 - - - - real(1000) - cam_chem - aerosol_nl - - In-cloud solubility factor used in BULK aerosol wet removal - Default: UNKNOWN. - - - - real(1000) - cam_chem - aerosol_nl - - Below-cloud solubility factor used in BULK aerosol wet removal - Default: UNKNOWN. - - - - real(1000) - cam_chem - aerosol_nl - - Scavenging coefficient used in BULK aerosol wet removal - Default: UNKNOWN. - - - - char*256 - cam_chem - chem_inparm - - Full pathname of boundary dataset for airplane emissions. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/emis/emissions.aircraft.T42LR.nc - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset containing soil moisture fraction information used in 'xactive_atm' - method of calculating dry deposition of chemical tracers. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/dvel/clim_soilw.nc - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset which contains the prescribed deposition velocities used - in the 'table' method of calculating dry deposition of chemical tracers. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/dvel/depvel_monthly.nc - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset which contains land vegitation information used in 'xactive_atm' - method of calculating dry deposition of chemical tracers. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/dvel/regrid_vegetation.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/dvel/regrid_vegetation_all_zero_aquaplanet_1deg_regularGrid_c20170421.nc - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset which contains season information used in 'xactive_atm' - method of calculating dry deposition of chemical tracers. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/dvel/season_wes.nc - - - - real - cam_chem - dust_nl - - Tuning parameter for dust emissions. - - - 0.37D0 - 0.35D0 - 0.35D0 - 0.45D0 - 0.45D0 - 0.45D0 - 0.35D0 - 0.30D0 - 0.45D0 - 0.45D0 - 0.45D0 - 0.55D0 - 0.22D0 - 0.55D0 - 0.70D0 - 0.13D0 - 0.26D0 - 0.7D0 - 0.24D0 - 0.9D0 - - - - logical - waccmx - ion_electron_temp_nl - - If TRUE a steady state solution is used to calculate electron and - ion temperature. - - - .true. - - - - char*256 - waccm_phys - iondrag_nl - - Full pathname of dataset for coefficient data used in WACCM to calculate ion drag - for high solar fluxes from the Scherliess low latitude electric potential model. - - - $DIN_LOC_ROOT/atm/waccm/efld/coeff_hflux.dat - - - - char*256 - waccm_phys - iondrag_nl - - Full pathname of dataset for coefficient data used in WACCM to calculate ion drag - for low solar fluxes from the Scherliess low latitude electric potential model. - - - $DIN_LOC_ROOT/atm/waccm/efld/coeff_lflux.dat - - - - real - waccm_phys - iondrag_nl - - Maximum cross cap electric potential used in Heelis high-latitude potential - empirical model for WACCM ion drag and joule heating. - Default: UNKNOWN (120 kV). - - - - logical - waccm_phys - iondrag_nl - - Switch to turn on empirical ExB ion drift velocities model for use in ion drag - parameterizations. If this is false then it is assumed ion drift velocities are - supplied by an active ionosphere model. - Default: UNKNOWN. - - - - char*256 - waccm - chem_inparm - - Full pathname of dataset for the neutral species absorption cross sections for EUV - photo reactions producing electrons. - - - $DIN_LOC_ROOT/atm/waccm/phot/electron_121129.dat - - - - char*32 - cam_chem - chem_inparm - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation of emission datasets specified. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - by {{ hilight }}srf_emis_specifier{{ closehilight }}. - - - CYCLICAL - - - - integer - cam_chem - chem_inparm - - The cycle year of the surface emissions data - if {{ hilight }}srf_emis_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - chem_inparm - - The date at which the surface emissions are fixed - if {{ hilight }}srf_emis_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - chem_inparm - - The time of day (seconds) corresponding to {{ hilight }}srf_emis_fixed_ymd{{ closehilight }} - at which the surface emissions are fixed - if {{ hilight }}srf_emis_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - waccm - chem_inparm - - Full pathname of dataset for EUVAC solar EUV model (0.05-121nm). - - - $DIN_LOC_ROOT/atm/waccm/phot/EUVAC_reference_c170222.nc - - - - integer - cam_chem - chem_inparm - - The cycle year of the external forcings (3D emissions) data - if {{ hilight }}ext_frc_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - chem_inparm - - Default: current model date - The date at which the external forcings are fixed - if {{ hilight }}ext_frc_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - chem_inparm - - The time of day (seconds) corresponding to {{ hilight }}ext_frc_fixed_ymd{{ closehilight }} - at which the external forcings are fixed - if {{ hilight }}ext_frc_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256(1000) - cam_chem - chem_inparm - - List of full pathnames of elevated emission (or external chemical forcings) datasets. - - The chemistry package reads in elevated emission data from a set of netcdf files in - units of "molecules/cm3/s". Each tracer species emissions is read from its - own file as directed by the namelist variable {{ hilight }}ext_frc_specifier{{ closehilight }}. The - {{ hilight }}ext_frc_specifier{{ closehilight }} variable tells the model which species have elevated - emissions and the file path for the corresponding species. That is, the - {{ hilight }}ext_frc_specifier{{ closehilight }} variable is set something like: - - ext_frc_specifier = 'SO2 -> /path/vrt.emis.so2.nc', - 'SO4 -> /path/vrt.emis.so4.nc', etc... - - Each emission file can have more than one source. When the emission are - read in the sources are summed to give a total emission field for the - corresponding species. The emission can be read in as time series of data, - cycle over a given year, or be fixed to a given date. - - The vertical coordinate in these emissions files should be 'altitude' (km) so that the - vertical redistribution to the model layers is done using a mass conserving method. - If the vertical coordinate is altitude then data needs to be ordered from the - surface to the top (increasing altitude). - - Default: UNKNOWN. - - - - char*32 - cam_chem - chem_inparm - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for fixed lower boundary data. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - CYCLICAL - - - - integer - cam_chem - chem_surfvals_nl - - The cycle year of the fixed lower boundary data - if {{ hilight }}flbc_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - chem_surfvals_nl - - The date at which the fixed lower boundary data is fixed - if {{ hilight }}flbc_type{{ closehilight }} is 'FIXED'.. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - chem_surfvals_nl - - The time of day (seconds) corresponding to {{ hilight }}flbc_fixed_ymd{{ closehilight }} - at which the fixed lower boundary data is fixed - if {{ hilight }}flbc_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - chem_surfvals_nl - - Full pathname of dataset for fixed lower boundary conditions. - - - $DIN_LOC_ROOT/atm/waccm/lb/LBC_1765-2100_1.9x2.5_CCMI_RCP60_za_RNOCStrend_c141002.nc - - - - char*16(1000) - cam_chem - chem_surfvals_nl - - List of species that are fixed at the lower boundary. - Default: UNKNOWN. - - - - char*8 - cam_chem - chem_surfvals_nl - CYCLICAL,SERIAL,FIXED - - Type of time interpolation for fixed lower boundary data. - - - CYCLICAL - - - - char*256 - cam_chem - noy_ubc_nl - - File name of dataset for NOy upper boundary conditions. - Default: UNKNOWN. - - - - char*256 - cam_chem - noy_ubc_nl - - Full pathname of the directory that contains the NOy upper boundary conditions files specified in - {{ hilight }}noy_ubc_filelist{{ closehilight }}. - Default: UNKNOWN. - - - - char*256 - cam_chem - noy_ubc_nl - - Filename of file that contains a sequence of filenames for prescribed NOy upper boundary conditions. - The filenames in this file are relative to the directory specified by {{ hilight }}noy_ubc_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - char*32 - cam_chem - noy_ubc_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for NOy upper boundary conditions. - - - SERIAL - - - - integer - cam_chem - noy_ubc_nl - - The cycle year of the NOy upper boundary data - if {{ hilight }}flbc_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - noy_ubc_nl - - The date at which the NOy upper boundary data is fixed - if {{ hilight }}flbc_type{{ closehilight }} is 'FIXED'.. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - noy_ubc_nl - - The time of day (seconds) corresponding to {{ hilight }}noy_ubc_fixed_ymd{{ closehilight }} - at which the NOy upper boundary data is fixed - if {{ hilight }}flbc_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset for chemical tracers constrained in the stratosphere - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ub/ubvals_b40.20th.track1_1996-2005_c110315.nc - - - - char*16(1000) - cam_chem - chem_inparm - - List of species that are constrained in the stratosphere. - Default: UNKNOWN. - - - - real - cam_chem - chem_inparm - - Multiplication factor applied to the lighting NOx production - Default: 1.0. - - - 1.00D0 - 2.00D0 - 1.50D0 - 1.30D0 - 1.60D0 - 0.32D0 - - - - real - waccm - chem_inparm - - Multiplication factor applied to the upper boundary NO mass mixing ratio. - - - 1.0 - - - - char*256 - waccm - chem_inparm - - Full pathname of dataset for the neutral species absorption cross sections. - - - $DIN_LOC_ROOT/atm/waccm/phot/photon_c130710.dat - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset for fast-tuv photolysis cross sections - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/phot/tuv_xsect.nc - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset of O2 cross sections for fast-tuv photolysis - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/phot/o2src.nc - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset of O2 and 03 column densities above the model for look-up-table photolysis - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/phot/exo_coldens.nc - - - - char*256(100) - cam_chem - aircraft_emit_nl - - Filename of file that contains aircraft input file lists. The filenames in the files are relative - to the directory specified by {{ hilight }}aircraft_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - char*256 - cam_chem - aircraft_emit_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}aircraft_specifier{{ closehilight }}. - - - $DIN_LOC_ROOT/atm/cam/ggas - - - - char*32 - cam_chem - aircraft_emit_nl - CYCLICAL_LIST,SERIAL - - Type of time interpolation for data in aircraft aerosol files. - - - SERIAL - - - - char*256 - cam_chem - gcr_ionization_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}gcr_ionization_filelist{{ closehilight }}. - Default: UNKNOWN. - - - - char*256 - cam_chem - gcr_ionization_nl - - Filename of dataset for ionization rates by galactic cosmic rays. - Default: UNKNOWN. - - - - char*256 - cam_chem - gcr_ionization_nl - - Filename of file that contains a sequence of filenames for ionization - rates by galactic cosmic rays. The filenames in this file are relative - to the directory specified by {{ hilight }}gcr_ionization_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - char*16 - cam_chem - gcr_ionization_nl - - Names of variables containing ionization rates (/cm3/sec) in the cosmic rays datasets. - Default: none - - - - char*32 - cam_chem - gcr_ionization_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in gcr_ionization files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - SERIAL - - - - integer - cam_chem - gcr_ionization_nl - - The cycle year of the prescribed green house gas data - if {{ hilight }}gcr_ionization_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - gcr_ionization_nl - - The date at which the prescribed green house gas data is fixed - if {{ hilight }}gcr_ionization_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - gcr_ionization_nl - - The time of day (seconds) corresponding to {{ hilight }}gcr_ionization_fixed_ymd{{ closehilight }} - at which the prescribed green house gas data is fixed - if {{ hilight }}gcr_ionization_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - prescribed_aero_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}prescribed_aero_filelist{{ closehilight }}. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart_aero/aero - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/aero - - - - char*256 - cam_chem - prescribed_aero_nl - - Filename of dataset for prescribed aerosols. - Default: UNKNOWN. - - - aero_1.9x2.5_L26_2000clim_c091112.nc - mam3_1.9x2.5_L30_2000clim_c130319.nc - - - - char*256 - cam_chem - prescribed_aero_nl - - Filename of file that contains a sequence of filenames for prescribed - aerosols. The filenames in this file are relative to the directory specified - by {{ hilight }}prescribed_aero_datapath{{ closehilight }}. - Default: UNKNOWN. - - - aero_1.9x2.5_L26_list_c070514.txt - aero_1.9x2.5_L26_list_c070514.txt - - - - logical - cam_chem - prescribed_aero_nl - - Remove the file containing prescribed aerosol concentrations from local disk when no longer needed. - - - .false. - - - - char*32(50) - cam_chem - prescribed_aero_nl - - A list of variable names of the concentration fields in the prescribed aerosol datasets - and corresponding names used in the physics buffer seperated by colons. For example: - - prescribed_aero_specifier = 'pbuf_name1:ncdf_fld_name1','pbuf_name2:ncdf_fld_name2', ... - - If there is no colon seperater then the specified name is used as both the pbuf_name and ncdf_fld_name, - - Default: none - - - - char*32 - cam_chem - prescribed_aero_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in prescribed_aero files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - CYCLICAL - CYCLICAL - SERIAL - - - - integer - cam_chem - prescribed_aero_nl - - The cycle year of the prescribed aerosol data - if {{ hilight }}prescribed_aero_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 2000 - 2000 - 0 - - - - integer - cam_chem - prescribed_aero_nl - - The date at which the prescribed aerosol data is fixed - if {{ hilight }}prescribed_aero_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - prescribed_aero_nl - - The time of day (seconds) corresponding to {{ hilight }}prescribed_aero_fixed_ymd{{ closehilight }} - at which the prescribed aerosol data is fixed - if {{ hilight }}prescribed_aero_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - aerodep_flx_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}aerodep_flx_filelist{{ closehilight }}. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart_aero/aero - $DIN_LOC_ROOT/atm/cam/chem/trop_mam/aero - - - - char*256 - cam_chem - aerodep_flx_nl - - Filename of dataset for prescribed aerosols. - Default: UNKNOWN. - - - aerosoldep_monthly_1849-2006_1.9x2.5_c090803.nc - mam3_1.9x2.5_L30_2000clim_c130319.nc - - - - char*256 - cam_chem - aerodep_flx_nl - - Filename of file that contains a sequence of filenames for prescribed - aerosols. The filenames in this file are relative to the directory specified - by {{ hilight }}aerodep_flx_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - logical - cam_chem - aerodep_flx_nl - - Remove the file containing prescribed aerosol deposition fluxes from local disk when no longer needed. - - - .false. - - - - char*32(22) - cam_chem - aerodep_flx_nl - - Names of variables containing aerosol data in the prescribed aerosol datasets. - Default: none - - - - char*32 - cam_chem - aerodep_flx_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in aerodep_flx files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - CYCLICAL - CYCLICAL - SERIAL - - - - integer - cam_chem - aerodep_flx_nl - - The cycle year of the prescribed aerosol flux data - if {{ hilight }}aerodep_flx_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 2000 - 2000 - 0 - - - - integer - cam_chem - aerodep_flx_nl - - The date at which the prescribed aerosol flux data is fixed - if {{ hilight }}aerodep_flx_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - aerodep_flx_nl - - The time of day (seconds) corresponding to {{ hilight }}ssaerodep_flx_fixed_ymd{{ closehilight }} - at which the prescribed aerosol flux data is fixed - if {{ hilight }}saerodep_flx_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - prescribed_ghg_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}prescribed_ghg_filelist{{ closehilight }}. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/cam/chem/methane - - - - char*256 - cam_chem - prescribed_ghg_nl - - Filename of dataset for prescribed GHGs. - Default: UNKNOWN. - - - CH4_1990-1999_clim_c090605.nc - - - - char*256 - cam_chem - prescribed_ghg_nl - - Filename of file that contains a sequence of filenames for prescribed - GHGs. The filenames in this file are relative to the directory specified - by {{ hilight }}prescribed_ghg_datapath{{ closehilight }}. - Default: UNKNOWN. - - - filelist_c090605.txt - - - - logical - cam_chem - prescribed_ghg_nl - - Remove the file containing prescribed green house gas concentrations from local disk when no longer needed. - - - .false. - - - - char*16(100) - cam_chem - prescribed_ghg_nl - - Names of variables containing GHG data in the prescribed GHG datasets. - Default: none - - - - char*32 - cam_chem - prescribed_ghg_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in prescribed_ghg files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - CYCLICAL - SERIAL - - - - integer - cam_chem - prescribed_ghg_nl - - The cycle year of the prescribed green house gas data - if {{ hilight }}prescribed_ghg_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 1995 - 0 - - - - integer - cam_chem - prescribed_ghg_nl - - The date at which the prescribed green house gas data is fixed - if {{ hilight }}prescribed_ghg_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - prescribed_ghg_nl - - The time of day (seconds) corresponding to {{ hilight }}prescribed_ghg_fixed_ymd{{ closehilight }} - at which the prescribed green house gas data is fixed - if {{ hilight }}prescribed_ghg_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - prescribed_ozone_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}prescribed_ozone_filelist{{ closehilight }}. - - - $DIN_LOC_ROOT/atm/cam/ozone - $DIN_LOC_ROOT/atm/cam/ozone_strataero - - - - char*256 - cam_chem - prescribed_ozone_nl - - Filename of dataset for prescribed ozone. - - - ozone_1.9x2.5_L26_2000clim_c091112.nc - ozone_strataero_CAM6chem_2000climo_zm_5day_c171004.nc - ozone_strataero_CAM6chem_2000climo_zm_5day_c171004.nc - waccm_ozone_c121126.nc - ozone_strataero_WACCM6_L70_zm5day_19750101-20141229_c180216.nc - - - - char*256 - cam_chem - prescribed_ozone_nl - - Filename of file that contains a sequence of filenames for prescribed - ozone. The filenames in this file are relative to the directory specified - by {{ hilight }}prescribed_ozone_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - char*16 - cam_chem - prescribed_ozone_nl - - Name of variable containing ozone data in the prescribed ozone datasets. - - - O3 - - - - logical - cam_chem - prescribed_ozone_nl - - Remove the file containing prescribed ozone concentrations from local disk when no longer needed. - - - .false. - - - - char*32 - cam_chem - prescribed_ozone_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in prescribed_ozone files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - CYCLICAL - - - - integer - cam_chem - prescribed_ozone_nl - - The cycle year of the prescribed ozone data - if {{ hilight }}prescribed_ozone_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 2000 - 0 - - - - integer - cam_chem - prescribed_ozone_nl - - The date at which the prescribed ozone data is fixed - if {{ hilight }}prescribed_ozone_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - prescribed_ozone_nl - - The time of day (seconds) corresponding to {{ hilight }}prescribed_ozone_fixed_ymd{{ closehilight }} - at which the prescribed ozone data is fixed - if {{ hilight }}prescribed_ozone_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - prescribed_volcaero_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}prescribed_volcaero_filelist{{ closehilight }}. - Default: UNKNOWN. - - - - char*256 - cam_chem - prescribed_volcaero_nl - - Filename of dataset for prescribed volcaero. - Default: UNKNOWN. - - - - char*256 - cam_chem - prescribed_volcaero_nl - - Filename of file that contains a sequence of filenames for prescribed - volcanic aerosols. The filenames in this file are relative to the directory specified - by {{ hilight }}prescribed_volcaero_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - char*16 - cam_chem - prescribed_volcaero_nl - - Name of variable containing volcaero data in the prescribed volcaero datasets. - - - MMRVOLC - - - - logical - cam_chem - prescribed_volcaero_nl - - Remove the file containing prescribed volcanic aerosol concentrations from local disk when no longer needed. - - - .false. - - - - char*32 - cam_chem - prescribed_volcaero_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in prescribed_volcaero files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - SERIAL - - - - integer - cam_chem - prescribed_volcaero_nl - - The cycle year of the prescribed volcanic aerosol data - if {{ hilight }}prescribed_volcaero_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - prescribed_volcaero_nl - - The date at which the prescribed volcanic aerosol data is fixed - if {{ hilight }}prescribed_volcaero_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - prescribed_volcaero_nl - - The time of day (seconds) corresponding to {{ hilight }}prescribed_volcaero_fixed_ymd{{ closehilight }} - at which the prescribed volcanic aerosol data is fixed - if {{ hilight }}prescribed_volcaero_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - prescribed_strataero_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}prescribed_strataero_filelist{{ closehilight }}. - - - $DIN_LOC_ROOT/atm/waccm/sulf - $DIN_LOC_ROOT/atm/cam/volc - $DIN_LOC_ROOT/atm/cam/ozone - $DIN_LOC_ROOT/atm/cam/ozone_strataero - - - - char*256 - cam_chem - prescribed_strataero_nl - - Filename of dataset for prescribed volcaero. - - - CESM_1949_2100_sad_V2_c130627.nc - CESM_1849_2100_sad_V3_c160211.nc - ozone_strataero_CAM6chem_1849-2014_zm_5day_c170924.nc - ozone_strataero_WACCM6_L70_zm5day_19750101-20141229_c180216.nc - - - - char*256 - cam_chem - prescribed_strataero_nl - - Filename of file that contains a sequence of filenames for prescribed - stratospheric aerosols. The filenames in this file are relative to the directory specified - by {{ hilight }}prescribed_strataero_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - char*16 - cam_chem - prescribed_strataero_nl - - Name of variable containing prescribed stratospheric aerosol specifiers - Default: set by the CAM program - - - - logical - cam_chem - prescribed_strataero_nl - - Remove the file containing prescribed volcanic aerosol concentrations from local disk when no longer needed. - - - .false. - - - - char*32 - cam_chem - prescribed_strataero_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in prescribed_strataero files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - SERIAL - - - - integer - cam_chem - prescribed_strataero_nl - - The cycle year of the prescribed volcanic aerosol data - if {{ hilight }}prescribed_strataero_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - prescribed_strataero_nl - - The date at which the prescribed volcanic aerosol data is fixed - if {{ hilight }}prescribed_strataero_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - prescribed_strataero_nl - - The time of day (seconds) corresponding to {{ hilight }}prescribed_strataero_fixed_ymd{{ closehilight }} - at which the prescribed volcanic aerosol data is fixed - if {{ hilight }}prescribed_strataero_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - logical - aerosol - prescribed_strataero_nl - - Indicates whether to use the unified chemistry tropopause method to set prescribed - stratospheric aerosols below the tropopause to zero. This has a maximum altitude - level corresponding to 300 hPa for latitudes poleward of 50 degrees. - - - .false. - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset for radiative source function used in look up table photloysis - - - $DIN_LOC_ROOT/atm/waccm/phot/RSF_GT200nm_v3.0_c080811.nc - - - - char*256 - waccm - chem_inparm - - Full pathname of dataset for the coefficients of the NOEM nitric oxide model used - to calculate its upper boundary concentration. - - - $DIN_LOC_ROOT/atm/waccm/ub/snoe_eof.nc - - - - char*256 - cam_chem - dust_nl - - Full pathname of boundary dataset for soil erodibility factors. - - - $DIN_LOC_ROOT/atm/cam/dst/dst_source2x2tunedcam6-2x2-04062017.nc - $DIN_LOC_ROOT/atm/cam/dst/dst_source2x2_cam5.4_c150327.nc - $DIN_LOC_ROOT/atm/cam/dst/dst_source2x2tuned-cam4-06132012.nc - $DIN_LOC_ROOT/atm/cam/dst/dst_source1x1tuned-cam4-06202012.nc - - - - char*256(1000) - cam_chem - chem_inparm - - List of full pathnames of surface emission datasets. - - The chemistry package reads in emission data from a set of netcdf files in - units of "molecules/cm2/s". Each tracer species emissions is read from its - own file as directed by the namelist variable {{ hilight }}srf_emis_specifier{{ closehilight }}. The - {{ hilight }}srf_emis_specifier{{ closehilight }} variable tells the model which species have emissions - and the file path for the corresponding species. That is, the - {{ hilight }}srf_emis_specifier{{ closehilight }} variable is set something like: - - srf_emis_specifier = 'CH4 -> /path/emis.ch4.nc', - 'CO -> /path/emis.co.nc', etc... - - Each emission file can have more than one source. When the emission are - read in the sources are summed to give a total emission field for the - corresponding species. The emission can be read in as time series of data, - cycle over a given year, or be fixed to a given date. - - Default: UNKNOWN. - - - - char*256 - cam_chem - sulf_nl - - Full pathname of dataset containing tropopheric sulfate aerosols - - - $DIN_LOC_ROOT/atm/waccm/sulf/sulfate.ar5_camchem_c130304.nc - - - - char*256 - cam_chem - sulf_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}sulf_filelist{{ closehilight }}. - Default: UNKNOWN. - - - - char*256 - cam_chem - sulf_nl - - Filename of file that contains a sequence of filenames for prescribed - sulfate. The filenames in this file are relative to the directory specified - by {{ hilight }}sulf_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - char*16 - cam_chem - sulf_nl - - Name of variable containing sulfate data in the prescribed sulfate datasets. - - - SULFATE - - - - logical - cam_chem - sulf_nl - - Remove the file containing prescribed sulfate concentrations from local disk when no longer needed. - - - .false. - - - - char*32 - cam_chem - sulf_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in prescribed sulfate files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - CYCLICAL - - - - integer - cam_chem - sulf_nl - - The cycle year of the prescribed sulfate data - if {{ hilight }}sulf_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - sulf_nl - - The date at which the prescribed sulfate data is fixed - if {{ hilight }}sulf_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - sulf_nl - - The time of day (seconds) corresponding to {{ hilight }}sulf_fixed_ymd{{ closehilight }} - at which the prescribed sulfate data is fixed - if {{ hilight }}sulf_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - waccm - chem_inparm - - Full pathname of dataset for TGCM upper boundary - - - $DIN_LOC_ROOT/atm/waccm/ub/tgcm_ubc_1993_c100204.nc - - - - char*32 - waccm - chem_inparm - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in TGCM upper boundary file. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - SERIAL - - - - integer - waccm - chem_inparm - - The cycle year of the TGCM upper boundary data - if {{ hilight }}tgcm_ubc_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - waccm - chem_inparm - - The date at which the TGCM upper boundary data is fixed - if {{ hilight }}tgcm_ubc_type{{ closehilight }} is 'FIXED'. - Format: YYYY - - - 0 - - - - integer - waccm - chem_inparm - - The time of day (seconds) corresponding to {{ hilight }}tgcm_ubc_fixed_ymd{{ closehilight }} - at which the TGCM upper boundary data is fixed - if {{ hilight }}tgcm_ubc_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - real - waccm - chem_inparm - - Perturbation applied to the upper boundary temperature. - - - 0.0 - - - - integer - cam_chem - chem_inparm - - Frequency in time steps at which the chemical equations are solved. - - - 1 - - - - char*256 - cam_chem - chem_inparm - - Filename of dataset for linoz cholirine loading. - Default: none. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ub/EESC_1850-2100_c090603.nc - - - - char*8 - cam_chem - chem_inparm - SERIAL,FIXED - - Type of time interpolation type for data in {{ hilight }}chlorine_loading_file{{ closehilight }} - - - SERIAL - - - - integer - cam_chem - chem_inparm - - The time of day (seconds) corresponding to {{ hilight }}chlorine_loading_fixed_ymd{{ closehilight }} - at which the chlorine loading data is fixed - if {{ hilight }}chlorine_loading_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - integer - cam_chem - chem_inparm - - The date at which the chlorine loading data is fixed - if {{ hilight }}chlorine_loading_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - char*256 - cam_chem - chem_inparm - - Full pathname of the directory that contains the files specified in - {{ hilight }}linoz_data_filelist{{ closehilight }}. - Default: none. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ub - - - - char*256 - cam_chem - chem_inparm - - Filename of dataset for LINOZ data. - Default: none. - - - linoz2004_2006jpl_c081216.nc - - - - char*256 - cam_chem - chem_inparm - - Filename of file that contains a sequence of filenames of the linoz data. - The filenames in this file are relative to the directory specified - by {{ hilight }}linoz_data_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - char*24 - cam_chem - chem_inparm - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in linoz_data files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - SERIAL - - - - logical - cam_chem - chem_inparm - - Remove the file containing LINOZ data from local disk when no longer needed. - - - .false. - - - - integer - cam_chem - chem_inparm - - The cycle year of the LINOZ data - if {{ hilight }}linoz_data_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - chem_inparm - - The date at which the LINOZ data is fixed - if {{ hilight }}linoz_data_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - chem_inparm - - The time of day (seconds) corresponding to {{ hilight }}linoz_data_fixed_ymd{{ closehilight }} - at which the LINOZ data is fixed - if {{ hilight }}linoz_data_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - chem_inparm - - Full pathname of the directory that contains the files specified in - {{ hilight }}tracer_cnst_filelist{{ closehilight }}. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart_aero/oxid - $DIN_LOC_ROOT/atm/waccm/halons - $DIN_LOC_ROOT/atm/cam/chem/methane - $DIN_LOC_ROOT/atm/waccm/halons - - - - char*256 - cam_chem - chem_inparm - - Filename of dataset for the prescribed chemical constituents. - Default: UNKNOWN. - - - oxid_1.9x2.5_L26_1850-2005_c091123.nc - oxid_1.9x2.5_L26_1850clim_c091123.nc - oxid_1.9x2.5_L26_1850clim_c091123.nc - halons_oxid_1.9x2.5zm_L66_1849-2099_c160714.nc - CH4_1990-1999_clim_c090605.nc - halons_oxid_1.9x2.5zm_L66_1849-2099_c160714.nc - - - - char*256 - cam_chem - chem_inparm - - Filename of file that contains a sequence of filenames for the prescribed chemical constituents. - The filenames in this file are relative to the directory specified - by {{ hilight }}tracer_cnst_datapath{{ closehilight }}. - Default: UNKNOWN. - - - oxid_1.9x2.5_L26_clim_list.c090805.txt - filelist_c090605.txt - - - - logical - cam_chem - chem_inparm - - Remove the file containing prescribed chemical constituents from local disk when no longer needed. - - - .false. - - - - char*256(100) - cam_chem - chem_inparm - - List of prescribed chemical constituents. - Default: UNKNOWN. - - - - char*24 - cam_chem - chem_inparm - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in tracer_cnst files. - - - CYCLICAL - CYCLICAL - CYCLICAL - CYCLICAL - - - - integer - cam_chem - chem_inparm - - The cycle year of the prescribed chemical constituents data - if {{ hilight }}tracer_cnst_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 1850 - 1850 - 2000 - 2000 - 1995 - 2000 - - - - integer - cam_chem - chem_inparm - - The date at which the chemical constituents data is fixed - if {{ hilight }}tracer_cnst_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - chem_inparm - - The time of day (seconds) corresponding to {{ hilight }}tracer_cnst_fixed_ymd{{ closehilight }} - at which the chemical constituents data is fixed - if {{ hilight }}tracer_cnst_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - cam_chem - chem_inparm - - Full pathname of the directory that contains the files specified in - {{ hilight }}tracer_srcs_filelist{{ closehilight }}. - Default: UNKNOWN. - - - - char*256 - cam_chem - chem_inparm - - Filename of dataset for the prescribed chemical sources. - Default: UNKNOWN. - - - - char*256 - cam_chem - chem_inparm - - Filename of file that contains a sequence of datasets for the prescribed chemical sources. - The filenames in this file are relative to the directory specified - by {{ hilight }}tracer_srcs_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - logical - cam_chem - chem_inparm - - Remove the file containing prescribed chemical sources from local disk when no longer needed. - - - .false. - - - - char*256(100) - cam_chem - chem_inparm - - List of prescribed chemical sources - Default: UNKNOWN. - - - - char*24 - cam_chem - chem_inparm - - Type of time interpolation for data in tracer_srcs files. - - - SERIAL - - - - integer - cam_chem - chem_inparm - - The cycle year of the prescribed chemical sources data - if {{ hilight }}tracer_srcs_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - - - - integer - cam_chem - chem_inparm - - The date at which the chemical sources data is fixed - if {{ hilight }}tracer_srcs_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - cam_chem - chem_inparm - - The time of day (seconds) corresponding to {{ hilight }}tracer_srcs_fixed_ymd{{ closehilight }} - at which the chemical sources data is fixed - if {{ hilight }}tracer_srcs_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - logical - cam_chem - chem_inparm - - If TRUE then use the FTUV method to calculate the photolysis reactions rates, - otherwise use the look up table method. - - - .false. - - - - char*256 - cam_chem - chem_inparm - - Full pathname of dataset for Chebyshev polynomial Coeff data used for photolysis - cross sections. - - - $DIN_LOC_ROOT/atm/waccm/phot/effxstex.txt - - - - char*256 - cam_chem - chem_inparm - - Full pathname of cross section dataset for long wavelengh photolysis - - - $DIN_LOC_ROOT/atm/waccm/phot/temp_prs_GT200nm_JPL10_c140624.nc - - - - char*256 - cam_chem - chem_inparm - - Full pathname of cross section dataset for short wavelengh photolysis - - - $DIN_LOC_ROOT/atm/waccm/phot/xs_short_jpl10_c140303.nc - - - - char*256(200) - cam_chem - rxn_rate_diags_nl - - Give the user the ability to specify rate families (or groupings) diagnostics based - on reaction tag names. These group names can be added to history fincl variables. - A "+" character at the end of a string indicates that the summation will continue with the next string. - Example: - rate_sums = - 'OX_P = NO_HO2 + CH3O2_NO + 2*jo2_b ... ', - 'OX_L = NO2_O_M + HO2_O3 + CLO_O ...', - 'RO2_RO2_sum = CH3O2_CH3O2a + CH3O2_CH3O2b + C2H5O2_CH3O2 + C2H5O2_C2H5O2 + CH3CO3_CH3O2 +', - 'CH3CO3_CH3CO3 + CH3H7O2_CH3O2 + RO2_CH3O2 + MACRO2_CH3O2 ...', - fincl1 = 'OX_P','OX_L', 'RO2_RO2_sum', ... - Default: none - - - 'O3_Prod = NO_HO2 + CH3O2_NO + HOCH2OO_NO + C2H5O2_NO + CH3CO3_NO + EO2_NO + C3H7O2_NO + PO2_NO + RO2_NO + ENEO2_NO + MACRO2_NOa + jhonitr + ', - 'MCO3_NO + MEKO2_NO + ALKO2_NO + jalknit + .92*ISOPAO2_NO + .92*ISOPBO2_NO + ISOPNO3_NO + XO2_NO + ACBZO2_NO + BENZO2_NO + BZOO_NO + ', - 'C6H5O2_NO + DICARBO2_NO + MALO2_NO + MDIALO2_NO + PHENO2_NO + TOLO2_NO + XYLENO2_NO + XYLOLO2_NO + NTERPO2_NO + .9*TERP2O2_NO + .8*TERPO2_NO + jterpnit', - 'O3_Loss = O1D_H2O + OH_O3 + HO2_O3 + C2H4_O3 + C3H6_O3 + ISOP_O3 + MVK_O3 + MACR_O3 + MTERP_O3 + BCARY_O3', - 'O3S_Loss = 2.0*O_O3 + O1D_H2O + HO2_O3 + OH_O3 + H_O3 + 2.0*NO2_O + 2.0*jno3_b + 2.0*CLO_O + 2.0*jcl2o2 + 2.0*CLO_CLOa + 2.0*CLO_CLOb + ', - '2.0*BRO_CLOb + 2.0*BRO_CLOc + 2.0*BRO_BRO + 2.0*BRO_O + CLO_HO2 + BRO_HO2 + S_O3 + SO_O3 + ', - 'C2H4_O3 + C3H6_O3 + ISOP_O3 + MVK_O3 + MACR_O3 + MTERP_O3 + BCARY_O3', - 'Ox_Prod = 2.0*jo2_a + 2.0*jo2_b + NO_HO2 + CH3O2_NO + HOCH2OO_NO + C2H5O2_NO + CH3CO3_NO + EO2_NO + C3H7O2_NO + PO2_NO + RO2_NO + ENEO2_NO + MACRO2_NOa + ', - 'jhonitr + MCO3_NO + MEKO2_NO + ALKO2_NO + jalknit + .92*ISOPAO2_NO + .92*ISOPBO2_NO + ISOPNO3_NO + XO2_NO + ACBZO2_NO + BENZO2_NO + BZOO_NO + ', - 'C6H5O2_NO + DICARBO2_NO + MALO2_NO + MDIALO2_NO + PHENO2_NO + TOLO2_NO + XYLENO2_NO + XYLOLO2_NO + NTERPO2_NO + .9*TERP2O2_NO + .8*TERPO2_NO + jterpnit', - 'Ox_Loss = 2.0*O_O3 + O1D_H2O + HO2_O + HO2_O3 + OH_O + OH_O3 + H_O3 + 2.0*NO2_O + 2.0*jno3_b + 2.0*CLO_O + 2.0*jcl2o2 + 2.0*CLO_CLOa + 2.0*CLO_CLOb + ', - '2.0*BRO_CLOb + 2.0*BRO_CLOc + 2.0*BRO_BRO + 2.0*BRO_O + CLO_HO2 + BRO_HO2 + C2H4_O3 + C3H6_O3 + ISOP_O3 + MVK_O3 + MACR_O3 + MTERP_O3 + BCARY_O3', - 'RO2_NO_sum = NO_HO2 + HOCH2OO_NO + C2H5O2_NO + CH3CO3_NO + EO2_NO + C3H7O2_NO + PO2_NO + RO2_NO + ENEO2_NO + MACRO2_NOa + jhonitr + ', - 'MCO3_NO + MEKO2_NO + ALKO2_NO + jalknit + .92*ISOPAO2_NO + .92*ISOPBO2_NO + ISOPNO3_NO + XO2_NO + ACBZO2_NO + BENZO2_NO + BZOO_NO + ', - 'C6H5O2_NO + DICARBO2_NO + MALO2_NO + MDIALO2_NO + PHENO2_NO + TOLO2_NO + XYLENO2_NO + XYLOLO2_NO + NTERPO2_NO + .9*TERP2O2_NO + .8*TERPO2_NO + jterpnit', - 'RO2_NO3_sum = NO3_HO2 + MACRO2_NO3 + MCO3_NO3 + ISOPAO2_NO3 + ISOPBO2_NO3 + XO2_NO3', - 'RO2_HO2_sum = CH3O2_HO2 + HOCH2OO_HO2 + EO2_HO2 + C2H5O2_HO2 + CH3CO3_HO2 + C3H7O2_HO2 + PO2_HO2 + RO2_HO2 + MEKO2_HO2 + MACRO2_HO2 + ISOPAO2_HO2 + ISOPBO2_HO2 + ALKO2_HO2 +', - 'XO2_HO2 + TOLO2_HO2 + PHENO2_HO2 + C6H5O2_HO2 + BENZO2_HO2 + MALO2_HO2 + BZOO_HO2 + ACBZO2_HO2 + DICARBO2_HO2 + MDIALO2_HO2 + XYLOLO2_HO2 + XYLENO2_HO2 + TERPO2_HO2 +', - 'TERP2O2_HO2 + NTERPO2_HO2', - 'RO2_RO2_sum = CH3O2_CH3O2a + CH3O2_CH3O2b + C2H5O2_CH3O2 + C2H5O2_C2H5O2 + CH3CO3_CH3O2 + CH3CO3_CH3CO3 + C3H7O2_CH3O2 + RO2_CH3O2 + MACRO2_CH3O2 + MACRO2_CH3CO3 + MCO3_CH3O2 +', - ' MCO3_CH3CO3 + MCO3_MCO3 + ISOPAO2_CH3O2 + ISOPBO2_CH3O2 + ISOPAO2_CH3CO3 + ISOPBO2_CH3CO3 + XO2_CH3O2 + XO2_CH3CO3', - 'RCO2_NO2_sum = CH3CO3_NO2 + MCO3_NO2', - 'OddOx_Ox_Loss = 2.0*O_O3 + O1D_H2O', - 'OddOx_HOx_Loss = HO2_O + HO2_O3 + OH_O + OH_O3 + H_O3', - 'OddOx_NOx_Loss = 2.0*NO2_O + 2.0*jno3_b', - 'OddOx_CLOxBROx_Loss = 2.0*CLO_O + 2.0*jcl2o2 + 2.0*CLO_CLOa + 2.0*CLO_CLOb + 2.0*BRO_CLOb + 2.0*BRO_CLOc + 2.0*BRO_BRO + 2.0*BRO_O + CLO_HO2 + BRO_HO2', - 'OddOx_Loss_Tot = 2.0*O_O3 + O1D_H2O + HO2_O + HO2_O3 + OH_O + OH_O3 + H_O3 + 2.0*NO2_O + 2.0*jno3_b + 2.0*CLO_O + 2.0*jcl2o2 + 2.0*CLO_CLOa + 2.0*CLO_CLOb + 2.0*BRO_CLOb +', - ' 2.0*BRO_CLOc + 2.0*BRO_BRO + 2.0*BRO_O + CLO_HO2 + BRO_HO2', - 'OddOx_Prod_Tot = 2.0*jo2_a + 2.0*jo2_b' - - - - char*256(200) - cam_chem - species_sums_nl - - Give the user the ability to specify species families (or groupings) diagnostics in volume mixing ratio. - These group names can be added to history fincl variables. The units are mole/mole. - A "+" character at the end of a string indicates that the summation will continue with the next string. - Example: - vmr_sums = - 'SOAG = SOAG0 + SOAG1 + SOAG2 + SOAG3 + SOAG4', - 'NOy = N + NO + NO2 + NO3 + 2*N2O5 + HNO3 + HO2NO2 + CLONO2 +', - 'BRONO2 + PAN + MPAN + ISOPNO3 + ONITR +', - 'HONITR + ALKNIT + ISOPNITA + ISOPNITB + ISOPNOOH + NC4CH2OH +', - 'NC4CHO + NOA + NTERPOOH + PBZNIT + TERPNIT' - fincl1 = 'NOy','SOAG', ... - Default: none - - - - char*256(200) - cam_chem - species_sums_nl - - Give the user the ability to specify species families (or groupings) diagnostics in mass mixing ratio. - These group names can be added to history fincl variables. The units are kg/kg. - A "+" character at the end of a string indicates that the summation will continue with the next string. - Example: - mmr_sums = - 'soa_a1 = soa1_a1 + soa2_a1 + soa3_a1 + soa4_a1 + soa5_a1', - 'soa_a2 = soa1_a2 + soa2_a2 + soa3_a2 + soa4_a2 + soa5_a2' - fincl1 = 'soa_a1','soa_a2', ... - Default: none - - - - logical - chemistry - chem_inparm - - Indicates whether to use the unified chemistry tropopause method to set the - tropopause used in gas phase and aerosol chemical processes. This has a maximum altitude - level corresponding to 300 hPa for latitudes poleward of 50 degrees. - - - .true. - - - - - - - char*256 - waccm_sc - waccm_forcing_nl - - Filename of the prescribed waccm forcing data used with waccm_sc chemistry. - This contains prescribed constituents for non-LTE calculations and heating rates - for wavelengths less than 200 nm. - Default: UNKNOWN. - - - ghg_forcing_2000_c110321.nc - SCWACCM_forcing_WACCM6_zm_5day_L70_1975-2014_c180216.nc - - - - char*256 - waccm_sc - waccm_forcing_nl - - Full pathname of the directory that contains the files specified in - {{ hilight }}waccm_forcing_filelist{{ closehilight }}. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/waccm/ub - $DIN_LOC_ROOT/atm/waccm/waccm_forcing - - - - char*256 - waccm_sc - waccm_forcing_nl - - A file that contains a sequence of filenames for prescribed waccm forcing data. - The filenames in this file are relative to the directory specified - by {{ hilight }}waccm_forcing_datapath{{ closehilight }}. - Default: UNKNOWN. - - - - logical - waccm_sc - waccm_forcing_nl - - Remove the file containing prescribed waccm forcing data from local disk when no longer needed. - - - .false. - - - - char*16(100) - waccm_sc - waccm_forcing_nl - - Names of variables containing concentrations and heating rate in the prescribed waccm forcing datasets. - Default: none - - - - char*32 - waccm_sc - waccm_forcing_nl - CYCLICAL,SERIAL,INTERP_MISSING_MONTHS,FIXED - - Type of time interpolation for data in waccm_forcing files. - Can be set to 'CYCLICAL', 'SERIAL', 'INTERP_MISSING_MONTHS', or 'FIXED'. - - - CYCLICAL - - - - integer - waccm_sc - waccm_forcing_nl - - The cycle year of the prescribed waccm forcing data - if {{ hilight }}waccm_forcing_type{{ closehilight }} is 'CYCLICAL'. - Format: YYYY - - - 0 - 2000 - 0 - - - - integer - waccm_sc - waccm_forcing_nl - - The date at which the prescribed waccm forcing data is fixed - if {{ hilight }}waccm_forcing_type{{ closehilight }} is 'FIXED'. - Format: YYYYMMDD - - - 0 - - - - integer - waccm_sc - waccm_forcing_nl - - The time of day (seconds) corresponding to {{ hilight }}waccm_forcing_fixed_ymd{{ closehilight }} - at which the prescribed waccm forcing data is fixed - if {{ hilight }}waccm_forcing_type{{ closehilight }} is 'FIXED'. - Default: 0 seconds - - - - char*256 - waccm_sc - chem_inparm - - Full pathname of time-variant boundary dataset for H2O production/loss rates. - Default: UNKNOWN. - - - $DIN_LOC_ROOT/atm/waccm/phot/xh2o_c080826.nc - $DIN_LOC_ROOT/atm/waccm/phot/xh2o_c080826.nc - - - - - - - logical - waccm_phys - radheat_nl - - Determines which constituents are used from non-LTE calculations. - TRUE implies use prognostic constituents. - FALSE implies use constituents from dataset specified by {{ hilight }}waccm_forcing_file{{ closehilight }}. - Default: TRUE for full chemistry WACCM; FALSE for WACCM_SC. - - - - logical - waccm_phys - radheat_nl - - If TRUE apply upper limit to CO2 concentrations passed to the Formichev non-LTE cooling calculation - (code not intended for values greater than 720 ppmv). Running with flag set to TRUE could lead to - incorrect cooling rates if model CO2 exceeds 720 ppmv. If FALSE simulation will abort if CO2 levels - exceed this value at altitudes above 1 mbar. The 720 ppmv CO2 limiter in the Formichev non-LTE cooling - scheme is applied to all vertical levels regardless of this setting. - - - .false. - - - - logical - waccm_phys - qbo_nl - - TRUE implies assume cyclic qbo data. - - - .false. - - - - char*256 - waccm_phys - qbo_nl - - Filepath for qbo forcing dataset. - - - $DIN_LOC_ROOT/atm/waccm/qbo/qbocyclic28months.nc - - - - logical - waccm_phys - qbo_nl - - TRUE implies qbo package is active. - - - .false. - - - - integer - waccm - geomag_nl - - If set this year is used for setting geomagnetic coordinates through out the - simulation. If not set the model simulation year is used. - Default: none - - - - char*256 - waccm - geomag_nl - - International Geomagnetic Reference Field (IGRF) coefficients. - Default: None. - - - $DIN_LOC_ROOT/atm/waccm/geomag/igrf_ceofs_c160412.nc - - - - char*256 - waccm - epp_ionization_nl - - Filepath input dataset for ionization due to energetic particle precipitation. - Default: None. - - - - char*80 - waccm - epp_ionization_nl - - Variable name in netCDF file {{ hilight }}epp_all_filepath{{ closehilight }} which contains - ion pairs production rates. - Default: None. - - - - char*256 - waccm - epp_ionization_nl - - Filepath input dataset for ionization due to solar proton events. - Default: None. - - - - char*80 - waccm - epp_ionization_nl - - Variable name in netCDF file {{ hilight }}epp_spe_filepath{{ closehilight }} which contains - ion pairs production rates. - Default: None. - - - - char*256 - waccm - epp_ionization_nl - - Filepath input dataset for ionization due to medium energy electrons. - Default: None. - - - - char*80 - waccm - epp_ionization_nl - - Variable name in netCDF file {{ hilight }}epp_mee_filepath{{ closehilight }} which contains - ion pairs production rates. - Default: None. - - - - char*256 - waccm - epp_ionization_nl - - Filepath input dataset for ionization due to galactic cosmic rays. - Default: None. - - - - char*80 - waccm - epp_ionization_nl - - Variable name in netCDF file {{ hilight }}epp_gcr_filepath{{ closehilight }} which contains - ion pairs production rates. - Default: None. - - - - - - - char*80(20) - physconst - air_composition_nl - - List of major species of dry air. If not set then the composition of dry - air is considered fixed at tropospheric conditions and the properties of - dry air are constant. If set then the list of major species is assumed to - have 'N2' listed last. This information is currently used only for - computing the variable properties of air in WACCM-X configurations. - Default if WACCM-X: - - ['O_mixing_ratio_wrt_dry_air', 'O2_mixing_ratio_wrt_dry_air', - 'H_mixing_ratio_wrt_dry_air', 'N2_mixing_ratio_wrt_dry_air'] - - Otherwise default is None. - - - "" - - O_mixing_ratio_wrt_dry_air, O2_mixing_ratio_wrt_dry_air, H_mixing_ratio_wrt_dry_air, N2_mixing_ratio_wrt_dry_air - - - - - - char*80(20) - physconst - air_composition_nl - - List of water species that are included in "moist" air. This is currently - used only by the SE dycore to generalize the computation of the moist air - mass and thermodynamic properties. - Default if CAM4, CAM5, or Kessler physics is used: - - ['specific_humidity', - 'cloud_liquid_water_mixing_ratio_wrt_dry_air', - 'rain_mixing_ratio_wrt_dry_air'] - - Default if CAM6 physics is used: - - ['specific_humidity', - 'cloud_liquid_water_mixing_ratio_wrt_dry_air', - 'cloud_ice_mixing_ratio_wrt_dry_air', - 'rain_mixing_ratio_wrt_dry_air', - 'snow_mixing_ratio_wrt_dry_air'] - - Otherwise default is: ['specific_humidity'] - - - - specific_humidity - - - specific_humidity, cloud_liquid_water_mixing_ratio_wrt_dry_air, rain_mixing_ratio_wrt_dry_air - - - specific_humidity, cloud_liquid_water_mixing_ratio_wrt_dry_air, rain_mixing_ratio_wrt_dry_air - - - specific_humidity, cloud_liquid_water_mixing_ratio_wrt_dry_air, rain_mixing_ratio_wrt_dry_air - - - specific_humidity, cloud_liquid_water_mixing_ratio_wrt_dry_air, cloud_ice_mixing_ratio_wrt_dry_air, rain_mixing_ratio_wrt_dry_air, rain_mixing_ratio_wrt_dry_air - - - - - - - - integer - se - dyn_se_nl - - Set for refined exodus meshes (variable viscosity). - Viscosity in namelist specified for regions with a resolution equivilant - to a uniform grid with se_ne = se_fine_ne. - Default: -1 (not used) - - - -1 - 120 - - - - integer - se - dyn_se_nl - 0,1,2 - - CAM physics forcing option: - 0: tendencies - 1: adjustments - 2: hybrid - - - 2 - - - - real - se - dyn_se_nl - - Scalar viscosity with variable coefficient. - Use variable hyperviscosity based on element area limited by - se_max_hypervis_courant. - - - 0 - - - - real - se - dyn_se_nl - - Use tensor hyperviscosity. - Citation: Guba, O., Taylor, M. A., Ullrich, P. A., Overfelt, J. R., and - Levy, M. N.: The spectral element method (SEM) on variable-resolution - grids: evaluating grid sensitivity and resolution-aware numerical - viscosity, Geosci. Model Dev., 7, 2803-2816, - doi:10.5194/gmd-7-2803-2014, 2014. - Default: 0 (i.e., not used) - - - 0 - 3.0D0 - - - - integer - se - dyn_se_nl - - Number of hyperviscosity subcycles per dynamics timestep. - - - 3 - 4 - - - - integer - se - dyn_se_nl - - Number of hyperviscosity subcycles per dynamics timestep in sponge del2 diffusion. - - - 1 - 4 - - - - integer - se - dyn_se_nl - - Variable to specify the vertical index at which the - Rayleigh friction term is centered (the peak value). - Default: 2 - - - 2 - - - - real - se - dyn_se_nl - - Rayleigh friction parameter to determine the width of the profile. If set - to 0 then a width is chosen by the algorithm (see rayleigh_friction.F90). - Default: 0.5. - - - 0.5 - 3 - - - - real - se - dyn_se_nl - - Rayleigh friction parameter to determine the approximate value of the decay - time (days) at model top. If 0.0 then no Rayleigh friction is applied. - Default: 0. - - - 0.0 - - - - real - se - dyn_se_nl - - Used by SE dycore to apply sponge layer diffusion to u, v, and T for - stability of WACCM configurations. The diffusion is modeled on 3D molecular - diffusion and thermal conductivity by using actual molecular diffusion and - thermal conductivity coefficients multiplied by the value of - se_molecular_diff. - - If set <= 0.0 then the code is not activated. If set > 0.0 then - the molecular diffusion and thermal conductivity coefficients will be - multiplied by a factor of se_molecular_diff. - - Default: 0. - - - 0.0 - 100.0 - - - - integer - se - dyn_se_nl - - Number of hyperviscosity subcycles done in tracer advection code. - - - 1 - 2 - - - - integer - se - dyn_se_nl - 0,4,8 - - Limiter used for horizontal tracer advection: - 0: None - 4: Sign-preserving limiter. - 8: Monotone limiter. - - - 8 - - - - real - se - dyn_se_nl - - Upper bound for Courant number, used to limit se_hypervis_power. - Default: 1.0e99 (i.e., not used) unless se_refined_mesh=TRUE - - - 1.0e99 - 1.9 - - - - char*256 - se - dyn_se_nl - - Filename of exodus file to read grid from (generated by CUBIT or SQuadGen). - - - none - $DIN_LOC_ROOT/atm/cam/coords/ne0np4CONUS.ne30x8.g - $DIN_LOC_ROOT/atm/cam/coords/ne0np4EQFACE.ne5x4.g - - - - integer - se - dyn_se_nl - - Number of elements along a cube edge. - Must match value of grid. Set this to zero to use a refined mesh. - Default: 0 - - - 0 - 5 - 16 - 30 - 60 - 120 - 240 - - - - integer - se - dyn_se_nl - - Number of PEs to be used by SE dycore. - Default: -1 = Number of PEs used by CAM. - - - -1 - - - - integer - se - dyn_se_nl - - Number of dynamics steps per physics timestep. - - - 2 - 1 - 3 - 3 - 12 - 5 - 5 - 6 - 7 - - - - integer - se - dyn_se_nl - 0,1,2 - - Scaling of temperature increment for different levels of - thermal energy consistency. - 0: no scaling - 1: scale increment for cp consistency between dynamics and physics - 2: do 1 as well as take into account condensate effect on thermal energy - - - 1 - - - - real - se - dyn_se_nl - - Hyperviscosity coefficient for u,v, T [m^4/s]. - If < 0, se_nu is automatically set. - - - -1 - 1.0e13 - - - - real - se - dyn_se_nl - - Hyperviscosity applied to divergence component of winds [m^4/s]. - If < 0, uses se_nu_p. - - - -1 - 1.5625e13 - - - - real - se - dyn_se_nl - - Hyperviscosity coefficient applied to pressure-level thickness [m^4/s]. - If < 0, se_nu_p is automatically set. - - - -1 - 1.5625e13 - - - - real - se - dyn_se_nl - - Second-order viscosity applied only near the model top [m^2/s]. - - - 5.0e5 - 1.0e6 - - 0.0 - 2.0e5 - - - - logical - se - dyn_se_nl - - Hyperscosity for T and dp is applied to (T-Tref) and (dp-dp_ref) where - Xref are reference states where the effect of topography has been removed - (Simmons and Jiabin, 1991, QJRMS, Section 2a). - If TRUE dp_ref is dynamic smoothed reference state derived by Patrick Callaghan - (Lauritzen et al., 2018, JAMES, Appendix A.2) and temperature reference state - based on Simmons and Jiabin (1991) but using smoothed dp_ref. - If FALSE Tref is static reference state (Simmons and Jiabin) and dp_ref state - derived from hydrostatic balance. - - - .false. - - - - logical - se - dyn_se_nl - - If TRUE the continous equations the dynamical core is based on will conserve a - comprehensive moist total energy - If FALSE the continous equations the dynamical core is based on will conserve - a total energy based on cp for dry air and no condensates (same total energy as - CAM physics uses). - For more details see Lauritzen et al., (2018;DOI:10.1029/2017MS001257) - - - .true. - - - - logical - se - dyn_se_nl - - If TRUE the CSLAM algorithm will work for Courant number larger than 1 with - a low-order increment for tracer mass more than one grid cell width away - - - .true. - - - - integer - se - dyn_se_nl - - Number of SE rsplit time-steps CSLAM supercycles - rsplit/se_fvm_supercycling must be an integer - - - -1 - - - - integer - se - dyn_se_nl - - Number of SE rsplit time-steps CSLAM supercycles in the jet region - (which is specified by kmin_jet and kmax_jet) - rsplit/se_fvm_supercycling_jet must be an integer - se_fvm_supercycling_jet/se_fvm_supercyling must be an integer - - - -1 - - - - integer - se - dyn_se_nl - - Min level index where CSLAM runs with se_fvm_supercycling_jet - (if se_fvm_supercycling_jet.ne.se_fvm_supercycling) or - min index where are Courant number increment is active - (se_large_Courant_incr=.true.) - - - -1 - - - - integer - se - dyn_se_nl - - Max level index where CSLAM runs with se_fvm_supercycling_jet - (if se_fvm_supercycling_jet.ne.se_fvm_supercycling) or - max index where are Courant number increment is active - (se_large_Courant_incr=.true.) - - - -1 - - - - integer - se - dyn_se_nl - - Tracer advection is done every qsplit dynamics timesteps. - - - 1 - - - - logical - se - dyn_se_nl - - TRUE specified use of a refined grid (mesh) for this run. - - - .false. - .true. - .true. - - - - integer - se - dyn_se_nl - - Vertically lagrangian code vertically remaps every rsplit tracer timesteps. - - - 3 - 6 - 6 + UNSET_PATH + $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L26_c180105.nc + $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L30_c180105.nc + $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L32_c180105.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.23x0.31_L26_c100513.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.23x0.31_L26_c061106.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1980-01-01_0.47x0.63_L26_c071226.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.47x0.63_L26_c061106.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-10-01_0.5x0.625_L26_c031204.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1987-01-01_0.9x1.25_L26_c060703.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.9x1.25_L26_c051205.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_1.9x2.5_L26_c070408.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L26_c040809.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L26_c110309.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L26_c091007.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0001-01-01_4x5_L26_c060608.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L26_c030918.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.23x0.31_L30_c110527.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.47x0.63_L30_c100929.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L30_c100618.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L30_c090306.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L30_c070109.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L30_c110309.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L30_c100831.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_4x5_L30_c090108.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L30_c081013.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L32_c141031.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L32_c150407.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam4_0000-01-01_10x15_L32_c170914.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.47x0.63_L26_APE_c080227.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L26_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L26_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L26_c161230.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L30_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L30_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L30_c170103.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L32_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L32_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L32_c170103.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_0.9x1.25_L30_c080724.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L26_c080114.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L30_c080215.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L26_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L30_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L26_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L30_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L26_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L30_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_10x15_L30_c121015.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_4x5_L30_c121015.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_1.9x2.5_L30_c121015.nc + $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_4x5_L26_c060217.nc + $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_10x15_L26_c060216.nc + $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-02-01_0.9x1.25_L66_c040928.nc + $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-07-01_1.9x2.5_L66_c040928.nc + $DIN_LOC_ROOT/atm/waccm/ic/FWT2000_f09_spinup01.cam.i.0001-01-02-00000_c160315.nc + $DIN_LOC_ROOT/atm/waccm/ic/f2000.waccm-mam3_1.9x2.5_L70.cam2.i.0017-01-01.c120410.nc + $DIN_LOC_ROOT/atm/waccm/ic/f2000.waccm-mam3_10x15_L70.cam2.i.0017-01-01.c141016.nc + $DIN_LOC_ROOT/atm/waccm/ic/b1850.waccm-mam3_1.9x2.5_L70.cam2.i.0156-01-01.c120523.nc + $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-05-01_1.9x2.5_L103_c040928.nc + $DIN_LOC_ROOT/atm/waccm/ic/wa3_4x5_1950_spinup.cam2.i.1960-01-01-00000.nc + $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-01-01_10x15_L66_c041121.nc + $DIN_LOC_ROOT/atm/waccm/ic/f40.2000.4deg.wcm.carma.sulf.004.cam2.i.0008-01-01-00000.nc + $DIN_LOC_ROOT/atm/waccm/ic/f40.2deg.wcm.carma.sulf.L66.cam2.i.2010-01-01.nc + $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_L81_c110906.nc + $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_4x5_L81_c160630.nc + $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_10x15_L81_c141027.nc + $DIN_LOC_ROOT/atm/waccm/ic/waccmx_aqua_4x5_L126_c170705.nc + $DIN_LOC_ROOT/atm/waccm/ic/fx2000_0.9x1.25_126lev_0002-01-01-00000_c181221.nc + $DIN_LOC_ROOT/atm/waccm/ic/wcmx-cam6-phys_1.9x2.5_130lev_2000_c181115.nc + $DIN_LOC_ROOT/atm/waccm/ic/wcmx-cam6-phys_0.9x1.25_130lev_2000_c190122.nc + $DIN_LOC_ROOT/atm/waccm/ic/FC6X2000_f05_spinup01.cam.i.0002-01-01-00000_c190711.nc + $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_4x5_L130_c180803.nc + $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_1.9x2.5_L130_c180803.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/T341clim01.cam2.i.0024-01-01-00000.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_256x512_L26_c030918.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_128x256_L26_c030918.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_128x256_L26_c040422.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_T42_L26_c031110.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L26_c030918.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L30_c090102.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L30_c031210.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L32_c170510.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L26_c091218.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_48x96_L26_c040420.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L30_c100426.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_32x64_L26_c030918.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_32x64_L30_c090107.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L26_c030228.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_8x16_L26_c030918.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L30_c090102.nc + $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01_ne5np4_L30.140707.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L26_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L30_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L32_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L26_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L30_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam6_ne30np4_L32_c171023.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L26_c171018.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L30_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L32_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L26_c171018.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L30_c171024.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L32_c171023.nc + $DIN_LOC_ROOT/atm/cam/inic/homme/cami_1850-01-01_ne240np4_L26_c110314.nc + $DIN_LOC_ROOT/atm/cam/inic/homme/cami_0000-09-01_ne240np4_L26_c061106.nc + $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01-ne240np4_L30_c111004.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne5np4_L26_c170517.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne16np4_L26_c170417.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne30np4_L26_c170417.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne60np4_L26_c171023.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne120np4_L26_c170419.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne240np4_L26_c170613.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne5np4_L30_c170517.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne16np4_L30_c170417.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne30np4_L30_c170417.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne120np4_L30_c170419.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne5np4_L32_c170517.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne16np4_L32_c170509.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne30np4_L32_c170509.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne120np4_L32_c170908.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne240np4_L32_c170908.nc + $DIN_LOC_ROOT/atm/cam/inic/se/f2000_conus_ne30x8_L32_c190712.nc + $DIN_LOC_ROOT/atm/waccm/ic/wa3_ne5np4_1950_spinup.cam2.i.1960-01-01-00000_c150810.nc + $DIN_LOC_ROOT/atm/waccm/ic/waccm5_1850_ne30np4_L70_0001-01-11-00000_c151217.nc + $DIN_LOC_ROOT/atm/waccm/ic/fw2000_ne30np4_L70_c181221.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L30_c031210.nc - - integer - se - dyn_se_nl + + real + initial_conditions + cam_initfiles_nl - Frequency with which diagnostic output is written to log (output every - statefreq dynamics timesteps). + Perturb the initial conditions for temperature randomly by up to the given + amount. Only applied for initial simulations. + Default: 0.0 - 0 + 0.0 - - integer - se - dyn_se_nl + + char*256 + initial_conditions + cam_initfiles_nl - Time stepping method for SE dycore - se_tstep_type=1 RK2 followed by qsplit-1 Leapfrog steps; second-order accurate in time (CESM1.2.0 setting) - se_tstep_type=2 RK2-SSP 3 stage (as used by tracers) - se_tstep_type=3 classic Runga-Kutta (RK) 3 stage - se_tstep_type=4 Kinnmark&Gray Runga-Kutta (RK) 4 stage + Full pathname of master restart file from which to branch. Setting is + Required for branch run. + Default: none - 4 + none - - integer - se - dyn_se_nl + + char*80 + initial_conditions + cam_initfiles_nl - Number of tracers to include in logfile diagnostics for SE dycore - 3 + UNSET_PATH - - char*32 - se - dyn_se_nl - - CAM-SE vertical remapping of temperature: - "thermal_energy_over_P": Map cp*T (thermal energy conserving) using a pressure coordinate. - "Tv_over_logP" : Map virtual temperature using a log pressure coordinate. + - Default: "thermal_energy_over_P" - - - thermal_energy_over_P - - - - char*32 - se - dyn_se_nl + + char*256 + topo + cam_initfiles_nl - CAM-SE vertical remap algorithm for u,v,T, and water species: - - "PPM_bc_mirror": PPM vertical remap with mirroring at the boundaries (solid wall boundary conditions, high-order throughout) - "PPM_bc_PCoM" : PPM vertical remap without mirroring at the boundaries (no boundary conditions enforced, first-order at two cells bordering top and bottom boundaries) - "PPM_bc_linear_extrapolation": PPM with linear extrapolation in ghost cells (code from A. Bradley, DOE) - - The following options use the FV3 vertical remapping algorithm: - - "FV3_PPM": Monotone PPM - "FV3_CS" : Monotonic cubic spline with 2*delta_z oscillations removed - "FV3_CS_2dz_filter": Selectively monotonic cubic spline, where local extrema are retained, with 2*delta_z oscillations removed - "FV3_non_monotone_CS_2dz_filter": Non-monotonic (linear) cubic spline with 2*delta_z oscillations removed; - - Default: "FV3_CS" + Full pathname of time-invariant boundary dataset for topography fields. + Default: UNSET_PATH - FV3_CS + UNSET_PATH + UNSET_PATH + UNSET_PATH + UNSET_PATH + UNSET_PATH + $DIN_LOC_ROOT/atm/cam/topo/topo-from-cami_0000-01-01_256x512_L26_c030918.nc + $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_128x256_c050520.nc + $DIN_LOC_ROOT/atm/cam/topo/T42_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20180111.nc + $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_48x96_c050520.nc + $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_32x64_c050520.nc + $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_8x16_c050520.nc + $DIN_LOC_ROOT/atm/cam/topo/USGS_gtopo30_0.23x0.31_remap_c061107.nc + $DIN_LOC_ROOT/atm/cam/topo/USGS_gtopo30_0.47x0.63_remap_c061106.nc + $DIN_LOC_ROOT/atm/cam/topo/fv_0.47x0.63_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171023.nc + $DIN_LOC_ROOT/atm/cam/topo/topo-from-cami_0000-10-01_0.5x0.625_L26_c031204.nc + $DIN_LOC_ROOT/atm/cam/topo/fv_0.9x1.25_nc3000_Nsw042_Nrs008_Co060_Fi001_ZR_sgh30_24km_GRNL_c170103.nc + $DIN_LOC_ROOT/atm/cam/topo/fv_1.9x2.5_nc3000_Nsw084_Nrs016_Co120_Fi001_ZR_GRNL_c190405.nc + $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_2.5x3.33_remap_c100204.nc + $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_4x5_remap_c050520.nc + $DIN_LOC_ROOT/atm/cam/topo/fv_10x15_nc0540_Nsw042_Nrs008_Co060_Fi001_20171220.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne5np4_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw064_20170515.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne16np4_nc3000_Co120_Fi001_PF_nullRR_Nsw084_20171012.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne30np4_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171020.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne60np4_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171012.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne120np4_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171011.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne240np4_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171014.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne5pg2_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw060_20170706.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne30pg2_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg2_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171014.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg2_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171012.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne240pg2_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171014.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne5pg3_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw064_20170516.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne16pg3_nc3000_Co120_Fi001_PF_nullRR_Nsw084_20171012.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne30pg3_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg3_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171012.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg3_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171014.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne240pg3_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171015.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne5pg4_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw060_20170707.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne30pg4_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg4_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171018.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg4_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171014.nc + $DIN_LOC_ROOT/atm/cam/topo/se/ne30x8_conus_nc3000_Co060_Fi001_MulG_PF_nullRR_Nsw042_20190710.nc - - char*32 - se - dyn_se_nl - - CAM-SE vertical remap algorithm for non-water tracers: - - "PPM_bc_mirror": PPM vertical remap with mirroring at the boundaries (solid wall boundary conditions, high-order throughout) - "PPM_bc_PCoM" : PPM vertical remap without mirroring at the boundaries (no boundary conditions enforced, first-order at two cells bordering top and bottom boundaries) - "PPM_bc_linear_extrapolation": PPM with linear extrapolation in ghost cells (code from A. Bradley, DOE) - The following options use the FV3 vertical remapping algorithm: + - "FV3_PPM": Monotone PPM - "FV3_CS" : Monotonic cubic spline with 2*delta_z oscillations removed - "FV3_non_monotone_CS_2dz_filter": Non-monotonic (linear) cubic spline with 2*delta_z oscillations removed; - - Default: "PPM_bc_linear_extrapolation" - - - PPM_bc_linear_extrapolation - - - - logical - se - dyn_se_nl + + char*256 + initial_conditions + physics_nl - Set .true. to allow writing SE dynamics fields to the restart file using the - unstructured grid format. This allows the restart file to be used as an - initial file, but its use as a restart file will introduce roundoff size - differences into the simulation. + Full pathname of atmospheric state dataset for comparison/check (NetCDF format). + If not UNSET_PATH, the physics check tool will run and print out stats every timestep. + Default: UNSET_PATH - .false. + UNSET_PATH - - - - integer - se - dyn_se_nl + + real + diagnostics + physics_nl - Number of equally-spaced horizontal physics points per spectral - element. A number greater than zero will define [se_fv_nphys] equally - spaced physics points in each direction (e.g., se_fv_nphys = 3 will - result in 9 equally-spaced physics points per element). - Default: 0 = feature disabled, use dynamics GLL points. + Controls processing of comparison between physics outputs and data from `{{ hilight }}ncdata_check{{ closehilight }}`. + Threshold over which differences are counted in the physics check process. + Default: 0 - 0 - 2 - 3 - 4 + 0.0 - - - - char*16 - se - dyn_se_nl - no,SCRIP + + real + diagnostics + physics_nl - If 'SCRIP', write a NetCDF file with the grid in SCRIP format. - If using a finite-volume physics grid, write the FVM grid, otherwise - write the native GLL grid. - Note that if this option is used, the simulation will exit after writing. + Controls processing of comparison between physics outputs and data from `{{ hilight }}ncdata_check{{ closehilight }}`. + Threshold under which the absolute difference is calculated rather than the relative difference. + Default: 1e-5 - no + 1.e-5 - - char*256 - se - dyn_se_nl - - Name of grid file to write if se_write_grid_file is set. - Default: Set according to active grid - - - - logical - se - dyn_se_nl + + char*32 + diagnostics + physics_nl - Set to true to write the SEMapping.nc file. + Name of parameterization to take snapshot before running + Default: none - .false. + none - - char*256(5) - se - native_mapping_nl + + char*32 + diagnostics + physics_nl - List of SCRIP grid filenames each representing a destination grid. If provided during a CAM simulation running the spectral element dycore, mapping files will be created from the native cubed-sphere grid to each destination grid. Both native mapping (using the internal spectral element basis functions) and bilinear maps are created. + Name of parameterization to take snapshot after running Default: none - - - - - integer - se - dyn_se_nl - - Number of threads to use for loops over elements. - - 0 + none - - integer - se - dyn_se_nl + + char*256 + build + physics_nl - Number of threads to use when processing vertical loops. Normally - equal to se_tracer_num_threads. + Full pathname to CAM physics grid ESMF mesh file. - 0 + UNSET_PATH - - integer - se - dyn_se_nl + + char*256 + build + physics_nl - Number of threads to use when processing loops over threads. - Normally equal to se_vert_num_threads. - - - 0 - - + Name of CCPP physics suite being used. - + This variable is usually set by the model + itself unless running with multiple physics + suites, in which case it must be specified + by the user in 'user_nl_cam'. - - char*80 - dyn_test - analytic_ic_nl - none,held_suarez_1994,moist_baroclinic_wave_dcmip2016,dry_baroclinic_wave_dcmip2016,dry_baroclinic_wave_jw2006 - - Specify the type of analytic initial conditions for an initial run. - held_suarez_1994: Initial conditions specified in Held and Suarez (1994) - moist_baroclinic_wave_dcmip2016: Moist baroclinic wave as used in the DCMIP 2016 experiments - dry_baroclinic_wave_dcmip2016: Dry baroclinic wave as used in the DCMIP 2016 experiments - dry_baroclinic_wave_jw2006: Dry baroclinic wave as specified in Jablonowski and Williamson (2006) + This variable will also set the value of the + 'phys_suite' namelist XML attribute. - none - held_suarez_1994 - held_suarez_1994 - moist_baroclinic_wave_dcmip2016 - moist_baroclinic_wave_dcmip2016 + UNSET - + + + + integer - driver - aquap_nl - 1,2,3,4,5,6,7,8,9,10,11 + vert_coord + vert_coord_nl - Set the sst to a particular analytic solution. + Number of model vertical levels. + Default: 30 + + Please note that changing this variable + will likely change other namelist variables + via the "nlev" attribute. - 1 + 30 + 26 + 30 + 32 - - + - - real - physconst - physconst_nl - - Length of siderial day [seconds]. - Default: set to shr_const value - - - - real - physconst - physconst_nl - - Radius of Earth [m]. - Default: set to shr_const value - - - - real - physconst - physconst_nl - - Acceleration of gravity [m/s**2]. - Default: set to shr_const value - - - - real - physconst - physconst_nl - - Molecular weight of dry air [g/mol] - Default: set to shr_const value - - - - real - physconst - physconst_nl - - Heat capacity of dry air at constant pressure [J/kg/K]. - Default: set to shr_const value - - - + real - physconst - physconst_nl + press_lim + ref_pres_nl - Molecular weight of water [g/mol]. - Default: set to shr_const value + Troposphere cloud physics will be done only below the top defined + by this pressure (Pa). + + 1.D2 + - + real - physconst - physconst_nl + press_lim + ref_pres_nl - Heat capacity of water vapor at constant pressure [J/kg/K]. - Default: set to shr_const value + MAM affects climate only below the top defined by this pressure (Pa). + + 1.D-4 + - + real - physconst - physconst_nl + press_lim + ref_pres_nl - Freezing point of water [K]. - Default: set to shr_const value + Molecular diffusion will be done only if the lowest pressure is + below this limit (Pa). + + 0.1D0 + - + real - physconst - physconst_nl + press_lim + ref_pres_nl - Planetary rotation rate (radians/second). Value set here is ONLY used in the atmosphere. - Default: set to shr_const value + The level closest to this pressure (Pa) is the bottom of the region + where molecular diffusion is done. + + 50.D0 + - + - - char*256 - offline_unit_driver - offline_driver_nl - - Filepath for dataset for offline unit driver. - Default: none - - - - char*256 - offline_unit_driver - offline_driver_nl + + char*8 + diagnostics + qneg_nl + summary,timestep,off - List of filepaths for dataset for offline unit driver. - Default: none + Control the writing of qneg3 and qneg4 warning messages. + 'summary' causes a summary of QNEG3 and QNEG4 errors to be + printed at the end of the run + 'timestep' causes a summary of QNEG3 and QNEG4 errors to be printed at the + end of each timestep. The total is reset at the end of each timestep. + 'off' causes the qneg3 and qneg4 warnings to be supressed. + Note that these settings do not affect the availability of qneg + history variables. + Default: summary + + summary + - + integer diagnostics cam_logfile_nl @@ -11690,4 +445,4 @@ - + diff --git a/src/physics/utils/phys_comp.F90 b/src/physics/utils/phys_comp.F90 index 824a38a7..ce47aab4 100644 --- a/src/physics/utils/phys_comp.F90 +++ b/src/physics/utils/phys_comp.F90 @@ -2,6 +2,7 @@ module phys_comp use ccpp_kinds, only: kind_phys use shr_kind_mod, only: SHR_KIND_CS, SHR_KIND_CL + use runtime_obj, only: unset_str implicit none private @@ -14,15 +15,15 @@ module phys_comp ! Public module data ! suite_name: Suite we are running - character(len=SHR_KIND_CS), public, protected :: phys_suite_name = '' + character(len=SHR_KIND_CS), public, protected :: phys_suite_name = unset_str ! Private module data character(len=SHR_KIND_CS), allocatable :: suite_names(:) character(len=SHR_KIND_CS), allocatable :: suite_parts(:) - character(len=SHR_KIND_CL) :: ncdata_check = 'ncdata_check' - character(len=SHR_KIND_CL) :: cam_physics_mesh = 'cam_physics_mesh' - character(len=SHR_KIND_CS) :: cam_take_snapshot_before ='before' - character(len=SHR_KIND_CS) :: cam_take_snapshot_after = 'after' + character(len=SHR_KIND_CL) :: ncdata_check = unset_str + character(len=SHR_KIND_CL) :: cam_physics_mesh = unset_str + character(len=SHR_KIND_CS) :: cam_take_snapshot_before = unset_str + character(len=SHR_KIND_CS) :: cam_take_snapshot_after = unset_str real(kind_phys) :: min_difference = HUGE(1.0_kind_phys) real(kind_phys) :: min_relative_value = HUGE(1.0_kind_phys) @@ -46,25 +47,23 @@ subroutine phys_readnl(nlfilename) character(len=*), intent(in) :: nlfilename ! Local variables - integer :: unitn, ierr + character(len=SHR_KIND_CS) :: physics_suite + + integer :: unitn, ierr, i character(len=*), parameter :: subname = 'phys_readnl' namelist /physics_nl/ ncdata_check, min_difference, min_relative_value,& - cam_take_snapshot_before, cam_take_snapshot_after, cam_physics_mesh + cam_take_snapshot_before, cam_take_snapshot_after, cam_physics_mesh,& + physics_suite ! Initialize namelist variables to invalid values - min_relative_value = HUGE(1.0_kind_phys) min_difference = HUGE(1.0_kind_phys) + min_relative_value = HUGE(1.0_kind_phys) cam_take_snapshot_after = unset_path_str cam_take_snapshot_before = unset_path_str cam_physics_mesh = unset_path_str ncdata_check = unset_path_str - - !!XXgoldyXX: To do: Move setting of to namelist - !!XXgoldyXX: At that point, we can check that is in - !!XXgoldyXX: - call ccpp_physics_suite_list(suite_names) - phys_suite_name = suite_names(1) + physics_suite = unset_str ! Read namelist if (masterproc) then @@ -92,6 +91,22 @@ subroutine phys_readnl(nlfilename) mpicom, ierr) call mpi_bcast(cam_take_snapshot_after, len(cam_take_snapshot_after),& mpi_character, masterprocid, mpicom, ierr) + call mpi_bcast(physics_suite, len(physics_suite),& + mpi_character, masterprocid, mpicom, ierr) + + ! Check that the listed physics suite is actually present + ! in the CCPP physics suite list: + call ccpp_physics_suite_list(suite_names) + do i = 1, size(suite_names) + if (trim(physics_suite) == trim(suite_names(i))) then + phys_suite_name = trim(physics_suite) + end if + end do + + ! If no match is found, then end run here: + if (phys_suite_name == unset_str) then + call endrun(subname//": Physics suite '"//trim(physics_suite)//"' not found.") + end if ! Print out namelist variables if (masterproc) then @@ -106,6 +121,7 @@ subroutine phys_readnl(nlfilename) else write(iulog,*) ' Physics data check will not be performed' end if + write(iulog, *) ' CCPP Physics suite chosen: ', phys_suite_name end if end subroutine phys_readnl From b46b544e8c9fa1eef09848f41f66e39272183235 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 23 Nov 2021 13:47:15 -0700 Subject: [PATCH 05/33] Update ncar-physics external to avoid need to modify SDF. --- Externals_CAM.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Externals_CAM.cfg b/Externals_CAM.cfg index db585109..449426ad 100644 --- a/Externals_CAM.cfg +++ b/Externals_CAM.cfg @@ -23,7 +23,7 @@ required = False local_path = src/physics/ncar_ccpp protocol = git repo_url = https://github.com/NCAR/atmospheric_physics -tag = atmos_phys0_00_014 +tag = atmos_phys0_00_015 required = True [silhs] From 3abeaed07238403bec5d04962769feec5ec248e4 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 11 Jan 2022 14:08:50 -0700 Subject: [PATCH 06/33] Make the case 'DEBUG' flag a config variable/namelist attribute. --- cime_config/buildnml | 6 ------ cime_config/cam_config.py | 15 ++++++++++++++- cime_config/namelist_definition_cam.xml | 1 + 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/cime_config/buildnml b/cime_config/buildnml index f889ddaf..ffdab68b 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -120,7 +120,6 @@ def buildnml(case, caseroot, compname): rundir = case.get_value("RUNDIR") cam_namelist_opts = case.get_value("CAM_NAMELIST_OPTS") cam_nml_use_case = case.get_value("CAM_NML_USE_CASE") - debug = case.get_value("DEBUG") ninst_atm = case.get_value("NINST_ATM") run_type = case.get_value("RUN_TYPE") run_startdate = case.get_value("RUN_STARTDATE") @@ -264,11 +263,6 @@ def buildnml(case, caseroot, compname): cam_branch_file) # End if - # Set "debug_output" namelist variable to 2 if DEBUG is true: - if debug: - infile_lines.append(" debug_output = 2") - # End if - # Determine location and name of "user_nl_cam" files: user_nl_file = os.path.join(caseroot, "user_nl_cam" + inst_string) diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index 305688f8..4ca07217 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -94,7 +94,8 @@ def __init__(self, case, case_log): comp_ocn = case.get_value("COMP_OCN") # CESM ocean component exeroot = case.get_value("EXEROOT") # Model executable path nthrds = case.get_value("NTHRDS_ATM") # Number of model OpenMP threads - start_date = case.get_value("RUN_STARTDATE") # Model simulation starte date + start_date = case.get_value("RUN_STARTDATE") # Model simulation start date + debug_case = case.get_value("DEBUG") # Case debug flag # Save case variables needed for code auto-generation: self.__atm_root = case.get_value("COMP_ROOT_DIR_ATM") @@ -168,6 +169,18 @@ def __init__(self, case, case_log): self.create_config("ic_ymd", "Start date of model run.", start_date_cam, is_nml_attr=True) + #---------------------------------------------------- + # Set CAM debug flag (needed for namelist generation) + #---------------------------------------------------- + + #Please note that the boolean debug_case is converted to + #an integer in order to match other namelist XML attribute + #logicals. + + self.create_config("debug", + "Flag to check if debug mode is enabled.", + int(debug_case), is_nml_attr=True) + #------------------------ # Set CAM physics columns #------------------------ diff --git a/cime_config/namelist_definition_cam.xml b/cime_config/namelist_definition_cam.xml index d5a86467..2d3e8fdd 100644 --- a/cime_config/namelist_definition_cam.xml +++ b/cime_config/namelist_definition_cam.xml @@ -442,6 +442,7 @@ 0 + 2 From de01ffa5f958a48a7998c8e9be9422c7ab5591fd Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 11 Jan 2022 16:12:41 -0700 Subject: [PATCH 07/33] Write namelist variable strings based off their specified type. --- cime_config/atm_in_paramgen.py | 36 +++++++++++++++++----------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index b9d3e7a7..706614c6 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -242,21 +242,16 @@ def write(self, output_path): #a fortran boolean, integer, or real. This done using the following regular expressions: #-------------------------------------------------------------------------------------- - #Integer and real (including scientific notation) regular expression string: - regex_num_string = r"(^[+-]?\.\d+)($|[de][+-]?\d+$)|^[+-]?\d+\.?(\d+|($|[de][+-]?\d+$))($|[de][+-]?\d+$)" - - #Compile regular expression for numbers, ignoring case for the "d" and "e" notations: - re_num_check = re.compile(regex_num_string, flags=re.I) - - #compile regular expressiong for booleans, ignoring case: - re_bool_check = re.compile(r"^(\.true\.|\.false\.)$", flags=re.I) - # Make sure ParamGen object has been reduced: if not self.reduced: emsg = "ParamGen object for atm_in must be reduced before being " emsg += "written to file. Please check CAM's buildnml script." raise SystemError(emsg) + #Create sets for string evaluation below: + num_bool_set = {"integer", "real", "logical"} #types that don't need quotes + quote_set = {"'", '"'} #single and double quotes + # Write Fortran namelist file: with open(os.path.join(output_path), 'w') as atm_in_fil: for nml_group in self._data: @@ -265,27 +260,32 @@ def write(self, output_path): # Write all variables within that group: for var in self._data[nml_group]: + #Extract variable value(s): val = self._data[nml_group][var]["values"].strip() + + #If no value is set then move to the next variable: if val==None: continue - #Check if value matches fortran number or boolean: - is_num = re_num_check.match(val) - is_bool = re_bool_check.match(val) + #Extract variable type: + var_type = self._data[nml_group][var]["type"].strip() #Check if variable value is a number or boolean: - if is_num or is_bool: + if var_type in num_bool_set: #If so, then write value as-is: atm_in_fil.write(" {} = {}\n".format(var, val)) - else: - #Value is a string, so check if there is a starting quote: - if val[0] == "'" or val[0] == '"': - #If so, then assume value is already wrapped in quotes, - #so write value as-is: + elif "char*" in var_type: + #Value is a string, so check if is already inside quotes: + if val[0] in quote_set and val[-1] == val[0]: + #If so, then write string value as-is: atm_in_fil.write(" {} = {}\n".format(var, val)) else: #If not, then write string with added quotes: atm_in_fil.write(" {} = '{}'\n".format(var, val)) + else: + #This is an un-recognized type option, so raise an error: + emsg = f"Namelist type '{var_type}' for variable '{var}' is un-recognized" + raise CamConfigValError(emsg) # Add space for next namelist group: atm_in_fil.write('/\n\n') From b7f9b1559f05eecdcc2efbdba8b4f521da31ef16 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Wed, 12 Jan 2022 10:56:50 -0700 Subject: [PATCH 08/33] Write namelist entries alphabetically, and make sure the type element is present. --- cime_config/atm_in_paramgen.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index 706614c6..6d7470f9 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -254,12 +254,13 @@ def write(self, output_path): # Write Fortran namelist file: with open(os.path.join(output_path), 'w') as atm_in_fil: - for nml_group in self._data: + #Loop through namelist groups in alphabetical order: + for nml_group in sorted(self._data): # Write namelist group: atm_in_fil.write("&"+nml_group+"\n") - # Write all variables within that group: - for var in self._data[nml_group]: + # Write all variables within that group (sorted alphabetically): + for var in sorted(self._data[nml_group]): #Extract variable value(s): val = self._data[nml_group][var]["values"].strip() @@ -268,7 +269,11 @@ def write(self, output_path): continue #Extract variable type: - var_type = self._data[nml_group][var]["type"].strip() + if "type" in self._data[nml_group][var]: + var_type = self._data[nml_group][var]["type"].strip() + else: + emsg = f"Namelist entry '{var}' is missing required 'type' element." + raise CamConfigValError(emsg) #Check if variable value is a number or boolean: if var_type in num_bool_set: @@ -284,7 +289,8 @@ def write(self, output_path): atm_in_fil.write(" {} = '{}'\n".format(var, val)) else: #This is an un-recognized type option, so raise an error: - emsg = f"Namelist type '{var_type}' for variable '{var}' is un-recognized" + emsg = f"Namelist type '{var_type}' for entry '{var}' is un-recognized.\n" + emsg += "Acceptable namelist types are: logical, integer, real, or char*N." raise CamConfigValError(emsg) # Add space for next namelist group: From 60ada0cb3f723f5913fc3a8ee34f3bfc3f1dbf52 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Mon, 24 Jan 2022 13:14:56 -0700 Subject: [PATCH 09/33] Modify issue-closing workflow to only run on PRs to the NCAR repo (Github Issue #139), and update workflow python version to 3.10 --- .github/workflows/branch_push_workflow.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/workflows/branch_push_workflow.yml b/.github/workflows/branch_push_workflow.yml index 193b90e3..aee716d9 100644 --- a/.github/workflows/branch_push_workflow.yml +++ b/.github/workflows/branch_push_workflow.yml @@ -16,25 +16,23 @@ jobs: #This job is designed to close any issues or pull requests specified #in the body of a pull request merged into a non-default branch. issue_closer: + if: github.repository == 'NCAR/CAMDEN' # Only run on main repo runs-on: ubuntu-latest steps: # Acquire github action routines - uses: actions/checkout@v2 # Acquire specific version of python - - name: Set up Python 3.6 - if: github.repository == 'NCAR/CAMDEN' # Only run on main repo + - name: Set up Python 3.10 uses: actions/setup-python@v1 with: - python-version: '3.6' # Semantic version range syntax or exact version of a Python version + python-version: '3.10' # Semantic version range syntax or exact version of a Python version # Install required python packages - name: Install dependencies - if: github.repository == 'NCAR/CAMDEN' # Only run on main repo run: | python -m pip install --upgrade pip # Install latest version of PIP pip install PyGithub # Install PyGithub python package # Run CAM issue-closing script - name: Run python Github issue-closing script - if: github.repository == 'NCAR/CAMDEN' # Only run on main repo env: ACCESS_TOKEN: ${{ secrets.WRITE_ACCESS_TOKEN }} run: .github/scripts/branch_pr_issue_closer.py --access_token $ACCESS_TOKEN --trigger_sha $GITHUB_SHA From 113c6f3503e12c1d26f2314a284499e9a6be9dce Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 1 Feb 2022 09:51:48 -0700 Subject: [PATCH 10/33] Add new 'append_atm_in_pg' method to allow for the use of multiple namelist definition files, and add in checks to make sure all namelist groups and variables (entry ids) are unique. Also add in new unit tests for namelist generation via ParamGen, and fix multiple bugs. Please note that this commit does have some failing cam_config unit tests that will need to be fixed later. --- cime_config/atm_in_paramgen.py | 433 ++++++++--- cime_config/cam_config.py | 3 +- test/run_tests.sh | 6 +- test/unit/cam_config_unit_tests.py | 5 +- .../sample_files/atm_in_files/test_attr_in | 13 + .../atm_in_files/test_extra_nml_def.xml | 35 + .../test_extra_nml_same_group.xml | 36 + .../atm_in_files/test_extra_nml_same_var.xml | 48 ++ .../atm_in_files/test_missing_elems.xml | 57 ++ .../atm_in_files/test_multi_attr_in | 13 + .../atm_in_files/test_multi_xml_in | 18 + .../atm_in_files/test_simple_atm_in | 13 + .../atm_in_files/test_simple_nml_def.xml | 60 ++ .../sample_files/atm_in_files/test_user_in | 13 + .../atm_in_files/test_user_nl_bad_comment | 10 + .../atm_in_files/test_user_nl_bad_format | 11 + .../atm_in_files/test_user_nl_double_var | 13 + .../atm_in_files/test_user_nl_simple | 14 + .../atm_in_files/test_user_nl_undefined_var | 13 + test/unit/test_atm_in_paramgen.py | 670 ++++++++++++++++++ 20 files changed, 1371 insertions(+), 113 deletions(-) create mode 100644 test/unit/sample_files/atm_in_files/test_attr_in create mode 100644 test/unit/sample_files/atm_in_files/test_extra_nml_def.xml create mode 100644 test/unit/sample_files/atm_in_files/test_extra_nml_same_group.xml create mode 100644 test/unit/sample_files/atm_in_files/test_extra_nml_same_var.xml create mode 100644 test/unit/sample_files/atm_in_files/test_missing_elems.xml create mode 100644 test/unit/sample_files/atm_in_files/test_multi_attr_in create mode 100644 test/unit/sample_files/atm_in_files/test_multi_xml_in create mode 100644 test/unit/sample_files/atm_in_files/test_simple_atm_in create mode 100644 test/unit/sample_files/atm_in_files/test_simple_nml_def.xml create mode 100644 test/unit/sample_files/atm_in_files/test_user_in create mode 100644 test/unit/sample_files/atm_in_files/test_user_nl_bad_comment create mode 100644 test/unit/sample_files/atm_in_files/test_user_nl_bad_format create mode 100644 test/unit/sample_files/atm_in_files/test_user_nl_double_var create mode 100644 test/unit/sample_files/atm_in_files/test_user_nl_simple create mode 100644 test/unit/sample_files/atm_in_files/test_user_nl_undefined_var create mode 100644 test/unit/test_atm_in_paramgen.py diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index 6d7470f9..652fde8f 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -10,20 +10,24 @@ #---------------------------------------- import os +import os.path import sys import re from collections import OrderedDict #CAM specific config error: from cam_config_classes import CamConfigValError +from cam_config_classes import CamConfigTypeError #---------------- # Import ParamGen #---------------- -_CIMEROOT = os.environ.get("CIMEROOT") -if _CIMEROOT is None: - raise SystemExit("ERROR: must set CIMEROOT environment variable") -sys.path.append(os.path.join(_CIMEROOT, "scripts", "lib", "CIME", "ParamGen")) +_CIME_CONF_DIR = os.path.abspath(os.path.dirname(__file__)) +_CIME_ROOT = os.path.join(_CIME_CONF_DIR, os.pardir, "cime") + +if not os.path.exists(_CIME_ROOT): + raise SystemExit("ERROR: Cannot find 'cime' directory. Did you run checkout_externals?") +sys.path.append(os.path.join(_CIME_ROOT, "scripts", "lib", "CIME", "ParamGen")) from paramgen import ParamGen ############################## @@ -53,19 +57,16 @@ def __init__(self, pg_data_dict): #Initialize ParamGen directly: super().__init__(pg_data_dict) - #Create namelist var/group dictionary: - self.__var_group_dict = {} - - for nml_group in self._data: - for var in self._data[nml_group]: + #Create namelist var/group dictionary, + #which used by the "append_user_nl_file" + #method: + self._var_group_dict = {} - #Check if variable already exists in dictionary: - if var in self.__var_group_dict: - #If so, then append group to list: - self.__var_group_dict[var].append(nml_group) - else: - #If not, then add variable and group to dictionary: - self.__var_group_dict[var] = [nml_group] + #Create empty dictionaries that will contain + #the namelist definition file and the set + #of all namelist groups and variables: + self._nml_def_groups = {} + self._nml_def_vars = {} #### @@ -80,34 +81,173 @@ def from_namelist_xml(cls, nml_xml_file): """ #Create ParamGen object using base class: - _pg_xml = ParamGen.from_xml_nml(nml_xml_file) + _pg_xml = ParamGen.from_xml_nml(nml_xml_file, no_duplicates=True) #Initialize new "atm_in" object: atm_in_pg = AtmInParamGen(_pg_xml.data) + #Check if the new ParamGen object has all of the required + #namelist elements: + #---------------- + missing_elems = atm_in_pg.check_nml_def_elems() + + if missing_elems: + emsg = "The XML namelist definition file:\n" + emsg += f"{nml_xml_file}\n" + emsg += "has namelist entries that are missing required elements.\n" + emsg += "Those entries and missing elements are:\n" + for entry_id, missing_elems in missing_elems.items(): + emsg += f"{entry_id} : {', '.join(missing_elems)}\n" + raise CamConfigValError(emsg) + #---------------- + + #Initialize file->group/var set dictionary: + atm_in_pg._nml_def_groups[nml_xml_file] = set() + atm_in_pg._nml_def_vars[nml_xml_file] = set() + + #Create namelist variable/group dictionary + #and associated sets: + #---------------- + for nml_group in atm_in_pg._data: + for var in atm_in_pg._data[nml_group]: + + #Check if variable already exists in dictionary: + if var in atm_in_pg._var_group_dict: + #No duplicate variables are allowed, even if + #in separate namelist groups, so raise an error. + #Please note that this error should always be + #caught earlier than this, so if it gets to this + #point something has gone seriously wrong: + emsg = f"Namelist entry id '{var}' exists" + emsg += f" in namelist group '{nml_group}'" + emsg += f" and '{self.__var_group_dict[var]}'\n" + emsg += "Namelist variables can belong to only one group." + raise SystemError(emsg) + else: + #If not, then add variable and group to dictionary: + atm_in_pg._var_group_dict[var] = nml_group + + #Add namelist groups and variables to their + #respective sets: + atm_in_pg._nml_def_groups[nml_xml_file].add(nml_group) + atm_in_pg._nml_def_vars[nml_xml_file].add(var) + + #---------------- + #Return object: return atm_in_pg #### - @classmethod - def from_user_nl_file(cls, user_nl_file): - """ - Reads in a given "user_nl_cam" file or equivalent and initializes - a AtmInParamGen object. This method is an alternative to the xml, - yaml, and json methods already available from the base ParamGen class. - ---------- - user_nl_file -> path (str) to namelist definition XML file + def check_nml_def_elems(self): """ + Function that checks if certain namelist definition + file elements/tags that are optional for ParamGen + but required by CAM/SIMA are present in the provided + ParamGen atm_in object. + """ - #Parse user_nl_cam file: - _data = AtmInParamGen._read_user_input(user_nl_file) + #Please note that "group" and "values" are automatically + #required by the ParamGen schema. - #Create new ParamGen object: - atm_in_paramgen_obj = AtmInParamGen(_data) + #Required namelist elements: + req_elems = ["type", "desc", "category"] + + #Set missing attributes dictionary: + missing_elems = {} + + #Assume it is a ParamGen object, and loop over namelist groups: + for nml_group in self._data: + #Now loop over variables in group: + for var in self._data[nml_group]: + #Lastly loop over required namelist elements: + for req_elem in req_elems: + #Check if required element is present: + if not req_elem in self._data[nml_group][var]: + #Add missing attribute to dictionary: + if var in missing_elems: + missing_elems[var].append(req_elem) + else: + missing_elems[var] = [req_elem] + #End if + #End if + #End for + #End for + #End for + + #Return missing elements dictionary: + return missing_elems + + #### - return atm_in_paramgen_obj + def append_atm_in_pg(self, atm_pg_obj): + + """ + Append a new AtmInParamGen object + to this one, ensuring that there are + no duplicate namelist groups or variables. + ---------- + atm_pg_obj -> An AtmInParamGen object + + """ + + #Make sure there is only one XML file associated with + #input PG object: + if len(atm_pg_obj._nml_def_groups.keys()) > 1: + emsg = "ParamGen object being appended to another must" + emsg += " be associated with only one namelist definition file." + emsg += "\nInstead it is associated with the following files:\n" + emsg += "\n".join(atm_pg_obj._nml_def_groups.keys()) + raise CamConfigValError(emsg) + else: + #Extract namelist definition file name: + input_file = next(iter(atm_pg_obj._nml_def_groups)) + + #Extract the group and variable sets from input PG object: + input_groups = atm_pg_obj._nml_def_groups[input_file] + input_vars = atm_pg_obj._nml_def_vars[input_file] + + #Check that there are no matching namelist groups: + #------------------------------------------------ + for nml_file, nml_groups in self._nml_def_groups.items(): + + #Determine if any namelist groups are the same + #between the two objects: + same_groups = nml_groups.intersection(input_groups) + + #If so, then raise an error (as all namelist groups must be unique): + if same_groups: + emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" + emsg += " the following conflicting namelist groups:\n" + emsg += ", ".join(same_groups) + raise CamConfigValError(emsg) + + #------------------------------------------------ + + #Check that there are no matching namelist variables: + #------------------------------------------------ + for nml_file, nml_vars in self._nml_def_vars.items(): + + #Determine if any namelist groups are the same + #between the two objects: + same_vars = nml_vars.intersection(input_vars) + + #If so, then raise an error (as all namelist variable ids must be unique): + if same_vars: + emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" + emsg += " the following conflicting namelist variables:\n" + emsg += ", ".join(same_vars) + raise CamConfigValError(emsg) + + #------------------------------------------------ + + #Add input PG object dictionaries to this object's dicts: + self._nml_def_groups.update(atm_pg_obj._nml_def_groups) + self._nml_def_vars.update(atm_pg_obj._nml_def_vars) + + #Append input PG object to this object: + self.append(atm_pg_obj) #### @@ -115,12 +255,14 @@ def append_user_nl_file(self, user_nl_file): """ Reads in user_nl_cam files and converts them to the proper ParamGen syntax. + ---------- + user_nl_file -> path (str) to user_nl_cam file + """ _data = OrderedDict() with open(user_nl_file,'r') as user_file: within_comment_block = False - curr_group = "UNSET" for line in user_file: if len(line)>1: line_s = line.split() @@ -134,87 +276,49 @@ def append_user_nl_file(self, user_nl_file): continue if not within_comment_block and line_s[0][0] != "!": # not a single comment line either - # check format: - if curr_group == "UNSET" and line.strip()[0] == "&": - curr_group = line.strip()[1:] - elif curr_group != "UNSET" and line.strip()[0] == "/": - curr_group = "UNSET" - else: - #Join string elements back together: - line_j = ' '.join(line_s) - - # now parse the line: - if ("=" in line_j): - line_ss = line_j.split("=") - var_str = (line_ss[0]).strip() # the first element is the parameter name - val_str = ' '.join(line_ss[1:]) # the rest is tha value string - if '!' in val_str: - val_str = val_str.split("!")[0] # discard the comment in val str, if one exists - - #Check if variable already exists in group dictionary: - if var_str in self.__var_group_dict: - #Extract namelist group list for variable: - var_group_list = self.__var_group_dict[var_str] - - #Check if no group has been assigned (i.e. it is "Global"): - if curr_group == "UNSET": - #If only one group option exists, then assign that group: - if len(var_group_list) == 1: - data_group = var_group_list[0] - else: - #Raise an error stating that the user must - #specify the associated group: - group_list_str = ', '.join(var_group_list) - emsg = "Namelist variable '{}' is associated" - emsg += " with the following namelist groups:\n" - emsg += "{}\nPlease specify which group using" - emsg += " '&groupname' in '{}'" - raise CamConfigValError(emsg.format(var_str, group_list_str, - user_nl_file)) - - else: - #Check that the specified group matches one of - #the defined groups for that variable: - if curr_group in var_group_list: - #If so, then use specified group: - data_group = curr_group - else: - #If not, then raise an error: - emsg = "There is no variable '{}', associated with namelist group '{}'." - emsg += " Please double-check '{}'." - raise CamConfigValError(emsg.format(var_str, curr_group, - user_nl_file)) - - else: - #Raise error that namelist variable isn't listed in - #anywhere in a definition file: - emsg = "Variable '{}' not found in any namelist definition files." - emsg += " Please double-check '{}'." - raise CamConfigValError(emsg.format(var_str, user_nl_file)) - - #Add the namelist group if not already in data dict: - if not data_group in _data: - _data[data_group] = dict() - - #Check if variable already exists in data dictionary: - if var_str in _data[data_group]: - emsg = "Namelist variable '{}' listed more than once in '{}'" - emsg += "\nPlease either list the variable only once, or specify separate namelist groups" - emsg += "for each listed instance, if relevant." - raise CamConfigValError(emsg.format(var_str, user_nl_file)) - - #Enter the parameter in the dictionary: - _data[data_group][var_str] = {'values':val_str} + #Join string elements back together: + line_j = ' '.join(line_s) + + # now parse the line: + if ("=" in line_j): + line_ss = line_j.split("=") + var_str = (line_ss[0]).strip() # the first element is the parameter name + val_str = ' '.join(line_ss[1:]) # the rest is tha value string + if '!' in val_str: + val_str = val_str.split("!")[0] # discard the comment in val str, if one exists + + #Check if variable already exists in group dictionary: + if var_str in self._var_group_dict: + #Extract namelist group list for variable: + data_group = self._var_group_dict[var_str] + else: - emsg = "Cannot parse the following line in '{}' :\n'{}'" - raise CamConfigValError(emsg.format(user_nl_file, line)) + #Raise error that namelist variable isn't listed in + #anywhere in a definition file: + emsg = "Variable '{}' not found in any namelist definition files." + emsg += " Please double-check '{}'." + raise CamConfigValError(emsg.format(var_str, user_nl_file)) + + #Add the namelist group if not already in data dict: + if not data_group in _data: + _data[data_group] = {} + + #Check if variable already exists in data dictionary: + if var_str in _data[data_group]: + emsg = "Namelist variable '{}' set more than once in '{}'" + emsg += "\nPlease set each variable only once." + raise CamConfigValError(emsg.format(var_str, user_nl_file)) + + #Enter the parameter in the dictionary: + _data[data_group][var_str] = {'values':val_str} + else: + emsg = "Cannot parse the following line in '{}' :\n'{}'" + raise CamConfigValError(emsg.format(user_nl_file, line)) #Check if there is unclosed block: if within_comment_block: - raise CamConfigValError("Un-closed comment block! Please check '{}'".format(user_nl_file)) - if curr_group!="UNSET": - raise CamConfigValError("Un-closed namelist group block! Please check `{}`".format(user_nl_file)) + raise CamConfigValError(f"Un-closed comment block! Please check '{user_nl_file}'") #Create new ParamGen object: pg_user = ParamGen(_data) @@ -277,8 +381,15 @@ def write(self, output_path): #Check if variable value is a number or boolean: if var_type in num_bool_set: - #If so, then write value as-is: - atm_in_fil.write(" {} = {}\n".format(var, val)) + if var_type == 'logical': + #If logical, then write the associated truth value: + if self._is_nml_logical_true(var, val): + atm_in_fil.write(f" {var} = .true.\n") + else: + atm_in_fil.write(f" {var} = .false.\n") + else: + #If a number, then write value as-is: + atm_in_fil.write(" {} = {}\n".format(var, val)) elif "char*" in var_type: #Value is a string, so check if is already inside quotes: if val[0] in quote_set and val[-1] == val[0]: @@ -341,5 +452,109 @@ def __expand_func(self, varname): #Return value if found: return val + #### + + def _is_nml_logical_true(self, varname, var_val): + + """ + Checks if a "logical" XML namelist value is true or + false. + ---------- + varname -> The name of the variable being checked + var_val -> The value of the variable being checked + + doctests: + + 1. Check that a True value returns true: + >>> AtmInParamGen({})._is_nml_logical_true("test", True) + True + + 2. Check that a "true" value returns true: + >>> AtmInParamGen({})._is_nml_logical_true("test", "true") + True + + 3. Check that a ".true." value returns true: + >>> AtmInParamGen({})._is_nml_logical_true("test", ".true.") + True + + 4. Check that a "1" value returns true: + >>> AtmInParamGen({})._is_nml_logical_true("test", "1") + True + + 5. Check that a 1 (integer) value returns true: + >>> AtmInParamGen({})._is_nml_logical_true("test", 1) + True + + 6. Check that a False value returns false: + >>> AtmInParamGen({})._is_nml_logical_true("test", False) + False + + 7. Check that a "FALSE" value returns false: + >>> AtmInParamGen({})._is_nml_logical_true("test", "FALSE") + False + + 8. Check that a ".False." value returns false: + >>> AtmInParamGen({})._is_nml_logical_true("test", ".False.") + False + + 9. Check that a "0" value returns false: + >>> AtmInParamGen({})._is_nml_logical_true("test", "0") + False + + 10. Check that a 0 (integer) value returns false: + >>> AtmInParamGen({})._is_nml_logical_true("test", 0) + False + + 11. Check that a bad string value returns the correct error: + >>> AtmInParamGen({})._is_nml_logical_true("test", "this_wont_work") # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + cam_config_classes.CamConfigValError:... + XML namelist logical variable, 'test', must have a value of true, false, 1, or 0, not 'this_wont_work' + + 12. Check that a bad integer value returns the correct error: + >>> AtmInParamGen({})._is_nml_logical_true("test", 3) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + cam_config_classes.CamConfigValError:... + XML namelist logical variable, 'test', must have a value of true, false, 1, or 0, not 3 + + 13. Check that a non-boolean, string or integer type returns an error: + >>> AtmInParamGen({})._is_nml_logical_true("test", 13.03) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + cam_config_classes.CamConfigTypeError:... + XML namelist variable 'test' must have a value that is either a boolean, string, or integer, not float. + + """ + + if isinstance(var_val, bool): + return var_val + elif isinstance(var_val, str): + if var_val.lower() in {"true", ".true.", "1"}: + return True + elif var_val.lower() in {"false", ".false.", "0"}: + return False + else: + emsg = f"\nXML namelist logical variable, '{varname}'" + emsg += ", must have a value of true, false, 1, or 0, not" + emsg += f" '{var_val}'" + raise CamConfigValError(emsg) + elif isinstance(var_val, int): + if var_val == 1: + return True + elif var_val == 0: + return False + else: + emsg = f"\nXML namelist logical variable, '{varname}'" + emsg += ", must have a value of true, false, 1, or 0, not" + emsg += f" {var_val}" + raise CamConfigValError(emsg) + else: + emsg = f"\nXML namelist variable '{varname}' must" + emsg += " have a value that is either a boolean, string, or integer," + emsg += f" not {type(var_val).__name__}." + raise CamConfigTypeError(emsg) + ############ #End of file diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index 4ca07217..1b31f035 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -913,7 +913,8 @@ def __init__(self): "COMP_ROOT_DIR_ATM" : "/a/third/made-up/path", "CAM_CPPDEFS" : "-DTEST_CPPDEF -DNEW_TEST=5", "NTHRDS_ATM" : 1, - "RUN_STARTDATE" : "101" + "RUN_STARTDATE" : "101", + "DEBUG" : False } def get_value(self, key): diff --git a/test/run_tests.sh b/test/run_tests.sh index 33472859..9cd05709 100755 --- a/test/run_tests.sh +++ b/test/run_tests.sh @@ -53,7 +53,7 @@ if [ ! -d "cime_config" ]; then fi #CAM config classes doctests: -${PYTHON} cime_config/cam_config_classes.py +run_doctest cime_config/cam_config_classes.py # CAM config doctests: run_doctest cime_config/cam_config.py # CAM autogen doctests: @@ -64,6 +64,8 @@ run_doctest cime_config/cam_build_cache.py run_doctest cime_config/create_readnl_files.py # Registry generator doctests: run_doctest src/data/generate_registry_data.py +# ParamGen atm_in namelist writer doctests: +run_doctest cime_config/ # CAM config unit tests: run_unittest test/unit/cam_config_unit_tests.py # Registry generator unit tests: @@ -72,6 +74,8 @@ run_unittest test/unit/test_registry.py run_unittest test/unit/create_readnl_files_tests.py # Physics variable init (phys_init) generator unit tests: run_unittest test/unit/write_init_unit_tests.py +# ParamGen atm_in namelist writer unit tests: +run_unittest test/unit/test_atm_in_paramgen.py # Report if [ ${NUMERRORS} -gt 0 ]; then diff --git a/test/unit/cam_config_unit_tests.py b/test/unit/cam_config_unit_tests.py index ed7dbd38..d1d1d64d 100644 --- a/test/unit/cam_config_unit_tests.py +++ b/test/unit/cam_config_unit_tests.py @@ -37,7 +37,7 @@ # pylint: disable=wrong-import-position from cam_autogen import CamAutoGenError from cam_config import ConfigCAM -from cam_config import CamConfigTypeError, CamConfigValError +from cam_config_classes import CamConfigTypeError, CamConfigValError # pylint: enable=wrong-import-position #++++++++++++++++++++++++++++++++++++++++++ @@ -68,7 +68,8 @@ def __init__(self): "COMP_ROOT_DIR_ATM" : "/a/third/made-up/path", "CAM_CPPDEFS" : "UNSET", "NTHRDS_ATM" : 1, - "RUN_STARTDATE" : "101" + "RUN_STARTDATE" : "101", + "DEBUG" : False } def get_value(self, key): diff --git a/test/unit/sample_files/atm_in_files/test_attr_in b/test/unit/sample_files/atm_in_files/test_attr_in new file mode 100644 index 00000000..ffc2bd1f --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_attr_in @@ -0,0 +1,13 @@ +&bird_sounds_nl + duck_quack = .false. + turkey_leg = 3.14d0 +/ + +&fairy_tales + straw_into_gold = 'Rumpelstiltskin' +/ + +&scary_stories + monkey_paw = 0 +/ + diff --git a/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml b/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml new file mode 100644 index 00000000..5eb75f53 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml @@ -0,0 +1,35 @@ + + + + + + + + logical + sci_fi + sci_fi_sounds_nl + + Does it go "pew-pew"? + + + .FALSE. + .TRUE. + + + + + integer + sci_fi + sci_fi_sounds_nl + + What are you doing with the + warp drive? + + + 0 + 1 + 1000000 + + + + diff --git a/test/unit/sample_files/atm_in_files/test_extra_nml_same_group.xml b/test/unit/sample_files/atm_in_files/test_extra_nml_same_group.xml new file mode 100644 index 00000000..2704bc1f --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_extra_nml_same_group.xml @@ -0,0 +1,36 @@ + + + + + + + + logical + sci_fi + + bird_sounds_nl + + Does it go "pew-pew"? + + + .FALSE. + .TRUE. + + + + + integer + sci_fi + sci_fi_sounds_nl + + What are you doing with the + warp drive? + + + 0 + 1 + 1000000 + + + + diff --git a/test/unit/sample_files/atm_in_files/test_extra_nml_same_var.xml b/test/unit/sample_files/atm_in_files/test_extra_nml_same_var.xml new file mode 100644 index 00000000..bad1f8ed --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_extra_nml_same_var.xml @@ -0,0 +1,48 @@ + + + + + + + + logical + sci_fi + sci_fi_sounds_nl + + Its a sci-fi duck! + + + true + False + + + + + logical + sci_fi + sci_fi_sounds_nl + + Does it go "pew-pew"? + + + .FALSE. + .TRUE. + + + + + integer + sci_fi + sci_fi_sounds_nl + + What are you doing with the + warp drive? + + + 0 + 1 + 1000000 + + + + diff --git a/test/unit/sample_files/atm_in_files/test_missing_elems.xml b/test/unit/sample_files/atm_in_files/test_missing_elems.xml new file mode 100644 index 00000000..77ec316e --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_missing_elems.xml @@ -0,0 +1,57 @@ + + + + + + + + + testing + bird_sounds_nl + + Do ducks quack? + + + true + False + + + + + real + testing + bird_sounds_nl + + How many kgs of turkey legs have + you eaten? + + + 3.14d0 + + + + + integer + more_testing + scary_stories + + How many wishes do you want? + Default: 0 + + + 0 + 3 + 5 + + + + + + fairy_tales + + Rumpelstiltskin + + + + + diff --git a/test/unit/sample_files/atm_in_files/test_multi_attr_in b/test/unit/sample_files/atm_in_files/test_multi_attr_in new file mode 100644 index 00000000..085ba7f5 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_multi_attr_in @@ -0,0 +1,13 @@ +&bird_sounds_nl + duck_quack = .false. + turkey_leg = 3.14d0 +/ + +&fairy_tales + straw_into_gold = 'Rumpelstiltskin' +/ + +&scary_stories + monkey_paw = 5 +/ + diff --git a/test/unit/sample_files/atm_in_files/test_multi_xml_in b/test/unit/sample_files/atm_in_files/test_multi_xml_in new file mode 100644 index 00000000..a834d490 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_multi_xml_in @@ -0,0 +1,18 @@ +&bird_sounds_nl + duck_quack = .false. + turkey_leg = 3.14d0 +/ + +&fairy_tales + straw_into_gold = 'Rumpelstiltskin' +/ + +&scary_stories + monkey_paw = 0 +/ + +&sci_fi_sounds_nl + laser_beam = .true. + warp_drive = 0 +/ + diff --git a/test/unit/sample_files/atm_in_files/test_simple_atm_in b/test/unit/sample_files/atm_in_files/test_simple_atm_in new file mode 100644 index 00000000..18b6e6b2 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_simple_atm_in @@ -0,0 +1,13 @@ +&bird_sounds_nl + duck_quack = .true. + turkey_leg = 3.14d0 +/ + +&fairy_tales + straw_into_gold = 'Rumpelstiltskin' +/ + +&scary_stories + monkey_paw = 0 +/ + diff --git a/test/unit/sample_files/atm_in_files/test_simple_nml_def.xml b/test/unit/sample_files/atm_in_files/test_simple_nml_def.xml new file mode 100644 index 00000000..db3b3fac --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_simple_nml_def.xml @@ -0,0 +1,60 @@ + + + + + + + + logical + testing + bird_sounds_nl + + Do ducks quack? + + + true + False + + + + + real + testing + bird_sounds_nl + + How many kgs of turkey legs have + you eaten? + + + 3.14d0 + + + + + integer + more_testing + scary_stories + + How many wishes do you want? + Default: 0 + + + 0 + 3 + 5 + + + + + char*128 + more_testing + fairy_tales + + What do you think my name is? + + + Rumpelstiltskin + + + + diff --git a/test/unit/sample_files/atm_in_files/test_user_in b/test/unit/sample_files/atm_in_files/test_user_in new file mode 100644 index 00000000..a00925bf --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_user_in @@ -0,0 +1,13 @@ +&bird_sounds_nl + duck_quack = .true. + turkey_leg = 22.7 +/ + +&fairy_tales + straw_into_gold = "Rapunzel" +/ + +&scary_stories + monkey_paw = 0 +/ + diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_bad_comment b/test/unit/sample_files/atm_in_files/test_user_nl_bad_comment new file mode 100644 index 00000000..5c3b252a --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_user_nl_bad_comment @@ -0,0 +1,10 @@ +! Users should add all user specific namelist changes below in the form of +! namelist_var = new_namelist_value + +/* +Also add a multi-line comment block here +to make sure it works properly + +turkey_leg = 22.7 + +straw_into_gold = "Rapunzel" diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_bad_format b/test/unit/sample_files/atm_in_files/test_user_nl_bad_format new file mode 100644 index 00000000..f73a2ab4 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_user_nl_bad_format @@ -0,0 +1,11 @@ +! Users should add all user specific namelist changes below in the form of +! namelist_var = new_namelist_value + +/* +Also add a multi-line comment block here +to make sure it works properly +*/ + +turkey_leg 22.7 + +straw_into_gold = "Rapunzel" diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_double_var b/test/unit/sample_files/atm_in_files/test_user_nl_double_var new file mode 100644 index 00000000..cd1dd895 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_user_nl_double_var @@ -0,0 +1,13 @@ +! Users should add all user specific namelist changes below in the form of +! namelist_var = new_namelist_value + +/* +Also add a multi-line comment block here +to make sure it works properly +*/ + +turkey_leg = 22.7 + +turkey_leg = 27.2 + +straw_into_gold="Rapunzel" diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_simple b/test/unit/sample_files/atm_in_files/test_user_nl_simple new file mode 100644 index 00000000..805b9fe3 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_user_nl_simple @@ -0,0 +1,14 @@ +! Users should add all user specific namelist changes below in the form of +! namelist_var = new_namelist_value + +/* +Also add a multi-line comment block here +to make sure it works properly +*/ + +!Really make sure comments are properly handled: +!turkey_leg = 22.7 + +turkey_leg = 22.7 + + straw_into_gold="Rapunzel" diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_undefined_var b/test/unit/sample_files/atm_in_files/test_user_nl_undefined_var new file mode 100644 index 00000000..1006e561 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_user_nl_undefined_var @@ -0,0 +1,13 @@ +! Users should add all user specific namelist changes below in the form of +! namelist_var = new_namelist_value + +/* +Also add a multi-line comment block here +to make sure it works properly +*/ + +banana_peel = "slippery" + +turkey_leg = 22.7 + + straw_into_gold="Rapunzel" diff --git a/test/unit/test_atm_in_paramgen.py b/test/unit/test_atm_in_paramgen.py new file mode 100644 index 00000000..1a55a1d9 --- /dev/null +++ b/test/unit/test_atm_in_paramgen.py @@ -0,0 +1,670 @@ +""" +Python unit testing collection for the +AtmInParamGen namelist-generation class, +including its error-handling processes. + +To run these unit tests, simply type: + +python test_atm_in_paramgen.py + +or (for more verbose output): + +python test_atm_in_paramgen.py -v + +which will currently run XXX tests, all of which should pass. +""" + +#---------------------------------------- +#Import required python libraries/modules: +#---------------------------------------- +import logging +import os +import os.path +import sys +import glob +import filecmp +import xml.etree.ElementTree as ET + +#Python unit-testing library: +import unittest + +#Add directory to python path: +_TEST_DIR = os.path.abspath(os.path.dirname(__file__)) +_CAM_ROOT_DIR = os.path.join(_TEST_DIR, os.pardir, os.pardir) +_CIME_CONF_DIR = os.path.abspath(os.path.join(_CAM_ROOT_DIR, "cime_config")) + +_SAMPLES_DIR = os.path.join(os.path.join(_TEST_DIR, "sample_files"), "atm_in_files") +_PRE_TMP_DIR = os.path.join(_TEST_DIR, "tmp") +_TMP_DIR = os.path.join(_PRE_TMP_DIR, "atm_in_paramgen") + +#Check for all necessary directories: +if not os.path.exists(_CIME_CONF_DIR): + EMSG = "Cannot find cime_config directory where 'atm_in_paramgen.py' should be located." + raise ImportError(EMSG) + +if not os.path.exists(_SAMPLES_DIR): + raise ImportError("Cannot find sample files directory") + +#Add "cime_config" directory to python path: +sys.path.append(_CIME_CONF_DIR) + +#Import CAM configure objects: +# pylint: disable=wrong-import-position +from atm_in_paramgen import AtmInParamGen +from cam_config_classes import CamConfigValError +# pylint: enable=wrong-import-position + +################# +#Helper functions +################# + +def remove_files(file_list): + """Remove files in if they exist""" + for fpath in file_list: + if os.path.exists(fpath): + os.remove(fpath) + # End if + # End for + +################# + +#++++++++++++++++++++++++++++++++++++++++++ +#Create "fake" CIME case to test Config_CAM +#++++++++++++++++++++++++++++++++++++++++++ + +class FakeCase: + + # pylint: disable=too-few-public-methods + """ + Fake CIME case class with variables needed to test + the "Config_CAM" object. + """ + + def __init__(self): + + + #Create dictionary (so get_value works properly): + self.conf_opts = { + "ATM_GRID" : "f19_f19_mg17", + "ATM_NX" : 180, + "ATM_NY" : 90, + "COMP_OCN" : "socn", + "COMP_ATM" : "cam", + "EXEROOT" : "/some/made-up/path", + "CASEROOT" : "/another/made-up/path", + "CAM_CONFIG_OPTS" : "-dyn none --physics-suites something;otherthing", + "COMP_ROOT_DIR_ATM" : "/a/third/made-up/path", + "CAM_CPPDEFS" : "UNSET", + "NTHRDS_ATM" : 1, + "RUN_STARTDATE" : "101", + "feel_lucky" : 1 #For testing + } + + def get_value(self, key): + + """ + Function used to return value + from conf_opts dictionary, + with the key as input. + """ + + if key in self.conf_opts: + val = self.conf_opts[key] + else: + val = None + + return val + + +#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +#Main AtmInParamGen testing routine, used when script is run directly +#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +class AtmInParamGenTestRoutine(unittest.TestCase): + + """ + Runs all AtmInParamGen tests, to ensure + that the class (and error-handling) methods + are running properly. + """ + + @classmethod + def setUpClass(cls): + + """Clean output directory (tmp) before running tests""" + #Does "tmp" directory exist? If not then create it: + if not os.path.exists(_PRE_TMP_DIR): + os.mkdir(_PRE_TMP_DIR) + + #Now check if ""atm_in_paramgen"" directory exists: + if not os.path.exists(_TMP_DIR): + os.mkdir(_TMP_DIR) + + #Clear out all files: + remove_files(glob.iglob(os.path.join(_TMP_DIR, '*.*'))) + + #Run inherited setup method: + super().setUpClass() + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a namelist can be built with a proper + #XML namelist definition file: + #++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_namelist_from_xml(self): + + """ + Check that AtmInParamGen can properly parse + an XML namelist defition file and generate + the correct "atm_in" fortran namelist file. + """ + + # Create fake CIME case: + fcase = FakeCase() + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Get expected atm_in file: + atm_in_output = os.path.join(_SAMPLES_DIR, "test_simple_atm_in") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Set all ParamGen namelist values: + pg_test.reduce_atm_in(fcase, {}) + + # Create test atm_in namelist file name: + test_output = os.path.join(_TMP_DIR, "test_in") + + # Create CAM namelist using CIME's nmlgen routine: + pg_test.write(test_output) + + # Check that output file was written: + amsg = f"{test_output} does not exist" + self.assertTrue(os.path.exists(test_output), msg=amsg) + + # Check that output file matches expected file: + amsg = f"{test_output} does not match {atm_in_output}" + self.assertTrue(filecmp.cmp(test_output, atm_in_output, shallow=False), \ + msg=amsg) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a namelist can be built with a proper + #XML namelist definition file and corresponding + #attributes/guards: + #++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_namelist_from_xml_using_attrs(self): + + """ + Check that AtmInParamGen can properly parse + an XML namelist defition file and generate + the correct "atm_in" fortran namelist file + when namelist attributes/guards are being used. + """ + + # Create fake CIME case: + fcase = FakeCase() + + # Create namelist attribute dictionary: + nml_attr_dict = {"bird" : "goose", "never_read" : "0"} + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Get expected atm_in file: + atm_in_output = os.path.join(_SAMPLES_DIR, "test_attr_in") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Set all ParamGen namelist values: + pg_test.reduce_atm_in(fcase, nml_attr_dict) + + # Create test atm_in namelist file name: + test_output = os.path.join(_TMP_DIR, "test_attr_in") + + # Create CAM namelist using CIME's nmlgen routine: + pg_test.write(test_output) + + # Check that output file was written: + amsg = f"{test_output} does not exist" + self.assertTrue(os.path.exists(test_output), msg=amsg) + + # Check that output file matches expected file: + amsg = f"{test_output} does not match {atm_in_output}" + self.assertTrue(filecmp.cmp(test_output, atm_in_output, shallow=False), \ + msg=amsg) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a namelist can be built with a proper + #XML namelist definition file and multiple + #corresponding attributes/guards: + #++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_namelist_from_xml_using_multi_attrs(self): + + """ + Check that AtmInParamGen can properly parse + an XML namelist defition file and generate + the correct "atm_in" fortran namelist file + when multiple namelist attributes/guards are + being used. + """ + + # Create fake CIME case: + fcase = FakeCase() + + # Create namelist attribute dictionary: + nml_attr_dict = {"bird" : "goose", "never_read" : "1"} + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Get expected atm_in file: + atm_in_output = os.path.join(_SAMPLES_DIR, "test_multi_attr_in") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Set all ParamGen namelist values: + pg_test.reduce_atm_in(fcase, nml_attr_dict) + + # Create test atm_in namelist file name: + test_output = os.path.join(_TMP_DIR, "test_multi_attr_in") + + # Create CAM namelist using CIME's nmlgen routine: + pg_test.write(test_output) + + # Check that output file was written: + amsg = f"{test_output} does not exist" + self.assertTrue(os.path.exists(test_output), msg=amsg) + + # Check that output file matches expected file: + amsg = f"{test_output} does not match {atm_in_output}" + self.assertTrue(filecmp.cmp(test_output, atm_in_output, shallow=False), \ + msg=amsg) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a namelist with missing, required + #XML elements/tags fails with the correct error + #message + #++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_namelist_xml_missing_elems(self): + + """ + Check that AtmInParamGen throws the correct + error message when an XML namelist file + is missing required namelist entry elements. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_missing_elems.xml") + + # Attempt to run ParamGen: + with self.assertRaises(CamConfigValError) as cerr: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Check exception message + emsg = "The XML namelist definition file:\n" + emsg += f"{xml_test_fil}\n" + emsg += "has namelist entries that are missing required elements.\n" + emsg += "Those entries and missing elements are:\n" + emsg += "duck_quack : type\n" + emsg += "straw_into_gold : type, desc, category\n" + + self.assertEqual(emsg, str(cerr.exception)) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that ParamGen can properly concantenate + #multiple XML namelist defition files into a single + #atm_in namelist file + #++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_mutli_xml_namelist_defs(self): + + """ + Check that using multiple XML namelist + definition files that are then appended + together works as expected. + """ + + # Create fake CIME case: + fcase = FakeCase() + + # Create namelist attribute dictionary: + nml_attr_dict = {"bird" : "goose", "spaceship" : "x-wing"} + + # Get XML file paths: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_def.xml") + + # Get expected atm_in file: + atm_in_output = os.path.join(_SAMPLES_DIR, "test_multi_xml_in") + + # Create the ParamGen objects: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + + # Append the extra PG object to the other: + pg_test.append_atm_in_pg(pg_ext) + + # Set all ParamGen namelist values: + pg_test.reduce_atm_in(fcase, nml_attr_dict) + + # Create test atm_in namelist file name: + test_output = os.path.join(_TMP_DIR, "test_multi_xml_in") + + # Create CAM namelist using CIME's nmlgen routine: + pg_test.write(test_output) + + # Check that output file was written: + amsg = f"{test_output} does not exist" + self.assertTrue(os.path.exists(test_output), msg=amsg) + + # Check that output file matches expected file: + amsg = f"{test_output} does not match {atm_in_output}" + self.assertTrue(filecmp.cmp(test_output, atm_in_output, shallow=False), \ + msg=amsg) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that attempting to append a ParamGen object + #that itself was the combination of multiple XML + #namelist defition files fails with the appropriate + #error + #++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_mutli_xml_append_multi(self): + + """ + Check that appending a ParamGen + object that is itself a + combination of multiple namelist + definition file-derived ParamGen + objects throws an error and that + the error message is correct. + """ + + # Get XML file paths: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_def.xml") + third_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_same_group.xml") + + # Get expected atm_in file: + atm_in_output = os.path.join(_SAMPLES_DIR, "test_multi_xml_in") + + # Create the ParamGen objects: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + pg_third = AtmInParamGen.from_namelist_xml(third_xml_fil) + + # Append the extra PG object to the other: + pg_test.append_atm_in_pg(pg_ext) + + # Try to append the combined PG object to the third object: + with self.assertRaises(CamConfigValError) as cerr: + pg_third.append_atm_in_pg(pg_test) + + # Check exception message: + emsg = "ParamGen object being appended to another must" + emsg += " be associated with only one namelist definition file." + emsg += "\nInstead it is associated with the following files:\n" + emsg += f"{xml_test_fil}\n{extra_xml_fil}" + self.assertEqual(emsg, str(cerr.exception)) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that trying to combine multiple XML namelist + #defition files with the same namelist group + #fails with the appropriate error + #++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_mutli_xml_same_nl_group(self): + + """ + Check that using multiple XML namelist + definition files that have the same + namelist group throws an error and + that the error message is correct. + """ + + # Get XML file paths: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_same_group.xml") + + # Create the ParamGen objects: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + + # Append the extra PG object to the other: + with self.assertRaises(CamConfigValError) as cerr: + pg_test.append_atm_in_pg(pg_ext) + + # Check exception message: + emsg = f"Both\n'{xml_test_fil}'\nand\n'{extra_xml_fil}'\nhave" + emsg += " the following conflicting namelist groups:\n" + emsg += "bird_sounds_nl" + self.assertEqual(emsg, str(cerr.exception)) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that trying to combine multiple XML + #namelist defition files with the same namelist + #variable fails with the appropriate error + #++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_mutli_xml_same_nl_var(self): + + """ + Check that using multiple XML namelist + definition files that have the same + namelist entry id throws an error and + that the error message is correct. + """ + + # Get XML file paths: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_same_var.xml") + + # Create the ParamGen objects: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + + # Append the extra PG object to the other: + with self.assertRaises(CamConfigValError) as cerr: + pg_test.append_atm_in_pg(pg_ext) + + # Check exception message: + emsg = f"Both\n'{xml_test_fil}'\nand\n'{extra_xml_fil}'\nhave" + emsg += " the following conflicting namelist variables:\n" + emsg += "duck_quack" + self.assertEqual(emsg, str(cerr.exception)) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file properly modifies + #an associated atm_in file + #++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_namelist_mode_from_user_nl_cam(self): + + """ + Check that AtmInParamGen can properly change + the value of a namelist entry based on + a provided user_nl_cam file. + """ + + # Create fake CIME case: + fcase = FakeCase() + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Get "user_nl_cam" file path: + user_nl_fil = os.path.join(_SAMPLES_DIR, "test_user_nl_simple") + + # Get expected atm_in file: + atm_in_output = os.path.join(_SAMPLES_DIR, "test_user_in") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Append user_nl_cam file: + pg_test.append_user_nl_file(user_nl_fil) + + # Set all ParamGen namelist values: + pg_test.reduce_atm_in(fcase, {}) + + # Create test atm_in namelist file name: + test_output = os.path.join(_TMP_DIR, "test_user_in") + + # Create CAM namelist using CIME's nmlgen routine: + pg_test.write(test_output) + + # Check that output file was written: + amsg = f"{test_output} does not exist" + self.assertTrue(os.path.exists(test_output), msg=amsg) + + # Check that output file matches expected file: + amsg = f"{test_output} does not match {atm_in_output}" + self.assertTrue(filecmp.cmp(test_output, atm_in_output, shallow=False), \ + msg=amsg) + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file with an un-closed + #block comment fails with the appropriate error + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_namelist_user_nl_unclosed_comment(self): + + """ + Check that a user_nl_cam file with an + un-closed block comment throws an error + and that the error message is correct. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Get "user_nl_cam" file path: + user_nl_fil = os.path.join(_SAMPLES_DIR, "test_user_nl_bad_comment") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Attempt to append user_nl_cam file: + with self.assertRaises(CamConfigValError) as cerr: + pg_test.append_user_nl_file(user_nl_fil) + + # Check exception message: + emsg = f"Un-closed comment block! Please check '{user_nl_fil}'" + self.assertEqual(emsg, str(cerr.exception)) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file with an improperly + #formatted namelist entry fails with the + #appropriate error + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_namelist_user_nl_bad_format_entry(self): + + """ + Check that a user_nl_cam file with a + namelist entry that is missing an equals + sign throws an error and that the error + message is correct. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Get "user_nl_cam" file path: + user_nl_fil = os.path.join(_SAMPLES_DIR, "test_user_nl_bad_format") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Attempt to append user_nl_cam file: + with self.assertRaises(CamConfigValError) as cerr: + pg_test.append_user_nl_file(user_nl_fil) + + # Check exception message: + emsg = f"Cannot parse the following line in '{user_nl_fil}' :\n'turkey_leg 22.7\n'" + self.assertEqual(emsg, str(cerr.exception)) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file with a namelist + #entry that is not currently present within the + #AtmInParamGen object fails with the appropriate + #error + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_namelist_user_nl_undefined_entry(self): + + """ + Check that a user_nl_cam file with a + namelist entry that has yet to be defined + in an namelist definition file throws an + error and that the error message is correct. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Get "user_nl_cam" file path: + user_nl_fil = os.path.join(_SAMPLES_DIR, "test_user_nl_undefined_var") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Attempt to append user_nl_cam file: + with self.assertRaises(CamConfigValError) as cerr: + pg_test.append_user_nl_file(user_nl_fil) + + # Check exception message: + emsg = "Variable 'banana_peel' not found in any namelist definition files." + emsg += f" Please double-check '{user_nl_fil}'." + self.assertEqual(emsg, str(cerr.exception)) + + #++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file with a repeated + #namelist entry fails with the appropriate + #error message. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_namelist_user_nl_double_entry(self): + + """ + Check that a user_nl_cam file with a + namelist entry that has is included twice + in the file throws an error and that the + error message is correct. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Get "user_nl_cam" file path: + user_nl_fil = os.path.join(_SAMPLES_DIR, "test_user_nl_double_var") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Attempt to append user_nl_cam file: + with self.assertRaises(CamConfigValError) as cerr: + pg_test.append_user_nl_file(user_nl_fil) + + # Check exception message: + emsg = f"Namelist variable 'turkey_leg' set more than once in '{user_nl_fil}'" + emsg += "\nPlease set each variable only once." + self.assertEqual(emsg, str(cerr.exception)) + +################################################# +#Run unit tests if this script is called directly +################################################# + +if __name__ == "__main__": + unittest.main() + +############ +#End of file +############ From 5a0bac79c43920960f654286c9edaf61a00f5518 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 8 Feb 2022 10:36:57 -0700 Subject: [PATCH 11/33] Fix some bugs to allow CAMDEN to build/run. --- Externals_CAM.cfg | 2 +- cime_config/cam_autogen.py | 8 +++++--- cime_config/cam_build_cache.py | 6 ++++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/Externals_CAM.cfg b/Externals_CAM.cfg index 449426ad..2fe3eef2 100644 --- a/Externals_CAM.cfg +++ b/Externals_CAM.cfg @@ -2,7 +2,7 @@ local_path = ccpp_framework protocol = git repo_url = https://github.com/gold2718/ccpp-framework -tag = CPF_0.2.030 +tag = CPF_0.2.032 required = True [cosp2] diff --git a/cime_config/cam_autogen.py b/cime_config/cam_autogen.py index 5d1097a5..75882fe1 100644 --- a/cime_config/cam_autogen.py +++ b/cime_config/cam_autogen.py @@ -604,6 +604,9 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, preproc_cache_str = 'UNSET' # end if + # Initialize namelist generation logical: + do_gen_nl = False + if os.path.exists(genccpp_dir): do_gen_ccpp = force or build_cache.ccpp_mismatch(sdfs, scheme_files, host_files, @@ -612,14 +615,13 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, else: os.makedirs(genccpp_dir) do_gen_ccpp = True + do_gen_nl = True # End if create_nl_file = os.path.join(_CIME_CONFIG_DIR, "create_readnl_files.py") - if xml_files: + if not do_gen_nl: do_gen_nl = force or build_cache.xml_nl_mismatch(create_nl_file, xml_files) - else: - do_gen_nl = False # end if if do_gen_nl: args = [] diff --git a/cime_config/cam_build_cache.py b/cime_config/cam_build_cache.py index 7d263845..441ad77f 100644 --- a/cime_config/cam_build_cache.py +++ b/cime_config/cam_build_cache.py @@ -371,8 +371,10 @@ def write(self): new_xml_entry(ccpp, 'scheme_namelist_meta_file', sfile, FileStatus.sha1sum(sfile)) # end for - scheme_nlgroups = ET.SubElement(ccpp, 'scheme_namelist_groups') - scheme_nlgroups.text = " ".join(self.__scheme_nl_groups) + if self.__scheme_nl_groups: + scheme_nlgroups = ET.SubElement(ccpp, 'scheme_namelist_groups') + scheme_nlgroups.text = " ".join(self.__scheme_nl_groups) + #end if new_xml_entry(ccpp, 'create_nl_file', self.__create_nl_file.file_path, self.__create_nl_file.file_hash) From a8e128e154fff5d442e77367a9071e1758104cce Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 8 Feb 2022 12:06:16 -0700 Subject: [PATCH 12/33] Fix python doctests. --- cime_config/cam_autogen.py | 8 ++++---- cime_config/cam_config.py | 6 +++--- test/run_tests.sh | 2 +- test/unit/sample_files/write_init_files/suite_bad.xml | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/cime_config/cam_autogen.py b/cime_config/cam_autogen.py index 75882fe1..b2891bac 100644 --- a/cime_config/cam_autogen.py +++ b/cime_config/cam_autogen.py @@ -500,7 +500,7 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, 2. Check that the correct error is raised when a scheme's metadata file cannot be found: - >>> generate_physics_suites(TestBuildCache, "UNSET", "cam", "bad", \ + >>> generate_physics_suites(TestBuildCache, "UNSET", "cam", "bad_suite", \ TEST_ATM_ROOT, TEST_BLDROOT, TEST_REG_DIR, \ TEST_REGFILES, TEST_SOURCE_MODS_DIR, \ False) #doctest: +ELLIPSIS @@ -510,7 +510,7 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, 3. Check that generate_physics_suites works properly when good inputs are provided: - >>> generate_physics_suites(TestBuildCache, "UNSET", "cam", "simple", \ + >>> generate_physics_suites(TestBuildCache, "UNSET", "cam", "simple_suite", \ TEST_ATM_ROOT, TEST_BLDROOT, TEST_REG_DIR, \ TEST_REGFILES, TEST_SOURCE_MODS_DIR, \ False) #doctest: +ELLIPSIS @@ -555,7 +555,7 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, scheme_files = [] xml_files = {} # key is scheme, value is xml file path for sdf in phys_suites_str.split(';'): - sdf_path = suite_name_path_dict[sdf] + sdf_path = suite_name_path_dict.get(sdf) if not sdf_path: emsg = "ERROR: Unable to find SDF for suite '{}'" raise CamAutoGenError(emsg.format(sdf)) @@ -820,7 +820,7 @@ def ccpp_mismatch(self, sdfs, scheme_files, host_files, # actual generation routines when performing doctests: return False - def xml_nl_mismatch(self, xml_files): + def xml_nl_mismatch(self, create_nl_file, xml_files): # Always return False, in order to avoid running the # actual generation routines when performing doctests: return False diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index 1b31f035..4ecc6cfd 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -66,15 +66,15 @@ class ConfigCAM: >>> FCONFIG.create_config("test_int", "test object description", 5) Traceback (most recent call last): - ... - CamConfigValError: ERROR: The CAM config variable, 'test_int', already exists! Any new config variable must be given a different name + ... + cam_config_classes.CamConfigValError: ERROR: The CAM config variable, 'test_int', already exists! Any new config variable must be given a different name 3. Check that a configure object's given value must be either a string, integer or list: >>> FCONFIG.create_config("test_dict", "test_object_description", {"x": "y"}) Traceback (most recent call last): ... - CamConfigTypeError: ERROR: The input value for new CAM config variable, 'test_dict', must be an integer, string, or list, not + cam_config_classes.CamConfigTypeError: ERROR: The input value for new CAM config variable, 'test_dict', must be an integer, string, or list, not """ diff --git a/test/run_tests.sh b/test/run_tests.sh index 9cd05709..2656dd2d 100755 --- a/test/run_tests.sh +++ b/test/run_tests.sh @@ -65,7 +65,7 @@ run_doctest cime_config/create_readnl_files.py # Registry generator doctests: run_doctest src/data/generate_registry_data.py # ParamGen atm_in namelist writer doctests: -run_doctest cime_config/ +run_doctest cime_config/atm_in_paramgen.py # CAM config unit tests: run_unittest test/unit/cam_config_unit_tests.py # Registry generator unit tests: diff --git a/test/unit/sample_files/write_init_files/suite_bad.xml b/test/unit/sample_files/write_init_files/suite_bad.xml index b1dd7a54..447b131b 100644 --- a/test/unit/sample_files/write_init_files/suite_bad.xml +++ b/test/unit/sample_files/write_init_files/suite_bad.xml @@ -6,7 +6,7 @@ doctests, to test that a missing metadata file raises the correct error. --> - + bad_scheme From 39a9776956e0cc5387a1f2fde330034d9a340ba0 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 8 Feb 2022 15:06:27 -0700 Subject: [PATCH 13/33] Fix python unit tests. --- cime_config/cam_config.py | 2 +- test/unit/cam_config_unit_tests.py | 186 ++++-------------- .../write_init_files/host_var_host.meta | 2 +- .../simple_build_cache_template.xml | 2 +- .../write_init_files/simple_host.meta | 2 +- .../write_init_files/temp_adjust.meta | 6 +- .../write_init_files/temp_adjust_4D.meta | 6 +- .../write_init_files/temp_adjust_bvd.meta | 6 +- .../temp_adjust_no_horiz.meta | 6 +- .../write_init_files/temp_adjust_noreq.meta | 6 +- .../write_init_files/temp_adjust_param.meta | 6 +- .../write_init_files/temp_adjust_scalar.meta | 6 +- 12 files changed, 65 insertions(+), 171 deletions(-) diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index 4ecc6cfd..5ad9f6f9 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -838,7 +838,7 @@ def ccpp_phys_set(self, cam_nml_attr_dict, phys_nl_pg_dict): #If not, then throw an error: emsg = "physics_suite specified in user_nl_cam, '{}', does not\n" emsg += "match the suite listed in CAM_CONFIG_OPTS: '{}'" - raise CamConfigValError(emsg.format(user_nl_pg_dict['physics_suite'], + raise CamConfigValError(emsg.format(phys_nl_val, phys_suites[0])) else: diff --git a/test/unit/cam_config_unit_tests.py b/test/unit/cam_config_unit_tests.py index d1d1d64d..1acca407 100644 --- a/test/unit/cam_config_unit_tests.py +++ b/test/unit/cam_config_unit_tests.py @@ -64,7 +64,7 @@ def __init__(self): "COMP_ATM" : "cam", "EXEROOT" : "/some/made-up/path", "CASEROOT" : "/another/made-up/path", - "CAM_CONFIG_OPTS" : "-dyn none --physics-suites adiabatic;kessler", + "CAM_CONFIG_OPTS" : "-dyn none --physics-suites mango;papaya", "COMP_ROOT_DIR_ATM" : "/a/third/made-up/path", "CAM_CPPDEFS" : "UNSET", "NTHRDS_ATM" : 1, @@ -308,24 +308,19 @@ def test_config_ccpp_phys_set_check_single_suite(self): #Set "new" physics_suites value with one physics suite: - self.test_config_cam.set_value("physics_suites", "kessler") + self.test_config_cam.set_value("physics_suites", "papaya") #Create (empty) namelist attribute dictionary: cam_nml_attr_dict = {} - #Create namelist file: - with open("test.txt", "w", encoding='UTF-8') as test_fil: - test_fil.write('!Namelist test file\n') - test_fil.write('physics_suite = "kessler"\n') + #Create fake 'atm_in' ParamGen dictionary: + phys_nl_pg_dict = {'physics_suite': {'values': 'papaya'}} #Run ccpp_phys_set config method: - self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, "test.txt") + self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, phys_nl_pg_dict) #Check that dictonary entries are correct: - self.assertEqual(cam_nml_attr_dict["phys_suite"], "kessler") - - #Remove text file: - os.remove("test.txt") + self.assertEqual(cam_nml_attr_dict["phys_suite"], "papaya") #Set physics_suites back to its original value: self.test_config_cam.set_value("physics_suites", cam_config_suites_orig) @@ -346,19 +341,14 @@ def test_config_ccpp_phys_set_check_multi_suite(self): #Create namelist attribute dictionary: cam_nml_attr_dict = {} - #Create namelist file: - with open("test.txt", "w", encoding='UTF-8') as test_fil: - test_fil.write('!Namelist test file\n') - test_fil.write('physics_suite = "adiabatic"\n') + #Create fake 'atm_in' ParamGen dictionary: + phys_nl_pg_dict = {'physics_suite': {'values': 'mango'}} #Run ccpp_phys_set config method: - self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, "test.txt") + self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, phys_nl_pg_dict) #Check that dictonary entries are correct: - self.assertEqual(cam_nml_attr_dict["phys_suite"], "adiabatic") - - #Remove text file: - os.remove("test.txt") + self.assertEqual(cam_nml_attr_dict["phys_suite"], "mango") #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ #Check "ccpp_phys_set" missing "physics_suite" error-handling @@ -375,160 +365,67 @@ def test_config_ccpp_phys_set_missing_phys(self): #Create namelist attribute dictionary: cam_nml_attr_dict = {} + #Create fake 'atm_in' ParamGen dictionary: + phys_nl_pg_dict = {'physics_suite': {'values': 'UNSET'}} + #Set error message: ermsg = "No 'physics_suite' variable is present in user_nl_cam.\n" - ermsg += "This is required if more than one suite is listed\n" - ermsg += "in CAM_CONFIG_OPTS." - - #Create namelist file: - with open("test.txt", "w", encoding='UTF-8') as test_fil: - test_fil.write('!Namelist test file\n') + ermsg += "This is required because more than one suite is listed\n" + ermsg += "in CAM_CONFIG_OPTS: 'mango;papaya'" #Expect "CamConfigValError": with self.assertRaises(CamConfigValError) as valerr: #Run ccpp_phys_set config method, which should fail #due to missing "physics_suite" namelist variable: - self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, "test.txt") + self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, phys_nl_pg_dict) #Check that error message matches what's expected: self.assertEqual(ermsg, str(valerr.exception)) - #Remove text file: - os.remove("test.txt") - - #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - #Check "ccpp_phys_set" multiple namelist entries error-handling - #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + #Check "ccpp_phys_set" non-matching physics_suite error-handling + #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - def test_config_ccpp_phys_set_two_phys(self): + def test_config_ccpp_phys_set_no_physics_suite_match(self): """ Check that "ccpp_phys_set" throws the proper - error if there is more than one CCPP suite and - more than one "physics_suite" namelist variable. + error if the "physics_suite" namelist variable + value doesn't match any of the options listed + in "CAM_CONFIG_OPTS". """ #Create namelist attribute dictionary: cam_nml_attr_dict = {} - #Set error message: - ermsg = "More than one 'physics_suite' variable is present in user_nl_cam.\n" - ermsg += "Only one 'physics_suite' line is allowed." - - #Create namelist file: - with open("test.txt", "w", encoding='UTF-8') as test_fil: - test_fil.write('!Namelist test file\n') - test_fil.write('physics_suite = "adiabatic"\n') - test_fil.write('physics_suite = "kessler"\n') - - #Expect "CamConfigValError": - with self.assertRaises(CamConfigValError) as valerr: - #Run ccpp_phys_set config method, which should fail - #due to multiple "physics_suite" namelist variable: - self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, "test.txt") - - #Check that error message matches what's expected: - self.assertEqual(ermsg, str(valerr.exception)) - - #Remove text file: - os.remove("test.txt") - - #++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - #Check "ccpp_phys_set" missing equals-sign error-handling - #++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - - def test_config_ccpp_phys_set_missing_equals(self): - - """ - Check that "ccpp_phys_set" throws the proper - error if there is a missing equals (=) sign - after the "physics_suite" namelist variable. - """ - - #Create namelist attribute dictionary: - cam_nml_attr_dict = {} + #Create fake 'atm_in' ParamGen dictionary: + phys_nl_pg_dict = {'physics_suite': {'values': 'starfruit'}} #Set error message: - ermsg = "No equals (=) sign was found with the 'physics_suite' variable." - - - #Create namelist file: - with open("test.txt", "w", encoding='UTF-8') as test_fil: - test_fil.write('!Namelist test file\n') - test_fil.write('physics_suite "adiabatic"\n') + ermsg = "physics_suite specified in user_nl_cam, 'starfruit', doesn't match any suites\n" + ermsg += "listed in CAM_CONFIG_OPTS: 'mango;papaya'" #Expect "CamConfigValError": with self.assertRaises(CamConfigValError) as valerr: #Run ccpp_phys_set config method, which should fail - #due to a missing equals sign in the namelist entry: - self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, "test.txt") - - #Check that error message matches what's expected: - self.assertEqual(ermsg, str(valerr.exception)) - - #Remove text file: - os.remove("test.txt") - - #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - #Check "ccpp_phys_set" multiple equals-signs error-handling - #++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - - def test_config_ccpp_phys_set_two_equals(self): - - """ - Check that "ccpp_phys_set" throws the proper - error if there is more than one equals (=) sign - after the "physics_suite" namelist variable. - """ - - #Create namelist attribute dictionary: - cam_nml_attr_dict = {} - - #Set error message: - ermsg = "There must only be one equals (=) sign in the 'physics_suite' namelist line." - - #Create namelist file: - with open("test.txt", "w", encoding='UTF-8') as test_fil: - test_fil.write('!Namelist test file\n') - test_fil.write('physics_suite == "adiabatic"\n') - - #Expect "CamConfigValError": - with self.assertRaises(CamConfigValError) as valerr: - #Run ccpp_phys_set config method, which should fail - #due to an incorrect number of equal signs in the - #namelist entry: - self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, "test.txt") + #due to a mis-match between the "physics_suite" namelist + #variable and the physics suite options listed in the + #physics_suites config variable: + self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, phys_nl_pg_dict) #Check that error message matches what's expected: self.assertEqual(ermsg, str(valerr.exception)) - #Remove text file: - os.remove("test.txt") + #----- + #Same test, but with only one physics suite available: + #----- - #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - #Check "ccpp_phys_set" non-matching physics_suite error-handling - #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - - def test_config_ccpp_phys_set_no_physics_suite_match(self): - - """ - Check that "ccpp_phys_set" throws the proper - error if the "physics_suite" namelist variable - value doesn't match any of the options listed - in "CAM_CONFIG_OPTS". - """ - - #Create namelist attribute dictionary: - cam_nml_attr_dict = {} - - #Set error message: - ermsg = "physics_suite specified in user_nl_cam, 'cam6', doesn't match any suites\n" - ermsg += "listed in CAM_CONFIG_OPTS" + #Set "new" physics_suites value with one physics suite: + self.test_config_cam.set_value("physics_suites", "papaya") - #Create namelist file: - with open("test.txt", "w", encoding='UTF-8') as test_fil: - test_fil.write('!Namelist test file\n') - test_fil.write('physics_suite = "cam6"\n') + #Set new error message: + ermsg = "physics_suite specified in user_nl_cam, 'starfruit', does not\n" + ermsg += "match the suite listed in CAM_CONFIG_OPTS: 'papaya'" #Expect "CamConfigValError": with self.assertRaises(CamConfigValError) as valerr: @@ -536,14 +433,11 @@ def test_config_ccpp_phys_set_no_physics_suite_match(self): #due to a mis-match between the "physics_suite" namelist #variable and the physics suite options listed in the #physics_suites config variable: - self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, "test.txt") + self.test_config_cam.ccpp_phys_set(cam_nml_attr_dict, phys_nl_pg_dict) #Check that error message matches what's expected: self.assertEqual(ermsg, str(valerr.exception)) - #Remove text file: - os.remove("test.txt") - ################################################# #Run unit tests if this script is called directly diff --git a/test/unit/sample_files/write_init_files/host_var_host.meta b/test/unit/sample_files/write_init_files/host_var_host.meta index 6ab3f971..66745d6e 100644 --- a/test/unit/sample_files/write_init_files/host_var_host.meta +++ b/test/unit/sample_files/write_init_files/host_var_host.meta @@ -32,7 +32,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 diff --git a/test/unit/sample_files/write_init_files/simple_build_cache_template.xml b/test/unit/sample_files/write_init_files/simple_build_cache_template.xml index cb784b1a..c665190a 100644 --- a/test/unit/sample_files/write_init_files/simple_build_cache_template.xml +++ b/test/unit/sample_files/write_init_files/simple_build_cache_template.xml @@ -10,7 +10,7 @@ - + TAG3 UNSET kind_phys=REAL64 diff --git a/test/unit/sample_files/write_init_files/simple_host.meta b/test/unit/sample_files/write_init_files/simple_host.meta index 0a50a1e9..3794185e 100644 --- a/test/unit/sample_files/write_init_files/simple_host.meta +++ b/test/unit/sample_files/write_init_files/simple_host.meta @@ -32,7 +32,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 diff --git a/test/unit/sample_files/write_init_files/temp_adjust.meta b/test/unit/sample_files/write_init_files/temp_adjust.meta index 09344292..d132fdf1 100644 --- a/test/unit/sample_files/write_init_files/temp_adjust.meta +++ b/test/unit/sample_files/write_init_files/temp_adjust.meta @@ -41,7 +41,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -59,7 +59,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -77,7 +77,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 diff --git a/test/unit/sample_files/write_init_files/temp_adjust_4D.meta b/test/unit/sample_files/write_init_files/temp_adjust_4D.meta index 103a0298..5b543393 100644 --- a/test/unit/sample_files/write_init_files/temp_adjust_4D.meta +++ b/test/unit/sample_files/write_init_files/temp_adjust_4D.meta @@ -41,7 +41,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -59,7 +59,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -77,7 +77,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 diff --git a/test/unit/sample_files/write_init_files/temp_adjust_bvd.meta b/test/unit/sample_files/write_init_files/temp_adjust_bvd.meta index abc2b9ed..2c48d913 100644 --- a/test/unit/sample_files/write_init_files/temp_adjust_bvd.meta +++ b/test/unit/sample_files/write_init_files/temp_adjust_bvd.meta @@ -41,7 +41,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -59,7 +59,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -77,7 +77,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 diff --git a/test/unit/sample_files/write_init_files/temp_adjust_no_horiz.meta b/test/unit/sample_files/write_init_files/temp_adjust_no_horiz.meta index 5a7ec3a9..d3393591 100644 --- a/test/unit/sample_files/write_init_files/temp_adjust_no_horiz.meta +++ b/test/unit/sample_files/write_init_files/temp_adjust_no_horiz.meta @@ -41,7 +41,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -59,7 +59,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -77,7 +77,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 diff --git a/test/unit/sample_files/write_init_files/temp_adjust_noreq.meta b/test/unit/sample_files/write_init_files/temp_adjust_noreq.meta index 2bca7fb3..9003b023 100644 --- a/test/unit/sample_files/write_init_files/temp_adjust_noreq.meta +++ b/test/unit/sample_files/write_init_files/temp_adjust_noreq.meta @@ -27,7 +27,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -45,7 +45,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -63,7 +63,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 diff --git a/test/unit/sample_files/write_init_files/temp_adjust_param.meta b/test/unit/sample_files/write_init_files/temp_adjust_param.meta index 03c1bc0f..89ad88ec 100644 --- a/test/unit/sample_files/write_init_files/temp_adjust_param.meta +++ b/test/unit/sample_files/write_init_files/temp_adjust_param.meta @@ -48,7 +48,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -66,7 +66,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -84,7 +84,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 diff --git a/test/unit/sample_files/write_init_files/temp_adjust_scalar.meta b/test/unit/sample_files/write_init_files/temp_adjust_scalar.meta index bfd1d968..bd727e1e 100644 --- a/test/unit/sample_files/write_init_files/temp_adjust_scalar.meta +++ b/test/unit/sample_files/write_init_files/temp_adjust_scalar.meta @@ -41,7 +41,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -59,7 +59,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 @@ -77,7 +77,7 @@ [ errmsg ] standard_name = ccpp_error_message long_name = Error message for error handling in CCPP - units = 1 + units = none dimensions = () type = character kind = len=512 From 2c5b8765307ac2ca6d70879fa983a2c07125f7bc Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Wed, 9 Feb 2022 13:41:39 -0700 Subject: [PATCH 14/33] Fix pylint concerns. --- cime_config/atm_in_paramgen.py | 297 +++++++++++++++-------------- cime_config/buildlib | 10 +- cime_config/buildnml | 25 +-- cime_config/cam_autogen.py | 10 +- cime_config/cam_build_cache.py | 32 ++-- cime_config/cam_config.py | 26 +-- cime_config/cam_config_classes.py | 24 +-- src/data/generate_registry_data.py | 225 +++++++++++----------- src/data/write_init_files.py | 28 +-- test/.pylintrc | 5 +- test/pylint_test.sh | 1 + test/run_tests.sh | 4 +- 12 files changed, 333 insertions(+), 354 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index 652fde8f..f9aa24f7 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -12,7 +12,6 @@ import os import os.path import sys -import re from collections import OrderedDict #CAM specific config error: from cam_config_classes import CamConfigValError @@ -28,11 +27,123 @@ if not os.path.exists(_CIME_ROOT): raise SystemExit("ERROR: Cannot find 'cime' directory. Did you run checkout_externals?") sys.path.append(os.path.join(_CIME_ROOT, "scripts", "lib", "CIME", "ParamGen")) +#pylint: disable=wrong-import-position from paramgen import ParamGen +#pylint: enable=wrong-import-position -############################## +################# +#HELPER FUNCTIONS +################# + +def _is_nml_logical_true(varname, var_val): + + """ + Checks if a "logical" XML namelist value is true or + false. + ---------- + varname -> The name of the variable being checked + var_val -> The value of the variable being checked + + doctests: + + 1. Check that a True value returns true: + >>> _is_nml_logical_true("test", True) + True + + 2. Check that a "true" value returns true: + >>> _is_nml_logical_true("test", "true") + True + + 3. Check that a ".true." value returns true: + >>> _is_nml_logical_true("test", ".true.") + True + + 4. Check that a "1" value returns true: + >>> _is_nml_logical_true("test", "1") + True + + 5. Check that a 1 (integer) value returns true: + >>> _is_nml_logical_true("test", 1) + True + + 6. Check that a False value returns false: + >>> _is_nml_logical_true("test", False) + False + + 7. Check that a "FALSE" value returns false: + >>> _is_nml_logical_true("test", "FALSE") + False + + 8. Check that a ".False." value returns false: + >>> _is_nml_logical_true("test", ".False.") + False + + 9. Check that a "0" value returns false: + >>> _is_nml_logical_true("test", "0") + False + + 10. Check that a 0 (integer) value returns false: + >>> _is_nml_logical_true("test", 0) + False + + 11. Check that a bad string value returns the correct error: + >>> _is_nml_logical_true("test", "this_wont_work") # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + cam_config_classes.CamConfigValError:... + XML namelist logical variable, 'test', must have a value of true, false, 1, or 0, not 'this_wont_work' + + 12. Check that a bad integer value returns the correct error: + >>> _is_nml_logical_true("test", 3) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + cam_config_classes.CamConfigValError:... + XML namelist logical variable, 'test', must have a value of true, false, 1, or 0, not 3 + + 13. Check that a non-boolean, string or integer type returns an error: + >>> _is_nml_logical_true("test", 13.03) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + cam_config_classes.CamConfigTypeError:... + XML namelist variable 'test' must have a value that is either a boolean, string, or integer, not float. + + """ + + if isinstance(var_val, bool): + return var_val + if isinstance(var_val, str): + if var_val.lower() in {"true", ".true.", "1"}: + return True + if var_val.lower() in {"false", ".false.", "0"}: + return False + + #Raise error if no match was found: + emsg = f"\nXML namelist logical variable, '{varname}'" + emsg += ", must have a value of true, false, 1, or 0, not" + emsg += f" '{var_val}'" + raise CamConfigValError(emsg) + + if isinstance(var_val, int): + if var_val == 1: + return True + if var_val == 0: + return False + + #Raise error if no match was found: + emsg = f"\nXML namelist logical variable, '{varname}'" + emsg += ", must have a value of true, false, 1, or 0, not" + emsg += f" {var_val}" + raise CamConfigValError(emsg) + + #Type is un-recognizeda, so raise an error: + emsg = f"\nXML namelist variable '{varname}' must" + emsg += " have a value that is either a boolean, string, or integer," + emsg += f" not {type(var_val).__name__}." + raise CamConfigTypeError(emsg) + +################################################################ # MAIN "atm_in" ParamGen class -############################## +################################################################ class AtmInParamGen(ParamGen): """ @@ -60,13 +171,17 @@ def __init__(self, pg_data_dict): #Create namelist var/group dictionary, #which used by the "append_user_nl_file" #method: - self._var_group_dict = {} + self.var_group_dict = {} #Create empty dictionaries that will contain #the namelist definition file and the set #of all namelist groups and variables: - self._nml_def_groups = {} - self._nml_def_vars = {} + self.nml_def_groups = {} + self.nml_def_vars = {} + + #Set variables needed for ParamGen "reduction": + self.__case = None + self.__atm_attr_dict = None #### @@ -102,8 +217,8 @@ def from_namelist_xml(cls, nml_xml_file): #---------------- #Initialize file->group/var set dictionary: - atm_in_pg._nml_def_groups[nml_xml_file] = set() - atm_in_pg._nml_def_vars[nml_xml_file] = set() + atm_in_pg.nml_def_groups[nml_xml_file] = set() + atm_in_pg.nml_def_vars[nml_xml_file] = set() #Create namelist variable/group dictionary #and associated sets: @@ -112,7 +227,7 @@ def from_namelist_xml(cls, nml_xml_file): for var in atm_in_pg._data[nml_group]: #Check if variable already exists in dictionary: - if var in atm_in_pg._var_group_dict: + if var in atm_in_pg.var_group_dict: #No duplicate variables are allowed, even if #in separate namelist groups, so raise an error. #Please note that this error should always be @@ -120,17 +235,17 @@ def from_namelist_xml(cls, nml_xml_file): #point something has gone seriously wrong: emsg = f"Namelist entry id '{var}' exists" emsg += f" in namelist group '{nml_group}'" - emsg += f" and '{self.__var_group_dict[var]}'\n" + emsg += f" and '{atm_in_pg.var_group_dict[var]}'\n" emsg += "Namelist variables can belong to only one group." raise SystemError(emsg) - else: - #If not, then add variable and group to dictionary: - atm_in_pg._var_group_dict[var] = nml_group - #Add namelist groups and variables to their - #respective sets: - atm_in_pg._nml_def_groups[nml_xml_file].add(nml_group) - atm_in_pg._nml_def_vars[nml_xml_file].add(var) + #If not, then add variable and group to dictionary: + atm_in_pg.var_group_dict[var] = nml_group + + #Add namelist groups and variables to their + #respective sets: + atm_in_pg.nml_def_groups[nml_xml_file].add(nml_group) + atm_in_pg.nml_def_vars[nml_xml_file].add(var) #---------------- @@ -194,23 +309,23 @@ def append_atm_in_pg(self, atm_pg_obj): #Make sure there is only one XML file associated with #input PG object: - if len(atm_pg_obj._nml_def_groups.keys()) > 1: + if len(atm_pg_obj.nml_def_groups.keys()) > 1: emsg = "ParamGen object being appended to another must" emsg += " be associated with only one namelist definition file." emsg += "\nInstead it is associated with the following files:\n" - emsg += "\n".join(atm_pg_obj._nml_def_groups.keys()) + emsg += "\n".join(atm_pg_obj.nml_def_groups.keys()) raise CamConfigValError(emsg) - else: - #Extract namelist definition file name: - input_file = next(iter(atm_pg_obj._nml_def_groups)) + + #Extract namelist definition file name: + input_file = next(iter(atm_pg_obj.nml_def_groups)) #Extract the group and variable sets from input PG object: - input_groups = atm_pg_obj._nml_def_groups[input_file] - input_vars = atm_pg_obj._nml_def_vars[input_file] + input_groups = atm_pg_obj.nml_def_groups[input_file] + input_vars = atm_pg_obj.nml_def_vars[input_file] #Check that there are no matching namelist groups: #------------------------------------------------ - for nml_file, nml_groups in self._nml_def_groups.items(): + for nml_file, nml_groups in self.nml_def_groups.items(): #Determine if any namelist groups are the same #between the two objects: @@ -227,7 +342,7 @@ def append_atm_in_pg(self, atm_pg_obj): #Check that there are no matching namelist variables: #------------------------------------------------ - for nml_file, nml_vars in self._nml_def_vars.items(): + for nml_file, nml_vars in self.nml_def_vars.items(): #Determine if any namelist groups are the same #between the two objects: @@ -243,8 +358,8 @@ def append_atm_in_pg(self, atm_pg_obj): #------------------------------------------------ #Add input PG object dictionaries to this object's dicts: - self._nml_def_groups.update(atm_pg_obj._nml_def_groups) - self._nml_def_vars.update(atm_pg_obj._nml_def_vars) + self.nml_def_groups.update(atm_pg_obj.nml_def_groups) + self.nml_def_vars.update(atm_pg_obj.nml_def_vars) #Append input PG object to this object: self.append(atm_pg_obj) @@ -261,7 +376,7 @@ def append_user_nl_file(self, user_nl_file): """ _data = OrderedDict() - with open(user_nl_file,'r') as user_file: + with open(user_nl_file,'r', encoding='utf-8') as user_file: within_comment_block = False for line in user_file: if len(line)>1: @@ -281,17 +396,17 @@ def append_user_nl_file(self, user_nl_file): line_j = ' '.join(line_s) # now parse the line: - if ("=" in line_j): + if "=" in line_j: line_ss = line_j.split("=") var_str = (line_ss[0]).strip() # the first element is the parameter name val_str = ' '.join(line_ss[1:]) # the rest is tha value string if '!' in val_str: - val_str = val_str.split("!")[0] # discard the comment in val str, if one exists + val_str = val_str.split("!", maxsplit=1)[0] # discard the comment in val str, if one exists #Check if variable already exists in group dictionary: - if var_str in self._var_group_dict: + if var_str in self.var_group_dict: #Extract namelist group list for variable: - data_group = self._var_group_dict[var_str] + data_group = self.var_group_dict[var_str] else: #Raise error that namelist variable isn't listed in @@ -357,7 +472,7 @@ def write(self, output_path): quote_set = {"'", '"'} #single and double quotes # Write Fortran namelist file: - with open(os.path.join(output_path), 'w') as atm_in_fil: + with open(os.path.join(output_path), 'w', encoding='utf-8') as atm_in_fil: #Loop through namelist groups in alphabetical order: for nml_group in sorted(self._data): # Write namelist group: @@ -369,7 +484,7 @@ def write(self, output_path): val = self._data[nml_group][var]["values"].strip() #If no value is set then move to the next variable: - if val==None: + if val is None: continue #Extract variable type: @@ -383,21 +498,21 @@ def write(self, output_path): if var_type in num_bool_set: if var_type == 'logical': #If logical, then write the associated truth value: - if self._is_nml_logical_true(var, val): + if _is_nml_logical_true(var, val): atm_in_fil.write(f" {var} = .true.\n") else: atm_in_fil.write(f" {var} = .false.\n") else: #If a number, then write value as-is: - atm_in_fil.write(" {} = {}\n".format(var, val)) + atm_in_fil.write(f" {var} = {val}\n") elif "char*" in var_type: #Value is a string, so check if is already inside quotes: if val[0] in quote_set and val[-1] == val[0]: #If so, then write string value as-is: - atm_in_fil.write(" {} = {}\n".format(var, val)) + atm_in_fil.write(f" {var} = {val}\n") else: #If not, then write string with added quotes: - atm_in_fil.write(" {} = '{}'\n".format(var, val)) + atm_in_fil.write(f" {var} = '{val}'\n") else: #This is an un-recognized type option, so raise an error: emsg = f"Namelist type '{var_type}' for entry '{var}' is un-recognized.\n" @@ -452,109 +567,5 @@ def __expand_func(self, varname): #Return value if found: return val - #### - - def _is_nml_logical_true(self, varname, var_val): - - """ - Checks if a "logical" XML namelist value is true or - false. - ---------- - varname -> The name of the variable being checked - var_val -> The value of the variable being checked - - doctests: - - 1. Check that a True value returns true: - >>> AtmInParamGen({})._is_nml_logical_true("test", True) - True - - 2. Check that a "true" value returns true: - >>> AtmInParamGen({})._is_nml_logical_true("test", "true") - True - - 3. Check that a ".true." value returns true: - >>> AtmInParamGen({})._is_nml_logical_true("test", ".true.") - True - - 4. Check that a "1" value returns true: - >>> AtmInParamGen({})._is_nml_logical_true("test", "1") - True - - 5. Check that a 1 (integer) value returns true: - >>> AtmInParamGen({})._is_nml_logical_true("test", 1) - True - - 6. Check that a False value returns false: - >>> AtmInParamGen({})._is_nml_logical_true("test", False) - False - - 7. Check that a "FALSE" value returns false: - >>> AtmInParamGen({})._is_nml_logical_true("test", "FALSE") - False - - 8. Check that a ".False." value returns false: - >>> AtmInParamGen({})._is_nml_logical_true("test", ".False.") - False - - 9. Check that a "0" value returns false: - >>> AtmInParamGen({})._is_nml_logical_true("test", "0") - False - - 10. Check that a 0 (integer) value returns false: - >>> AtmInParamGen({})._is_nml_logical_true("test", 0) - False - - 11. Check that a bad string value returns the correct error: - >>> AtmInParamGen({})._is_nml_logical_true("test", "this_wont_work") # doctest: +ELLIPSIS - Traceback (most recent call last): - ... - cam_config_classes.CamConfigValError:... - XML namelist logical variable, 'test', must have a value of true, false, 1, or 0, not 'this_wont_work' - - 12. Check that a bad integer value returns the correct error: - >>> AtmInParamGen({})._is_nml_logical_true("test", 3) # doctest: +ELLIPSIS - Traceback (most recent call last): - ... - cam_config_classes.CamConfigValError:... - XML namelist logical variable, 'test', must have a value of true, false, 1, or 0, not 3 - - 13. Check that a non-boolean, string or integer type returns an error: - >>> AtmInParamGen({})._is_nml_logical_true("test", 13.03) # doctest: +ELLIPSIS - Traceback (most recent call last): - ... - cam_config_classes.CamConfigTypeError:... - XML namelist variable 'test' must have a value that is either a boolean, string, or integer, not float. - - """ - - if isinstance(var_val, bool): - return var_val - elif isinstance(var_val, str): - if var_val.lower() in {"true", ".true.", "1"}: - return True - elif var_val.lower() in {"false", ".false.", "0"}: - return False - else: - emsg = f"\nXML namelist logical variable, '{varname}'" - emsg += ", must have a value of true, false, 1, or 0, not" - emsg += f" '{var_val}'" - raise CamConfigValError(emsg) - elif isinstance(var_val, int): - if var_val == 1: - return True - elif var_val == 0: - return False - else: - emsg = f"\nXML namelist logical variable, '{varname}'" - emsg += ", must have a value of true, false, 1, or 0, not" - emsg += f" {var_val}" - raise CamConfigValError(emsg) - else: - emsg = f"\nXML namelist variable '{varname}' must" - emsg += " have a value that is either a boolean, string, or integer," - emsg += f" not {type(var_val).__name__}." - raise CamConfigTypeError(emsg) - ############ #End of file diff --git a/cime_config/buildlib b/cime_config/buildlib index 34c8c053..603b2479 100755 --- a/cime_config/buildlib +++ b/cime_config/buildlib @@ -143,17 +143,17 @@ def _build_cam(): complib = os.path.join(libroot, "libatm.a") makefile = os.path.join(casetools, "Makefile") - cmd = "{} complib -j {} MODEL=cam COMPLIB={} -f {} {} " \ - .format(gmake, gmake_j, complib, makefile, - get_standard_makefile_args(case)) + cmd = f"{gmake} complib -j {gmake_j} MODEL=cam COMPLIB={complib}" + cmd += f" -f {makefile} {get_standard_makefile_args(case)} " # Add C Pre-Processor (CPP) definitions, if present: if config.cpp_defs: - cmd += " USER_CPPDEFS='{}'".format(' '.join(config.cpp_defs)) + ccpp_defs_str = ' '.join(config.cpp_defs) + cmd += f" USER_CPPDEFS='{ccpp_defs_str}'" retcode, out, err = run_cmd(cmd) _LOGGER.info("%s: \n\n output:\n %s \n\n err:\n\n%s\n", cmd, out, err) - expect(retcode == 0, "Command {} failed with rc={}".format(cmd, retcode)) + expect(retcode == 0, f"Command {cmd} failed with rc={retcode}") ############################################################################### diff --git a/cime_config/buildnml b/cime_config/buildnml index ffdab68b..1ea4c340 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -50,8 +50,7 @@ class CamBuildnmlError(ValueError): # This simplifies the filename mangling for different cases. def _create_ic_filename(inst_string, i_or_r, run_refcase, run_refdate, run_reftod): - return "{}.cam{}.{}.{}-{}.nc".format( \ - run_refcase, inst_string, i_or_r, run_refdate, run_reftod) + return f"{run_refcase}.cam{inst_string}.{i_or_r}.{run_refdate}-{run_reftod}.nc" ################## @@ -83,24 +82,6 @@ def nml_attr_set(config): #Return namelist attribute dictionary: return cam_nml_attr_dict -def _get_namelist_groups(nmlgen, skip_groups=None): - """Return all groups in except those from """ - all_groups = set() - for child in nmlgen._definition.tree.getroot(): - for item in child: - if item.tag == 'group': - all_groups.add(item.text) - # end if - # end for - # end for - if skip_groups: - if not isinstance(skip_groups, set): - skip_groups = set(skip_groups) - # end if - all_groups -= skip_groups - # end if - return sorted(all_groups) - ################# #PRIMARY FUNCTION ################# @@ -162,7 +143,7 @@ def buildnml(case, caseroot, compname): # Determine directory for user modified namelist_definition_cam.xml: user_xml_dir = os.path.join(caseroot, "SourceMods", "src.cam") expect(os.path.isdir(user_xml_dir), - "user_xml_dir %s does not exist " %user_xml_dir) + f"user_xml_dir {user_xml_dir} does not exist ") # User definition *replaces* existing definition: user_definition = os.path.join(user_xml_dir, "namelist_definition_cam.xml") @@ -180,7 +161,7 @@ def buildnml(case, caseroot, compname): # Check that the standard namelist file actually exists: expect(os.path.isfile(definition_file), - "Namelist XML file '{}' not found!".format(definition_file)) + f"Namelist XML file '{definition_file}' not found!") # Create the ParamGen object: pg_atm = AtmInParamGen.from_namelist_xml(definition_file) diff --git a/cime_config/cam_autogen.py b/cime_config/cam_autogen.py index b2891bac..1c7f6d6d 100644 --- a/cime_config/cam_autogen.py +++ b/cime_config/cam_autogen.py @@ -198,7 +198,7 @@ def _find_schemes_in_sdf(suite_part): NB: This function is recursive as schemes may be nested inside other suite objects (e.g., group, subcycle) """ - scheme_list = list() # Attempt to retain ordering + scheme_list = [] # Attempt to retain ordering for section in suite_part: item_type = section.tag.lower() if item_type == 'scheme': @@ -432,7 +432,7 @@ def generate_registry(data_search, build_cache, atm_root, bldroot, registry_files = [registry_file] genreg_dir = os.path.join(bldroot, "cam_registry") # Create empty registry file objects list: - reg_files_list = list() + reg_files_list = [] # Figure out if we need to generate new data source and metadata files gen_reg_file = os.path.join(_REG_GEN_DIR, "generate_registry_data.py") if os.path.exists(genreg_dir): @@ -625,9 +625,9 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, # end if if do_gen_nl: args = [] - for scheme in xml_files: + for scheme, xml_file in xml_files.items(): args.extend(["--namelist-file-arg", - f"{scheme}:{xml_files[scheme]}"]) + f"{scheme}:{xml_file}"]) # end for args.append("--namelist-read-mod") args.append("cam_ccpp_scheme_namelists") @@ -875,7 +875,7 @@ def ic_names(self): TEST_REG_DIR = os.path.join(TEST_BLDROOT, "cam_registry") # For generate_init_routines: - TEST_REGFILES = list() + TEST_REGFILES = [] TEST_CAP_DATAFILE = os.path.join("test_bldroot", "ccpp", "capfiles.txt") # Create testing buildroot directory: diff --git a/cime_config/cam_build_cache.py b/cime_config/cam_build_cache.py index 441ad77f..61eef1de 100644 --- a/cime_config/cam_build_cache.py +++ b/cime_config/cam_build_cache.py @@ -53,7 +53,7 @@ def new_entry_from_xml(item): emsg = f"ERROR: No path for {item.tag} XML item" raise ValueError(emsg) else: - raise ValueError("ERROR: Invalid {} XML item".format(item.tag)) + raise ValueError(f"ERROR: Invalid {item.tag} XML item") # end if return new_entry @@ -346,12 +346,10 @@ def write(self): rgen_entry = ET.SubElement(registry, 'reg_gen_file') rgen_entry.text = rgen_file # end for - for stdname in self.__ic_names: - for ic_name in self.__ic_names[stdname]: - ic_entry = ET.SubElement(registry, 'ic_name_entry') - ic_entry.set('standard_name', stdname) - ic_entry.text = ic_name - # end for + for stdname, ic_name in self.__ic_names.items(): + ic_entry = ET.SubElement(registry, 'ic_name_entry') + ic_entry.set('standard_name', stdname) + ic_entry.text = ic_name # end for # CCPP ccpp = ET.SubElement(new_cache, 'CCPP') @@ -380,9 +378,9 @@ def write(self): self.__create_nl_file.file_hash) preproc = ET.SubElement(ccpp, 'preproc_defs') preproc.text = self.__preproc_defs - for kind_def in self.__kind_types: + for kind_def, kind_type in self.__kind_types.items(): kind_type = ET.SubElement(ccpp, 'kind_type') - kind_type.text = f"{kind_def}={self.__kind_types[kind_def]}" + kind_type.text = f"{kind_def}={kind_type}" # end for new_cache_tree = ET.ElementTree(new_cache) new_cache_tree.write(self.__build_cache) @@ -683,35 +681,35 @@ def ic_names(self): TEST_SCHEME = os.path.join(TEST_SOURCE_MODS_DIR, "temp_adjust_scalar.meta") # Generate test build caches from template: - f1 = open(BUILD_CACHE, 'rt') + f1 = open(BUILD_CACHE, 'rt', encoding='utf-8') data = f1.read() data = data.replace("TAG1", "").replace("TAG2", "").replace("TAG3", "") f1.close() - f1 = open(BUILD_CACHE, 'w') + f1 = open(BUILD_CACHE, 'w', encoding='utf-8') f1.write(data) f1.close() - f1 = open(BAD_BUILD_CACHE, 'rt') + f1 = open(BAD_BUILD_CACHE, 'rt', encoding='utf-8') data = f1.read() data = data.replace("TAG1", "").replace("TAG2", "").replace("TAG3", "") f1.close() - f1 = open(BAD_BUILD_CACHE, 'w') + f1 = open(BAD_BUILD_CACHE, 'w', encoding='utf-8') f1.write(data) f1.close() - f1 = open(BAD_BUILD_CACHE_REG, 'rt') + f1 = open(BAD_BUILD_CACHE_REG, 'rt', encoding='utf-8') data = f1.read() data = data.replace("TAG1", "").replace("TAG2", "").replace("TAG3", "") f1.close() - f1 = open(BAD_BUILD_CACHE_REG, 'w') + f1 = open(BAD_BUILD_CACHE_REG, 'w', encoding='utf-8') f1.write(data) f1.close() - f1 = open(BAD_BUILD_CACHE_CCPP, 'rt') + f1 = open(BAD_BUILD_CACHE_CCPP, 'rt', encoding='utf-8') data = f1.read() data = data.replace("TAG1", "").replace("TAG2", "").replace("TAG3", "") f1.close() - f1 = open(BAD_BUILD_CACHE_CCPP, 'w') + f1 = open(BAD_BUILD_CACHE_CCPP, 'w', encoding='utf-8') f1.write(data) f1.close() diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index 5ad9f6f9..b767e95c 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -109,7 +109,7 @@ def __init__(self, case, case_log): # If only "UNSET" is present in the list, then convert to # empty list: if len(self.__cppdefs) == 1 and "UNSET" in self.__cppdefs: - self.__cppdefs = list() + self.__cppdefs = [] # The following translation is hard-wired for backwards compatibility # to support the differences between how the config_grids specifies the @@ -142,14 +142,14 @@ def __init__(self, case, case_log): elif not user_dyn_opt: user_dyn_opt = None elif user_dyn_opt not in dyn_valid_vals: - emsg = "ERROR: '{}' is not a valid dycore,".format(user_dyn_opt) - emsg += "\n Valid values: {}".format(dyn_valid_vals) + emsg = f"ERROR: '{user_dyn_opt}' is not a valid dycore," + emsg += f"\n Valid values: {dyn_valid_vals}" raise CamConfigValError(emsg) # End if (no else, dyn is valid #----------------------------------------------- # Create empty dictonary - self.__config_dict = dict() + self.__config_dict = {} # Create namelist group list, starting with default namelist groups self.__nml_groups = ['cam_initfiles_nl', @@ -588,11 +588,11 @@ def print_config(self, obj_name, case_log): if obj_name in self.config_dict: obj = self.config_dict[obj_name] else: - raise CamConfigValError("ERROR: Invalid configuration name, '{}'".format(obj_name)) + raise CamConfigValError(f"ERROR: Invalid configuration name, '{obj_name}'") # Print variable to logger - case_log.debug("{}".format(obj.desc)) - case_log.debug("{} = {}".format(obj.name, obj.value)) + case_log.debug(f"{obj.desc}") + case_log.debug(f"{obj.name} = {obj.value}") #++++++++++++++++++++++++ @@ -614,7 +614,7 @@ def print_all(self, case_log): # Also print CPP definitions, if any: if self.__cppdefs: - case_log.debug("\nCAM CPP Defs: {}".format(" ".join(self.__cppdefs))) + case_log.debug(f"\nCAM CPP Defs: {' '.join(self.__cppdefs)}") # Print additional separator (to help separate this output from # additional CIME output) @@ -632,7 +632,7 @@ def set_value(self, obj_name, val): if obj_name in self.config_dict: obj = self.config_dict[obj_name] else: - raise CamConfigValError("ERROR: Invalid configuration name, '{}'".format(obj_name)) + raise CamConfigValError(f"ERROR: Invalid configuration name, '{obj_name}'") # Next, check that the given value is either an integer or a string if not isinstance(val, (int, str)): @@ -682,7 +682,7 @@ def add_cppdef(self, cppname, value=None): #This is done because a CPP definition should only be set once, #in order to avoid variable overwriting or other un-expected #compiler behaviors: - if any([re.match(check_str+r"($|=)", cppdef.strip()) for cppdef in self.__cppdefs]): + if any(re.match(check_str+r"($|=)", cppdef.strip()) for cppdef in self.__cppdefs): #If match is found, then raise an error: emsg = "ERROR: CPP definition '{}' has already been set" raise CamConfigValError(emsg.format(cppname.upper())) @@ -693,7 +693,7 @@ def add_cppdef(self, cppname, value=None): cpp_str = check_str else: # Create CPP definition flag string: - cpp_str = "{}={}".format(check_str, value) + cpp_str = f"{check_str}={value}" # Add string to CPP definition list: self.__cppdefs.append(cpp_str) @@ -710,7 +710,7 @@ def get_value(self, obj_name): if obj_name in self.config_dict: obj = self.config_dict[obj_name] else: - raise CamConfigValError("ERROR: Invalid configuration name, '{}'".format(obj_name)) + raise CamConfigValError(f"ERROR: Invalid configuration name, '{obj_name}'") # If it does, then return the object's value return obj.value @@ -766,7 +766,7 @@ def generate_cam_src(self, gen_fort_indent): self.__atm_root, self.__bldroot, reg_dir, reg_files, source_mods_dir, force_ccpp) - phys_dirs, force_init, cap_datafile, nl_groups, capgen_db = retvals + phys_dirs, force_init, _, nl_groups, capgen_db = retvals # Add in the namelist groups from schemes self.__nml_groups.extend(nl_groups) diff --git a/cime_config/cam_config_classes.py b/cime_config/cam_config_classes.py index 6a77df92..a0e7a7e6 100644 --- a/cime_config/cam_config_classes.py +++ b/cime_config/cam_config_classes.py @@ -24,7 +24,7 @@ class CamConfigValError(ValueError): (e.g., log user errors without backtrace)""" # pylint: disable=useless-super-delegation def __init__(self, message): - super(CamConfigValError, self).__init__(message) + super().__init__(message) # pylint: enable=useless-super-delegation ############################################################################### @@ -33,8 +33,8 @@ class CamConfigTypeError(TypeError): """Class used to handle CAM config type errors (e.g., log user errors without backtrace)""" # pylint: disable=useless-super-delegation - def __init_(self, message): - super(CamConfigTypeError, self).__init__(message) + def __init__(self, message): + super().__init__(message) # pylint: enable=useless-super-delegation ############################################################################### @@ -324,7 +324,7 @@ def _check_string_val(name, val, valid_vals=None): # Internal generic CAM configure class ############################################################################### -class _ConfigGen(object): +class _ConfigGen: """ Generic configuration class used to @@ -393,7 +393,7 @@ def __init__(self, name, desc, is_nml_attr=False): elif isinstance(desc, list): derr = [type(x) for x in desc] else: - derr = "{} ({})".format(type(desc), desc) + derr = f"{type(desc).__name__} ({desc})" # end if raise CamConfigTypeError(emsg.format(name, derr)) # end if @@ -401,7 +401,7 @@ def __init__(self, name, desc, is_nml_attr=False): # Add name, description, and namelist attribute logical to object self.__name = name if isinstance(desc, str): - self.__desc = "# {}".format(desc) + self.__desc = f"# {desc}" elif isinstance(desc, list): self.__desc = "# " + "\n# ".join(desc) # end if @@ -468,7 +468,7 @@ class ConfigInteger(_ConfigGen): def __init__(self, name, desc, val, valid_vals=None, is_nml_attr=False): # Add generic attributes - super(ConfigInteger, self).__init__(name, desc, is_nml_attr=is_nml_attr) + super().__init__(name, desc, is_nml_attr=is_nml_attr) # Add valid_vals to object self.__valid_vals = valid_vals @@ -577,7 +577,7 @@ class ConfigString(_ConfigGen): def __init__(self, name, desc, val, valid_vals=None, is_nml_attr=False): # Add generic attributes - super(ConfigString, self).__init__(name, desc, is_nml_attr=is_nml_attr) + super().__init__(name, desc, is_nml_attr=is_nml_attr) # If ok, then add valid_vals to object self.__valid_vals = valid_vals @@ -721,7 +721,7 @@ class ConfigList(_ConfigGen): def __init__(self, name, desc, val, valid_type=None, valid_vals=None): # Add generic attributes - super(ConfigList, self).__init__(name, desc, is_nml_attr=False) + super().__init__(name, desc, is_nml_attr=False) # Check if valid_type is not None if valid_type is not None: @@ -818,13 +818,13 @@ def __check_type(self, val): if bad_val_types: if len(bad_val_types) > 1: emsg = "ERROR: The following list entries, provided for variable," - emsg += " '{}', are not {}s, but instead are:\n".format(self.name, good_type) + emsg += f" '{self.name}', are not {good_type}s, but instead are:\n" else: emsg = "ERROR: The following list entry, provided for variable," - emsg += " '{}', is not a {}, but instead is: ".format(self.name, good_type) + emsg += f" '{self.name}', is not a {good_type}, but instead is: " # end if for key_str, type_str in bad_val_types.items(): - emsg += "'{}': type='{}'\n".format(key_str, type_str) + emsg += f"'{key_str}': type='{type_str}'\n" # end for raise CamConfigValError(emsg) # End if diff --git a/src/data/generate_registry_data.py b/src/data/generate_registry_data.py index f1c67a5c..37415301 100755 --- a/src/data/generate_registry_data.py +++ b/src/data/generate_registry_data.py @@ -86,7 +86,7 @@ class TypeRegistry(dict): def __init__(self): """Initialize TypeRegistry object with intrinsic Fortran types""" - super(TypeRegistry, self).__init__() + super().__init__() self['character'] = TypeEntry('character', None) self['complex'] = TypeEntry('complex', None) self['integer'] = TypeEntry('integer', None) @@ -192,22 +192,20 @@ def __init__(self, elem_node, local_name, dimensions, known_types, def write_metadata(self, outfile): """Write out this variable as CCPP metadata""" - outfile.write('[ {} ]\n'.format(self.local_name)) - outfile.write(' {} = {}\n'.format('standard_name', self.standard_name)) + outfile.write(f'[ {self.local_name} ]\n') + outfile.write(f' standard_name = {self.standard_name}\n') if self.long_name: - outfile.write(' {} = {}\n'.format('long_name', self.long_name)) + outfile.write(f' long_name = {self.long_name}\n') # end if - outfile.write(' {} = {}\n'.format('units', self.units)) + outfile.write(f' units = {self.units}\n') if self.is_ddt: - outfile.write(' {} = {}\n'.format('type', self.var_type)) + outfile.write(f' type = {self.var_type}\n') elif self.kind: - outfile.write(' {} = {} | {} = {}\n'.format('type', self.var_type, - 'kind', self.kind)) + outfile.write(f' type = {self.var_type} | kind = {self.kind}\n') else: - outfile.write(' {} = {}\n'.format('type', self.var_type)) + outfile.write(f' type = {self.var_type}\n') # end if - outfile.write(' {} = {}\n'.format('dimensions', - self.dimension_string)) + outfile.write(f' dimensions = {self.dimension_string}\n') def write_initial_value(self, outfile, indent, init_var, ddt_str, tstep_init=False): @@ -218,11 +216,11 @@ def write_initial_value(self, outfile, indent, init_var, ddt_str, if self.local_index_name_str: #Then write variable with local index name: # pylint: disable=no-member - var_name = '{}{}'.format(ddt_str, self.local_index_name_str) + var_name = f'{ddt_str}{self.local_index_name_str}' # pylint: enable=no-member else: #Otherwise, use regular local variable name: - var_name = '{}{}'.format(ddt_str, self.local_name) + var_name = f'{ddt_str}{self.local_name}' if self.allocatable == VarBase.__pointer_type_str: if self.initial_value == VarBase.__pointer_def_init: init_val = '' @@ -250,34 +248,34 @@ def write_initial_value(self, outfile, indent, init_var, ddt_str, if tstep_init: if self.var_type.lower() == 'real': if self.kind: - outfile.write('{} = 0._{}'.format(var_name, self.kind), indent) + outfile.write(f'{var_name} = 0._{self.kind}', indent) else: - outfile.write('{} = 0.0'.format(var_name), indent) + outfile.write(f'{var_name} = 0.0', indent) elif self.var_type.lower() == 'integer': if self.kind: - outfile.write('{} = 0_{}'.format(var_name, self.kind), indent) + outfile.write(f'{var_name} = 0_{self.kind}', indent) else: - outfile.write('{} = 0'.format(var_name), indent) + outfile.write(f'{var_name} = 0'.format(var_name), indent) elif self.var_type.lower() == 'character': if self.kind: - outfile.write('{} = {}_""'.format(var_name, self.kind), indent) + outfile.write(f'{var_name} = {self.kind}_""', indent) else: - outfile.write('{} = ""'.format(var_name), indent) + outfile.write(f'{var_name} = ""', indent) elif self.var_type.lower() == 'complex': if self.kind: - outfile.write('{} = (0._{}, 0._{})'.format(var_name, self.kind, self.kind), indent) + outfile.write(f'{var_name} = (0._{self.kind}, 0._{self.kind})', indent) else: - outfile.write('{} = (0.0, 0.0)'.format(var_name), indent) + outfile.write(f'{var_name} = (0.0, 0.0)', indent) elif self.var_type.lower() == 'logical': - outfile.write('{} = .false.'.format(var_name), indent) + outfile.write('{var_name} = .false.', indent) else: emsg = 'Variable "{}" is of type "{}", which is not a supported type\n' emsg += 'for use with "phys_timestep_init_zero".' raise TypeError(emsg.format(var_name, self.var_type)) # end if elif init_val: - outfile.write("if ({}) then".format(init_var), indent) - outfile.write("{} = {}".format(var_name, init_val), indent+1) + outfile.write(f"if ({init_var}) then", indent) + outfile.write(f"{var_name} = {init_val}", indent+1) outfile.write("end if", indent) # end if @@ -427,13 +425,13 @@ def __init__(self, elem_node, parent_name, dimensions, known_types, #with the correct index variable name: local_index_string = ','.join(my_local_index) local_index_name_str = \ - '{}({})'.format(parent_name, local_index_string) + f'{parent_name}({local_index_string})' else: emsg = "Cannot find element dimension, '{}' in {}({})" raise CCPPError(emsg.format(index_name, parent_name, ', '.join(dimensions))) # end if - local_name = '{}({})'.format(parent_name, self.index_string) + local_name = f'{parent_name}({self.index_string})' super().__init__(elem_node, local_name, my_dimensions, known_types, parent_type, units_default=parent_units, @@ -578,15 +576,15 @@ def __init__(self, var_node, known_types, vdict, logger): # end if # Maybe fix up type string if self.module: - self.__type_string = 'type({})'.format(self.var_type) + self.__type_string = f'type({self.var_type})' elif self.kind: - self.__type_string = '{}({})'.format(self.var_type, self.kind) + self.__type_string = f'{self.var_type}({self.kind})' else: - self.__type_string = '{}'.format(self.var_type) + self.__type_string = f'{self.var_type}' # end if if logger: - dmsg = 'Found registry Variable, {} ({})' - logger.debug(dmsg.format(self.local_name, self.standard_name)) + dmsg = f'Found registry Variable, {self.local_name} ({self.standard_name})' + logger.debug(dmsg) # end if def write_metadata(self, outfile): @@ -657,9 +655,9 @@ def write_definition(self, outfile, access, indent, # Initial value if self.initial_value: if self.allocatable == "pointer": - init_str = " => {}".format(self.initial_value) + init_str = f" => {self.initial_value}" elif not (self.allocatable[0:11] == 'allocatable'): - init_str = " = {}".format(self.initial_value) + init_str = f" = {self.initial_value}" # end if (no else, do not initialize allocatable fields) else: init_str = "" @@ -671,11 +669,9 @@ def write_definition(self, outfile, access, indent, convert_to_long_name(self.standard_name)) # end if outfile.write(comment, indent) - outfile.write("{}{}{}{} :: {}{}{}".format(type_str, acc_str, - all_str, pro_str, - self.local_name, - self.__def_dims_str, - init_str), indent) + var_dec_str = f"{type_str}{acc_str}{all_str}{pro_str} :: " + var_dec_str += f"{self.local_name}{self.__def_dims_str}{init_str}" + outfile.write(var_dec_str, indent) def write_allocate_routine(self, outfile, indent, init_var, reall_var, ddt_str): @@ -693,7 +689,7 @@ def write_allocate_routine(self, outfile, indent, my_ddt = self.is_ddt if my_ddt: # This is a DDT object, allocate entries subi = indent - sub_ddt_str = '{}{}%'.format(ddt_str, self.local_name) + sub_ddt_str = f'{ddt_str}{self.local_name}%' if dimension_string: subi += 1 emsg = "Arrays of DDT objects not implemented" @@ -704,7 +700,7 @@ def write_allocate_routine(self, outfile, indent, init_var, reall_var, sub_ddt_str) else: # Do we need to allocate this variable? - lname = '{}{}'.format(ddt_str, self.local_name) + lname = f'{ddt_str}{self.local_name}' if self.allocatable == "pointer": all_type = 'associated' elif self.allocatable == "allocatable": @@ -713,21 +709,19 @@ def write_allocate_routine(self, outfile, indent, all_type = '' # end if if all_type: - outfile.write("if ({}({})) then".format(all_type, lname), - indent) - outfile.write("if ({}) then".format(reall_var), indent+1) - outfile.write("deallocate({})".format(lname), indent+2) + outfile.write(f"if ({all_type}({lname})) then", indent) + outfile.write(f"if ({reall_var}) then", indent+1) + outfile.write(f"deallocate({lname})", indent+2) if self.allocatable == "pointer": - outfile.write("nullify({})".format(lname), indent+2) + outfile.write(f"nullify({lname})".format(lname), indent+2) # end if outfile.write("else", indent+1) - emsg = 'subname//": {} is already {}'.format(lname, all_type) + emsg = f'subname//": {lname} is already {all_type}' emsg += ', cannot allocate"' - outfile.write("call endrun({})".format(emsg), indent+2) + outfile.write(f"call endrun({emsg})", indent+2) outfile.write("end if", indent+1) outfile.write("end if", indent) - outfile.write("allocate({}{})".format(lname, dimension_string), - indent) + outfile.write(f"allocate({lname}{dimension_string})", indent) # end if if self.allocatable != "parameter": # Initialize the variable @@ -762,7 +756,7 @@ def write_tstep_init_routine(self, outfile, indent, if my_ddt: # This is a DDT object, initalize individual entries subi = indent - sub_ddt_str = '{}{}%'.format(ddt_str, self.local_name) + sub_ddt_str = f'{ddt_str}{self.local_name}%' if dimension_string: emsg = "Arrays of DDT objects not implemented" raise ParseInternalError(emsg) @@ -822,7 +816,7 @@ class VarDict(OrderedDict): def __init__(self, name, ttype, logger): """Initialize a registry variable dictionary""" - super(VarDict, self).__init__() + super().__init__() self.__name = name self.__type = ttype self.__logger = logger @@ -940,11 +934,11 @@ def variable_list(self): def write_metadata(self, outfile): """Write out the variables in this dictionary as CCPP metadata""" outfile.write('[ccpp-table-properties]\n') - outfile.write(' name = {}\n'.format(self.name)) - outfile.write(' type = {}\n'.format(self.module_type)) + outfile.write(f' name = {self.name}\n') + outfile.write(f' type = {self.module_type}\n') outfile.write('[ccpp-arg-table]\n') - outfile.write(' name = {}\n'.format(self.name)) - outfile.write(' type = {}\n'.format(self.module_type)) + outfile.write(f' name = {self.name}\n') + outfile.write(f' type = {self.module_type}\n') for var in self.variable_list(): var.write_metadata(outfile) # end if @@ -1049,10 +1043,10 @@ def variable_list(self): def write_metadata(self, outfile): """Write out this DDT as CCPP metadata""" outfile.write('[ccpp-table-properties]\n') - outfile.write(' name = {}\n'.format(self.ddt_type)) + outfile.write(f' name = {self.ddt_type}\n') outfile.write(' type = ddt\n') outfile.write('[ccpp-arg-table]\n') - outfile.write(' name = {}\n'.format(self.ddt_type)) + outfile.write(f' name = {self.ddt_type}\n') outfile.write(' type = ddt\n') for var in self.__data: var.write_metadata(outfile) @@ -1072,18 +1066,18 @@ def write_definition(self, outfile, access, indent): # end if my_acc = 'private' if self.private else 'public' if self.extends: - acc_str = ', extends({})'.format(self.extends.type_type) + acc_str = f', extends({self.extends.type_type})' elif self.bindC: acc_str = ', bind(C)' elif my_acc != access: - acc_str = ', {}'.format(my_acc) + acc_str = f', {my_acc}' else: acc_str = '' # end if # Write the CCPP header write_ccpp_table_header(self.ddt_type, outfile) # Write the type definition - outfile.write("type{} :: {}".format(acc_str, self.ddt_type), indent) + outfile.write(f"type{acc_str} :: {self.ddt_type}", indent) maxtyp = max([len(x.type_string) for x in self.__data]) maxacc = max([len(x.access) for x in self.__data if x.access != 'private']) @@ -1093,7 +1087,7 @@ def write_definition(self, outfile, access, indent): maxtyp=maxtyp, maxacc=maxacc, maxall=maxall, has_protect=False) # end if - outfile.write("end type {}".format(self.ddt_type), indent) + outfile.write(f"end type {self.ddt_type}", indent) outfile.write("", 0) @property @@ -1189,7 +1183,7 @@ def add_ddt(self, newddt, logger=None): logger.debug(dmsg) # end if if self.__known_types.known_type(newddt.ddt_type): - raise CCPPError('Duplicate DDT entry, {}'.format(newddt.ddt_type)) + raise CCPPError(f'Duplicate DDT entry, {newddt.ddt_type}') # end if self.__ddts[newddt.ddt_type] = newddt self.__known_types.add_type(newddt.ddt_type, @@ -1201,9 +1195,9 @@ def variable_list(self): def write_metadata(self, outdir, logger): """Write out the variables in this file as CCPP metadata""" - ofilename = os.path.join(outdir, "{}.meta".format(self.name)) - logger.info("Writing registry metadata file, {}".format(ofilename)) - with open(ofilename, "w") as outfile: + ofilename = os.path.join(outdir, f"{self.name}.meta") + logger.info(f"Writing registry metadata file, {ofilename}") + with open(ofilename, "w", encoding='utf-8') as outfile: # Write DDTs defined in this file for ddt in self.__ddts.values(): ddt.write_metadata(outfile) @@ -1224,9 +1218,9 @@ def dim_sort_key(cls, dim_name): def write_source(self, outdir, indent, logger): """Write out source code for the variables in this file""" - ofilename = os.path.join(outdir, "{}.F90".format(self.name)) - logger.info("Writing registry source file, {}".format(ofilename)) - file_desc = "Variables for registry source file, {}".format(self.name) + ofilename = os.path.join(outdir, f"{self.name}.F90") + logger.info(f"Writing registry source file, {ofilename}") + file_desc = f"Variables for registry source file, {self.name}" with FortranWriter(ofilename, "w", file_desc, self.name, indent=indent) as outfile: # Use statements (if any) @@ -1259,7 +1253,7 @@ def write_source(self, outdir, indent, logger): mod = module[0] mtype = module[1] pad = ' '*(maxlen - len(mod)) - outfile.write('use {},{} only: {}'.format(mod, pad, mtype), 1) + outfile.write(f'use {mod},{pad} only: {mtype}'.format(mod, pad, mtype), 1) # end for # More boilerplate outfile.write("", 0) @@ -1273,10 +1267,8 @@ def write_source(self, outdir, indent, logger): # Write data management subroutine declarations outfile.write('', 0) outfile.write('!! public interfaces', 0) - outfile.write('public :: {}'.format(self.allocate_routine_name()), - 1) - outfile.write('public :: {}'.format(self.tstep_init_routine_name()), - 1) + outfile.write(f'public :: {self.allocate_routine_name()}', 1) + outfile.write(f'public :: {self.tstep_init_routine_name()}', 1) # end of module header outfile.end_module_header() outfile.write("", 0) @@ -1288,11 +1280,11 @@ def write_source(self, outdir, indent, logger): def allocate_routine_name(self): """Return the name of the allocate routine for this module""" - return 'allocate_{}_fields'.format(self.name) + return f'allocate_{self.name}_fields' def tstep_init_routine_name(self): """Return the name of the physics timestep init routine for this module""" - return "{}_tstep_init".format(self.name) + return f"{self.name}_tstep_init" def write_allocate_routine(self, outfile): """Write a subroutine to allocate all the data in this module""" @@ -1300,13 +1292,13 @@ def write_allocate_routine(self, outfile): args = list(self.__var_dict.known_dimensions) args.sort(key=File.dim_sort_key) # Attempt at a consistent interface init_var = 'set_init_val' - args.append('{}_in'.format(init_var)) + args.append(f'{init_var}_in') reall_var = 'reallocate' - args.append('{}_in'.format(reall_var)) - outfile.write('subroutine {}({})'.format(subname, ', '.join(args)), 1) + args.append(f'{reall_var}_in') + outfile.write(f'subroutine {subname}({", ".join(args)})', 1) # Use statements nanmods = 'nan => shr_infnan_nan, assignment(=)' - outfile.write('use shr_infnan_mod, only: {}'.format(nanmods), 2) + outfile.write(f'use shr_infnan_mod, only: {nanmods}', 2) outfile.write('use cam_abortutils, only: endrun', 2) # Dummy arguments outfile.write('!! Dummy arguments', 2) @@ -1318,31 +1310,31 @@ def write_allocate_routine(self, outfile): typ = 'integer' opt = ', ' # end if - outfile.write('{}{}intent(in) :: {}'.format(typ, opt, arg), 2) + outfile.write(f'{typ}{opt}intent(in) :: {arg}', 2) # end for outfile.write('', 0) outfile.write('!! Local variables', 2) - outfile.write('logical :: {}'.format(init_var), 2) - outfile.write('logical :: {}'.format(reall_var), 2) - subn_str = 'character(len=*), parameter :: subname = "{}"' - outfile.write(subn_str.format(subname), 2) + outfile.write(f'logical :: {init_var}', 2) + outfile.write(f'logical :: {reall_var}', 2) + subn_str = f'character(len=*), parameter :: subname = "{subname}"' + outfile.write(subn_str, 2) outfile.write('', 0) outfile.write('! Set optional argument values', 2) - outfile.write('if (present({}_in)) then'.format(init_var), 2) - outfile.write('{iv} = {iv}_in'.format(iv=init_var), 3) + outfile.write(f'if (present({init_var}_in)) then', 2) + outfile.write(f'{init_var} = {init_var}_in', 3) outfile.write('else', 2) - outfile.write('{} = .true.'.format(init_var), 3) + outfile.write(f'{init_var} = .true.', 3) outfile.write('end if', 2) - outfile.write('if (present({}_in)) then'.format(reall_var), 2) - outfile.write('{iv} = {iv}_in'.format(iv=reall_var), 3) + outfile.write(f'if (present({reall_var}_in)) then', 2) + outfile.write(f'{reall_var} = {reall_var}_in', 3) outfile.write('else', 2) - outfile.write('{} = .false.'.format(reall_var), 3) + outfile.write(f'{reall_var} = .false.', 3) outfile.write('end if', 2) outfile.write('', 0) for var in self.__var_dict.variable_list(): var.write_allocate_routine(outfile, 2, init_var, reall_var, '') # end for - outfile.write('end subroutine {}'.format(subname), 1) + outfile.write(f'end subroutine {subname}', 1) def write_tstep_init_routine(self, outfile): """ @@ -1351,16 +1343,16 @@ def write_tstep_init_routine(self, outfile): """ subname = self.tstep_init_routine_name() outfile.write('', 0) - outfile.write('subroutine {}()'.format(subname), 1) + outfile.write(f'subroutine {subname}()', 1) outfile.write('', 0) outfile.write('!! Local variables', 2) - subn_str = 'character(len=*), parameter :: subname = "{}"' - outfile.write(subn_str.format(subname), 2) + subn_str = f'character(len=*), parameter :: subname = "{subname}"' + outfile.write(subn_str, 2) for var in self.__var_dict.variable_list(): var.write_tstep_init_routine(outfile, 2, '') # end for outfile.write('', 0) - outfile.write('end subroutine {}'.format(subname), 1) + outfile.write(f'end subroutine {subname}', 1) @property def name(self): @@ -1394,7 +1386,7 @@ def file_path(self): def __str__(self): """Return printable string for this File object""" - return "".format(self.file_type, self.name) + return f"" ############################################################################### def parse_command_line(args, description): @@ -1440,22 +1432,22 @@ def metadata_file_to_files(file_path, known_types, dycore, config, run_env): mfiles = [] if os.path.exists(file_path): if run_env.logger: - run_env.logger.info("Parsing metadata_file, '{}'".format(file_path)) + run_env.logger.info(f"Parsing metadata_file, '{file_path}'") # end if meta_tables = parse_metadata_file(file_path, known_ddts, run_env) else: - emsg = "Metadata file, '{}', does not exist" - raise CCPPError(emsg.format(file_path)) + emsg = f"Metadata file, '{file_path}', does not exist" + raise CCPPError(emsg) # end if # Create a File object with no Variables for mtable in meta_tables: htype = mtable.table_type hname = mtable.table_name if htype not in ('host', 'module', 'ddt'): - emsg = "Metadata type, '{}' not supported." - raise CCPPError(emsg.format(htype)) + emsg = f"Metadata type, '{htype}' not supported." + raise CCPPError(emsg) # end if - section = ''.format(hname, htype) + section = f'' sect_xml = ET.fromstring(section) mfile = File(sect_xml, known_types, dycore, config, run_env.logger, gen_code=False, file_path=file_path) @@ -1465,20 +1457,20 @@ def metadata_file_to_files(file_path, known_types, dycore, config, run_env): if sections: # CCPP Framework will check for a single section mheader = sections[0] else: - emsg = "Missing metadata section ([ccpp-arg-table]) for {}" - raise CCPPError(emsg.format(hname)) + emsg = f"Missing metadata section ([ccpp-arg-table]) for {hname}" + raise CCPPError(emsg) # end if for var in mheader.variable_list(loop_vars=False, consts=False): prop = var.get_prop_value('local_name') - vnode_str = '{" ".join(vdims)}' vnode_str += '' # end if vnode_str += '\n' @@ -1504,10 +1496,10 @@ def metadata_file_to_files(file_path, known_types, dycore, config, run_env): # end for if htype == 'ddt': # We defined the variables, now create the DDT for them. - vnode_str = ''.format(hname) + vnode_str = f'' for var in mheader.variable_list(loop_vars=False, consts=False): prop = var.get_prop_value('standard_name') - vnode_str += '\n {}'.format(prop) + vnode_str += f'\n {prop}' # end for vnode_str += '\n' var_node = ET.fromstring(vnode_str) @@ -1541,8 +1533,7 @@ def write_registry_files(registry, dycore, config, outdir, src_mod, src_root, for section in registry: sec_name = section.get('name') if sec_name: - logger.info("Parsing {}, {}, from registry".format(section.tag, - sec_name)) + logger.info(f"Parsing {section.tag}, {sec_name}, from registry") # end if if section.tag == 'file': files.append(File(section, known_types, dycore, config, logger)) @@ -1692,12 +1683,12 @@ def gen_registry(registry_file, dycore, config, outdir, indent, # end if # end for try: - emsg = "Invalid registry file, {}".format(registry_file) + emsg = f"Invalid registry file, {registry_file}" file_ok = validate_xml_file(registry_file, 'registry', version, logger, schema_path=schema_dir, error_on_noxmllint=error_on_no_validate) except CCPPError as ccpperr: - cemsg = "{}".format(ccpperr).split('\n')[0] + cemsg = f"{ccpperr}".split('\n', maxsplit=1)[0] if cemsg[0:12] == 'Execution of': xstart = cemsg.find("'") if xstart >= 0: @@ -1719,7 +1710,7 @@ def gen_registry(registry_file, dycore, config, outdir, indent, ic_names = None else: library_name = registry.get('name') - emsg = "Parsing registry, {}".format(library_name) + emsg = f"Parsing registry, {library_name}" logger.debug(emsg) reg_dir = os.path.dirname(registry_file) files = write_registry_files(registry, dycore, config, outdir, src_mod, @@ -1753,5 +1744,5 @@ def main(): ############################################################################### if __name__ == "__main__": - __RETCODE, _FILES = main() + __RETCODE, _FILES, _IC_NAMES = main() sys.exit(__RETCODE) diff --git a/src/data/write_init_files.py b/src/data/write_init_files.py index 62df00d4..7d2c4d35 100644 --- a/src/data/write_init_files.py +++ b/src/data/write_init_files.py @@ -11,7 +11,7 @@ import os.path # CCPP Framework import statements -from ccpp_state_machine import CCPP_STATE_MACH, RUN_PHASE_NAME +from ccpp_state_machine import CCPP_STATE_MACH from fortran_tools import FortranWriter from var_props import is_horizontal_dimension, is_vertical_dimension @@ -150,11 +150,11 @@ def write_init_files(cap_database, ic_names, outdir, # Write public parameters: retvals = write_ic_params(outfile, host_vars, ic_names) - stdname_list, ic_names, ic_max_len, stdname_max_len = retvals + ic_names, ic_max_len, stdname_max_len = retvals # Write initial condition arrays: - write_ic_arrays(outfile, stdname_list, ic_names, - ic_max_len, stdname_max_len, host_vars) + write_ic_arrays(outfile, ic_names, ic_max_len, + stdname_max_len, host_vars) # Add "contains" statement: outfile.end_module_header() @@ -183,10 +183,10 @@ def write_init_files(cap_database, ic_names, outdir, # end if # Log file creation: - logger.info("Writing initial conditions source file, {}".format(ofilename)) + logger.info(f"Writing initial conditions source file, {ofilename}") # Open file using CCPP's FortranWriter: - file_desc = "Initial conditions source file, {}".format(phys_input_filename) + file_desc = f"Initial conditions source file, {phys_input_filename}" with FortranWriter(ofilename, "w", file_desc, phys_input_fname_str, indent=indent) as outfile: @@ -233,7 +233,7 @@ class CamInitWriteError(ValueError): (e.g., log user errors without backtrace)""" # pylint: disable=useless-super-delegation def __init__(self, message): - super(CamInitWriteError, self).__init__(message) + super().__init__(message) # pylint: enable=useless-super-delegation ################# @@ -250,7 +250,7 @@ def _find_and_add_host_variable(stdname, host_dict, var_dict, missing_vars): """ hvar = host_dict.find_variable(stdname) if hvar: - if (hvar.source.type != 'host'): + if hvar.source.type != 'host': var_dict[stdname] = hvar # end if (other variables not readable) else: @@ -372,12 +372,12 @@ def write_ic_params(outfile, host_vars, ic_names): outfile.blank_line() - return stdname_list, ic_names, max_loclen, max_slen + return ic_names, max_loclen, max_slen ###### -def write_ic_arrays(outfile, stdname_list, ic_name_dict, - ic_max_len, stdname_max_len, host_vars): +def write_ic_arrays(outfile, ic_name_dict, ic_max_len, + stdname_max_len, host_vars): """ Write initial condition arrays to store @@ -442,7 +442,7 @@ def write_ic_arrays(outfile, stdname_list, ic_name_dict, if index == num_input_vars-1: suffix = " /)" # end if - outfile.write("{}{}".format(stdname_str, suffix), 2) + outfile.write(f"{stdname_str}{suffix}", 2) # end for outfile.blank_line() @@ -463,7 +463,7 @@ def write_ic_arrays(outfile, stdname_list, ic_name_dict, if index == num_input_vars-1: suffix = f" /), (/{max_ic_num}, phys_var_num/))" # end if - outfile.write("{}{}".format(ic_name_str, suffix), 2) + outfile.write(f"{ic_name_str}{suffix}", 2) # end for outfile.blank_line() @@ -974,7 +974,7 @@ def write_phys_check_subroutine(outfile, host_dict, host_vars, host_imports, var_locname = hvar.call_string(host_dict) # Set "if-statement" call string: - call_string_key = "case ('{}')".format(var_stdname) + call_string_key = f"case ('{var_stdname}')" # Extract vertical level variable: levnm, call_check_field, reason = get_dimension_info(hvar) diff --git a/test/.pylintrc b/test/.pylintrc index 90588cb8..5bd70f35 100644 --- a/test/.pylintrc +++ b/test/.pylintrc @@ -55,10 +55,7 @@ confidence= # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -#Note: If python 3.5 is ever dropped as a testing option -# then 'consider-using-f-string' should be re-enabled -JN - -disable=import-error, too-many-statements, too-many-lines, too-many-locals, bad-whitespace, consider-using-f-string +disable=import-error, too-many-statements, too-many-lines, too-many-locals, bad-whitespace # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option diff --git a/test/pylint_test.sh b/test/pylint_test.sh index 7721bc28..1a152b27 100755 --- a/test/pylint_test.sh +++ b/test/pylint_test.sh @@ -25,3 +25,4 @@ ${pylintcmd} ${CAMROOT}/cime_config/buildlib ${pylintcmd} ${CAMROOT}/cime_config/buildnml ${pylintcmd} ${CAMROOT}/src/data/generate_registry_data.py ${pylintcmd} ${CAMROOT}/src/data/write_init_files.py +${pylintcmd} ${CAMROOT}/cime_config/atm_in_paramgen.py diff --git a/test/run_tests.sh b/test/run_tests.sh index 2656dd2d..f8c44803 100755 --- a/test/run_tests.sh +++ b/test/run_tests.sh @@ -79,7 +79,7 @@ run_unittest test/unit/test_atm_in_paramgen.py # Report if [ ${NUMERRORS} -gt 0 ]; then - echo "${NUMERRORS} out of ${NUMTESTS} tests FAILED" + echo "${NUMERRORS} out of ${NUMTESTS} test collections FAILED" else - echo "All ${NUMTESTS} tests PASSED!" + echo "All ${NUMTESTS} test collections PASSED!" fi From df3393cb30eff3d0e24976c50cf73ac7f9fa321c Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Wed, 9 Feb 2022 15:18:16 -0700 Subject: [PATCH 15/33] Add new 'AtmInParamGenError' error class, and fix build cache bug. --- cime_config/atm_in_paramgen.py | 45 ++++++++++++++++--------------- cime_config/cam_build_cache.py | 10 ++++--- test/unit/test_atm_in_paramgen.py | 18 ++++++------- 3 files changed, 39 insertions(+), 34 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index f9aa24f7..d6535248 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -13,9 +13,6 @@ import os.path import sys from collections import OrderedDict -#CAM specific config error: -from cam_config_classes import CamConfigValError -from cam_config_classes import CamConfigTypeError #---------------- # Import ParamGen @@ -31,9 +28,15 @@ from paramgen import ParamGen #pylint: enable=wrong-import-position -################# +################################################################ + +class AtmInParamGenError(ValueError): + """Class used to handle atm_in ParamGen errors + (e.g., log user errors without backtrace)""" + +################################################################ #HELPER FUNCTIONS -################# +################################################################ def _is_nml_logical_true(varname, var_val): @@ -90,21 +93,21 @@ def _is_nml_logical_true(varname, var_val): >>> _is_nml_logical_true("test", "this_wont_work") # doctest: +ELLIPSIS Traceback (most recent call last): ... - cam_config_classes.CamConfigValError:... + atm_in_paramgen.AtmInParamGenError:... XML namelist logical variable, 'test', must have a value of true, false, 1, or 0, not 'this_wont_work' 12. Check that a bad integer value returns the correct error: >>> _is_nml_logical_true("test", 3) # doctest: +ELLIPSIS Traceback (most recent call last): ... - cam_config_classes.CamConfigValError:... + atm_in_paramgen.AtmInParamGenError:... XML namelist logical variable, 'test', must have a value of true, false, 1, or 0, not 3 13. Check that a non-boolean, string or integer type returns an error: >>> _is_nml_logical_true("test", 13.03) # doctest: +ELLIPSIS Traceback (most recent call last): ... - cam_config_classes.CamConfigTypeError:... + atm_in_paramgen.AtmInParamGenError:... XML namelist variable 'test' must have a value that is either a boolean, string, or integer, not float. """ @@ -121,7 +124,7 @@ def _is_nml_logical_true(varname, var_val): emsg = f"\nXML namelist logical variable, '{varname}'" emsg += ", must have a value of true, false, 1, or 0, not" emsg += f" '{var_val}'" - raise CamConfigValError(emsg) + raise AtmInParamGenError(emsg) if isinstance(var_val, int): if var_val == 1: @@ -133,13 +136,13 @@ def _is_nml_logical_true(varname, var_val): emsg = f"\nXML namelist logical variable, '{varname}'" emsg += ", must have a value of true, false, 1, or 0, not" emsg += f" {var_val}" - raise CamConfigValError(emsg) + raise AtmInParamGenError(emsg) #Type is un-recognizeda, so raise an error: emsg = f"\nXML namelist variable '{varname}' must" emsg += " have a value that is either a boolean, string, or integer," emsg += f" not {type(var_val).__name__}." - raise CamConfigTypeError(emsg) + raise AtmInParamGenError(emsg) ################################################################ # MAIN "atm_in" ParamGen class @@ -213,7 +216,7 @@ def from_namelist_xml(cls, nml_xml_file): emsg += "Those entries and missing elements are:\n" for entry_id, missing_elems in missing_elems.items(): emsg += f"{entry_id} : {', '.join(missing_elems)}\n" - raise CamConfigValError(emsg) + raise AtmInParamGenError(emsg) #---------------- #Initialize file->group/var set dictionary: @@ -314,7 +317,7 @@ def append_atm_in_pg(self, atm_pg_obj): emsg += " be associated with only one namelist definition file." emsg += "\nInstead it is associated with the following files:\n" emsg += "\n".join(atm_pg_obj.nml_def_groups.keys()) - raise CamConfigValError(emsg) + raise AtmInParamGenError(emsg) #Extract namelist definition file name: input_file = next(iter(atm_pg_obj.nml_def_groups)) @@ -336,7 +339,7 @@ def append_atm_in_pg(self, atm_pg_obj): emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" emsg += " the following conflicting namelist groups:\n" emsg += ", ".join(same_groups) - raise CamConfigValError(emsg) + raise AtmInParamGenError(emsg) #------------------------------------------------ @@ -353,7 +356,7 @@ def append_atm_in_pg(self, atm_pg_obj): emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" emsg += " the following conflicting namelist variables:\n" emsg += ", ".join(same_vars) - raise CamConfigValError(emsg) + raise AtmInParamGenError(emsg) #------------------------------------------------ @@ -413,7 +416,7 @@ def append_user_nl_file(self, user_nl_file): #anywhere in a definition file: emsg = "Variable '{}' not found in any namelist definition files." emsg += " Please double-check '{}'." - raise CamConfigValError(emsg.format(var_str, user_nl_file)) + raise AtmInParamGenError(emsg.format(var_str, user_nl_file)) #Add the namelist group if not already in data dict: if not data_group in _data: @@ -423,17 +426,17 @@ def append_user_nl_file(self, user_nl_file): if var_str in _data[data_group]: emsg = "Namelist variable '{}' set more than once in '{}'" emsg += "\nPlease set each variable only once." - raise CamConfigValError(emsg.format(var_str, user_nl_file)) + raise AtmInParamGenError(emsg.format(var_str, user_nl_file)) #Enter the parameter in the dictionary: _data[data_group][var_str] = {'values':val_str} else: emsg = "Cannot parse the following line in '{}' :\n'{}'" - raise CamConfigValError(emsg.format(user_nl_file, line)) + raise AtmInParamGenError(emsg.format(user_nl_file, line)) #Check if there is unclosed block: if within_comment_block: - raise CamConfigValError(f"Un-closed comment block! Please check '{user_nl_file}'") + raise AtmInParamGenError(f"Un-closed comment block! Please check '{user_nl_file}'") #Create new ParamGen object: pg_user = ParamGen(_data) @@ -492,7 +495,7 @@ def write(self, output_path): var_type = self._data[nml_group][var]["type"].strip() else: emsg = f"Namelist entry '{var}' is missing required 'type' element." - raise CamConfigValError(emsg) + raise AtmInParamGenError(emsg) #Check if variable value is a number or boolean: if var_type in num_bool_set: @@ -517,7 +520,7 @@ def write(self, output_path): #This is an un-recognized type option, so raise an error: emsg = f"Namelist type '{var_type}' for entry '{var}' is un-recognized.\n" emsg += "Acceptable namelist types are: logical, integer, real, or char*N." - raise CamConfigValError(emsg) + raise AtmInParamGenError(emsg) # Add space for next namelist group: atm_in_fil.write('/\n\n') diff --git a/cime_config/cam_build_cache.py b/cime_config/cam_build_cache.py index 61eef1de..4fc8e78e 100644 --- a/cime_config/cam_build_cache.py +++ b/cime_config/cam_build_cache.py @@ -346,10 +346,12 @@ def write(self): rgen_entry = ET.SubElement(registry, 'reg_gen_file') rgen_entry.text = rgen_file # end for - for stdname, ic_name in self.__ic_names.items(): - ic_entry = ET.SubElement(registry, 'ic_name_entry') - ic_entry.set('standard_name', stdname) - ic_entry.text = ic_name + for stdname, ic_names in self.__ic_names.items(): + for ic_name in ic_names: + ic_entry = ET.SubElement(registry, 'ic_name_entry') + ic_entry.set('standard_name', stdname) + ic_entry.text = ic_name + # end for # end for # CCPP ccpp = ET.SubElement(new_cache, 'CCPP') diff --git a/test/unit/test_atm_in_paramgen.py b/test/unit/test_atm_in_paramgen.py index 1a55a1d9..083681f2 100644 --- a/test/unit/test_atm_in_paramgen.py +++ b/test/unit/test_atm_in_paramgen.py @@ -51,7 +51,7 @@ #Import CAM configure objects: # pylint: disable=wrong-import-position from atm_in_paramgen import AtmInParamGen -from cam_config_classes import CamConfigValError +from atm_in_paramgen import AtmInParamGenError # pylint: enable=wrong-import-position ################# @@ -304,7 +304,7 @@ def test_namelist_xml_missing_elems(self): xml_test_fil = os.path.join(_SAMPLES_DIR, "test_missing_elems.xml") # Attempt to run ParamGen: - with self.assertRaises(CamConfigValError) as cerr: + with self.assertRaises(AtmInParamGenError) as cerr: pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Check exception message @@ -404,7 +404,7 @@ def test_mutli_xml_append_multi(self): pg_test.append_atm_in_pg(pg_ext) # Try to append the combined PG object to the third object: - with self.assertRaises(CamConfigValError) as cerr: + with self.assertRaises(AtmInParamGenError) as cerr: pg_third.append_atm_in_pg(pg_test) # Check exception message: @@ -438,7 +438,7 @@ def test_mutli_xml_same_nl_group(self): pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) # Append the extra PG object to the other: - with self.assertRaises(CamConfigValError) as cerr: + with self.assertRaises(AtmInParamGenError) as cerr: pg_test.append_atm_in_pg(pg_ext) # Check exception message: @@ -471,7 +471,7 @@ def test_mutli_xml_same_nl_var(self): pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) # Append the extra PG object to the other: - with self.assertRaises(CamConfigValError) as cerr: + with self.assertRaises(AtmInParamGenError) as cerr: pg_test.append_atm_in_pg(pg_ext) # Check exception message: @@ -552,7 +552,7 @@ def test_namelist_user_nl_unclosed_comment(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Attempt to append user_nl_cam file: - with self.assertRaises(CamConfigValError) as cerr: + with self.assertRaises(AtmInParamGenError) as cerr: pg_test.append_user_nl_file(user_nl_fil) # Check exception message: @@ -584,7 +584,7 @@ def test_namelist_user_nl_bad_format_entry(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Attempt to append user_nl_cam file: - with self.assertRaises(CamConfigValError) as cerr: + with self.assertRaises(AtmInParamGenError) as cerr: pg_test.append_user_nl_file(user_nl_fil) # Check exception message: @@ -617,7 +617,7 @@ def test_namelist_user_nl_undefined_entry(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Attempt to append user_nl_cam file: - with self.assertRaises(CamConfigValError) as cerr: + with self.assertRaises(AtmInParamGenError) as cerr: pg_test.append_user_nl_file(user_nl_fil) # Check exception message: @@ -650,7 +650,7 @@ def test_namelist_user_nl_double_entry(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Attempt to append user_nl_cam file: - with self.assertRaises(CamConfigValError) as cerr: + with self.assertRaises(AtmInParamGenError) as cerr: pg_test.append_user_nl_file(user_nl_fil) # Check exception message: From 3fdec6012918f74d6a78c977758fb3d425b3fc04 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 11 Feb 2022 14:49:00 -0700 Subject: [PATCH 16/33] Add the ability to write array/list entries to atm_in. Also add all namelist definition XML files needed by the SE dycore. Finally, add relevant tests, and fix some bugs. --- cime_config/atm_in_paramgen.py | 130 +++- cime_config/buildnml | 80 +- cime_config/cam_autogen.py | 4 +- cime_config/cam_config.py | 74 +- cime_config/namelist_definition_cam.xml | 172 ++--- src/data/namelist_definition_air_comp.xml | 86 +++ .../se/namelist_definition_se_dycore.xml | 691 ++++++++++++++++++ src/dynamics/se/native_mapping.F90 | 7 +- .../tests/namelist_definition_analy_ic.xml | 28 + .../sample_files/atm_in_files/test_attr_in | 6 +- .../atm_in_files/test_extra_nml_def.xml | 40 +- .../atm_in_files/test_multi_attr_in | 6 +- .../atm_in_files/test_multi_xml_in | 19 +- .../atm_in_files/test_simple_atm_in | 6 +- .../atm_in_files/test_simple_nml_def.xml | 12 + .../sample_files/atm_in_files/test_user_in | 4 + test/unit/test_atm_in_paramgen.py | 1 + 17 files changed, 1194 insertions(+), 172 deletions(-) create mode 100644 src/data/namelist_definition_air_comp.xml create mode 100644 src/dynamics/se/namelist_definition_se_dycore.xml create mode 100644 src/dynamics/tests/namelist_definition_analy_ic.xml diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index d6535248..32100391 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -12,6 +12,7 @@ import os import os.path import sys +import re from collections import OrderedDict #---------------- @@ -385,6 +386,10 @@ def append_user_nl_file(self, user_nl_file): if len(line)>1: line_s = line.split() + #If line is empty then go to next line: + if not line_s: + continue + # check if within comment block. if (not within_comment_block) and line.strip()[0:2] == "/*": within_comment_block = True @@ -438,12 +443,16 @@ def append_user_nl_file(self, user_nl_file): if within_comment_block: raise AtmInParamGenError(f"Un-closed comment block! Please check '{user_nl_file}'") - #Create new ParamGen object: - pg_user = ParamGen(_data) + #End with - #Append new user_nl_cam object to main atm_in namelist object: - self.append(pg_user) + #Check if any user_nl_cam data is present: + if _data: + #If so, then create new ParamGen object: + pg_user = ParamGen(_data) + #Append new user_nl_cam object to main atm_in namelist object: + self.append(pg_user) + #End if #### def write(self, output_path): @@ -455,24 +464,17 @@ def write(self, output_path): """ - #Compile regular expression to determine if variable value - #is a number or Fortran logical. - - #All "values" stored in ParamGen are strings. However, booleans and numbers - #(either integers or reals) shouldn't have wrapping quotes when written to the - #fortran namelist. Thus the value needs to be evaluated to see if it is actually - #a fortran boolean, integer, or real. This done using the following regular expressions: - #-------------------------------------------------------------------------------------- - # Make sure ParamGen object has been reduced: if not self.reduced: emsg = "ParamGen object for atm_in must be reduced before being " emsg += "written to file. Please check CAM's buildnml script." - raise SystemError(emsg) + raise AtmInParamGenError(emsg) + + #Create a regex value to check for an array type: + arr_type_regex = re.compile(r"[(][ ]*([0-9 ,]+)[ ]*[)]") #Create sets for string evaluation below: - num_bool_set = {"integer", "real", "logical"} #types that don't need quotes - quote_set = {"'", '"'} #single and double quotes + num_set = {"integer", "real"} #types that don't need special handling # Write Fortran namelist file: with open(os.path.join(output_path), 'w', encoding='utf-8') as atm_in_fil: @@ -497,30 +499,96 @@ def write(self, output_path): emsg = f"Namelist entry '{var}' is missing required 'type' element." raise AtmInParamGenError(emsg) - #Check if variable value is a number or boolean: - if var_type in num_bool_set: + #Check if an array type: + array_type = arr_type_regex.search(var_type) + + if array_type: + #Grab all text before array regex match: + var_type = var_type[:array_type.start()].strip() + + #Split the value into its array elements, + #this assumes that a comma (,) is the element + #delimiter: + array_elems = val.split(",") + + #Write beginning of namelist entry: + nml_str = f" {var} = " + + #Check if variable type is a logical: + if var_type == 'logical': + #loop over array elements: + for elem in array_elems: + if _is_nml_logical_true(var, elem): + elem_str = ".true., " + else: + elem_str = ".false., " + #End if + #Write to namelist string: + nml_str += elem_str + #End for + #Check if it is a number: + elif var_type in num_set: + #loop over array elements: + for elem in array_elems: + #Write to namelist string: + nml_str += f"{elem}, " + #End for + #check if it is a character: + elif "char*" in var_type: + #loop over array elements: + for elem_with_space in array_elems: + #Remove any extra white space: + elem = elem_with_space.strip() + + #Remove all quotes in the string, as they + #sometimes added by ParamGen during the "reduce" phase: + elem = elem.replace("'", "") + elem = elem.replace('"', "") + + #Add surrounding quotes: + elem_str = f'"{elem}", ' + #Write to namelist entry string: + nml_str += elem_str + #End for + else: + #This is an un-recognized type option, so raise an error: + emsg = f"Namelist type '{var_type}' for entry '{var}' is un-recognized.\n" + emsg += "Acceptable namelist types are: logical, integer, real, or char*N." + raise AtmInParamGenError(emsg) + #End if + + #There will always be a trailing comma and space (, ) so find it: + last_comma_idx = nml_str.rfind(", ") + + #Write final string to file: + atm_in_fil.write(nml_str[:last_comma_idx]+"\n") + + else: #Not an array + #Check if variable type is a logical: if var_type == 'logical': #If logical, then write the associated truth value: if _is_nml_logical_true(var, val): atm_in_fil.write(f" {var} = .true.\n") else: atm_in_fil.write(f" {var} = .false.\n") - else: + #End if + elif var_type in num_set: #If a number, then write value as-is: atm_in_fil.write(f" {var} = {val}\n") - elif "char*" in var_type: - #Value is a string, so check if is already inside quotes: - if val[0] in quote_set and val[-1] == val[0]: - #If so, then write string value as-is: - atm_in_fil.write(f" {var} = {val}\n") + elif "char*" in var_type: + #Remove all quotes in the string, as they + #sometimes added by ParamGen during the "reduce" phase: + val = val.replace("'", "") + val = val.replace('"', "") + #Add entry to atm_in file: + atm_in_fil.write(f' {var} = "{val}"\n') else: - #If not, then write string with added quotes: - atm_in_fil.write(f" {var} = '{val}'\n") - else: - #This is an un-recognized type option, so raise an error: - emsg = f"Namelist type '{var_type}' for entry '{var}' is un-recognized.\n" - emsg += "Acceptable namelist types are: logical, integer, real, or char*N." - raise AtmInParamGenError(emsg) + #This is an un-recognized type option, so raise an error: + emsg = f"Namelist type '{var_type}' for entry '{var}' is un-recognized.\n" + emsg += "Acceptable namelist types are: logical, integer, real, or char*N." + raise AtmInParamGenError(emsg) + #End if + #End if (array type) # Add space for next namelist group: atm_in_fil.write('/\n\n') diff --git a/cime_config/buildnml b/cime_config/buildnml index 1ea4c340..39c799c9 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -7,6 +7,7 @@ import sys import os import shutil import logging +import glob _CIMEROOT = os.environ.get("CIMEROOT") if _CIMEROOT is None: @@ -136,35 +137,66 @@ def buildnml(case, caseroot, compname): cam_nml_dict = nml_attr_set(config) - #-------------------------- - # Construct ParamGen object: - #-------------------------- + #------------------------------------------------ + # Extract dictionary of namelist definition files + #------------------------------------------------ + + xml_nml_fils = config.xml_nml_def_files - # Determine directory for user modified namelist_definition_cam.xml: + #-------------------------------------------------- + # Check for namelist definition files in SourceMods + #-------------------------------------------------- + + # Check for SourceMods directory: user_xml_dir = os.path.join(caseroot, "SourceMods", "src.cam") expect(os.path.isdir(user_xml_dir), f"user_xml_dir {user_xml_dir} does not exist ") - # User definition *replaces* existing definition: - user_definition = os.path.join(user_xml_dir, "namelist_definition_cam.xml") - if os.path.isfile(user_definition): - # Print message to logger: - _LOGGER.info(" ...found namelist definition file in %s", - user_xml_dir) - - # Set definition file: - definition_file = user_definition - else: - # If no user-defined file exists, then use standard file: - definition_file = os.path.join(_CIME_CONFIG_PATH, - "namelist_definition_cam.xml") - - # Check that the standard namelist file actually exists: - expect(os.path.isfile(definition_file), - f"Namelist XML file '{definition_file}' not found!") - - # Create the ParamGen object: - pg_atm = AtmInParamGen.from_namelist_xml(definition_file) + # Search for any possible host model namelist definition files: + nml_def_fils = glob.glob(os.path.join(user_xml_dir, + "namelist_definition_*.xml")) + + # Loop over all found files: + for nml_def_fil in nml_def_fils: + if os.path.basename(nml_def_fil) in xml_nml_fils: + # Print message to logger: + _LOGGER.info(" ...found namelist definition file %s in %s", + os.path.basename(nml_def_fil), user_xml_dir) + # Replace file path with SourceMods path + xml_nml_fils[os.path.basename(nml_def_fil)] = nml_def_fil + + + # Now search for CCPP namelist definition files: + ccpp_nml_fils = glob.glob(os.path.join(user_xml_dir, + "*_namelist.xml")) + + # Loop over all found files: + for ccpp_nml_fil in ccpp_nml_fils: + if os.path.basename(ccpp_nml_fil) in ccpp_nml_fils: + # Print message to logger: + _LOGGER.info(" ...found namelist definition file %s in %s", + os.path.basename(ccpp_nml_fil), user_xml_dir) + # Replace file path with SourceMods path: + xml_nml_fils[os.path.basename(ccpp_nml_fil)] = ccpp_nml_fil + + #-------------------------- + # Construct ParamGen objects: + #-------------------------- + + pg_atm_list = [] + + for xml_fil in xml_nml_fils.values(): + pg_obj = AtmInParamGen.from_namelist_xml(xml_fil) + pg_atm_list.append(pg_obj) + + #-------------------------------------- + # Combine all ParamGen objects together: + #--------------------------------------- + + pg_atm = pg_atm_list[0] + + for pg_obj in pg_atm_list[1:]: + pg_atm.append_atm_in_pg(pg_obj) #------------------------ # Loop over all instances: diff --git a/cime_config/cam_autogen.py b/cime_config/cam_autogen.py index 1c7f6d6d..a64b3259 100644 --- a/cime_config/cam_autogen.py +++ b/cime_config/cam_autogen.py @@ -514,7 +514,7 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, TEST_ATM_ROOT, TEST_BLDROOT, TEST_REG_DIR, \ TEST_REGFILES, TEST_SOURCE_MODS_DIR, \ False) #doctest: +ELLIPSIS - (['.../test_bldroot/ccpp_physics', '.../test_bldroot/ccpp'], False, '.../test_bldroot/ccpp/ccpp_datatable.xml', [], None) + (['.../test_bldroot/ccpp_physics', '.../test_bldroot/ccpp'], False, '.../test_bldroot/ccpp/ccpp_datatable.xml', dict_values([]), None) """ # Physics source gets copied into blddir @@ -699,7 +699,7 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, # End if return [physics_blddir, genccpp_dir], do_gen_ccpp, cap_output_file, \ - nl_groups, capgen_db + xml_files.values(), capgen_db ############################################################################### def generate_init_routines(build_cache, bldroot, force_ccpp, force_init, diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index b767e95c..7656df3f 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -14,6 +14,7 @@ import sys import argparse import os.path +from collections import OrderedDict #----------------------------------- # Import CAM-specific python modules @@ -151,13 +152,15 @@ def __init__(self, case, case_log): # Create empty dictonary self.__config_dict = {} - # Create namelist group list, starting with default namelist groups - self.__nml_groups = ['cam_initfiles_nl', - 'cam_logfile_nl', - 'physics_nl', - 'qneg_nl', - 'vert_coord_nl', - 'ref_pres_nl'] + # Save local (cime_config) directory path: + cime_conf_path = os.path.dirname(os.path.abspath(__file__)) + + # Create empty XML namelist definition files dictionary: + self.__xml_nml_def_files = OrderedDict() + + #Add the default host model namelist: + self.__xml_nml_def_files['namelist_definition_cam.xml'] = \ + os.path.join(cime_conf_path, 'namelist_definition_cam.xml') #---------------------------------------------------- # Set CAM start date (needed for namelist generation) @@ -166,6 +169,11 @@ def __init__(self, case, case_log): # Remove dashes from CIME-provided start date: start_date_cam = start_date.replace('-','') + # Remove leading zeros: + while start_date_cam[0] == "0": + start_date_cam = start_date_cam[1:] + # End while + self.create_config("ic_ymd", "Start date of model run.", start_date_cam, is_nml_attr=True) @@ -234,17 +242,33 @@ def __init__(self, case, case_log): # Dynamical core self.create_config("dyn", dyn_desc, "se", dyn_valid_vals, is_nml_attr=True) + + #Determine location of period (".") in atm_grid string: + dot_idx = atm_grid.find(".") + # Horizontal grid - self.create_config("hgrid", hgrid_desc, atm_grid, - se_grid_re, is_nml_attr=True) + if dot_idx == -1: + self.create_config("hgrid", hgrid_desc, atm_grid, + se_grid_re, is_nml_attr=True) + else: + self.create_config("hgrid", hgrid_desc, atm_grid[:dot_idx], + se_grid_re, is_nml_attr=True) + #End if # Source code directories self.create_config("dyn_src_dirs", dyn_dirs_desc, ["se",os.path.join("se","dycore")], valid_list_type="str") - # Add SE namelist groups to nmlgen list - self.__nml_groups.append("air_composition_nl") - self.__nml_groups.append("dyn_se_nl") + # Set paths for the SE dycore and "air composition" + # namelist definition files: + se_dyn_nml_fil = os.path.join(cime_conf_path, os.pardir, "src", + "dynamics", "se", "namelist_definition_se_dycore.xml") + air_comp_nml_fil = os.path.join(cime_conf_path, os.pardir, "src", + "data", "namelist_definition_air_comp.xml") + + #Add NML definition files to dictionary: + self.__xml_nml_def_files['namelist_definition_se_dycore.xml'] = se_dyn_nml_fil + self.__xml_nml_def_files['namelist_definition_air_comp.xml'] = air_comp_nml_fil # Add required CPP definitons: self.add_cppdef("_MPI") @@ -384,8 +408,13 @@ def __init__(self, case, case_log): # Set "analytic_ic" to True (1): analy_ic_val = 1 #Use Analytic ICs - # Add analytic_ic to namelist group list: - self.__nml_groups.append("analytic_ic_nl") + #Add analytic IC namelist definition file to dictionary: + analy_ic_nml_fil = os.path.join(cime_conf_path, os.pardir, "src", + "dynamics", "tests", + "namelist_definition_analy_ic.xml") + + #Add NML definition files to dictionary: + self.__xml_nml_def_files['namelist_definition_analy_ic.xml'] = analy_ic_nml_fil #Add new CPP definition: self.add_cppdef("ANALYTIC_IC") @@ -459,9 +488,12 @@ def config_dict(self): return self.__config_dict @property - def nml_groups(self): - """Return the namelist groups list of this object.""" - return self.__nml_groups + def xml_nml_def_files(self): + """ + Return a list of all XML namelist definition files + stored by this object. + """ + return self.__xml_nml_def_files @property def cpp_defs(self): @@ -766,9 +798,11 @@ def generate_cam_src(self, gen_fort_indent): self.__atm_root, self.__bldroot, reg_dir, reg_files, source_mods_dir, force_ccpp) - phys_dirs, force_init, _, nl_groups, capgen_db = retvals - # Add in the namelist groups from schemes - self.__nml_groups.extend(nl_groups) + phys_dirs, force_init, _, nml_fils, capgen_db = retvals + + # Add namelist definition files to dictionary: + for nml_fil in nml_fils: + self.__xml_nml_def_files[os.path.basename(nml_fil)] = nml_fil #Convert physics directory list into a string: phys_dirs_str = ';'.join(phys_dirs) diff --git a/cime_config/namelist_definition_cam.xml b/cime_config/namelist_definition_cam.xml index 2d3e8fdd..2c08d265 100644 --- a/cime_config/namelist_definition_cam.xml +++ b/cime_config/namelist_definition_cam.xml @@ -17,53 +17,53 @@ $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L26_c180105.nc $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L30_c180105.nc $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L32_c180105.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.23x0.31_L26_c100513.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.23x0.31_L26_c100513.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.23x0.31_L26_c061106.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1980-01-01_0.47x0.63_L26_c071226.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1980-01-01_0.47x0.63_L26_c071226.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.47x0.63_L26_c061106.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-10-01_0.5x0.625_L26_c031204.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1987-01-01_0.9x1.25_L26_c060703.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1987-01-01_0.9x1.25_L26_c060703.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.9x1.25_L26_c051205.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_1.9x2.5_L26_c070408.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_1.9x2.5_L26_c070408.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L26_c040809.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L26_c110309.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L26_c110309.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L26_c091007.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0001-01-01_4x5_L26_c060608.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.23x0.31_L30_c110527.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.47x0.63_L30_c100929.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L30_c100618.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L30_c090306.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0001-01-01_4x5_L26_c060608.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L26_c030918.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.23x0.31_L30_c110527.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.47x0.63_L30_c100929.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L30_c100618.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L30_c090306.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L30_c070109.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L30_c110309.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L30_c110309.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L30_c100831.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_4x5_L30_c090108.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L30_c081013.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L32_c141031.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L32_c150407.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam4_0000-01-01_10x15_L32_c170914.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.47x0.63_L26_APE_c080227.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L26_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L26_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L26_c161230.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L30_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L30_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L30_c170103.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L32_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L32_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L32_c170103.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_4x5_L30_c090108.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L30_c081013.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L32_c141031.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L32_c150407.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam4_0000-01-01_10x15_L32_c170914.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.47x0.63_L26_APE_c080227.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L26_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L26_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L26_c161230.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L30_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L30_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L30_c170103.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L32_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L32_c161020.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L32_c170103.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_0.9x1.25_L30_c080724.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L26_c080114.nc $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L30_c080215.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L26_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L30_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L26_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L30_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L26_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L30_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_10x15_L30_c121015.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_4x5_L30_c121015.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_1.9x2.5_L30_c121015.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L26_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L30_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L26_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L30_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L26_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L30_c081104.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_10x15_L30_c121015.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_4x5_L30_c121015.nc + $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_1.9x2.5_L30_c121015.nc $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_4x5_L26_c060217.nc $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_10x15_L26_c060216.nc $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-02-01_0.9x1.25_L66_c040928.nc @@ -80,61 +80,61 @@ $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_L81_c110906.nc $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_4x5_L81_c160630.nc $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_10x15_L81_c141027.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccmx_aqua_4x5_L126_c170705.nc + $DIN_LOC_ROOT/atm/waccm/ic/waccmx_aqua_4x5_L126_c170705.nc $DIN_LOC_ROOT/atm/waccm/ic/fx2000_0.9x1.25_126lev_0002-01-01-00000_c181221.nc $DIN_LOC_ROOT/atm/waccm/ic/wcmx-cam6-phys_1.9x2.5_130lev_2000_c181115.nc $DIN_LOC_ROOT/atm/waccm/ic/wcmx-cam6-phys_0.9x1.25_130lev_2000_c190122.nc $DIN_LOC_ROOT/atm/waccm/ic/FC6X2000_f05_spinup01.cam.i.0002-01-01-00000_c190711.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_4x5_L130_c180803.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_1.9x2.5_L130_c180803.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/T341clim01.cam2.i.0024-01-01-00000.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_256x512_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_128x256_L26_c030918.nc + $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_4x5_L130_c180803.nc + $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_1.9x2.5_L130_c180803.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/T341clim01.cam2.i.0024-01-01-00000.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_256x512_L26_c030918.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_128x256_L26_c030918.nc $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_128x256_L26_c040422.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_T42_L26_c031110.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_T42_L26_c031110.nc $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L30_c090102.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L30_c090102.nc $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L30_c031210.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L32_c170510.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L26_c091218.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L32_c170510.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L26_c091218.nc $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_48x96_L26_c040420.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L30_c100426.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L30_c100426.nc $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_32x64_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_32x64_L30_c090107.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L26_c030228.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_32x64_L30_c090107.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L26_c030228.nc $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_8x16_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L30_c090102.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01_ne5np4_L30.140707.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L26_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L30_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L32_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L26_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L30_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam6_ne30np4_L32_c171023.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L26_c171018.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L30_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L32_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L26_c171018.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L30_c171024.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L32_c171023.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami_1850-01-01_ne240np4_L26_c110314.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami_0000-09-01_ne240np4_L26_c061106.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01-ne240np4_L30_c111004.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne5np4_L26_c170517.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne16np4_L26_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne30np4_L26_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne60np4_L26_c171023.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne120np4_L26_c170419.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne240np4_L26_c170613.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne5np4_L30_c170517.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne16np4_L30_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne30np4_L30_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne120np4_L30_c170419.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne5np4_L32_c170517.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne16np4_L32_c170509.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne30np4_L32_c170509.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne120np4_L32_c170908.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne240np4_L32_c170908.nc + $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L30_c090102.nc + $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01_ne5np4_L30.140707.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L26_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L30_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L32_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L26_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L30_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam6_ne30np4_L32_c171023.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L26_c171018.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L30_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L32_c171020.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L26_c171018.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L30_c171024.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L32_c171023.nc + $DIN_LOC_ROOT/atm/cam/inic/homme/cami_1850-01-01_ne240np4_L26_c110314.nc + $DIN_LOC_ROOT/atm/cam/inic/homme/cami_0000-09-01_ne240np4_L26_c061106.nc + $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01-ne240np4_L30_c111004.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne5np4_L26_c170517.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne16np4_L26_c170417.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne30np4_L26_c170417.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne60np4_L26_c171023.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne120np4_L26_c170419.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne240np4_L26_c170613.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne5np4_L30_c170517.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne16np4_L30_c170417.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne30np4_L30_c170417.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne120np4_L30_c170419.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne5np4_L32_c170517.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne16np4_L32_c170509.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne30np4_L32_c170509.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne120np4_L32_c170908.nc + $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne240np4_L32_c170908.nc $DIN_LOC_ROOT/atm/cam/inic/se/f2000_conus_ne30x8_L32_c190712.nc $DIN_LOC_ROOT/atm/waccm/ic/wa3_ne5np4_1950_spinup.cam2.i.1960-01-01-00000_c150810.nc $DIN_LOC_ROOT/atm/waccm/ic/waccm5_1850_ne30np4_L70_0001-01-11-00000_c151217.nc @@ -165,7 +165,7 @@ Default: none - none + UNSET_PATH @@ -191,10 +191,6 @@ UNSET_PATH - UNSET_PATH - UNSET_PATH - UNSET_PATH - UNSET_PATH $DIN_LOC_ROOT/atm/cam/topo/topo-from-cami_0000-01-01_256x512_L26_c030918.nc $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_128x256_c050520.nc $DIN_LOC_ROOT/atm/cam/topo/T42_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20180111.nc @@ -232,6 +228,10 @@ $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg4_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171018.nc $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg4_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171014.nc $DIN_LOC_ROOT/atm/cam/topo/se/ne30x8_conus_nc3000_Co060_Fi001_MulG_PF_nullRR_Nsw042_20190710.nc + UNSET_PATH + UNSET_PATH + UNSET_PATH + UNSET_PATH diff --git a/src/data/namelist_definition_air_comp.xml b/src/data/namelist_definition_air_comp.xml new file mode 100644 index 00000000..312d09dd --- /dev/null +++ b/src/data/namelist_definition_air_comp.xml @@ -0,0 +1,86 @@ + + + + + + + + char*80(20) + physconst + air_composition_nl + + List of major species of dry air. If not set then the composition of dry + air is considered fixed at tropospheric conditions and the properties of + dry air are constant. If set then the list of major species is assumed to + have 'N2' listed last. This information is currently used only for + computing the variable properties of air in WACCM-X configurations. + Default if WACCM-X: + + ['O_mixing_ratio_wrt_dry_air', 'O2_mixing_ratio_wrt_dry_air', + 'H_mixing_ratio_wrt_dry_air', 'N2_mixing_ratio_wrt_dry_air'] + + Otherwise default is None. + + + "" + + O_mixing_ratio_wrt_dry_air, + O2_mixing_ratio_wrt_dry_air, + H_mixing_ratio_wrt_dry_air, + N2_mixing_ratio_wrt_dry_air + + + + + char*80(20) + physconst + air_composition_nl + + List of water species that are included in "moist" air. This is currently + used only by the SE dycore to generalize the computation of the moist air + mass and thermodynamic properties. + Default if CAM4, CAM5, or Kessler physics is used: + + ['specific_humidity', + 'cloud_liquid_water_mixing_ratio_wrt_dry_air', + 'rain_mixing_ratio_wrt_dry_air'] + + Default if CAM6 physics is used: + + ['specific_humidity', + 'cloud_liquid_water_mixing_ratio_wrt_dry_air', + 'cloud_ice_mixing_ratio_wrt_dry_air', + 'rain_mixing_ratio_wrt_dry_air', + 'snow_mixing_ratio_wrt_dry_air'] + + Otherwise default is: ['specific_humidity'] + + + + specific_humidity + + + specific_humidity, + cloud_liquid_water_mixing_ratio_wrt_dry_air, + rain_mixing_ratio_wrt_dry_air + + + specific_humidity, + cloud_liquid_water_mixing_ratio_wrt_dry_air, + rain_mixing_ratio_wrt_dry_air + + + specific_humidity, + cloud_liquid_water_mixing_ratio_wrt_dry_air, + rain_mixing_ratio_wrt_dry_air + + + specific_humidity, + cloud_liquid_water_mixing_ratio_wrt_dry_air, + cloud_ice_mixing_ratio_wrt_dry_air, rain_mixing_ratio_wrt_dry_air, + rain_mixing_ratio_wrt_dry_air + + + + + diff --git a/src/dynamics/se/namelist_definition_se_dycore.xml b/src/dynamics/se/namelist_definition_se_dycore.xml new file mode 100644 index 00000000..44064a0d --- /dev/null +++ b/src/dynamics/se/namelist_definition_se_dycore.xml @@ -0,0 +1,691 @@ + + + + + + + + integer + se + dyn_se_nl + + Set for refined exodus meshes (variable viscosity). + Viscosity in namelist specified for regions with a resolution equivilant + to a uniform grid with se_ne = se_fine_ne. + Default: -1 (not used) + + + -1 + 120 + + + + integer + se + dyn_se_nl + 0,1,2 + + CAM physics forcing option: + 0: tendencies + 1: adjustments + 2: hybrid + + + 2 + + + + real + se + dyn_se_nl + + Scalar viscosity with variable coefficient. + Use variable hyperviscosity based on element area limited by + se_max_hypervis_courant. + + + 0 + + + + real + se + dyn_se_nl + + Use tensor hyperviscosity. + Citation: Guba, O., Taylor, M. A., Ullrich, P. A., Overfelt, J. R., and + Levy, M. N.: The spectral element method (SEM) on variable-resolution + grids: evaluating grid sensitivity and resolution-aware numerical + viscosity, Geosci. Model Dev., 7, 2803-2816, + doi:10.5194/gmd-7-2803-2014, 2014. + Default: 0 (i.e., not used) + + + 0 + 3.0D0 + + + + integer + se + dyn_se_nl + + Number of hyperviscosity subcycles per dynamics timestep. + + + 3 + 4 + + + + integer + se + dyn_se_nl + + Number of hyperviscosity subcycles per dynamics timestep in sponge del2 diffusion. + + + 1 + 4 + + + + integer + se + dyn_se_nl + + Variable to specify the vertical index at which the + Rayleigh friction term is centered (the peak value). + Default: 2 + + + 2 + + + + real + se + dyn_se_nl + + Rayleigh friction parameter to determine the width of the profile. If set + to 0 then a width is chosen by the algorithm (see rayleigh_friction.F90). + Default: 0.5. + + + 0.5 + 3 + + + + real + se + dyn_se_nl + + Rayleigh friction parameter to determine the approximate value of the decay + time (days) at model top. If 0.0 then no Rayleigh friction is applied. + Default: 0. + + + 0.0 + + + + real + se + dyn_se_nl + + Used by SE dycore to apply sponge layer diffusion to u, v, and T for + stability of WACCM configurations. The diffusion is modeled on 3D molecular + diffusion and thermal conductivity by using actual molecular diffusion and + thermal conductivity coefficients multiplied by the value of + se_molecular_diff. + + If set <= 0.0 then the code is not activated. If set > 0.0 then + the molecular diffusion and thermal conductivity coefficients will be + multiplied by a factor of se_molecular_diff. + + Default: 0. + + + 0.0 + 100.0 + + + + integer + se + dyn_se_nl + + Number of hyperviscosity subcycles done in tracer advection code. + + + 1 + 2 + + + + integer + se + dyn_se_nl + 0,4,8 + + Limiter used for horizontal tracer advection: + 0: None + 4: Sign-preserving limiter. + 8: Monotone limiter. + + + 8 + + + + real + se + dyn_se_nl + + Upper bound for Courant number, used to limit se_hypervis_power. + Default: 1.0e99 (i.e., not used) unless se_refined_mesh=TRUE + + + 1.0e99 + 1.9 + + + + char*256 + se + dyn_se_nl + + Filename of exodus file to read grid from (generated by CUBIT or SQuadGen). + + + none + $DIN_LOC_ROOT/atm/cam/coords/ne0np4CONUS.ne30x8.g + $DIN_LOC_ROOT/atm/cam/coords/ne0np4EQFACE.ne5x4.g + + + + integer + se + dyn_se_nl + + Number of elements along a cube edge. + Must match value of grid. Set this to zero to use a refined mesh. + Default: 0 + + + 0 + 5 + 16 + 30 + 60 + 120 + 240 + + + + integer + se + dyn_se_nl + + Number of PEs to be used by SE dycore. + Default: -1 = Number of PEs used by CAM. + + + -1 + + + + integer + se + dyn_se_nl + + Number of dynamics steps per physics timestep. + + + 2 + 1 + 3 + 3 + 12 + 5 + 5 + 6 + 7 + + + + integer + se + dyn_se_nl + 0,1,2 + + Scaling of temperature increment for different levels of + thermal energy consistency. + 0: no scaling + 1: scale increment for cp consistency between dynamics and physics + 2: do 1 as well as take into account condensate effect on thermal energy + + + 1 + + + + real + se + dyn_se_nl + + Hyperviscosity coefficient for u,v, T [m^4/s]. + If < 0, se_nu is automatically set. + + + -1 + 1.0e13 + + + + real + se + dyn_se_nl + + Hyperviscosity applied to divergence component of winds [m^4/s]. + If < 0, uses se_nu_p. + + + -1 + 1.5625e13 + + + + real + se + dyn_se_nl + + Hyperviscosity coefficient applied to pressure-level thickness [m^4/s]. + If < 0, se_nu_p is automatically set. + + + -1 + 1.5625e13 + + + + real + se + dyn_se_nl + + Second-order viscosity applied only near the model top [m^2/s]. + + + 5.0e5 + 1.0e6 + + 0.0 + 2.0e5 + + + + logical + se + dyn_se_nl + + Hyperscosity for T and dp is applied to (T-Tref) and (dp-dp_ref) where + Xref are reference states where the effect of topography has been removed + (Simmons and Jiabin, 1991, QJRMS, Section 2a). + If TRUE dp_ref is dynamic smoothed reference state derived by Patrick Callaghan + (Lauritzen et al., 2018, JAMES, Appendix A.2) and temperature reference state + based on Simmons and Jiabin (1991) but using smoothed dp_ref. + If FALSE Tref is static reference state (Simmons and Jiabin) and dp_ref state + derived from hydrostatic balance. + + + .false. + + + + logical + se + dyn_se_nl + + If TRUE the continous equations the dynamical core is based on will conserve a + comprehensive moist total energy + If FALSE the continous equations the dynamical core is based on will conserve + a total energy based on cp for dry air and no condensates (same total energy as + CAM physics uses). + For more details see Lauritzen et al., (2018;DOI:10.1029/2017MS001257) + + + .true. + + + + logical + se + dyn_se_nl + + If TRUE the CSLAM algorithm will work for Courant number larger than 1 with + a low-order increment for tracer mass more than one grid cell width away + + + .true. + + + + integer + se + dyn_se_nl + + Number of SE rsplit time-steps CSLAM supercycles + rsplit/se_fvm_supercycling must be an integer + + + -1 + + + + integer + se + dyn_se_nl + + Number of SE rsplit time-steps CSLAM supercycles in the jet region + (which is specified by kmin_jet and kmax_jet) + rsplit/se_fvm_supercycling_jet must be an integer + se_fvm_supercycling_jet/se_fvm_supercyling must be an integer + + + -1 + + + + integer + se + dyn_se_nl + + Min level index where CSLAM runs with se_fvm_supercycling_jet + (if se_fvm_supercycling_jet.ne.se_fvm_supercycling) or + min index where are Courant number increment is active + (se_large_Courant_incr=.true.) + + + -1 + + + + integer + se + dyn_se_nl + + Max level index where CSLAM runs with se_fvm_supercycling_jet + (if se_fvm_supercycling_jet.ne.se_fvm_supercycling) or + max index where are Courant number increment is active + (se_large_Courant_incr=.true.) + + + -1 + + + + integer + se + dyn_se_nl + + Tracer advection is done every qsplit dynamics timesteps. + + + 1 + + + + logical + se + dyn_se_nl + + TRUE specified use of a refined grid (mesh) for this run. + + + .false. + .true. + .true. + + + + integer + se + dyn_se_nl + + Vertically lagrangian code vertically remaps every rsplit tracer timesteps. + + + 3 + 6 + 6 + + + + integer + se + dyn_se_nl + + Frequency with which diagnostic output is written to log (output every + statefreq dynamics timesteps). + + + 0 + + + + integer + se + dyn_se_nl + + Time stepping method for SE dycore + se_tstep_type=1 RK2 followed by qsplit-1 Leapfrog steps; second-order accurate in time (CESM1.2.0 setting) + se_tstep_type=2 RK2-SSP 3 stage (as used by tracers) + se_tstep_type=3 classic Runga-Kutta (RK) 3 stage + se_tstep_type=4 Kinnmark&Gray Runga-Kutta (RK) 4 stage + + + 4 + + + + integer + se + dyn_se_nl + + Number of tracers to include in logfile diagnostics for SE dycore + + + 3 + + + + char*32 + se + dyn_se_nl + + CAM-SE vertical remapping of temperature: + + "thermal_energy_over_P": Map cp*T (thermal energy conserving) using a pressure coordinate. + "Tv_over_logP" : Map virtual temperature using a log pressure coordinate. + + Default: "thermal_energy_over_P" + + + thermal_energy_over_P + + + + char*32 + se + dyn_se_nl + + CAM-SE vertical remap algorithm for u,v,T, and water species: + + "PPM_bc_mirror": PPM vertical remap with mirroring at the boundaries (solid wall boundary conditions, high-order throughout) + "PPM_bc_PCoM" : PPM vertical remap without mirroring at the boundaries (no boundary conditions enforced, first-order at two cells bordering top and bottom boundaries) + "PPM_bc_linear_extrapolation": PPM with linear extrapolation in ghost cells (code from A. Bradley, DOE) + + The following options use the FV3 vertical remapping algorithm: + + "FV3_PPM": Monotone PPM + "FV3_CS" : Monotonic cubic spline with 2*delta_z oscillations removed + "FV3_CS_2dz_filter": Selectively monotonic cubic spline, where local extrema are retained, with 2*delta_z oscillations removed + "FV3_non_monotone_CS_2dz_filter": Non-monotonic (linear) cubic spline with 2*delta_z oscillations removed; + + Default: "FV3_CS" + + + FV3_CS + + + + char*32 + se + dyn_se_nl + + CAM-SE vertical remap algorithm for non-water tracers: + + "PPM_bc_mirror": PPM vertical remap with mirroring at the boundaries (solid wall boundary conditions, high-order throughout) + "PPM_bc_PCoM" : PPM vertical remap without mirroring at the boundaries (no boundary conditions enforced, first-order at two cells bordering top and bottom boundaries) + "PPM_bc_linear_extrapolation": PPM with linear extrapolation in ghost cells (code from A. Bradley, DOE) + + The following options use the FV3 vertical remapping algorithm: + + "FV3_PPM": Monotone PPM + "FV3_CS" : Monotonic cubic spline with 2*delta_z oscillations removed + "FV3_non_monotone_CS_2dz_filter": Non-monotonic (linear) cubic spline with 2*delta_z oscillations removed; + + Default: "PPM_bc_linear_extrapolation" + + + PPM_bc_linear_extrapolation + + + + logical + se + dyn_se_nl + + Set .true. to allow writing SE dynamics fields to the restart file using the + unstructured grid format. This allows the restart file to be used as an + initial file, but its use as a restart file will introduce roundoff size + differences into the simulation. + + + .false. + + + + + + + integer + se + dyn_se_nl + + Number of equally-spaced horizontal physics points per spectral + element. A number greater than zero will define [se_fv_nphys] equally + spaced physics points in each direction (e.g., se_fv_nphys = 3 will + result in 9 equally-spaced physics points per element). + Default: 0 = feature disabled, use dynamics GLL points. + + + 0 + 2 + 3 + 4 + + + + + + + char*16 + se + dyn_se_nl + no,SCRIP + + If 'SCRIP', write a NetCDF file with the grid in SCRIP format. + If using a finite-volume physics grid, write the FVM grid, otherwise + write the native GLL grid. + Note that if this option is used, the simulation will exit after writing. + + + no + + + + char*256 + se + dyn_se_nl + + Name of grid file to write if se_write_grid_file is set. + Default: Set according to active grid + + + UNSET + + + + logical + se + dyn_se_nl + + Set to true to write the SEMapping.nc file. + + + .false. + + + + char*256(5) + se + native_mapping_nl + + List of SCRIP grid filenames each representing a destination grid. If provided during a CAM simulation running the spectral element dycore, mapping files will be created from the native cubed-sphere grid to each destination grid. Both native mapping (using the internal spectral element basis functions) and bilinear maps are created. + Default: UNSET + + + UNSET + + + + + + + integer + se + dyn_se_nl + + Number of threads to use for loops over elements. + + + 0 + + + + integer + se + dyn_se_nl + + Number of threads to use when processing vertical loops. Normally + equal to se_tracer_num_threads. + + + 0 + + + + integer + se + dyn_se_nl + + Number of threads to use when processing loops over threads. + Normally equal to se_vert_num_threads. + + + 0 + + + diff --git a/src/dynamics/se/native_mapping.F90 b/src/dynamics/se/native_mapping.F90 index 63b9aa7c..ed5a11e8 100644 --- a/src/dynamics/se/native_mapping.F90 +++ b/src/dynamics/se/native_mapping.F90 @@ -27,7 +27,8 @@ module native_mapping subroutine native_mapping_readnl(NLFileName) - use shr_nl_mod, only: find_group_name => shr_nl_find_group_name + use shr_nl_mod, only: find_group_name => shr_nl_find_group_name + use runtime_obj, only: unset_str character(len=*), intent(in) :: NLFileName @@ -42,7 +43,7 @@ subroutine native_mapping_readnl(NLFileName) do_native_mapping=.false. do nf=1,maxoutgrids - native_mapping_outgrids(nf)='' + native_mapping_outgrids(nf) = unset_str enddo if(masterproc) then @@ -60,7 +61,7 @@ subroutine native_mapping_readnl(NLFileName) if(ierr/=0) then call endrun(sub//': namelist read returns an error condition for native_mapping_nl') end if - if(len_trim(native_mapping_outgrids(1))==0) exist=.false. + if(trim(native_mapping_outgrids(1)) == unset_str) exist=.false. end if close(unitn) end if diff --git a/src/dynamics/tests/namelist_definition_analy_ic.xml b/src/dynamics/tests/namelist_definition_analy_ic.xml new file mode 100644 index 00000000..ad6ef6ed --- /dev/null +++ b/src/dynamics/tests/namelist_definition_analy_ic.xml @@ -0,0 +1,28 @@ + + + + + + + + char*80 + dyn_test + analytic_ic_nl + none,held_suarez_1994,moist_baroclinic_wave_dcmip2016,dry_baroclinic_wave_dcmip2016,dry_baroclinic_wave_jw2006 + + Specify the type of analytic initial conditions for an initial run. + held_suarez_1994: Initial conditions specified in Held and Suarez (1994) + moist_baroclinic_wave_dcmip2016: Moist baroclinic wave as used in the DCMIP 2016 experiments + dry_baroclinic_wave_dcmip2016: Dry baroclinic wave as used in the DCMIP 2016 experiments + dry_baroclinic_wave_jw2006: Dry baroclinic wave as specified in Jablonowski and Williamson (2006) + + + none + held_suarez_1994 + held_suarez_1994 + moist_baroclinic_wave_dcmip2016 + moist_baroclinic_wave_dcmip2016 + + + + diff --git a/test/unit/sample_files/atm_in_files/test_attr_in b/test/unit/sample_files/atm_in_files/test_attr_in index ffc2bd1f..09218b02 100644 --- a/test/unit/sample_files/atm_in_files/test_attr_in +++ b/test/unit/sample_files/atm_in_files/test_attr_in @@ -4,7 +4,11 @@ / &fairy_tales - straw_into_gold = 'Rumpelstiltskin' + straw_into_gold = "Rumpelstiltskin" +/ + +&famous_folk + marx_bros = "Chico", "Harpo", "Groucho", "Zeppo" / &scary_stories diff --git a/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml b/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml index 5eb75f53..c7579b69 100644 --- a/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml +++ b/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml @@ -22,8 +22,8 @@ sci_fi sci_fi_sounds_nl - What are you doing with the - warp drive? + What are you doing with the + warp drive? 0 @@ -32,4 +32,40 @@ + + integer(110) + sci_fi + sci_fi_sounds_nl + + Computer! Do something computery! + + + + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0, + 0,1,0,1,0,0,1,0,1,0 + + + + + + char*256 + sci_fi + sci_fi_sounds_nl + + HAL, where is your hard drive? + + + $DIN_LOC_ROOT/I/CANT/LET/YOU/DO/THAT/DAVE + + diff --git a/test/unit/sample_files/atm_in_files/test_multi_attr_in b/test/unit/sample_files/atm_in_files/test_multi_attr_in index 085ba7f5..512e77fc 100644 --- a/test/unit/sample_files/atm_in_files/test_multi_attr_in +++ b/test/unit/sample_files/atm_in_files/test_multi_attr_in @@ -4,7 +4,11 @@ / &fairy_tales - straw_into_gold = 'Rumpelstiltskin' + straw_into_gold = "Rumpelstiltskin" +/ + +&famous_folk + marx_bros = "Chico", "Harpo", "Groucho", "Zeppo" / &scary_stories diff --git a/test/unit/sample_files/atm_in_files/test_multi_xml_in b/test/unit/sample_files/atm_in_files/test_multi_xml_in index a834d490..1b5cd59b 100644 --- a/test/unit/sample_files/atm_in_files/test_multi_xml_in +++ b/test/unit/sample_files/atm_in_files/test_multi_xml_in @@ -4,7 +4,11 @@ / &fairy_tales - straw_into_gold = 'Rumpelstiltskin' + straw_into_gold = "Rumpelstiltskin" +/ + +&famous_folk + marx_bros = "Chico", "Harpo", "Groucho", "Zeppo" / &scary_stories @@ -12,7 +16,20 @@ / &sci_fi_sounds_nl + hal_hard_drive = "/NOT/HERE/I/CANT/LET/YOU/DO/THAT/DAVE" laser_beam = .true. + lets_ask_computer = 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0 warp_drive = 0 / diff --git a/test/unit/sample_files/atm_in_files/test_simple_atm_in b/test/unit/sample_files/atm_in_files/test_simple_atm_in index 18b6e6b2..cd4aaea4 100644 --- a/test/unit/sample_files/atm_in_files/test_simple_atm_in +++ b/test/unit/sample_files/atm_in_files/test_simple_atm_in @@ -4,7 +4,11 @@ / &fairy_tales - straw_into_gold = 'Rumpelstiltskin' + straw_into_gold = "Rumpelstiltskin" +/ + +&famous_folk + marx_bros = "Chico", "Harpo", "Groucho", "Zeppo" / &scary_stories diff --git a/test/unit/sample_files/atm_in_files/test_simple_nml_def.xml b/test/unit/sample_files/atm_in_files/test_simple_nml_def.xml index db3b3fac..703451ea 100644 --- a/test/unit/sample_files/atm_in_files/test_simple_nml_def.xml +++ b/test/unit/sample_files/atm_in_files/test_simple_nml_def.xml @@ -57,4 +57,16 @@ + + char*256(4) + more_testing + famous_folk + + They were in movies. + + + "Chico", "Harpo", "Groucho", "Zeppo" + + + diff --git a/test/unit/sample_files/atm_in_files/test_user_in b/test/unit/sample_files/atm_in_files/test_user_in index a00925bf..61b2ae54 100644 --- a/test/unit/sample_files/atm_in_files/test_user_in +++ b/test/unit/sample_files/atm_in_files/test_user_in @@ -7,6 +7,10 @@ straw_into_gold = "Rapunzel" / +&famous_folk + marx_bros = "Chico", "Harpo", "Groucho", "Zeppo" +/ + &scary_stories monkey_paw = 0 / diff --git a/test/unit/test_atm_in_paramgen.py b/test/unit/test_atm_in_paramgen.py index 083681f2..d119ed73 100644 --- a/test/unit/test_atm_in_paramgen.py +++ b/test/unit/test_atm_in_paramgen.py @@ -97,6 +97,7 @@ def __init__(self): "CAM_CPPDEFS" : "UNSET", "NTHRDS_ATM" : 1, "RUN_STARTDATE" : "101", + "DIN_LOC_ROOT" : "/NOT/HERE", "feel_lucky" : 1 #For testing } From 7dd3d598e9ee1bfe5c6c910293939da8dcb4ba69 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 11 Feb 2022 15:16:50 -0700 Subject: [PATCH 17/33] Add ParamGen and namelist reader generator tests to Github Actions. --- .github/workflows/pr_open_sync_workflow.yml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr_open_sync_workflow.yml b/.github/workflows/pr_open_sync_workflow.yml index d7b7a1f6..e8817be6 100644 --- a/.github/workflows/pr_open_sync_workflow.yml +++ b/.github/workflows/pr_open_sync_workflow.yml @@ -35,7 +35,9 @@ jobs: pip install pytest # Run "checkout_externals": - name: Checkout externals - run: manage_externals/checkout_externals --externals Externals_CAM.cfg ccpp-framework + run: | + manage_externals/checkout_externals --externals Externals_CAM.cfg ccpp-framework + manage_externals/checkout_externals --externals Externals.cfg cime # Run python unit and doctests: - name: python unit tests run: | @@ -47,12 +49,20 @@ jobs: python cime_config/cam_autogen.py # CAM build cache doctests: python cime_config/cam_build_cache.py + # Namelist reader generator doctests: + python cime_config/create_readnl_files.py # Registry generator doctests: pytest src/data/generate_registry_data.py --doctest-modules + # ParamGen atm_in namelist writer doctests: + pytest cime_config/atm_in_paramgen.py --doctest-modules # Registry generator unit tests: python test/unit/test_registry.py + # Namelist reader autogeneration unit tests + python test/unit/create_readnl_files_tests.py # Physics variable init (phys_init) generator unit tests: python test/unit/write_init_unit_tests.py + # ParamGen atm_in namelist writer unit tests: + python test/unit/test_atm_in_paramgen.py ##### From 7bedb19a6db236850615435d52bf76a1e74442f7 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 11 Feb 2022 16:00:49 -0700 Subject: [PATCH 18/33] Remove python 3.6 from testing list (doesn't work with ParamGen), and fix testing bug for python 3.10 --- .github/workflows/pr_open_sync_workflow.yml | 2 +- test/unit/create_readnl_files_tests.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/pr_open_sync_workflow.yml b/.github/workflows/pr_open_sync_workflow.yml index e8817be6..eaa987bb 100644 --- a/.github/workflows/pr_open_sync_workflow.yml +++ b/.github/workflows/pr_open_sync_workflow.yml @@ -13,7 +13,7 @@ jobs: strategy: matrix: #All of these python versions will be used to run tests: - python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python-version: ["3.7", "3.8", "3.9", "3.10"] fail-fast: false steps: # Acquire github action routines: diff --git a/test/unit/create_readnl_files_tests.py b/test/unit/create_readnl_files_tests.py index 8a1a5cc3..be632014 100644 --- a/test/unit/create_readnl_files_tests.py +++ b/test/unit/create_readnl_files_tests.py @@ -353,7 +353,7 @@ def test_bad_namelist_def(self): '--namelist-read-mod', reader_mod] with self.assertRaises(NamelistError) as nerr: with self.assertLogs("test_bad_namelist_def", - level='ERROR') as cmp_log: + level='INFO') as cmp_log: namelist_obj = gen_namelist_files(args, _TMP_DIR, logger) # end with # end with From b4a3629e676768e0aef4e3b71824d28b20982cca Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 8 Mar 2022 16:05:14 -0700 Subject: [PATCH 19/33] Make modifications associated with restricting the python version used in CAMDEN to python 3.7 and later (Github issue #156). Also update the "checkout" Github Action to Version 3. --- .github/scripts/branch_pr_issue_closer.py | 10 ++++---- .github/scripts/pr_mod_file_tests.py | 6 ++--- .github/workflows/branch_push_workflow.yml | 2 +- .github/workflows/pr_open_sync_workflow.yml | 4 ++-- cime_config/cam_config.py | 26 +++++++++++++-------- cime_config/cam_config_classes.py | 5 +--- test/unit/cam_config_unit_tests.py | 7 ------ test/unit/test_registry.py | 8 +------ test/unit/write_init_unit_tests.py | 8 +------ 9 files changed, 30 insertions(+), 46 deletions(-) diff --git a/.github/scripts/branch_pr_issue_closer.py b/.github/scripts/branch_pr_issue_closer.py index b91914de..3dcdfdd9 100755 --- a/.github/scripts/branch_pr_issue_closer.py +++ b/.github/scripts/branch_pr_issue_closer.py @@ -101,7 +101,7 @@ def end_script(msg): """ Prints message to screen, and then exits script. """ - print("\n{}\n".format(msg)) + print(f"\n{msg}\n") print("Issue closing check has completed successfully.") sys.exit(0) @@ -173,7 +173,7 @@ def _main_prog(): first_word = post_msg_word_list[0] #Print merged pr number to screen: - print("Merged PR: {}".format(first_word)) + print(f"Merged PR: {first_word}") try: #Try assuming the word is just a number: @@ -364,7 +364,7 @@ def _main_prog(): end_script(endmsg) #Print project name associated with merged PR: - print("merged PR project name: {}".format(proj_mod_name)) + print(f"merged PR project name: {proj_mod_name}") #++++++++++++++++++++++++++++++++++++++++ #Extract repo project "To do" card issues @@ -467,7 +467,7 @@ def _main_prog(): cam_issue = cam_repo.get_issue(number=issue_num) #Close issue: cam_issue.edit(state='closed') - print("Issue #{} has been closed.".format(issue_num)) + print(f"Issue #{issue_num} has been closed.") else: #Extract card id from id dictionary: if issue_num in proj_issue_card_ids: @@ -498,7 +498,7 @@ def _main_prog(): #Close Pull Request: cam_pull.edit(state='closed') - print("Pull Request #{} has been closed.".format(pull_num)) + print(f"Pull Request #{pull_num} has been closed.") #++++++++++ #End script diff --git a/.github/scripts/pr_mod_file_tests.py b/.github/scripts/pr_mod_file_tests.py index 7cab01a0..8fe3c86b 100755 --- a/.github/scripts/pr_mod_file_tests.py +++ b/.github/scripts/pr_mod_file_tests.py @@ -220,9 +220,9 @@ def _main_prog(): print("+++++++++++++++++++++++++++++++++++++++++++++++") #Raise test failure exception: - fail_msg = "One or more files are below allowed pylint " \ - "score of {}.\nPlease see pylint message(s) " \ - "above for possible fixes." + fail_msg = "One or more files are below allowed pylint " + fail_msg += f"score of {pylev}.\nPlease see pylint message(s) " + fail_msg += "above for possible fixes." raise PrModTestFail(fail_msg) else: #All tests have passed, so exit normally: diff --git a/.github/workflows/branch_push_workflow.yml b/.github/workflows/branch_push_workflow.yml index aee716d9..5dd2b836 100644 --- a/.github/workflows/branch_push_workflow.yml +++ b/.github/workflows/branch_push_workflow.yml @@ -20,7 +20,7 @@ jobs: runs-on: ubuntu-latest steps: # Acquire github action routines - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Acquire specific version of python - name: Set up Python 3.10 uses: actions/setup-python@v1 diff --git a/.github/workflows/pr_open_sync_workflow.yml b/.github/workflows/pr_open_sync_workflow.yml index eaa987bb..5a8c16ec 100644 --- a/.github/workflows/pr_open_sync_workflow.yml +++ b/.github/workflows/pr_open_sync_workflow.yml @@ -17,7 +17,7 @@ jobs: fail-fast: false steps: # Acquire github action routines: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # Acquire specific version of python: - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -75,7 +75,7 @@ jobs: runs-on: ubuntu-latest steps: # acquire github action routines - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 # acquire specific version of python - name: Set up Python 3.8 uses: actions/setup-python@v1 diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index 7656df3f..966cdf3e 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -87,6 +87,13 @@ def __init__(self, case, case_log): and associated dictionary. """ + # Check if using python 3.7 or later. If not, + # then end build here: + if sys.version_info[0] < 3 or (sys.version_info[0] == 3 and sys.version_info[1] < 7): + emsg = "CAM requires python 3.7 or later, currently using python version" + emsg += f" {sys.version_info[0]}.{sys.version_info[1]}" + raise SystemError(emsg) + # Read in needed case variables atm_grid = case.get_value("ATM_GRID") # Atmosphere (CAM) grid cam_config_opts = case.get_value("CAM_CONFIG_OPTS") # CAM configuration options @@ -527,31 +534,30 @@ def parse_config_opts(cls, config_opts, test_mode=False): """ cco_str = "CAM_CONFIG_OPTS" - #Don't allow abbreviations if using python 3.5 or greater: - if sys.version_info[0] < 3 or (sys.version_info[0] == 3 and sys.version_info[1] < 5): - parser = argparse.ArgumentParser(description=cco_str, - prog="ConfigCAM", - epilog="Allowed values of "+cco_str) - else: - parser = argparse.ArgumentParser(description=cco_str, - prog="ConfigCAM", allow_abbrev=False, - epilog="Allowed values of "+cco_str) - + #Create parser object: + parser = argparse.ArgumentParser(description=cco_str, + prog="ConfigCAM", allow_abbrev=False, + epilog="Allowed values of "+cco_str) + #Add argument options: parser.add_argument("--physics-suites", "-physics-suites", type=str, required=True, metavar='', help="""Semicolon-separated list of Physics Suite Definition Files (SDFs)""") + parser.add_argument("--dyn", "-dyn", metavar='', type=str, required=False, default="", help="""Name of dycore""") + parser.add_argument("--analytic_ic", "-analytic_ic", action='store_true', required=False, help="""Flag to turn on Analytic Initial Conditions (ICs).""") + parser.add_argument("--dyn_kind", "-dyn_kind", type=str, required=False, default="REAL64", help="""Fortran kind used in dycore for type real.""") + parser.add_argument("--phys_kind", "-phys_kind", type=str, required=False, default="REAL64", help="""Fortran kind used in physics for type real.""") diff --git a/cime_config/cam_config_classes.py b/cime_config/cam_config_classes.py index a0e7a7e6..3b5e603e 100644 --- a/cime_config/cam_config_classes.py +++ b/cime_config/cam_config_classes.py @@ -12,9 +12,6 @@ import re from collections import OrderedDict -# Determine regular rexpression type (for later usage in check_string_val) -REGEX_TYPE = type(re.compile(r" ")) - ############################################################################### # Error-handling classes ############################################################################### @@ -296,7 +293,7 @@ def _check_string_val(name, val, valid_vals=None): emsg += "does not match any of the valid values: '{}'" return emsg.format(val, name, valid_vals) # End if - elif isinstance(valid_vals, REGEX_TYPE): + elif isinstance(valid_vals, re.Pattern): # If a regular expression object, then check that # value is matched by the expression if valid_vals.match(val) is None: diff --git a/test/unit/cam_config_unit_tests.py b/test/unit/cam_config_unit_tests.py index 1acca407..89cf9f45 100644 --- a/test/unit/cam_config_unit_tests.py +++ b/test/unit/cam_config_unit_tests.py @@ -161,13 +161,6 @@ def test_config_print_config_check(self): is skipped. """ - if sys.version_info[0] < 3: - raise unittest.SkipTest("This test doesn't work with Python 2") - - if sys.version_info[1] < 4: - raise unittest.SkipTest("This test requires Python version 3.4 or later") - - #Create new logger for print_config test: print_log = logging.getLogger("print_config") diff --git a/test/unit/test_registry.py b/test/unit/test_registry.py index f32ac812..f3c194dd 100644 --- a/test/unit/test_registry.py +++ b/test/unit/test_registry.py @@ -28,13 +28,7 @@ _TMP_DIR = os.path.join(__TEST_DIR, "tmp") _SRC_MOD_DIR = os.path.join(_TMP_DIR, "SourceMods") -# Find python version -PY3 = sys.version_info[0] > 2 -if PY3: - __FILE_OPEN = (lambda x: open(x, 'r', encoding='utf-8')) -else: - __FILE_OPEN = (lambda x: open(x, 'r')) -# End if +__FILE_OPEN = (lambda x: open(x, 'r', encoding='utf-8')) if not os.path.exists(__REGISTRY_DIR): raise ImportError("Cannot find registry directory") diff --git a/test/unit/write_init_unit_tests.py b/test/unit/write_init_unit_tests.py index e1501679..3972571e 100644 --- a/test/unit/write_init_unit_tests.py +++ b/test/unit/write_init_unit_tests.py @@ -30,13 +30,7 @@ _SRC_MOD_DIR = os.path.join(_PRE_TMP_DIR, "SourceMods") _INC_SEARCH_DIRS = [_SRC_MOD_DIR, __REGISTRY_DIR] -# Find python version -PY3 = sys.version_info[0] > 2 -if PY3: - __FILE_OPEN = (lambda x: open(x, 'r', encoding='utf-8')) -else: - __FILE_OPEN = (lambda x: open(x, 'r', encoding='utf-8')) -# End if +__FILE_OPEN = (lambda x: open(x, 'r', encoding='utf-8')) #Check for all necessary directories: if not os.path.exists(__CCPP_DIR): From e46823aeed27c48d67d113765bd745fc11695e06 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Wed, 9 Mar 2022 12:20:28 -0700 Subject: [PATCH 20/33] Fix pylint errors, and update python versions used in Github Actions. --- .github/scripts/branch_pr_issue_closer.py | 16 +- .github/scripts/pr_mod_file_tests.py | 14 +- .github/workflows/pr_open_sync_workflow.yml | 4 +- test/unit/test_registry.py | 156 +++++++++--------- test/unit/write_init_unit_tests.py | 174 ++++++++++---------- 5 files changed, 178 insertions(+), 186 deletions(-) diff --git a/.github/scripts/branch_pr_issue_closer.py b/.github/scripts/branch_pr_issue_closer.py index 3dcdfdd9..c7d35e4e 100755 --- a/.github/scripts/branch_pr_issue_closer.py +++ b/.github/scripts/branch_pr_issue_closer.py @@ -53,9 +53,9 @@ def project_card_move(oa_token, column_id, card_id): """ #create required argument strings from inputs: - github_oa_header = ''' "Authorization: token {0}" '''.format(oa_token) - github_url_str = '''https://api.github.com/projects/columns/cards/{0}/moves'''.format(card_id) - json_post_inputs = ''' '{{"position":"top", "column_id":{}}}' '''.format(column_id) + github_oa_header = f''' "Authorization: token {oa_token}" ''' + github_url_str = f'''https://api.github.com/projects/columns/cards/{card_id}/moves''' + json_post_inputs = f''' '{{"position":"top", "column_id":{column_id}}}' ''' #Create curl command line string: curl_cmdline = '''curl -H '''+github_oa_header+''' -H "Accept: application/vnd.github.inertia-preview+json" -X POST -d '''+\ @@ -267,10 +267,10 @@ def _main_prog(): issue_pattern = re.compile(r'#[0-9]+(\s|,|$)|.') #Create new "close" issues list: - close_issues = list() + close_issues = [] #Create new "closed" PR list: - close_pulls = list() + close_pulls = [] #Search text right after keywords for possible issue numbers: for match in word_matches: @@ -371,13 +371,13 @@ def _main_prog(): #++++++++++++++++++++++++++++++++++++++++ #Initalize issue counting dictionary: - proj_issues_count = dict() + proj_issues_count = {} #Initalize issue id to project card id dictionary: - proj_issue_card_ids = dict() + proj_issue_card_ids = {} #Initialize list for issues that have already been closed: - already_closed_issues = list() + already_closed_issues = [] #Loop over all repo projects: for project in projects: diff --git a/.github/scripts/pr_mod_file_tests.py b/.github/scripts/pr_mod_file_tests.py index 8fe3c86b..9fb1ed0a 100755 --- a/.github/scripts/pr_mod_file_tests.py +++ b/.github/scripts/pr_mod_file_tests.py @@ -16,8 +16,6 @@ import sys import os -import subprocess -import shlex import argparse from stat import S_ISREG @@ -63,7 +61,7 @@ def _file_is_python(filename): #If no ".py" extension exists, then #open the file and look for a shabang #that contains the word "python". - with open(filename, "r") as mod_file: + with open(filename, "r", encoding='utf-8') as mod_file: #Loop over lines in file: for line in mod_file: @@ -170,7 +168,7 @@ def _main_prog(): #++++++++++++++++++++++++++++++ #Create empty list to store python files: - pyfiles = list() + pyfiles = [] #Extract Github file objects: file_obj_list = pull_req.get_files() @@ -224,10 +222,10 @@ def _main_prog(): fail_msg += f"score of {pylev}.\nPlease see pylint message(s) " fail_msg += "above for possible fixes." raise PrModTestFail(fail_msg) - else: - #All tests have passed, so exit normally: - print("All pylint tests passed!") - sys.exit(0) + + #All tests have passed, so exit normally: + print("All pylint tests passed!") + sys.exit(0) #If no python files exist in PR, then exit script: else: diff --git a/.github/workflows/pr_open_sync_workflow.yml b/.github/workflows/pr_open_sync_workflow.yml index 5a8c16ec..3226bd92 100644 --- a/.github/workflows/pr_open_sync_workflow.yml +++ b/.github/workflows/pr_open_sync_workflow.yml @@ -77,10 +77,10 @@ jobs: # acquire github action routines - uses: actions/checkout@v3 # acquire specific version of python - - name: Set up Python 3.8 + - name: Set up Python 3.10 uses: actions/setup-python@v1 with: - python-version: '3.8' # Use python 3.8 + python-version: "3.10" # Use python 3.8 # install required python packages - name: Install dependencies run: | diff --git a/test/unit/test_registry.py b/test/unit/test_registry.py index f3c194dd..16bfb902 100644 --- a/test/unit/test_registry.py +++ b/test/unit/test_registry.py @@ -99,20 +99,20 @@ def test_good_simple_registry(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check return code - amsg = "Test failure: retcode={}".format(retcode) + amsg = f"Test failure: retcode={retcode}" self.assertEqual(retcode, 0, msg=amsg) flen = len(files) - amsg = "Test failure: Found {} files, expected 1".format(flen) + amsg = f"Test failure: Found {flen} files, expected 1" self.assertEqual(flen, 1, msg=amsg) # Make sure each output file was created - amsg = "{} does not exist".format(out_meta) + amsg = f"{out_meta} does not exist" self.assertTrue(os.path.exists(out_meta), msg=amsg) - amsg = "{} does not exist".format(out_source) + amsg = f"{out_source} does not exist" self.assertTrue(os.path.exists(out_source), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(out_meta, in_meta) + amsg = f"{out_meta} does not match {in_meta}" self.assertTrue(filecmp.cmp(out_meta, in_meta, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(out_source, in_source) + amsg = f"{out_source} does not match {in_source}" self.assertTrue(filecmp.cmp(out_source, in_source, shallow=False), msg=amsg) @@ -141,12 +141,11 @@ def test_good_ddt_registry(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check return code - amsg = "Test failure for dycore = {}, retcode={}".format(dycore, - retcode) + amsg = f"Test failure for dycore = {dycore}, retcode={retcode}" self.assertEqual(retcode, 0, msg=amsg) flen = len(files) - amsg = "Test failure for {} dycore: Found {} files, expected 1" - self.assertEqual(flen, 1, msg=amsg.format(dycore, flen)) + amsg = f"Test failure for {dycore} dycore: Found {flen} files, expected 1" + self.assertEqual(flen, 1, msg=amsg) # Make sure each output file was created if os.path.exists(gen_meta): os.rename(gen_meta, out_meta) @@ -154,15 +153,15 @@ def test_good_ddt_registry(self): if os.path.exists(gen_source): os.rename(gen_source, out_source) # End if - amsg = "{} does not exist".format(out_meta) + amsg = f"{out_meta} does not exist" self.assertTrue(os.path.exists(out_meta), msg=amsg) - amsg = "{} does not exist".format(out_source) + amsg = f"{out_source} does not exist" self.assertTrue(os.path.exists(out_source), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(out_meta, in_meta) + amsg = f"{out_meta} does not match {in_meta}" self.assertTrue(filecmp.cmp(out_meta, in_meta, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(out_source, in_source) + amsg = f"{out_source} does not match {in_source}" self.assertTrue(filecmp.cmp(out_source, in_source, shallow=False), msg=amsg) # End for @@ -190,20 +189,20 @@ def test_good_ddt_registry2(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check return code - amsg = "Test failure: retcode={}".format(retcode) + amsg = f"Test failure: retcode={retcode}" self.assertEqual(retcode, 0, msg=amsg) flen = len(files) - amsg = "Test failure: Found {} files, expected 1".format(flen) + amsg = f"Test failure: Found {flen} files, expected 1" self.assertEqual(flen, 1, msg=amsg) - amsg = "{} does not exist".format(out_meta) + amsg = f"{out_meta} does not exist" self.assertTrue(os.path.exists(out_meta), msg=amsg) - amsg = "{} does not exist".format(out_source) + amsg = f"{out_source} does not exist" self.assertTrue(os.path.exists(out_source), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(out_meta, in_meta) + amsg = f"{out_meta} does not match {in_meta}" self.assertTrue(filecmp.cmp(out_meta, in_meta, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(out_source, in_source) + amsg = f"{out_source} does not match {in_source}" self.assertTrue(filecmp.cmp(out_source, in_source, shallow=False), msg=amsg) @@ -230,20 +229,20 @@ def test_good_array(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check return code - amsg = "Test failure: retcode={}".format(retcode) + amsg = f"Test failure: retcode={retcode}" self.assertEqual(retcode, 0, msg=amsg) flen = len(files) - amsg = "Test failure: Found {} files, expected 1".format(flen) + amsg = f"Test failure: Found {flen} files, expected 1" self.assertEqual(flen, 1, msg=amsg) - amsg = "{} does not exist".format(out_meta) + amsg = f"{out_meta} does not exist" self.assertTrue(os.path.exists(out_meta), msg=amsg) - amsg = "{} does not exist".format(out_source) + amsg = f"{out_source} does not exist" self.assertTrue(os.path.exists(out_source), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(out_meta, in_meta) + amsg = f"{out_meta} does not match {in_meta}" self.assertTrue(filecmp.cmp(out_meta, in_meta, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(out_source, in_source) + amsg = f"{out_source} does not match {in_source}" self.assertTrue(filecmp.cmp(out_source, in_source, shallow=False), msg=amsg) @@ -268,27 +267,27 @@ def test_good_metadata_file_registry(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check return code - amsg = "Test failure for SE dycore, retcode={}".format(retcode) + amsg = f"Test failure for SE dycore, retcode={retcode}" self.assertEqual(retcode, 0, msg=amsg) flen = len(files) - amsg = "Test failure for SE dycore: Found {} files, expected 2" - self.assertEqual(flen, 2, msg=amsg.format(flen)) - amsg = "{} does not exist".format(out_meta) + amsg = f"Test failure for SE dycore: Found {flen} files, expected 2" + self.assertEqual(flen, 2, msg=amsg) + amsg = f"{out_meta} does not exist" self.assertTrue(os.path.exists(out_meta), msg=amsg) - amsg = "{} does not exist".format(out_source) + amsg = f"{out_source} does not exist" self.assertTrue(os.path.exists(out_source), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(out_meta, in_meta) + amsg = f"{out_meta} does not match {in_meta}" self.assertTrue(filecmp.cmp(out_meta, in_meta, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(out_source, in_source) + amsg = f"{out_source} does not match {in_source}" self.assertTrue(filecmp.cmp(out_source, in_source, shallow=False), msg=amsg) # Check that the metadata file has the correct number of variables mfile = files[1] mvars = mfile.variable_list() num_vars = len(mvars) - amsg = "Expected 14 metadata variables, found {}".format(num_vars) + amsg = f"Expected 14 metadata variables, found {num_vars}" self.assertEqual(num_vars, 14, msg=amsg) def test_diff_src_root_metadata_file_registry(self): @@ -322,27 +321,27 @@ def test_diff_src_root_metadata_file_registry(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check return code - amsg = "Test failure for SE dycore, retcode={}".format(retcode) + amsg = f"Test failure for SE dycore, retcode={retcode}" self.assertEqual(retcode, 0, msg=amsg) flen = len(files) - amsg = "Test failure for SE dycore: Found {} files, expected 2" - self.assertEqual(flen, 2, msg=amsg.format(flen)) - amsg = "{} does not exist".format(out_meta) + amsg = f"Test failure for SE dycore: Found {flen} files, expected 2" + self.assertEqual(flen, 2, msg=amsg) + amsg = f"{out_meta} does not exist" self.assertTrue(os.path.exists(out_meta), msg=amsg) - amsg = "{} does not exist".format(out_source) + amsg = f"{out_source} does not exist" self.assertTrue(os.path.exists(out_source), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(out_meta, in_meta) + amsg = f"{out_meta} does not match {in_meta}" self.assertTrue(filecmp.cmp(out_meta, in_meta, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(out_source, in_source) + amsg = f"{out_source} does not match {in_source}" self.assertTrue(filecmp.cmp(out_source, in_source, shallow=False), msg=amsg) # Check that the metadata file has the correct number of variables mfile = files[1] mvars = mfile.variable_list() num_vars = len(mvars) - amsg = "Expected 14 metadata variables, found {}".format(num_vars) + amsg = f"Expected 14 metadata variables, found {num_vars}" self.assertEqual(num_vars, 14, msg=amsg) def test_SourceMods_metadata_file_registry(self): @@ -380,27 +379,27 @@ def test_SourceMods_metadata_file_registry(self): error_on_no_validate=True) # Check return code - amsg = "Test failure for SE dycore, retcode={}".format(retcode) + amsg = f"Test failure for SE dycore, retcode={retcode}" self.assertEqual(retcode, 0, msg=amsg) flen = len(files) - amsg = "Test failure for SE dycore: Found {} files, expected 2" - self.assertEqual(flen, 2, msg=amsg.format(flen)) - amsg = "{} does not exist".format(out_meta) + amsg = f"Test failure for SE dycore: Found {flen} files, expected 2" + self.assertEqual(flen, 2, msg=amsg) + amsg = f"{out_meta} does not exist" self.assertTrue(os.path.exists(out_meta), msg=amsg) - amsg = "{} does not exist".format(out_source) + amsg = f"{out_source} does not exist" self.assertTrue(os.path.exists(out_source), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(out_meta, in_meta) + amsg = f"{out_meta} does not match {in_meta}" self.assertTrue(filecmp.cmp(out_meta, in_meta, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(out_source, in_source) + amsg = f"{out_source} does not match {in_source}" self.assertTrue(filecmp.cmp(out_source, in_source, shallow=False), msg=amsg) # Check that the metadata file has the correct number of variables mfile = files[1] mvars = mfile.variable_list() num_vars = len(mvars) - amsg = "Expected 14 metadata variables, found {}".format(num_vars) + amsg = f"Expected 16 metadata variables, found {num_vars}" self.assertEqual(num_vars, 16, msg=amsg) def test_good_complete_registry(self): @@ -430,22 +429,22 @@ def test_good_complete_registry(self): error_on_no_validate=True) # Check return code - amsg = "Test failure: retcode={}".format(retcode) + amsg = f"Test failure: retcode={retcode}" self.assertEqual(retcode, 0, msg=amsg) flen = len(files) - amsg = "Test failure: Found {} files, expected 2".format(flen) + amsg = f"Test failure: Found {flen} files, expected 2" self.assertEqual(flen, 2, msg=amsg) # Make sure each output file was created - amsg = "{} does not exist".format(out_meta) + amsg = f"{out_meta} does not exist" self.assertTrue(os.path.exists(out_meta), msg=amsg) - amsg = "{} does not exist".format(out_source) + amsg = f"{out_source} does not exist" self.assertTrue(os.path.exists(out_source), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(out_meta, in_meta) + amsg = f"{out_meta} does not match {in_meta}" self.assertTrue(filecmp.cmp(out_meta, in_meta, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(out_source, in_source) + amsg = f"{out_source} does not match {in_source}" self.assertTrue(filecmp.cmp(out_source, in_source, shallow=False), msg=amsg) @@ -453,7 +452,7 @@ def test_good_complete_registry(self): mfile = files[1] mvars = mfile.variable_list() num_vars = len(mvars) - amsg = "Expected 14 metadata variables, found {}".format(num_vars) + amsg = f"Expected 14 metadata variables, found {num_vars}" self.assertEqual(num_vars, 14, msg=amsg) def test_no_metadata_file_registry(self): @@ -490,10 +489,9 @@ def test_no_metadata_file_registry(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check exception message - xml_loc = os.path.abspath(bad_filename) - emsg = ("Metadata file, '{}', does not exist".format(xml_loc)) - self.assertEqual(emsg.format(out_source_name), - str(verr.exception).split('\n')[0]) + bad_meta = os.path.abspath(bad_filename) + emsg = (f"Metadata file, '{bad_meta}', does not exist") + self.assertEqual(emsg, str(verr.exception).split('\n', maxsplit=1)[0]) # Make sure no output files were created self.assertFalse(os.path.exists(out_meta)) self.assertFalse(os.path.exists(out_source)) @@ -537,18 +535,18 @@ def test_parameter(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check return code - amsg = "Test failure: retcode={}".format(retcode) + amsg = f"Test failure: retcode={retcode}" self.assertEqual(retcode, 0, msg=amsg) flen = len(files) - amsg = "Test failure: Found {} files, expected 1".format(flen) + amsg = f"Test failure: Found {flen} files, expected 1" self.assertEqual(flen, 1, msg=amsg) # Make sure each output file was created self.assertTrue(os.path.exists(out_meta)) self.assertTrue(os.path.exists(out_source)) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(out_meta, in_meta) + amsg = f"{out_meta} does not match {in_meta}" self.assertTrue(filecmp.cmp(out_meta, in_meta, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(out_source, in_source) + amsg = f"{out_source} does not match {in_source}" self.assertTrue(filecmp.cmp(out_source, in_source, shallow=False), msg=amsg) @@ -583,10 +581,8 @@ def test_bad_registry_version(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check exception message - xml_loc = os.path.join(_TMP_DIR, "reg_bad_version.xml") - emsg = ("Invalid registry file, {}".format(xml_loc)) - self.assertEqual(emsg.format(out_source_name), - str(verr.exception).split('\n')[0]) + emsg = (f"Invalid registry file, {filename}") + self.assertEqual(emsg, str(verr.exception).split('\n', maxsplit=1)[0]) # Make sure no output files were created self.assertFalse(os.path.exists(out_meta)) self.assertFalse(os.path.exists(out_source)) @@ -629,10 +625,8 @@ def test_missing_standard_name(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check exception message - xml_loc = os.path.join(_TMP_DIR, "reg_no_std_name.xml") - emsg = ("Invalid registry file, {}".format(xml_loc)) - self.assertEqual(emsg.format(out_source_name), - str(verr.exception).split('\n')[0]) + emsg = (f"Invalid registry file, {filename}") + self.assertEqual(emsg, str(verr.exception).split('\n', maxsplit=1)[0]) # Make sure no output files were created self.assertFalse(os.path.exists(out_meta)) self.assertFalse(os.path.exists(out_source)) @@ -675,10 +669,8 @@ def test_bad_dimensions(self): loglevel=logging.ERROR, error_on_no_validate=True) # Check exception message - xml_loc = os.path.join(_TMP_DIR, "reg_bad_dimensions.xml") - emsg = ("Invalid registry file, {}".format(xml_loc)) - self.assertEqual(emsg.format(out_source_name), - str(verr.exception).split('\n')[0]) + emsg = (f"Invalid registry file, {filename}") + self.assertEqual(emsg, str(verr.exception).split('\n', maxsplit=1)[0]) # Make sure no output files were created self.assertFalse(os.path.exists(out_meta)) self.assertFalse(os.path.exists(out_source)) @@ -725,7 +717,7 @@ def test_unknown_dimensions(self): # End with # Check exception message emsg = "Dimension, 'vertical_dimension', not found for 'u'" - self.assertEqual(emsg.format(out_source_name), str(verr.exception)) + self.assertEqual(emsg, str(verr.exception)) # Make sure no output files were created self.assertFalse(os.path.exists(out_meta)) self.assertFalse(os.path.exists(out_source)) @@ -771,7 +763,7 @@ def test_no_init_value(self): # End with # Check exception message emsg = "parameter, 'u', does not have an initial value" - self.assertEqual(emsg.format(out_source_name), str(verr.exception)) + self.assertEqual(emsg, str(verr.exception)) # Make sure no output files were created self.assertFalse(os.path.exists(out_meta)) self.assertFalse(os.path.exists(out_source)) @@ -1157,8 +1149,8 @@ def test_bad_metadata_file_dup_section(self): run_env) # Check exception message emsg = "module, 'physics_types_simple', table already contains " - emsg += "'physics_types_simple', at {}:36".format(infilename) - self.assertEqual(emsg, str(verr.exception).split('\n')[0]) + emsg += f"'physics_types_simple', at {infilename}:36" + self.assertEqual(emsg, str(verr.exception).split('\n', maxsplit=1)[0]) def test_bad_metadata_file_no_table(self): """Test response to bad metadata file with no table. @@ -1177,7 +1169,7 @@ def test_bad_metadata_file_no_table(self): run_env) # Check exception message emsg = "Missing metadata section ([ccpp-arg-table]) for physics_types_simple" - self.assertEqual(emsg, str(verr.exception).split('\n')[0]) + self.assertEqual(emsg, str(verr.exception).split('\n', maxsplit=1)[0]) if __name__ == '__main__': unittest.main() diff --git a/test/unit/write_init_unit_tests.py b/test/unit/write_init_unit_tests.py index 3972571e..e35f603f 100644 --- a/test/unit/write_init_unit_tests.py +++ b/test/unit/write_init_unit_tests.py @@ -57,10 +57,8 @@ # pylint: disable=wrong-import-position from ccpp_capgen import capgen -from ccpp_database_obj import CCPPDatabaseObj from framework_env import CCPPFrameworkEnv from generate_registry_data import gen_registry -from parse_tools import CCPPError import write_init_files as write_init # pylint: enable=wrong-import-position @@ -157,10 +155,10 @@ def test_simple_reg_write_init(self): check_init_out, phys_input_out]) # Generate registry files: - _ , files, _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, - _SRC_MOD_DIR, _CAM_ROOT, - loglevel=logging.ERROR, - error_on_no_validate=True) + _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, + _SRC_MOD_DIR, _CAM_ROOT, + loglevel=logging.ERROR, + error_on_no_validate=True) # Generate CCPP capgen files: kind_types = ['kind_phys=REAL64'] @@ -184,20 +182,20 @@ def test_simple_reg_write_init(self): phys_input_filename=pi_name) # Check return message: - amsg = "Test failure: retmsg={}".format(retmsg) + amsg = f"Test failure: retmsg={retmsg}" self.assertEqual(retmsg, '', msg=amsg) # Make sure each output file was created: - amsg = "{} does not exist".format(check_init_out) + amsg = f"{check_init_out} does not exist" self.assertTrue(os.path.exists(check_init_out), msg=amsg) - amsg = "{} does not exist".format(phys_input_out) + amsg = f"{phys_input_out} does not exist" self.assertTrue(os.path.exists(phys_input_out), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_in, check_init_out, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_in, phys_input_out, shallow=False), msg=amsg) @@ -239,10 +237,10 @@ def test_no_reqvar_write_init(self): check_init_out, phys_input_out]) # Generate registry files: - _, files, _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, - _SRC_MOD_DIR, _CAM_ROOT, - loglevel=logging.ERROR, - error_on_no_validate=True) + _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, + _SRC_MOD_DIR, _CAM_ROOT, + loglevel=logging.ERROR, + error_on_no_validate=True) # Generate CCPP capgen files: kind_types = ['kind_phys=REAL64'] @@ -266,20 +264,20 @@ def test_no_reqvar_write_init(self): phys_input_filename=pi_name) # Check return message: - amsg = "Test failure: retmsg={}".format(retmsg) + amsg = f"Test failure: retmsg={retmsg}" self.assertEqual(retmsg, '', msg=amsg) # Make sure each output file was created: - amsg = "{} does not exist".format(check_init_out) + amsg = f"{check_init_out} does not exist" self.assertTrue(os.path.exists(check_init_out), msg=amsg) - amsg = "{} does not exist".format(phys_input_out) + amsg = f"{phys_input_out} does not exist" self.assertTrue(os.path.exists(phys_input_out), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_in, check_init_out, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_in, phys_input_out, shallow=False), msg=amsg) @@ -349,20 +347,20 @@ def test_protected_reg_write_init(self): phys_input_filename=pi_name) # Check return message: - amsg = "Test failure: retmsg={}".format(retmsg) + amsg = f"Test failure: retmsg={retmsg}" self.assertEqual(retmsg, '', msg=amsg) # Make sure each output file was created: - amsg = "{} does not exist".format(check_init_out) + amsg = f"{check_init_out} does not exist" self.assertTrue(os.path.exists(check_init_out), msg=amsg) - amsg = "{} does not exist".format(phys_input_out) + amsg = f"{phys_input_out} does not exist" self.assertTrue(os.path.exists(phys_input_out), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_in, check_init_out, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_in, phys_input_out, shallow=False), msg=amsg) @@ -406,10 +404,10 @@ def test_host_input_var_write_init(self): check_init_out, phys_input_out]) # Generate registry files: - _, files, _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, - _SRC_MOD_DIR, _CAM_ROOT, - loglevel=logging.ERROR, - error_on_no_validate=True) + _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, + _SRC_MOD_DIR, _CAM_ROOT, + loglevel=logging.ERROR, + error_on_no_validate=True) # Generate CCPP capgen files: kind_types = ['kind_phys=REAL64'] @@ -432,17 +430,21 @@ def test_host_input_var_write_init(self): phys_check_filename=vic_name, phys_input_filename=pi_name) + # Check return message + rmsg = "" + self.assertEqual(rmsg, retmsg) + # Make sure each output file was created: - amsg = "{} does not exist".format(check_init_out) + amsg = f"{check_init_out} does not exist" self.assertTrue(os.path.exists(check_init_out), msg=amsg) - amsg = "{} does not exist".format(phys_input_out) + amsg = f"{phys_input_out} does not exist" self.assertTrue(os.path.exists(phys_input_out), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_in, check_init_out, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_in, phys_input_out, shallow=False), msg=amsg) @@ -514,10 +516,10 @@ def test_no_horiz_var_write_init(self): phys_input_filename=pi_name) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_out, check_init_in, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_out, phys_input_in, shallow=False), msg=amsg) @@ -589,10 +591,10 @@ def test_scalar_var_write_init(self): phys_input_filename=pi_name) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_out, check_init_in, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_out, phys_input_in, shallow=False), msg=amsg) @@ -667,10 +669,10 @@ def test_4d_var_write_init(self): self.assertEqual(rmsg, retmsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_out, check_init_in, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_out, phys_input_in, shallow=False), msg=amsg) @@ -711,10 +713,10 @@ def test_ddt_reg_write_init(self): remove_files([out_source, out_meta, cap_datafile, check_init_out, phys_input_out]) # Generate registry files: - _, files, _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, - _SRC_MOD_DIR, _CAM_ROOT, - loglevel=logging.ERROR, - error_on_no_validate=True) + _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, + _SRC_MOD_DIR, _CAM_ROOT, + loglevel=logging.ERROR, + error_on_no_validate=True) # Generate CCPP capgen files: kind_types=['kind_phys=REAL64'] @@ -737,20 +739,20 @@ def test_ddt_reg_write_init(self): phys_input_filename=pi_name) # Check return code: - amsg = "Test failure: retmsg={}".format(retmsg) + amsg = f"Test failure: retmsg={retmsg}" self.assertEqual(retmsg, '', msg=amsg) # Make sure each output file was created: - amsg = "{} does not exist".format(check_init_out) + amsg = f"{check_init_out} does not exist" self.assertTrue(os.path.exists(check_init_out), msg=amsg) - amsg = "{} does not exist".format(phys_input_out) + amsg = f"{phys_input_out} does not exist" self.assertTrue(os.path.exists(phys_input_out), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_in, check_init_out, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_in, phys_input_out, shallow=False), msg=amsg) @@ -819,20 +821,20 @@ def test_ddt2_reg_write_init(self): phys_input_filename=pi_name) # Check return code: - amsg = "Test failure: retmsg={}".format(retmsg) + amsg = f"Test failure: retmsg={retmsg}" self.assertEqual(retmsg, '', msg=amsg) # Make sure each output file was created: - amsg = "{} does not exist".format(check_init_out) + amsg = f"{check_init_out} does not exist" self.assertTrue(os.path.exists(check_init_out), msg=amsg) - amsg = "{} does not exist".format(phys_input_out) + amsg = f"{phys_input_out} does not exist" self.assertTrue(os.path.exists(phys_input_out), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_out, check_init_in, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_out, phys_input_in, shallow=False), msg=amsg) @@ -874,10 +876,10 @@ def test_ddt_array_reg_write_init(self): check_init_out, phys_input_out]) # Generate registry files: - _, files, _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, - _SRC_MOD_DIR, _CAM_ROOT, - loglevel=logging.ERROR, - error_on_no_validate=True) + _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, + _SRC_MOD_DIR, _CAM_ROOT, + loglevel=logging.ERROR, + error_on_no_validate=True) # Generate CCPP capgen files: kind_types=['kind_phys=REAL64'] @@ -900,20 +902,20 @@ def test_ddt_array_reg_write_init(self): phys_input_filename=pi_name) # Check return code: - amsg = "Test failure: retmsg={}".format(retmsg) + amsg = f"Test failure: retmsg={retmsg}" self.assertEqual(retmsg, '', msg=amsg) # Make sure each output file was created: - amsg = "{} does not exist".format(check_init_out) + amsg = f"{check_init_out} does not exist" self.assertTrue(os.path.exists(check_init_out), msg=amsg) - amsg = "{} does not exist".format(phys_input_out) + amsg = f"{phys_input_out} does not exist" self.assertTrue(os.path.exists(phys_input_out), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_out, check_init_in, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_out, phys_input_in, shallow=False), msg=amsg) @@ -955,10 +957,10 @@ def test_meta_file_reg_write_init(self): check_init_out, phys_input_out]) # Generate registry files: - _, files, _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, - _SRC_MOD_DIR, _CAM_ROOT, - loglevel=logging.ERROR, - error_on_no_validate=True) + _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, + _SRC_MOD_DIR, _CAM_ROOT, + loglevel=logging.ERROR, + error_on_no_validate=True) # Generate CCPP capgen files: kind_types=['kind_phys=REAL64'] @@ -981,20 +983,20 @@ def test_meta_file_reg_write_init(self): phys_input_filename=pi_name) # Check return code: - amsg = "Test failure: retmsg={}".format(retmsg) + amsg = f"Test failure: retmsg={retmsg}" self.assertEqual(retmsg, '', msg=amsg) # Make sure each output file was created: - amsg = "{} does not exist".format(check_init_out) + amsg = f"{check_init_out} does not exist" self.assertTrue(os.path.exists(check_init_out), msg=amsg) - amsg = "{} does not exist".format(phys_input_out) + amsg = f"{phys_input_out} does not exist" self.assertTrue(os.path.exists(phys_input_out), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_out, check_init_in, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_out, phys_input_in, shallow=False), msg=amsg) @@ -1036,10 +1038,10 @@ def test_parameter_reg_write_init(self): check_init_out, phys_input_out]) # Generate registry files: - _, files, _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, - _SRC_MOD_DIR, _CAM_ROOT, - loglevel=logging.ERROR, - error_on_no_validate=True) + _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, + _SRC_MOD_DIR, _CAM_ROOT, + loglevel=logging.ERROR, + error_on_no_validate=True) # Generate CCPP capgen files: kind_types=['kind_phys=REAL64'] @@ -1062,20 +1064,20 @@ def test_parameter_reg_write_init(self): phys_input_filename=pi_name) # Check return code: - amsg = "Test failure: retmsg={}".format(retmsg) + amsg = f"Test failure: retmsg={retmsg}" self.assertEqual(retmsg, '', msg=amsg) # Make sure each output file was created: - amsg = "{} does not exist".format(check_init_out) + amsg = f"{check_init_out} does not exist" self.assertTrue(os.path.exists(check_init_out), msg=amsg) - amsg = "{} does not exist".format(phys_input_out) + amsg = f"{phys_input_out} does not exist" self.assertTrue(os.path.exists(phys_input_out), msg=amsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_out, check_init_in, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_out, phys_input_in, shallow=False), msg=amsg) @@ -1120,10 +1122,10 @@ def test_bad_vertical_dimension(self): check_init_out, phys_input_out]) # Generate registry files: - _, files, _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, - _SRC_MOD_DIR, _CAM_ROOT, - loglevel=logging.ERROR, - error_on_no_validate=True) + _ = gen_registry(filename, 'se', {}, _TMP_DIR, 3, + _SRC_MOD_DIR, _CAM_ROOT, + loglevel=logging.ERROR, + error_on_no_validate=True) # Generate CCPP capgen files: kind_types=['kind_phys=REAL64'] @@ -1150,10 +1152,10 @@ def test_bad_vertical_dimension(self): self.assertEqual(emsg, retmsg) # For each output file, make sure it matches input file - amsg = "{} does not match {}".format(check_init_out, check_init_in) + amsg = f"{check_init_out} does not match {check_init_in}" self.assertTrue(filecmp.cmp(check_init_out, check_init_in, shallow=False), msg=amsg) - amsg = "{} does not match {}".format(phys_input_out, phys_input_in) + amsg = f"{phys_input_out} does not match {phys_input_in}" self.assertTrue(filecmp.cmp(phys_input_out, phys_input_in, shallow=False), msg=amsg) From 6aef116ce998f512fea6a12c5885ac0a318555ad Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Thu, 14 Apr 2022 09:34:31 -0600 Subject: [PATCH 21/33] Add array index duplication checking, along with some code review requests. --- cime_config/atm_in_paramgen.py | 975 ++++++++++++++++-- cime_config/buildnml | 61 +- cime_config/user_nl_cam | 11 +- .../atm_in_files/test_multi_xml_in | 4 + .../atm_in_files/test_nl_duplicate_atm_in | 40 + .../atm_in_files/test_third_nml_def.xml | 23 + .../sample_files/atm_in_files/test_user_in | 2 +- .../atm_in_files/test_user_nl_allow_dupl_var | 22 + ...nl_bad_comment => test_user_nl_bad_equals} | 6 +- ...er_nl_double_var => test_user_nl_dupl_var} | 8 +- ...r_nl_bad_format => test_user_nl_no_equals} | 5 - .../atm_in_files/test_user_nl_simple | 11 +- .../atm_in_files/test_user_nl_undefined_var | 5 - test/unit/test_atm_in_paramgen.py | 620 +++++++++-- 14 files changed, 1535 insertions(+), 258 deletions(-) create mode 100644 test/unit/sample_files/atm_in_files/test_nl_duplicate_atm_in create mode 100644 test/unit/sample_files/atm_in_files/test_third_nml_def.xml create mode 100644 test/unit/sample_files/atm_in_files/test_user_nl_allow_dupl_var rename test/unit/sample_files/atm_in_files/{test_user_nl_bad_comment => test_user_nl_bad_equals} (55%) rename test/unit/sample_files/atm_in_files/{test_user_nl_double_var => test_user_nl_dupl_var} (58%) rename test/unit/sample_files/atm_in_files/{test_user_nl_bad_format => test_user_nl_no_equals} (66%) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index 32100391..c0ec2a11 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -21,14 +21,29 @@ _CIME_CONF_DIR = os.path.abspath(os.path.dirname(__file__)) _CIME_ROOT = os.path.join(_CIME_CONF_DIR, os.pardir, "cime") +_PARAMGEN_ROOT = os.path.join(_CIME_ROOT, "scripts", "lib", "CIME", "ParamGen") -if not os.path.exists(_CIME_ROOT): - raise SystemExit("ERROR: Cannot find 'cime' directory. Did you run checkout_externals?") -sys.path.append(os.path.join(_CIME_ROOT, "scripts", "lib", "CIME", "ParamGen")) +if not os.path.exists(_PARAMGEN_ROOT): + _EMSG = f"ERROR: Cannot find '{_PARAMGEN_ROOT}' directory. Did you run checkout_externals?" + raise SystemExit(_EMSG) +#End if +sys.path.append(_PARAMGEN_ROOT) #pylint: disable=wrong-import-position from paramgen import ParamGen #pylint: enable=wrong-import-position +#Regular expression used by "remove_user_nl_comment" function: +_QUOTE_REGEX = re.compile(r"\".*?\"|'.*?'") + +#Regular expression used by the "write" and "append_user_nl_file" +#methods to determine if the variable is an array, and what +#the dimensions of the array are: +_ARRAY_TYPE_REGEX = re.compile(r"[(][ ]*([0-9 ,]+)[ ]*[)]") + +#Regular expression used to determine array indices in +#"find_arr_indices" function: +_ARR_INDEX_REGEX = re.compile(r"\((.+?)\)") + ################################################################ class AtmInParamGenError(ValueError): @@ -104,7 +119,7 @@ def _is_nml_logical_true(varname, var_val): atm_in_paramgen.AtmInParamGenError:... XML namelist logical variable, 'test', must have a value of true, false, 1, or 0, not 3 - 13. Check that a non-boolean, string or integer type returns an error: + 13. Check that an unsupported type returns an error: >>> _is_nml_logical_true("test", 13.03) # doctest: +ELLIPSIS Traceback (most recent call last): ... @@ -115,29 +130,36 @@ def _is_nml_logical_true(varname, var_val): if isinstance(var_val, bool): return var_val + #End if if isinstance(var_val, str): if var_val.lower() in {"true", ".true.", "1"}: return True + #End if if var_val.lower() in {"false", ".false.", "0"}: return False + #End if #Raise error if no match was found: emsg = f"\nXML namelist logical variable, '{varname}'" emsg += ", must have a value of true, false, 1, or 0, not" emsg += f" '{var_val}'" raise AtmInParamGenError(emsg) + #End if if isinstance(var_val, int): if var_val == 1: return True + #End if if var_val == 0: return False + #End if #Raise error if no match was found: emsg = f"\nXML namelist logical variable, '{varname}'" emsg += ", must have a value of true, false, 1, or 0, not" emsg += f" {var_val}" raise AtmInParamGenError(emsg) + #End if #Type is un-recognizeda, so raise an error: emsg = f"\nXML namelist variable '{varname}' must" @@ -145,6 +167,232 @@ def _is_nml_logical_true(varname, var_val): emsg += f" not {type(var_val).__name__}." raise AtmInParamGenError(emsg) +##### + +def remove_user_nl_comment(user_string): + + """ + Searches a one-line input string for a comment delimiter, + and then returns the string with all text after the delimiter + removed. + ---------- + user_string -> String that will be searched and processed for comments + + doctests: + + 1. Check that a string with no comment delimiters returns full string: + >>> remove_user_nl_comment("bananas") + 'bananas' + + 2. Check that a string with no comments outside quotes returns full string: + >>> remove_user_nl_comment(" '!ban!anas!' ") + " '!ban!anas!' " + + 3. Check that a string with no quotes but a comment returns string with no comment: + >>> remove_user_nl_comment("bananas !But not apples") + 'bananas ' + + 4. Check that a string with quotes and a comment returns string sans comment: + >>> remove_user_nl_comment(" 'bananas' !But not apples") + " 'bananas' " + + 5. Check that a string with a quoted comment and real comments returns proper string: + >>> remove_user_nl_comment(" '!ba!na!nas!' !But not apples") + " '!ba!na!nas!' " + + 6. Check that a string with a quoted comment and a real comment with multiple delimiters + returns the proper string: + >>> remove_user_nl_comment(" '!bananas!' !But not! apples!") + " '!bananas!' " + + 7. Check that a string with a quoted comment and a commented quote returns the proper string: + >>> remove_user_nl_comment(' "!bananas" !"But not apples" ') + ' "!bananas" ' + + 8. Check that a string with quotes inside quotes and multiple delimiters returns + the proper string: + >>> remove_user_nl_comment(''' "!bana'!'anas""other''fruit!" !But not '!Apples!' ''') + ' "!bana\\'!\\'anas""other\\'\\'fruit!" ' + + 9. Check that an array of strings returns the proper string: + >>> remove_user_nl_comment(" 'bananas', 'apples', 'kiwis' ") + " 'bananas', 'apples', 'kiwis' " + + 10. Check that an array of strings with a comment returns the proper string: + >>> remove_user_nl_comment(" 'bananas', 'apples', 'kiwis', !, and coconuts") + " 'bananas', 'apples', 'kiwis', " + + 11. Check that an array of of strings with comment delimiters and an actual comment + returns the proper string: + >>> remove_user_nl_comment(' , "!bananas", "app!les", "kiwis!", !And "Coconuts"!') + ' , "!bananas", "app!les", "kiwis!", ' + + 12. Check that a line with no comments or strings returns the proper string: + >>> remove_user_nl_comment('5') + '5' + + 13. Check that a line with a comment but no internal strings returns the proper string: + >>> remove_user_nl_comment(' .true. !And not .false.') + ' .true. ' + + 14. Check that an array of values with no comment returns the proper string: + >>> remove_user_nl_comment('13.0d0, 15.0d0, 1100.35d0') + '13.0d0, 15.0d0, 1100.35d0' + + 15. Check that an array of values with a comment returns the proper string: + >>> remove_user_nl_comment('13.0d0,! 15.0d0, 1100.35d0') + '13.0d0,' + """ + + #Create empty set for comment-delimiting indices: + comment_delim_indices = set() + + #Search for all comment delimiters (currently just "!"): + for char_idx, char in enumerate(user_string): + if char == "!": + #Add character index to set: + comment_delim_indices.add(char_idx) + #End if + #End for + + #If no comments are present, then return string as-is: + if not comment_delim_indices: + return user_string + #End if + + #Next, check if any single or double quotes are present: + if not "'" in user_string and not '"' in user_string: + #If no quotes, then cut-off string at first delimiter: + return user_string[:sorted(comment_delim_indices)[0]] + #End if + + #Create empty set for all character indices inside quotes: + quote_text_indices = set() + + #Search for all text within quotes: + quoted_text_matches = _QUOTE_REGEX.finditer(user_string) + + #Loop over all matches: + for quote_match in quoted_text_matches: + #Extract min/max indices of match: + index_span = quote_match.span(0) + #Add all indices to set: + for index in range(index_span[0], index_span[1]): + quote_text_indices.add(index) + #End for + #End for + + #Find all comment delimiters outside of quotes: + non_quote_comment = comment_delim_indices.difference(quote_text_indices) + + if not non_quote_comment: + #All comment delimiters are within quotes, + #so return string as-is: + return user_string + #End if + + #Find first comment delimiter outside of quotes. + #Everything to the right of it is part of the comment: + return user_string[:sorted(non_quote_comment)[0]] + +##### + +def user_nl_str_to_int(string, var_name): + + """ + Checks if a string can be converted + into an integer, and if not reports + the relevant error. This function + is only used in the "check_user_nl_var" + function below. + ---------- + string -> string to convert to integer. + var_name -> name of the array variable + associated with the string. + + doctests: + + 1. Check that a string with an integer can be + converted properly: + >>> user_nl_str_to_int("5", "banana") + 5 + + 2. Check that a string with a non-integer can be + convergted properly: + >>> user_nl_str_to_int("a", "banana") # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError:... + Invalid array index entry 'a' used for variable 'banana' in 'user_nl_cam'. + + """ + + #Attempt the conversion of the string to an integer: + try: + integer_val = int(string) + except ValueError: + emsg = f"\nInvalid array index entry '{string}' " + emsg += f"used for variable '{var_name}' in 'user_nl_cam'." + raise AtmInParamGenError(emsg) + #End except + + #Return relevant integer value: + return integer_val + +##### + +def check_dim_index(var_name, index_val, dim_size): + + """ + Checks that the user-specified index for the given + variables is within the dimension size limit as + specified in the namelist definition file. + ---------- + var_name -> Name of the array variable + associated with the string. + index_val -> Index value provided by user + dim_size -> Maximum variable dimension size. + + doctests: + + 1. Check that an in-bounds index value + returns nothing: + >>> check_dim_index("banana", 5, 15) + + 2. Check that an index value that is + too small returns the proper error: + >>> check_dim_index("banana", 0, 15) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError:... + Variable 'banana' has index 0 in 'user_nl_cam', which is less than one (1), the minimal index value allowed. + + 3. Check that an index value that is + too large returns the proper error: + >>> check_dim_index("banana", 20, 15) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError:... + Variable 'banana' has index 20 in 'user_nl_cam', which is greater than the max dimension size of 15 + + """ + + #Make sure index is greater than zero: + if index_val <= 0: + emsg = f"\nVariable '{var_name}' has index {index_val}" + emsg += " in 'user_nl_cam', which is less than one (1)," + emsg +=f" the minimal index value allowed." + raise AtmInParamGenError(emsg) + #End if + + #Make sure index is not greater than max value: + if index_val > dim_size: + emsg = f"\nVariable '{var_name}' has index {index_val}" + emsg += " in 'user_nl_cam', which is greater than the" + emsg +=f" max dimension size of {dim_size}" + raise AtmInParamGenError(emsg) + #End if + ################################################################ # MAIN "atm_in" ParamGen class ################################################################ @@ -172,21 +420,30 @@ def __init__(self, pg_data_dict): #Initialize ParamGen directly: super().__init__(pg_data_dict) - #Create namelist var/group dictionary, - #which used by the "append_user_nl_file" + #Create a namelist var/group dictionary, + #which is used by the "append_user_nl_file" #method: - self.var_group_dict = {} + self.__var_group_dict = {} #Create empty dictionaries that will contain - #the namelist definition file and the set + #the namelist definition files and the set #of all namelist groups and variables: - self.nml_def_groups = {} - self.nml_def_vars = {} + self.__nml_def_groups = {} + self.__nml_def_vars = {} #Set variables needed for ParamGen "reduction": self.__case = None self.__atm_attr_dict = None + #Initialize data structure for duplicate array + #checking in user_nl_cam files. This structure + #is organized like so: + # dict(var_name : list of sets) + # list size = number of array dimensions specified + # set contains the array indices specified for that + # dimension: + self.__set_index_vals = {} + #### @classmethod @@ -200,10 +457,10 @@ def from_namelist_xml(cls, nml_xml_file): """ #Create ParamGen object using base class: - _pg_xml = ParamGen.from_xml_nml(nml_xml_file, no_duplicates=True) + pg_xml = ParamGen.from_xml_nml(nml_xml_file, no_duplicates=True) #Initialize new "atm_in" object: - atm_in_pg = AtmInParamGen(_pg_xml.data) + atm_in_pg = AtmInParamGen(pg_xml.data) #Check if the new ParamGen object has all of the required #namelist elements: @@ -217,12 +474,14 @@ def from_namelist_xml(cls, nml_xml_file): emsg += "Those entries and missing elements are:\n" for entry_id, missing_elems in missing_elems.items(): emsg += f"{entry_id} : {', '.join(missing_elems)}\n" + #End for raise AtmInParamGenError(emsg) + #End if #---------------- #Initialize file->group/var set dictionary: - atm_in_pg.nml_def_groups[nml_xml_file] = set() - atm_in_pg.nml_def_vars[nml_xml_file] = set() + atm_in_pg.__nml_def_groups[nml_xml_file] = set() + atm_in_pg.__nml_def_vars[nml_xml_file] = set() #Create namelist variable/group dictionary #and associated sets: @@ -231,7 +490,7 @@ def from_namelist_xml(cls, nml_xml_file): for var in atm_in_pg._data[nml_group]: #Check if variable already exists in dictionary: - if var in atm_in_pg.var_group_dict: + if var in atm_in_pg.__var_group_dict: #No duplicate variables are allowed, even if #in separate namelist groups, so raise an error. #Please note that this error should always be @@ -239,18 +498,20 @@ def from_namelist_xml(cls, nml_xml_file): #point something has gone seriously wrong: emsg = f"Namelist entry id '{var}' exists" emsg += f" in namelist group '{nml_group}'" - emsg += f" and '{atm_in_pg.var_group_dict[var]}'\n" + emsg += f" and '{atm_in_pg.__var_group_dict[var]}'\n" emsg += "Namelist variables can belong to only one group." raise SystemError(emsg) + #End if #If not, then add variable and group to dictionary: - atm_in_pg.var_group_dict[var] = nml_group + atm_in_pg.__var_group_dict[var] = nml_group #Add namelist groups and variables to their #respective sets: - atm_in_pg.nml_def_groups[nml_xml_file].add(nml_group) - atm_in_pg.nml_def_vars[nml_xml_file].add(var) - + atm_in_pg.__nml_def_groups[nml_xml_file].add(nml_group) + atm_in_pg.__nml_def_vars[nml_xml_file].add(var) + #End for + #End for #---------------- #Return object: @@ -310,63 +571,462 @@ def append_atm_in_pg(self, atm_pg_obj): atm_pg_obj -> An AtmInParamGen object """ + #Loop over all XML files associated with input atm_pg object: + for input_file in atm_pg_obj.__nml_def_groups: + + #Extract the group and variable sets from input PG object: + input_groups = atm_pg_obj.__nml_def_groups[input_file] + input_vars = atm_pg_obj.__nml_def_vars[input_file] + + #Check that there are no matching namelist groups: + #------------------------------------------------ + for nml_file, nml_groups in self.__nml_def_groups.items(): + + #Determine if any namelist groups are the same + #between the two objects: + same_groups = nml_groups.intersection(input_groups) + + #If so, then raise an error (as all namelist groups must be unique): + if same_groups: + emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" + emsg += " the following conflicting namelist groups:\n" + emsg += ", ".join(same_groups) + raise AtmInParamGenError(emsg) + #End if + #End for + + #------------------------------------------------ + + #Check that there are no matching namelist variables: + #------------------------------------------------ + for nml_file, nml_vars in self.__nml_def_vars.items(): + + #Determine if any namelist groups are the same + #between the two objects: + same_vars = nml_vars.intersection(input_vars) + + #If so, then raise an error (as all namelist variable ids must be unique): + if same_vars: + emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" + emsg += " the following conflicting namelist variables:\n" + emsg += ", ".join(same_vars) + raise AtmInParamGenError(emsg) + #End if + #End for + #------------------------------------------------ + + #End for (input files used to create input atm_pb object) + + #Add input PG object dictionaries to this object's dicts: + self.__nml_def_groups.update(atm_pg_obj.__nml_def_groups) + self.__nml_def_vars.update(atm_pg_obj.__nml_def_vars) + + #Also combine PG object var-group dictionary needed for + #appending "user_nl_cam": + self.__var_group_dict.update(atm_pg_obj.__var_group_dict) - #Make sure there is only one XML file associated with - #input PG object: - if len(atm_pg_obj.nml_def_groups.keys()) > 1: - emsg = "ParamGen object being appended to another must" - emsg += " be associated with only one namelist definition file." - emsg += "\nInstead it is associated with the following files:\n" - emsg += "\n".join(atm_pg_obj.nml_def_groups.keys()) + #Append input PG object to this object: + self.append(atm_pg_obj) + + #### + + def check_user_nl_var(self, var_str): + + """ + Checks whether the variable string + is for a specific set of array + indices: + ---------- + var_str -> variable name string. + + outputs: + ---------- + is_array -> Logical for whether variable + is an array. + var_name -> Name of variable + (with array indices stripped). + arr_indxs -> List of lists, with one list + for each array dimension. Each + dimension list contains all duplicated + indices for that dimension. + data_group -> Namelist group for that particular + variable. + + """ + + #Iinitialize variable name: + var_name = var_str + + #Initialize array index list: + arr_indxs = [] + + #Check for array syntax, i.e. parantheses: + array_syntax_match = _ARR_INDEX_REGEX.search(var_str) + + #Extract variable name: + if array_syntax_match: + var_name = var_str[:array_syntax_match.start(0)] + else: + var_name = var_str + #End if + + #Check that variable actually exists in ParamGen object: + if var_name in self.__var_group_dict: + #Extract namelist group list for variable: + data_group = self.__var_group_dict[var_name] + + else: + #Raise error that namelist variable isn't listed in + #anywhere in a definition file: + emsg = f"Variable '{var_name}' not found in any namelist definition files." + emsg += " Please double-check 'user_nl_cam'." raise AtmInParamGenError(emsg) + #End if - #Extract namelist definition file name: - input_file = next(iter(atm_pg_obj.nml_def_groups)) + #Extract variable type from ParamGen Object: + var_type = self._data[data_group][var_name]["type"] - #Extract the group and variable sets from input PG object: - input_groups = atm_pg_obj.nml_def_groups[input_file] - input_vars = atm_pg_obj.nml_def_vars[input_file] + #Search for array dimension specifications in type: + array_type_dims = _ARRAY_TYPE_REGEX.search(var_type) - #Check that there are no matching namelist groups: - #------------------------------------------------ - for nml_file, nml_groups in self.nml_def_groups.items(): + #Determine if variable is actually an array or not: + if array_type_dims: + is_array = True + else: + is_array = False + #End if - #Determine if any namelist groups are the same - #between the two objects: - same_groups = nml_groups.intersection(input_groups) + #Exit function here if no array indices were used in user_nl_cam file: + if not array_syntax_match: - #If so, then raise an error (as all namelist groups must be unique): - if same_groups: - emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" - emsg += " the following conflicting namelist groups:\n" - emsg += ", ".join(same_groups) - raise AtmInParamGenError(emsg) + #No parantheses used, so no indices need to be checked: + return is_array, var_name, arr_indxs, data_group + #End if + + #If variable is not an array, but array indices are being + #used in user_nl_cam, then throw an error: + if not is_array: + emsg = f"Variable '{var_name}' is not an array, but array" + emsg += " dimensions are being specified in 'user_nl_cam'." + raise AtmInParamGenError(emsg) + #End if + + #Extract array dimension information from variable type + #as listed in the associated namelist definition file: + #---------------------------------------------------- + + #Pull out dimensions string: + array_dim_text = array_type_dims.group(1) + + #Split text by number of commas (which should indicate dimensions): + array_dims_list = array_dim_text.split(",") - #------------------------------------------------ + #Extract total number of dimensions: + num_arr_dims = len(array_dims_list) - #Check that there are no matching namelist variables: - #------------------------------------------------ - for nml_file, nml_vars in self.nml_def_vars.items(): + #Create new list of max dim size: + max_dim_sizes = [] + for dim_size in array_dims_list: + max_dim_sizes.append(user_nl_str_to_int(dim_size, var_name)) + #End for + + #---------------------------------------------------- + + #Now extract all text inside variable quotes: + user_array_text = array_syntax_match.group(1) + + #Split text by number of commas (which should indicate dimensions): + user_dim_text = user_array_text.split(",") + + #Check that the user hasn't listed more dimensions + #than is acutally present in the variable: + if len(user_dim_text) > num_arr_dims: + #Set proper grammar: + if num_arr_dims == 1: + dim_err_str = "dimension." + else: + dim_err_str = "dimensions." + #End if + emsg = f"Variable '{var_name}' has {len(user_dim_text)}" + emsg += " dimensions used in 'user_nl_cam', but is defined" + emsg += f" to only have {num_arr_dims} "+dim_err_str + raise AtmInParamGenError(emsg) + #End if + + #Loop over dimensions: + for dim_idx, array_index_text in enumerate(user_dim_text): + #Create new array list entry: + arr_indxs.append([]) + + #check for colons: + array_idx_bnds = array_index_text.split(":") + + #Determine number of colons by number of list elements: + num_colons = len(array_idx_bnds) - 1 + + if num_colons == 0: + #No colons are present, so the text should only be a number: + index_val = user_nl_str_to_int(array_idx_bnds[0], var_name) + + #Check index value: + check_dim_index(var_name, index_val, max_dim_sizes[dim_idx]) + + #Add number to array index list: + arr_indxs[dim_idx].append(index_val) + + elif num_colons == 1: + #One colon is present, so now check if there are specified index bounds: + if all(array_idx_bnds): + + #Both array bounds are specified: + index_min_val = user_nl_str_to_int(array_idx_bnds[0], var_name) + index_max_val = user_nl_str_to_int(array_idx_bnds[1], var_name) + + #Check index values: + check_dim_index(var_name, index_min_val, max_dim_sizes[dim_idx]) + check_dim_index(var_name, index_max_val, max_dim_sizes[dim_idx]) + + #Make sure first value is smaller than the second: + if index_max_val < index_min_val: + emsg = f"Bad indexing, min index value '{index_min_val}'" + emsg += f" greater than max index value '{index_max_val}'" + emsg += f" for variable '{var_name}' in 'user_nl_cam'." + raise AtmInParamGenError(emsg) + #End if + + #Add index range to array index list: + arr_indxs[dim_idx].extend([idx for idx in + range(index_min_val, index_max_val+1)]) + elif array_idx_bnds[0]: + + #Only minimum array bound specified: + index_min_val = user_nl_str_to_int(array_idx_bnds[0], var_name) + + #Check index value: + check_dim_index(var_name, index_min_val, max_dim_sizes[dim_idx]) + + #Add index range to array index list: + arr_indxs[dim_idx].extend([idx for idx in + range(index_min_val, max_dim_sizes[dim_idx]+1)]) + + elif array_idx_bnds[1]: + + #Only maximum array bounds specified: + index_max_val = user_nl_str_to_int(array_idx_bnds[1], var_name) + + #Check index value: + check_dim_index(var_name, index_max_val, max_dim_sizes[dim_idx]) + + #Add index range to array index list: + arr_indxs[dim_idx].extend([idx for idx in range(1, index_max_val+1)]) + + else: + + #Only a single colon provided. In this case provide a special index + #that indicates that specific indices can still be provided, but that the + #whole array dimension cannot be written again: + arr_indxs[dim_idx].append(-1) - #Determine if any namelist groups are the same - #between the two objects: - same_vars = nml_vars.intersection(input_vars) + #End if (index bounds) - #If so, then raise an error (as all namelist variable ids must be unique): - if same_vars: - emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" - emsg += " the following conflicting namelist variables:\n" - emsg += ", ".join(same_vars) + elif num_colons == 2: + + #Two colons are present, which means a stride value should be present as + #the last numerical value. If one is not present, then throw an error: + if not array_idx_bnds[2]: + emsg = f"Two colons were provided for variable '{var_name}'" + emsg += " in 'user_nl_cam', but no stride value was provided." + emsg += "\nPlease provide either a stride value, or remove the" + emsg += "extra colon." + raise AtmInParamGenError(emsg) + #End if + + if all(array_idx_bnds): + + #A min/max/stride value has been provided: + index_min_val = user_nl_str_to_int(array_idx_bnds[0], var_name) + index_max_val = user_nl_str_to_int(array_idx_bnds[1], var_name) + index_stride = user_nl_str_to_int(array_idx_bnds[2], var_name) + + #Check index values: + check_dim_index(var_name, index_min_val, max_dim_sizes[dim_idx]) + check_dim_index(var_name, index_max_val, max_dim_sizes[dim_idx]) + check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) + + #Make sure first value is smaller than the second: + if index_max_val < index_min_val: + emsg = f"Bad indexing, min index value '{index_min_val}'" + emsg += f" greater than max index value '{index_max_val}'" + emsg += f" for variable '{var_name}' in 'user_nl_cam'." + raise AtmInParamGenError(emsg) + #End if + + #Add index range to array index list: + arr_indxs[dim_idx].extend([idx for idx in + range(index_min_val, index_max_val+1, index_stride)]) + + elif array_idx_bnds[0]: + + #Only minimum array bound specified: + index_min_val = user_nl_str_to_int(array_idx_bnds[0], var_name) + index_stride = user_nl_str_to_int(array_idx_bnds[2], var_name) + + #Check index value: + check_dim_index(var_name, index_min_val, max_dim_sizes[dim_idx]) + check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) + + #Add index range to array index list: + arr_indxs[dim_idx].extend([idx for idx in + range(index_min_val, + max_dim_sizes[dim_idx]+1, + index_stride)]) + + elif array_idx_bnds[1]: + + #Only maximum array bounds specified: + index_max_val = user_nl_str_to_int(array_idx_bnds[1], var_name) + index_stride = user_nl_str_to_int(array_idx_bnds[2], var_name) + + #Check index value: + check_dim_index(var_name, index_max_val, max_dim_sizes[dim_idx]) + check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) + + #Add index range to array index list: + arr_indxs[dim_idx].extend([idx for idx in + range(1, index_max_val+1, index_stride)]) + + else: + + #Only a stride provided, so cover the entire array dimension + #using the provided stride: + + #Extract and check stride values: + index_stride = user_nl_str_to_int(array_idx_bnds[2], var_name) + check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) + + #Add index range to array index list: + arr_indxs[dim_idx].extend([idx for idx in + range(1, + max_dim_sizes[dim_idx]+1, + index_stride)]) + + #End if (index bounds) + + else: + + #Not sure what to do with three or more colons, so die here: + emsg = f"Variable '{var_name}' has {num_colons} colons (:) " + emsg += "listed in its dimension indexing in 'user_nl_cam'." + emsg += " Only up to two colons are supported." raise AtmInParamGenError(emsg) - #------------------------------------------------ + #End if (number of colons) + #End for (dimensions) - #Add input PG object dictionaries to this object's dicts: - self.nml_def_groups.update(atm_pg_obj.nml_def_groups) - self.nml_def_vars.update(atm_pg_obj.nml_def_vars) + #Return relevant variables: + return is_array, var_name, arr_indxs, data_group - #Append input PG object to this object: - self.append(atm_pg_obj) + #### + + def check_array_indices(self, var_name, arr_index_list): + + """ + Checks whether the list of array indices has already + been set for the given variable, and if so, raises + an error. + ---------- + var_name -> Name of array variable being modified. + + arr_index_list -> A list of lists of array indices + with the first list representing + the dimensions, and the second list + containing the array indices being + set for that dimension. + + """ + + #Initialize duplicated index flag, + #We won't know the answer one way or the other + #until either the end of the loop below, or + #until certain conditions are met, so for now + #initialize as "None": + is_arr_dupl = None + + #Initialize "possible" array duplication flag: + possible_dupl = False + + #Check if variable name exists in dictionary: + if not (var_name in self.__set_index_vals): + #Create a new entry with an empty list, + #it should then be filled out in the loop below: + self.__set_index_vals[var_name] = [] + + #Also set duplication to "False": + is_arr_dupl = False + #End if + + #Loop over each separate dimension list: + for dim_indx, dim_arr_indxs in enumerate(arr_index_list): + + #Initialize duplicated index list (for last dimension checked): + dup_indx_list = [] + + #Check if dimension index exists for variable dictionary: + if dim_indx == len(self.__set_index_vals[var_name]): + #Create a new set of array indices for the new dimensions: + self.__set_index_vals[var_name].append(set(dim_arr_indxs)) + + #Since a new dimension is being specified, this is not a duplicate: + is_arr_dupl = False + else: + #Loop over all array indices: + for arr_indx in dim_arr_indxs: + #Check if array index has already been explicitly called: + if arr_indx in self.__set_index_vals[var_name][dim_indx]: + #Add array index to list of duplicated values: + dup_indx_list.append(arr_indx) + + #This line is possibly a duplication, + #but will need to finish the loop to be sure: + possible_dupl = True + #End if + + #Add index to "set index" set for variable: + self.__set_index_vals[var_name][dim_indx].add(arr_indx) + #End for (array indices) + + #If there were no duplicates at this dimension, then this entry + #is not a duplicate: + if not possible_dupl: + is_arr_dupl = False + #End if + + #End if (new dimension) + #End for (dimensions) + + #If the duplication flag hasn't been set yet, then set it now: + if is_arr_dupl is None: + is_arr_dupl = possible_dupl + #End if + + #Now raise an error if there is array duplication: + if is_arr_dupl: + if any(dup == -1 for dup in dup_indx_list): + #This is a special case where a non-bounded + #colon (:) was repeated twice, so write + #the error message accordingly: + emsg = f"Variable '{var_name}' has all values" + emsg += " being set multiple times for" + emsg += f" dimension {dim_indx+1}." + else: + emsg = f"Variable '{var_name}' has values" + emsg += " at the following indices being" + emsg += " set multiple times for dimension" + emsg += f" ({dim_indx+1}) :\n" + emsg += ", ".join(str(dup) for dup in dup_indx_list) + #End if + raise AtmInParamGenError(emsg) + #End if #### @@ -375,74 +1035,159 @@ def append_user_nl_file(self, user_nl_file): Reads in user_nl_cam files and converts them to the proper ParamGen syntax. ---------- - user_nl_file -> path (str) to user_nl_cam file + user_nl_file -> Path (str) to user_nl_cam file. """ + #Create ordered dictionary to store namelist groups, + #variables, and values from user_nl_XXX file: _data = OrderedDict() + + #Initialize flag preventing duplicate namelist entries: + no_duplicates = True + + #Initialize flag to mark whether a variable is an array: + is_array = False + + #Initialize flag to mark whether the line is an array continuation line: + is_continue_line = False + + #Open user_nl_cam file: with open(user_nl_file,'r', encoding='utf-8') as user_file: - within_comment_block = False - for line in user_file: + for line_num, line in enumerate(user_file): if len(line)>1: + #Split line into a list of words/characters: line_s = line.split() #If line is empty then go to next line: if not line_s: continue + #End if - # check if within comment block. - if (not within_comment_block) and line.strip()[0:2] == "/*": - within_comment_block = True + #Check if a comment delimiter is somewhere in the string: + if '!' in line: + #Check if the entire line is a comment: + if line_s[0][0] == "!": + #Check if this comment is the duplicate keyword: + if "allow_duplicate_namliest_entries" in line_s: + #Next check if a user has set variable to True: + for word in line_s: + if word.lower() == "true": + #Allow duplicate namelist entries: + no_duplicates = False + break + #End if + #End for + #End if + #Continue to next line in file: + continue + #End if + #Otherwise simply remove any part of the line that is commented out: + line = remove_user_nl_comment(line) + #End if - if within_comment_block and line.strip()[-2:] == "*/": - within_comment_block = False - continue + #Check ifthe first character on the line is a comma (,): + if line.strip()[0] == ",": + #Is this an array variable: + if is_array: + #Was a continuation line already provided: + if is_continue_line: + #Two commas were used in a row with a newline + #in-between. Technically this is allowed, + #but in practice it is VERY likely a mistake, + #so raise an error here: + emsg = f"Line number {line_num+1} in 'user_nl_cam'" + emsg += " starts with a comma (,) but the" + emsg += " previous line ended with a comma." + emsg += "\nPlease remove one of the commas." + raise AtmInParamGenError(emsg) + #End if - if not within_comment_block and line_s[0][0] != "!": # not a single comment line either + #If not, then set it to be a continuation line: + is_continue_line = True + else: + #The previous variable is not an array, so throw an error: + emsg = f"Line number {line_num+1} in 'user_nl_cam'" + emsg += " starts with a comma (,) but the" + emsg += " associated namelist variable is not an array." + raise AtmInParamGenError(emsg) + #End if + #End if - #Join string elements back together: - line_j = ' '.join(line_s) + #Now parse the line: + if "=" in line and (line.strip()[0] != "=") and not (is_array and is_continue_line): + line_ss = line.split("=") # Split line into before/after equals sign + var_str = (line_ss[0]).strip() # the first element is the variable name - # now parse the line: - if "=" in line_j: - line_ss = line_j.split("=") - var_str = (line_ss[0]).strip() # the first element is the parameter name - val_str = ' '.join(line_ss[1:]) # the rest is tha value string - if '!' in val_str: - val_str = val_str.split("!", maxsplit=1)[0] # discard the comment in val str, if one exists + #Check if this variable is an array, and if so, + #then return what the variable name is, what indices (if any) + #are being specified, and what namelist (data) group it belongs to: + is_array, var_name, arr_indxs, data_group = self.check_user_nl_var(var_str) - #Check if variable already exists in group dictionary: - if var_str in self.var_group_dict: - #Extract namelist group list for variable: - data_group = self.var_group_dict[var_str] + #Are there array indices specified: + if arr_indxs: - else: - #Raise error that namelist variable isn't listed in - #anywhere in a definition file: - emsg = "Variable '{}' not found in any namelist definition files." - emsg += " Please double-check '{}'." - raise AtmInParamGenError(emsg.format(var_str, user_nl_file)) - - #Add the namelist group if not already in data dict: - if not data_group in _data: - _data[data_group] = {} - - #Check if variable already exists in data dictionary: - if var_str in _data[data_group]: - emsg = "Namelist variable '{}' set more than once in '{}'" - emsg += "\nPlease set each variable only once." - raise AtmInParamGenError(emsg.format(var_str, user_nl_file)) - - #Enter the parameter in the dictionary: - _data[data_group][var_str] = {'values':val_str} + if no_duplicates: + #Check if any duplicate array indices are present: + self.check_array_indices(var_name, arr_indxs) + #End if + + #ParamGen will think this is a "new" parameter variable, so we need + #to add a type in order for the "write" method to work properly. The + #type can be copied directly from the original variable using "var_name": + var_type = self._data[data_group][var_name]["type"] else: - emsg = "Cannot parse the following line in '{}' :\n'{}'" - raise AtmInParamGenError(emsg.format(user_nl_file, line)) + #Variable doesn't need a type specified: + var_type = None + #End if (array indices) + + #Extract value string: + val_str = ' '.join(line_ss[1:]) # the rest is tha value string + + #Check if value string ends in array continuation: + if is_array: + #Check if the string ends in a comma (,): + if val_str.strip()[-1] == ",": + #If so, then make "array_continue_line" fully true: + is_continue_line = True + #End if + #End if - #Check if there is unclosed block: - if within_comment_block: - raise AtmInParamGenError(f"Un-closed comment block! Please check '{user_nl_file}'") + #Add the namelist group if not already in data dict: + if not data_group in _data: + _data[data_group] = {} + #End if + + #Check if variable already exists in data dictionary: + if var_str in _data[data_group] and no_duplicates: + emsg = "Namelist variable '{}' set more than once in '{}'" + emsg += "\nPlease set each variable only once." + raise AtmInParamGenError(emsg.format(var_str, user_nl_file)) + #End if + + #Enter the parameter in the dictionary: + if var_type: + _data[data_group][var_str] = {'values':val_str, 'type':var_type} + else: + _data[data_group][var_str] = {'values':val_str} + #end if + elif (is_array and is_continue_line): + #This is an array continuation line, so append the line to previous + #variable's value as-is: + _data[data_group][var_str]['values'] += line + + #Check if the line does NOT end in a comma (,): + if not line.strip()[-1] == ",": + #Notify loop to check the next line for a comma: + is_continue_line = False + #End if + else: + emsg = "Cannot parse the following line in '{}' :\n'{}'" + raise AtmInParamGenError(emsg.format(user_nl_file, line)) + #End if ("=" sign check) + #End if (len(line) > 1) + #End for #End with #Check if any user_nl_cam data is present: @@ -469,9 +1214,7 @@ def write(self, output_path): emsg = "ParamGen object for atm_in must be reduced before being " emsg += "written to file. Please check CAM's buildnml script." raise AtmInParamGenError(emsg) - - #Create a regex value to check for an array type: - arr_type_regex = re.compile(r"[(][ ]*([0-9 ,]+)[ ]*[)]") + #End if #Create sets for string evaluation below: num_set = {"integer", "real"} #types that don't need special handling @@ -491,6 +1234,7 @@ def write(self, output_path): #If no value is set then move to the next variable: if val is None: continue + #End if #Extract variable type: if "type" in self._data[nml_group][var]: @@ -498,9 +1242,10 @@ def write(self, output_path): else: emsg = f"Namelist entry '{var}' is missing required 'type' element." raise AtmInParamGenError(emsg) + #End if #Check if an array type: - array_type = arr_type_regex.search(var_type) + array_type = _ARRAY_TYPE_REGEX.search(var_type) if array_type: #Grab all text before array regex match: @@ -540,7 +1285,7 @@ def write(self, output_path): #Remove any extra white space: elem = elem_with_space.strip() - #Remove all quotes in the string, as they + #Remove all quotes in the string, as they are #sometimes added by ParamGen during the "reduce" phase: elem = elem.replace("'", "") elem = elem.replace('"', "") @@ -634,6 +1379,8 @@ def __expand_func(self, varname): else: #Assume the XML attribute/guard is an empty string: val = "" + #End if + #End if #Return value if found: return val diff --git a/cime_config/buildnml b/cime_config/buildnml index 39c799c9..c9fb79bb 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -83,6 +83,37 @@ def nml_attr_set(config): #Return namelist attribute dictionary: return cam_nml_attr_dict +################## + +def nml_file_dict_update(nml_fil_dict, nml_def_fils): + + """ + Updates a dictionary of namelist definition + files using a list of provided file paths. + ---------- + nml_fil_dict -> Dictionary of nml definition files. + It is assumed to be organized as + {file name : file path} + + nml_def_fils -> List of path-like variables associated + with specific namelist definition files. + + """ + + # Loop over all found files: + for nml_def_fil in nml_def_fils: + fil_name = os.path.basename(nml_def_fil) + if fil_name in nml_fil_dict: + fil_dir = os.path.dirname(os.path.abspath(nml_def_fil)) + # Print message to logger: + _LOGGER.info(" ...found namelist definition file %s in %s", + fil_base, fil_dir) + # Replace file path with SourceMods path + nml_fil_dict[fil_base] = nml_def_fil + + # Return updated dictionary: + return nml_fil_dict + ################# #PRIMARY FUNCTION ################# @@ -141,7 +172,7 @@ def buildnml(case, caseroot, compname): # Extract dictionary of namelist definition files #------------------------------------------------ - xml_nml_fils = config.xml_nml_def_files + xml_nml_dict = config.xml_nml_def_files #-------------------------------------------------- # Check for namelist definition files in SourceMods @@ -156,28 +187,15 @@ def buildnml(case, caseroot, compname): nml_def_fils = glob.glob(os.path.join(user_xml_dir, "namelist_definition_*.xml")) - # Loop over all found files: - for nml_def_fil in nml_def_fils: - if os.path.basename(nml_def_fil) in xml_nml_fils: - # Print message to logger: - _LOGGER.info(" ...found namelist definition file %s in %s", - os.path.basename(nml_def_fil), user_xml_dir) - # Replace file path with SourceMods path - xml_nml_fils[os.path.basename(nml_def_fil)] = nml_def_fil - - # Now search for CCPP namelist definition files: ccpp_nml_fils = glob.glob(os.path.join(user_xml_dir, "*_namelist.xml")) - # Loop over all found files: - for ccpp_nml_fil in ccpp_nml_fils: - if os.path.basename(ccpp_nml_fil) in ccpp_nml_fils: - # Print message to logger: - _LOGGER.info(" ...found namelist definition file %s in %s", - os.path.basename(ccpp_nml_fil), user_xml_dir) - # Replace file path with SourceMods path: - xml_nml_fils[os.path.basename(ccpp_nml_fil)] = ccpp_nml_fil + # Combine file lists: + nml_def_fils.append(ccpp_nml_fils) + + # Update namelist definition file dict with new files: + xml_nml_dict = nml_file_dict_update(xml_nml_dict, nml_def_fils) #-------------------------- # Construct ParamGen objects: @@ -185,7 +203,7 @@ def buildnml(case, caseroot, compname): pg_atm_list = [] - for xml_fil in xml_nml_fils.values(): + for xml_fil in xml_nml_dict.values(): pg_obj = AtmInParamGen.from_namelist_xml(xml_fil) pg_atm_list.append(pg_obj) @@ -293,9 +311,6 @@ def buildnml(case, caseroot, compname): create_namelist_infile(case, user_nl_file, namelist_infile, "\n".join(infile_lines)) - # Convert namelist input file to list: - #namelist_infile_list = [namelist_infile] - #------------------------------------------- # Add user_nl_cam entries to ParamGen object: #------------------------------------------- diff --git a/cime_config/user_nl_cam b/cime_config/user_nl_cam index 22cfcb92..232bc57b 100644 --- a/cime_config/user_nl_cam +++ b/cime_config/user_nl_cam @@ -1,3 +1,8 @@ -! Users should add all user specific namelist changes below in the form of -! namelist_var = new_namelist_value - +! Users should add all user specific namelist changes below in the form of +! namelist_var = new_namelist_value +! +! Also, the variable below (which should remain commented) +! is a special variable that allows duplicate namelist +! entries in this file, if set to "True". Otherwise +! any duplicate namelist entries will result in an error. +! allow_duplicate_namliest_entries = False diff --git a/test/unit/sample_files/atm_in_files/test_multi_xml_in b/test/unit/sample_files/atm_in_files/test_multi_xml_in index 1b5cd59b..ae2f8d04 100644 --- a/test/unit/sample_files/atm_in_files/test_multi_xml_in +++ b/test/unit/sample_files/atm_in_files/test_multi_xml_in @@ -33,3 +33,7 @@ warp_drive = 0 / +&space_stations + space_station = "skylab" +/ + diff --git a/test/unit/sample_files/atm_in_files/test_nl_duplicate_atm_in b/test/unit/sample_files/atm_in_files/test_nl_duplicate_atm_in new file mode 100644 index 00000000..b8b889ea --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_nl_duplicate_atm_in @@ -0,0 +1,40 @@ +&bird_sounds_nl + duck_quack = .true. + turkey_leg = 27.2 +/ + +&fairy_tales + straw_into_gold = "!Rapunzel!" +/ + +&famous_folk + marx_bros = "Chico", "Harpo", "Groucho", "Zeppo" +/ + +&scary_stories + monkey_paw = 0 +/ + +&sci_fi_sounds_nl + hal_hard_drive = "/NOT/HERE/I/CANT/LET/YOU/DO/THAT/DAVE" + laser_beam = .false. + lets_ask_computer = 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, + 0, 1, 0, 1, 0, 0, 1, 0, 1, 0 + lets_ask_computer(15:20) = 3 + lets_ask_computer(16) = 4 + lets_ask_computer(1:) = 1 + lets_ask_computer(30:36:2) = 5 + lets_ask_computer(:50) = 2 + warp_drive = 0 +/ + diff --git a/test/unit/sample_files/atm_in_files/test_third_nml_def.xml b/test/unit/sample_files/atm_in_files/test_third_nml_def.xml new file mode 100644 index 00000000..010219b0 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_third_nml_def.xml @@ -0,0 +1,23 @@ + + + + + + + + char*128 + sci_fi + space_stations + + Which station is this? + + + skylab + iss + deathstar + ds9 + bablyon5 + + + + diff --git a/test/unit/sample_files/atm_in_files/test_user_in b/test/unit/sample_files/atm_in_files/test_user_in index 61b2ae54..d05306a3 100644 --- a/test/unit/sample_files/atm_in_files/test_user_in +++ b/test/unit/sample_files/atm_in_files/test_user_in @@ -8,7 +8,7 @@ / &famous_folk - marx_bros = "Chico", "Harpo", "Groucho", "Zeppo" + marx_bros = "mario", "luigi", "wario", "karl" / &scary_stories diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_allow_dupl_var b/test/unit/sample_files/atm_in_files/test_user_nl_allow_dupl_var new file mode 100644 index 00000000..ea54a426 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_user_nl_allow_dupl_var @@ -0,0 +1,22 @@ +! Users should add all user specific namelist changes below in the form of +! namelist_var = new_namelist_value +! allow_duplicate_namliest_entries = True + +turkey_leg = 22.7 + +turkey_leg = 27.2 + +straw_into_gold="!Rapunzel!" !Let down your hair! + +!Array indexing checks: + +lets_ask_computer(1:) = 1 + +lets_ask_computer(:50) = 2 + +lets_ask_computer(15:20) = 3 + +lets_ask_computer(16) = 4 + +lets_ask_computer(30:36:2) = 5 + diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_bad_comment b/test/unit/sample_files/atm_in_files/test_user_nl_bad_equals similarity index 55% rename from test/unit/sample_files/atm_in_files/test_user_nl_bad_comment rename to test/unit/sample_files/atm_in_files/test_user_nl_bad_equals index 5c3b252a..1b32d440 100644 --- a/test/unit/sample_files/atm_in_files/test_user_nl_bad_comment +++ b/test/unit/sample_files/atm_in_files/test_user_nl_bad_equals @@ -1,10 +1,6 @@ ! Users should add all user specific namelist changes below in the form of ! namelist_var = new_namelist_value -/* -Also add a multi-line comment block here -to make sure it works properly - turkey_leg = 22.7 -straw_into_gold = "Rapunzel" +=straw_into_gold = "Rapunzel" diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_double_var b/test/unit/sample_files/atm_in_files/test_user_nl_dupl_var similarity index 58% rename from test/unit/sample_files/atm_in_files/test_user_nl_double_var rename to test/unit/sample_files/atm_in_files/test_user_nl_dupl_var index cd1dd895..6a07b6de 100644 --- a/test/unit/sample_files/atm_in_files/test_user_nl_double_var +++ b/test/unit/sample_files/atm_in_files/test_user_nl_dupl_var @@ -1,13 +1,9 @@ ! Users should add all user specific namelist changes below in the form of ! namelist_var = new_namelist_value -/* -Also add a multi-line comment block here -to make sure it works properly -*/ - turkey_leg = 22.7 turkey_leg = 27.2 -straw_into_gold="Rapunzel" +straw_into_gold="!Rapunzel!" !Let down your hair! + diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_bad_format b/test/unit/sample_files/atm_in_files/test_user_nl_no_equals similarity index 66% rename from test/unit/sample_files/atm_in_files/test_user_nl_bad_format rename to test/unit/sample_files/atm_in_files/test_user_nl_no_equals index f73a2ab4..589890b5 100644 --- a/test/unit/sample_files/atm_in_files/test_user_nl_bad_format +++ b/test/unit/sample_files/atm_in_files/test_user_nl_no_equals @@ -1,11 +1,6 @@ ! Users should add all user specific namelist changes below in the form of ! namelist_var = new_namelist_value -/* -Also add a multi-line comment block here -to make sure it works properly -*/ - turkey_leg 22.7 straw_into_gold = "Rapunzel" diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_simple b/test/unit/sample_files/atm_in_files/test_user_nl_simple index 805b9fe3..97f72dd7 100644 --- a/test/unit/sample_files/atm_in_files/test_user_nl_simple +++ b/test/unit/sample_files/atm_in_files/test_user_nl_simple @@ -1,14 +1,13 @@ ! Users should add all user specific namelist changes below in the form of ! namelist_var = new_namelist_value -/* -Also add a multi-line comment block here -to make sure it works properly -*/ - !Really make sure comments are properly handled: !turkey_leg = 22.7 turkey_leg = 22.7 - straw_into_gold="Rapunzel" + straw_into_gold="Rapunzel" !Does this also work? + +marx_bros = "mario", "luigi", + "wario" + , "karl" diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_undefined_var b/test/unit/sample_files/atm_in_files/test_user_nl_undefined_var index 1006e561..9445c094 100644 --- a/test/unit/sample_files/atm_in_files/test_user_nl_undefined_var +++ b/test/unit/sample_files/atm_in_files/test_user_nl_undefined_var @@ -1,11 +1,6 @@ ! Users should add all user specific namelist changes below in the form of ! namelist_var = new_namelist_value -/* -Also add a multi-line comment block here -to make sure it works properly -*/ - banana_peel = "slippery" turkey_leg = 22.7 diff --git a/test/unit/test_atm_in_paramgen.py b/test/unit/test_atm_in_paramgen.py index d119ed73..a59dffdb 100644 --- a/test/unit/test_atm_in_paramgen.py +++ b/test/unit/test_atm_in_paramgen.py @@ -341,15 +341,20 @@ def test_mutli_xml_namelist_defs(self): # Get XML file paths: xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_def.xml") + third_xml_fil = os.path.join(_SAMPLES_DIR, "test_third_nml_def.xml") # Get expected atm_in file: atm_in_output = os.path.join(_SAMPLES_DIR, "test_multi_xml_in") # Create the ParamGen objects: - pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) - pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + pg_third = AtmInParamGen.from_namelist_xml(third_xml_fil) - # Append the extra PG object to the other: + # Append two ParamGen objects together: + pg_ext.append(pg_third) + + # Append the final PG objects together: pg_test.append_atm_in_pg(pg_ext) # Set all ParamGen namelist values: @@ -370,51 +375,6 @@ def test_mutli_xml_namelist_defs(self): self.assertTrue(filecmp.cmp(test_output, atm_in_output, shallow=False), \ msg=amsg) - #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that attempting to append a ParamGen object - #that itself was the combination of multiple XML - #namelist defition files fails with the appropriate - #error - #++++++++++++++++++++++++++++++++++++++++++++++++ - - def test_mutli_xml_append_multi(self): - - """ - Check that appending a ParamGen - object that is itself a - combination of multiple namelist - definition file-derived ParamGen - objects throws an error and that - the error message is correct. - """ - - # Get XML file paths: - xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") - extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_def.xml") - third_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_same_group.xml") - - # Get expected atm_in file: - atm_in_output = os.path.join(_SAMPLES_DIR, "test_multi_xml_in") - - # Create the ParamGen objects: - pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) - pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) - pg_third = AtmInParamGen.from_namelist_xml(third_xml_fil) - - # Append the extra PG object to the other: - pg_test.append_atm_in_pg(pg_ext) - - # Try to append the combined PG object to the third object: - with self.assertRaises(AtmInParamGenError) as cerr: - pg_third.append_atm_in_pg(pg_test) - - # Check exception message: - emsg = "ParamGen object being appended to another must" - emsg += " be associated with only one namelist definition file." - emsg += "\nInstead it is associated with the following files:\n" - emsg += f"{xml_test_fil}\n{extra_xml_fil}" - self.assertEqual(emsg, str(cerr.exception)) - #++++++++++++++++++++++++++++++++++++++++++++++++ #Check that trying to combine multiple XML namelist #defition files with the same namelist group @@ -530,36 +490,6 @@ def test_namelist_mode_from_user_nl_cam(self): self.assertTrue(filecmp.cmp(test_output, atm_in_output, shallow=False), \ msg=amsg) - #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file with an un-closed - #block comment fails with the appropriate error - #+++++++++++++++++++++++++++++++++++++++++++++++ - - def test_namelist_user_nl_unclosed_comment(self): - - """ - Check that a user_nl_cam file with an - un-closed block comment throws an error - and that the error message is correct. - """ - - # Get XML file path: - xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") - - # Get "user_nl_cam" file path: - user_nl_fil = os.path.join(_SAMPLES_DIR, "test_user_nl_bad_comment") - - # Create the ParamGen object: - pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) - - # Attempt to append user_nl_cam file: - with self.assertRaises(AtmInParamGenError) as cerr: - pg_test.append_user_nl_file(user_nl_fil) - - # Check exception message: - emsg = f"Un-closed comment block! Please check '{user_nl_fil}'" - self.assertEqual(emsg, str(cerr.exception)) - #++++++++++++++++++++++++++++++++++++++++++++++++ #Check that a user_nl_cam file with an improperly #formatted namelist entry fails with the @@ -571,25 +501,38 @@ def test_namelist_user_nl_bad_format_entry(self): """ Check that a user_nl_cam file with a namelist entry that is missing an equals - sign throws an error and that the error + sign or has an equals sign at the beginning of + the line throws an error and that the error message is correct. """ # Get XML file path: xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") - # Get "user_nl_cam" file path: - user_nl_fil = os.path.join(_SAMPLES_DIR, "test_user_nl_bad_format") + # Get "user_nl_cam" file paths: + user_nl_no_equals = os.path.join(_SAMPLES_DIR, "test_user_nl_no_equals") + user_nl_bad_equals = os.path.join(_SAMPLES_DIR, "test_user_nl_bad_equals") # Create the ParamGen object: pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) - # Attempt to append user_nl_cam file: + # Attempt to append user_nl_cam file with line that + # contains no equals ('=') sign: with self.assertRaises(AtmInParamGenError) as cerr: - pg_test.append_user_nl_file(user_nl_fil) + pg_test.append_user_nl_file(user_nl_no_equals) # Check exception message: - emsg = f"Cannot parse the following line in '{user_nl_fil}' :\n'turkey_leg 22.7\n'" + emsg = f"Cannot parse the following line in '{user_nl_no_equals}' :\n'turkey_leg 22.7\n'" + self.assertEqual(emsg, str(cerr.exception)) + + # Now attempt to append user_nl_cam file with with line + # that contains an equal('=') sign at the beginning of the line: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file(user_nl_bad_equals) + + # Check exception message: + emsg = f"Cannot parse the following line in '{user_nl_bad_equals}'" + emsg += """ :\n'=straw_into_gold = "Rapunzel"\n'""" self.assertEqual(emsg, str(cerr.exception)) #++++++++++++++++++++++++++++++++++++++++++++++++ @@ -623,10 +566,10 @@ def test_namelist_user_nl_undefined_entry(self): # Check exception message: emsg = "Variable 'banana_peel' not found in any namelist definition files." - emsg += f" Please double-check '{user_nl_fil}'." + emsg += " Please double-check 'user_nl_cam'." self.assertEqual(emsg, str(cerr.exception)) - #++++++++++++++++++++++++++++++++++++++++++++++++ + #+++++++++++++++++++++++++++++++++++++++++++++++ #Check that a user_nl_cam file with a repeated #namelist entry fails with the appropriate #error message. @@ -638,17 +581,34 @@ def test_namelist_user_nl_double_entry(self): Check that a user_nl_cam file with a namelist entry that has is included twice in the file throws an error and that the - error message is correct. + error message is correct. Also check that + the "allow_dupl" flag works as expected + and that no error is thrown, and the + correct output is written. """ - # Get XML file path: + # Create fake CIME case: + fcase = FakeCase() + + # Get expected atm_in file: + atm_in_output = os.path.join(_SAMPLES_DIR, "test_nl_duplicate_atm_in") + + # Get XML file paths: xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_def.xml") # Get "user_nl_cam" file path: - user_nl_fil = os.path.join(_SAMPLES_DIR, "test_user_nl_double_var") + user_nl_fil = os.path.join(_SAMPLES_DIR, "test_user_nl_dupl_var") - # Create the ParamGen object: + # Get "user_nl_cam" file that allow duplicates: + user_nl_fil_allow_dupl = os.path.join(_SAMPLES_DIR, "test_user_nl_allow_dupl_var") + + # Create the ParamGen objects: pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + + # Append ParamGenObjects together: + pg_test.append_atm_in_pg(pg_ext) # Attempt to append user_nl_cam file: with self.assertRaises(AtmInParamGenError) as cerr: @@ -659,6 +619,486 @@ def test_namelist_user_nl_double_entry(self): emsg += "\nPlease set each variable only once." self.assertEqual(emsg, str(cerr.exception)) + # Now attempt to append user_nl_cam file, but + # with duplicates allowed: + pg_test.append_user_nl_file(user_nl_fil_allow_dupl) + + # Set all ParamGen namelist values: + pg_test.reduce_atm_in(fcase, {}) + + # Create test atm_in namelist file name: + test_output = os.path.join(_TMP_DIR, "test_nl_duplicate_atm_in") + + # Create CAM namelist using CIME's nmlgen routine: + pg_test.write(test_output) + + # Check that output file was written: + amsg = f"{test_output} does not exist" + self.assertTrue(os.path.exists(test_output), msg=amsg) + + # Check that output file matches expected file: + amsg = f"{test_output} does not match {atm_in_output}" + self.assertTrue(filecmp.cmp(test_output, atm_in_output, shallow=False), \ + msg=amsg) + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a line starting with a comma + #for a non-array namelist entry fails correctly. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_non_array_comma(self): + + """ + Check that a user_nl_cam file with + a namelist entry that is not an array, + followed by a comma-leading section of + text, fails with the appropriate error. + """ + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("duck_quack = 1 \n") + nl_file.write(", .false.") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Line number 2 in 'user_nl_cam'" + emsg += " starts with a comma (,) but the" + emsg += " associated namelist variable is not an array." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file with an array + #variable with an ending and starting comma + #fails correctly. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_double_comma(self): + + """ + Check that a user_nl_cam file with a + namelist entry that is an array, but + that has both a trailing and leading + comma separated only with a newline + and blank spaces fails with the + appropriate error. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("marx_bros = 'mario', 'luigi',\n") + nl_file.write(", 'wario', 'karl'") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Line number 2 in 'user_nl_cam'" + emsg += " starts with a comma (,) but the" + emsg += " previous line ended with a comma." + emsg += "\nPlease remove one of the commas." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file that contains + #a non-array namelist variable with array + #dimensions specified fails correctly. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_non_array_dims(self): + + """ + Check that a user_nl_cam file with + a namelist variable that is not + an array, but is given specific array + dimensions, fails with the appropriate + error. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("duck_quack(5:6) = .true.") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Variable 'duck_quack' is not an array, but array" + emsg += " dimensions are being specified in 'user_nl_cam'." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file that contains + #a array namelist variable with too many array + #dimensions specified fails correctly. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_too_many_array_dims(self): + + """ + Check that a user_nl_cam file with + a namelist variable that is an array, + but that is listed with too many dimension, + fails with the appropriate error. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("marx_bros(5,8) = .true.") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Variable 'marx_bros' has 2 dimensions" + emsg += " used in 'user_nl_cam', but is defined" + emsg += f" to only have 1 dimension." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file that contains + #a array namelist variable with specified array + #indices in the wrong (max:min) order fails + #correctly. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_bad_index_order(self): + + """ + Check that a user_nl_cam file with a + namelist variable that is an arry + with specified indexes that are out of + order (max:min instead of min:max) fails + with the appropriate error. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("marx_bros(4:1) = 'Gummo'") + # End with + + # Attempt to append user_nl_cam file with line that + # contains no equals ('=') sign: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = f"Bad indexing, min index value '4'" + emsg += f" greater than max index value '1'" + emsg += f" for variable 'marx_bros' in 'user_nl_cam'." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #Try again with a stride index: + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("marx_bros(4:1:2) = 'Gummo'") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = f"Bad indexing, min index value '4'" + emsg += f" greater than max index value '1'" + emsg += f" for variable 'marx_bros' in 'user_nl_cam'." + self.assertEqual(emsg, str(cerr.exception)) + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file that contains an + #array variable with two colons for a specific + #array dimension, but no stride value, fails + #with the appropriate error. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_no_stride_val(self): + + """ + Check that a user_nl_cam file with + a namelist variable that is an array, + and that has two colons for a given + array dimension, but no specified + stride value, fails with the appropriate + error. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("marx_bros(2::) = 'Gummo'") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = f"Two colons were provided for variable 'marx_bros'" + emsg += " in 'user_nl_cam', but no stride value was provided." + emsg += "\nPlease provide either a stride value, or remove the" + emsg += "extra colon." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file that contains an + #array variable with three colons for a specific + #array dimension fails with the appropriate error. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_three_colons(self): + + """ + Check that a user_nl_cam file + with a namelist variable that is + an array, and that has three colons + for a given array dimension, fails + with the appropriate error. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("marx_bros(1:2:3:4) = 'Gummo'") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = f"Variable 'marx_bros' has 3 colons (:) " + emsg += "listed in its dimension indexing in 'user_nl_cam'." + emsg += " Only up to two colons are supported." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file that contains an + #array variable with a colon dimension, and then + #that same array variable is listed again with + #a colon, fails with the appropriate error. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_colon_dupl(self): + + """ + Check that a user_nl_cam file + with a namelist variable that is + an array with a colon dimension, + and that is listed twice in the file, + fails with the appropriate error. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("marx_bros(:) = 'Gummo'\n") + nl_file.write("marx_bros(:) = 'Groucho'") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Variable 'marx_bros' has all values" + emsg += " being set multiple times for" + emsg += " dimension 1." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #+++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file that contains an + #array variable with the same exact array index + #referenced twice fails with the appropriate error + #+++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_arr_index_dupl(self): + + """ + Check that a user_nl_cam file with + a namelist variable that has an + array index that is specified + twice (i.e. not with just a colon) + fails with the appropriate error. + """ + + # Get XML file paths: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_def.xml") + + # Create the ParamGen objects: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + + # Append the PG objects together: + pg_test.append_atm_in_pg(pg_ext) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write("marx_bros(:) = 'Gummo'\n") + nl_file.write("marx_bros(4) = 'Groucho'\n\n") + nl_file.write("lets_ask_computer(:) = 2\n") + nl_file.write("lets_ask_computer(7) = 3\n") + nl_file.write("lets_ask_computer(7) = 4 !Should fail here") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Variable 'lets_ask_computer' has values" + emsg += " at the following indices being" + emsg += " set multiple times for dimension" + emsg += " (1) :\n" + emsg += "7" + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #+++++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file that contains an + #array variable with a specified array index range + #referenced twice fails with the appropriate error + #+++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_arr_index_range_dupl(self): + + """ + check that a user_nl_cam file with a + namelist variable that has an array + index range (e.g. min:max) that covers + a previous index range for that same + variable fails with the appropriate error + """ + + # Get XML file paths: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_def.xml") + + # Create the ParamGen objects: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + + # Append the PG objects together: + pg_test.append_atm_in_pg(pg_ext) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w") as nl_file: + nl_file.write('marx_bros = "Gummo"\n') + nl_file.write('marx_bros(2:) = "Groucho"\n') + nl_file.write('marx_bros(1) = "Karl"\n') + nl_file.write('lets_ask_computer = 2\n') + nl_file.write('lets_ask_computer(5:20) = 3\n') + nl_file.write('lets_ask_computer(4) = 4\n') + nl_file.write("lets_ask_computer(:11) = 5 !Should fail here") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Variable 'lets_ask_computer' has values" + emsg += " at the following indices being" + emsg += " set multiple times for dimension" + emsg += " (1) :\n" + emsg += "4, 5, 6, 7, 8, 9, 10, 11" + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + ################################################# #Run unit tests if this script is called directly ################################################# From 27bf2af34a240bcf47e998dd268ed50c71bc4851 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 15 Apr 2022 14:54:49 -0600 Subject: [PATCH 22/33] Add remaining code review requests. --- cime_config/atm_in_paramgen.py | 177 +++++++----- cime_config/cam_autogen.py | 76 +---- cime_config/cam_config.py | 265 +++++++++++------- cime_config/namelist_definition_cam.xml | 23 -- src/data/namelist_definition_air_comp.xml | 47 ++-- .../atm_in_files/test_user_nl_simple | 7 +- 6 files changed, 308 insertions(+), 287 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index c0ec2a11..491d0c19 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -63,6 +63,9 @@ def _is_nml_logical_true(varname, var_val): varname -> The name of the variable being checked var_val -> The value of the variable being checked + Returns a boolean that matches the value of the + input logical. + doctests: 1. Check that a True value returns true: @@ -178,6 +181,8 @@ def remove_user_nl_comment(user_string): ---------- user_string -> String that will be searched and processed for comments + Returns the input string, but with any commented text removed. + doctests: 1. Check that a string with no comment delimiters returns full string: @@ -393,6 +398,92 @@ def check_dim_index(var_name, index_val, dim_size): raise AtmInParamGenError(emsg) #End if +##### + +def _get_nml_value_str(var_name, var_type, var_val): + + """ + Converts namelist variable inputs into their + correct Fortran namelist value format + ---------- + var_name -> Variable name (used for error message) + var_type -> Variable type (logial, integer, float, character) + var_val -> Variable value to convert + + returns the fortran namelist-formatted variable + value. + + doctests: + + 1. Check that a true logical variable outputs the correct value: + >>> _get_nml_value_str("banana", "logical", "true") + '.true.' + + 2. Check that a false logical variable outputs the correct value: + >>> _get_nml_value_str("banana", "logical", "0") + '.false.' + + 3. Check that an integer variable outputs the correct value: + >>> _get_nml_value_str("banana", "integer", 5) + '5' + + 4. Check that a float variable outputs the correct value: + >>> _get_nml_value_str("banana", "real", "5d5") + '5d5' + + 5. Check that a character variable with no quotes outputs + the correct value: + >>> _get_nml_value_str("banana", "char*10", "apple") + '"apple"' + + 6. Check that a character variable with quotes outputs + the correct value: + >>> _get_nml_value_str("banana", "char*250", " 'apple' ") + '"apple"' + + 7. Check that a character variable with double quotes + outputs the correct value: + >>> _get_nml_value_str("banana", "char*N", ' "apple" ') + '"apple"' + + 8. Check that a variable with an unknown type returns + the proper error: + >>> _get_nml_value_str("banana", "apple", "true") # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError: Namelist type 'apple' for entry 'banana' is un-recognized. + Acceptable namelist types are: logical, integer, real, or char*N. + """ + + #Create set for variable types + #that don't need special handling: + num_set = {"integer", "real"} + + #Check variable type: + if var_type == 'logical': + #If logical, then write the associated truth value: + if _is_nml_logical_true(var_name, var_val): + return ".true." + else: + return ".false." + #End if + elif var_type in num_set: + #If a number, then write value as-is: + return f"{var_val}" + elif "char*" in var_type: + #Remove all quotes in the string, as they + #sometimes added by ParamGen during the "reduce" phase: + var_val = var_val.replace("'", "") + var_val = var_val.replace('"', "") + #Return with double quotes: + return f'"{var_val.strip()}"' + else: + #This is an un-recognized type option, so raise an error: + emsg = f"Namelist type '{var_type}' for entry '{var_name}' is un-recognized.\n" + emsg += "Acceptable namelist types are: logical, integer, real, or char*N." + raise AtmInParamGenError(emsg) + #End if + ################################################################ # MAIN "atm_in" ParamGen class ################################################################ @@ -1259,48 +1350,15 @@ def write(self, output_path): #Write beginning of namelist entry: nml_str = f" {var} = " - #Check if variable type is a logical: - if var_type == 'logical': - #loop over array elements: - for elem in array_elems: - if _is_nml_logical_true(var, elem): - elem_str = ".true., " - else: - elem_str = ".false., " - #End if - #Write to namelist string: - nml_str += elem_str - #End for - #Check if it is a number: - elif var_type in num_set: - #loop over array elements: - for elem in array_elems: - #Write to namelist string: - nml_str += f"{elem}, " - #End for - #check if it is a character: - elif "char*" in var_type: - #loop over array elements: - for elem_with_space in array_elems: - #Remove any extra white space: - elem = elem_with_space.strip() - - #Remove all quotes in the string, as they are - #sometimes added by ParamGen during the "reduce" phase: - elem = elem.replace("'", "") - elem = elem.replace('"', "") - - #Add surrounding quotes: - elem_str = f'"{elem}", ' - #Write to namelist entry string: - nml_str += elem_str - #End for - else: - #This is an un-recognized type option, so raise an error: - emsg = f"Namelist type '{var_type}' for entry '{var}' is un-recognized.\n" - emsg += "Acceptable namelist types are: logical, integer, real, or char*N." - raise AtmInParamGenError(emsg) - #End if + #loop over array elements: + for elem in array_elems: + + #Get properly-formatted variable value: + nml_val = _get_nml_value_str(var, var_type, elem) + + #Add value string (with comma) to namelist string: + nml_str += (nml_val + ", ") + #End for #There will always be a trailing comma and space (, ) so find it: last_comma_idx = nml_str.rfind(", ") @@ -1309,34 +1367,19 @@ def write(self, output_path): atm_in_fil.write(nml_str[:last_comma_idx]+"\n") else: #Not an array - #Check if variable type is a logical: - if var_type == 'logical': - #If logical, then write the associated truth value: - if _is_nml_logical_true(var, val): - atm_in_fil.write(f" {var} = .true.\n") - else: - atm_in_fil.write(f" {var} = .false.\n") - #End if - elif var_type in num_set: - #If a number, then write value as-is: - atm_in_fil.write(f" {var} = {val}\n") - elif "char*" in var_type: - #Remove all quotes in the string, as they - #sometimes added by ParamGen during the "reduce" phase: - val = val.replace("'", "") - val = val.replace('"', "") - #Add entry to atm_in file: - atm_in_fil.write(f' {var} = "{val}"\n') - else: - #This is an un-recognized type option, so raise an error: - emsg = f"Namelist type '{var_type}' for entry '{var}' is un-recognized.\n" - emsg += "Acceptable namelist types are: logical, integer, real, or char*N." - raise AtmInParamGenError(emsg) - #End if - #End if (array type) + #Get properly-formatted variable value: + nml_val = _get_nml_value_str(var, var_type, val) + + #Write variable to namelist file: + atm_in_fil.write(f' {var} = {nml_val}\n') + + #End if (array type) + #End for (namelist variables) # Add space for next namelist group: atm_in_fil.write('/\n\n') + #End for (namelist groups) + #End with (open atm_in file) #### diff --git a/cime_config/cam_autogen.py b/cime_config/cam_autogen.py index a64b3259..7d7ecbaf 100644 --- a/cime_config/cam_autogen.py +++ b/cime_config/cam_autogen.py @@ -318,54 +318,6 @@ def _find_metadata_files(source_dirs, scheme_finder): # Return meta_files dictionary: return meta_files -############################################################################### -def _find_CCPP_SDF_paths(source_dirs): -############################################################################### - """ - Find all CCPP Suite Definiition File (SDF) paths given a list - of directories, assuming every SDF has the form "suite_*.xml". - - Currently this function assumes that suite files are only in the - provided directories, and will do no sub-directory searching. It - also assumes that if an SDF has already found then all other - SDFs with the same name can be ignored. This means that user-modifiable - directories (e.g. "SourceMods/src.cam") should always be listed first - in the provided "source_dirs" list. - """ - - # Create empty dictionary to store SDF filenames and paths - SDF_files = {} - - for direc in source_dirs: - - # Search for SDFs in directory: - SDF_list = glob.glob(os.path.join(direc, "suite_*.xml")) - - # Move on to next directory if empty: - if not SDF_list: - continue - - # Loop over SDF paths: - for SDF in SDF_list: - - # Check if filename is already in dictionary: - if not os.path.basename(SDF) in SDF_files: - # If not, then add to dictionary: - SDF_files[os.path.basename(SDF)] = SDF - # End if - # End for - # End for - - # Raise error if no SDF files are found: - if not SDF_files: - emsg = "ERROR: No CCPP Suite Definition Files (SDF)s of the " - emsg = "type 'suite_*.xml' were found in the provided directories:" - emsg += "\n".join(source_dirs) - raise CamAutoGenError(emsg) - - # Return SDF file paths: - return SDF_files.values() - ############################################################################### def _update_genccpp_dir(utility_files, genccpp_dir): ############################################################################### @@ -500,7 +452,7 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, 2. Check that the correct error is raised when a scheme's metadata file cannot be found: - >>> generate_physics_suites(TestBuildCache, "UNSET", "cam", "bad_suite", \ + >>> generate_physics_suites(TestBuildCache, "UNSET", "cam", "bad", \ TEST_ATM_ROOT, TEST_BLDROOT, TEST_REG_DIR, \ TEST_REGFILES, TEST_SOURCE_MODS_DIR, \ False) #doctest: +ELLIPSIS @@ -510,7 +462,7 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, 3. Check that generate_physics_suites works properly when good inputs are provided: - >>> generate_physics_suites(TestBuildCache, "UNSET", "cam", "simple_suite", \ + >>> generate_physics_suites(TestBuildCache, "UNSET", "cam", "simple", \ TEST_ATM_ROOT, TEST_BLDROOT, TEST_REG_DIR, \ TEST_REGFILES, TEST_SOURCE_MODS_DIR, \ False) #doctest: +ELLIPSIS @@ -525,37 +477,15 @@ def generate_physics_suites(build_cache, preproc_defs, host_name, # Collect all source directories source_search = [source_mods_dir, os.path.join(atm_root, "src", "physics", "ncar_ccpp")] - # Finda all possible CCPP suite names and SDF paths: - SDF_paths = _find_CCPP_SDF_paths(source_search) # Find all metadata files, organize by scheme name all_scheme_files = _find_metadata_files(source_search, find_scheme_names) - # Create dictionary of SDF suite names->paths - suite_name_path_dict = {} - for SDF_path in SDF_paths: - # Open SDF file - _, suite = read_xml_file(SDF_path) - - # Extract suite name - suite_name = suite.get('name') - - # Check if suite is already present in dictioanry - if suite_name in suite_name_path_dict: - #If so, then raise an error, because we are unsure which SDF to use - emsg = "Two SDFs have the same suite name: '{}'. The two SDFs are:\n" - emsg += "{}\n{}" - raise CamAutoGenError(emsg.format(suite_name, SDF_path, - suite_name_path_dict[suite_name])) - - # Add suite to dictionary - suite_name_path_dict[suite_name] = SDF_path - # Find the SDFs specified for this model build sdfs = [] scheme_files = [] xml_files = {} # key is scheme, value is xml file path for sdf in phys_suites_str.split(';'): - sdf_path = suite_name_path_dict.get(sdf) + sdf_path = _find_file(f"suite_{sdf}.xml", source_search) if not sdf_path: emsg = "ERROR: Unable to find SDF for suite '{}'" raise CamAutoGenError(emsg.format(sdf)) diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index 966cdf3e..0fddb2ba 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -31,6 +31,100 @@ from cam_autogen import generate_registry, generate_physics_suites from cam_autogen import generate_init_routines +############################################################################### +#HELPER FUNCTIONS +############################################################################### + +def get_atm_hgrid(atm_grid_str): + + """ + Processes the provided atmospheric grid string + to determine what dynamical core and horizontal + grid regex are being used for this model run. + + Inputs: + atm_grid_str -> The "ATM_GRID" string provided by CIME + + Outputs: + dycore -> A string which specifies the dycore being used. + hgrid_re -> A regular expression that matches the provided grid string. + + Doctests: + + 1. Check that a FV grid returns the correct results: + >>> get_atm_hgrid("1.9x2.5") + ('fv', re.compile('[0-9][0-9.]*x[0-9][0-9.]*')) + + 2. Check that an SE grid returns the correct results: + >>> get_atm_hgrid("ne5np4.pg2") + ('se', re.compile('ne[0-9]+np[1-8](.*)(pg[1-9])?')) + + 3. Check that an FV3 grid returns the correct results: + >>> get_atm_hgrid("C96") + ('fv3', re.compile('C[0-9]+')) + + 4. Check that an MPAS grid returns the correct results: + >>> get_atm_hgrid("mpasa480") + ('mpas', re.compile('mpasa[0-9]+')) + + 5. Check that a null dycore returns the correct results: + >>> get_atm_hgrid("null") + ('none', None) + + 6. Check that a horizontal grid with with no matches fails + with the correct error message: + >>> get_atm_hgrid("1.9xC96") # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + cam_config_classes.CamConfigValError: ERROR: The specified CAM horizontal grid, '1.9xC96', does not match any known format. + """ + + # Create regex expressions to search for the different dynamics grids + eul_grid_re = re.compile(r"T[0-9]+") # Eulerian dycore + fv_grid_re = re.compile(r"[0-9][0-9.]*x[0-9][0-9.]*") # FV dycore + se_grid_re = re.compile(r"ne[0-9]+np[1-8](.*)(pg[1-9])?") # SE dycore + fv3_grid_re = re.compile(r"C[0-9]+") # FV3 dycore + mpas_grid_re = re.compile(r"mpasa[0-9]+") # MPAS dycore (not totally sure about this pattern) + + # Check if specified grid matches any of the pre-defined grid options. + # If so, then add both the horizontal grid and dynamical core + # to the configure object + if fv_grid_re.match(atm_grid_str) is not None: + + #Finite Volume (FV) dycore: + return "fv", fv_grid_re + + elif se_grid_re.match(atm_grid_str) is not None: + + #Spectral Element (SE) dycore: + return "se", se_grid_re + + elif fv3_grid_re.match(atm_grid_str) is not None: + + #Finite Volume Cubed-Sphere (FV3) dycore: + return "fv3", fv3_grid_re + + elif mpas_grid_re.match(atm_grid_str) is not None: + + #Model for Prediction Across Scales (MPAS) dycore: + return "mpas", mpas_grid_re + + elif eul_grid_re.match(atm_grid_str) is not None: + + #Eulerian Spectral (eul) dycore: + return "eul", eul_grid_re + + elif atm_grid_str == "null": + + #Null dycore: + return "none", None + + else: + emsg = "ERROR: The specified CAM horizontal grid, '{}', " + emsg += "does not match any known format." + raise CamConfigValError(emsg.format(atm_grid_str)) + #End if + ############################################################################### # MAIN CAM CONFIGURE OBJECT ############################################################################### @@ -166,8 +260,7 @@ def __init__(self, case, case_log): self.__xml_nml_def_files = OrderedDict() #Add the default host model namelist: - self.__xml_nml_def_files['namelist_definition_cam.xml'] = \ - os.path.join(cime_conf_path, 'namelist_definition_cam.xml') + self._add_xml_nml_file(cime_conf_path, 'namelist_definition_cam.xml') #---------------------------------------------------- # Set CAM start date (needed for namelist generation) @@ -227,28 +320,16 @@ def __init__(self, case, case_log): "These directories are assumed to be located under", "src/dynamics, with a slash ('/') indicating directory hierarchy."] - # Create regex expressions to search for the different dynamics grids - eul_grid_re = re.compile(r"T[0-9]+") # Eulerian dycore - fv_grid_re = re.compile(r"[0-9][0-9.]*x[0-9][0-9.]*") # FV dycore - se_grid_re = re.compile(r"ne[0-9]+np[1-8](.*)(pg[1-9])?") # SE dycore - fv3_grid_re = re.compile(r"C[0-9]+") # FV3 dycore - mpas_grid_re = re.compile(r"mpasa[0-9]+") # MPAS dycore (not totally sure about this pattern) - - # Check if specified grid matches any of the pre-defined grid options. - # If so, then add both the horizontal grid and dynamical core - # to the configure object - if fv_grid_re.match(atm_grid) is not None: - # Dynamical core - self.create_config("dyn", dyn_desc, "fv", - dyn_valid_vals, is_nml_attr=True) - # Horizontal grid - self.create_config("hgrid", hgrid_desc, atm_grid, - fv_grid_re, is_nml_attr=True) - elif se_grid_re.match(atm_grid) is not None: - # Dynamical core - self.create_config("dyn", dyn_desc, "se", - dyn_valid_vals, is_nml_attr=True) + #Determine dynmaical core and grid-matching regex to use for validation: + dycore, grid_regex = get_atm_hgrid(atm_grid) + + #Add dynamical core to config object: + self.create_config("dyn", dyn_desc, dycore, + dyn_valid_vals, is_nml_attr=True) + + #Add horizontal grid to config object: + if dycore == "se": #Determine location of period (".") in atm_grid string: dot_idx = atm_grid.find(".") @@ -256,26 +337,34 @@ def __init__(self, case, case_log): # Horizontal grid if dot_idx == -1: self.create_config("hgrid", hgrid_desc, atm_grid, - se_grid_re, is_nml_attr=True) + grid_regex, is_nml_attr=True) else: self.create_config("hgrid", hgrid_desc, atm_grid[:dot_idx], - se_grid_re, is_nml_attr=True) + grid_regex, is_nml_attr=True) #End if + else: + + #Add horizontal grid as-is: + self.create_config("hgrid", hgrid_desc, atm_grid, + grid_regex, is_nml_attr=True) + #End if + + #Add dycore-specific settings: + #------------ + if dycore == "se": # Source code directories self.create_config("dyn_src_dirs", dyn_dirs_desc, ["se",os.path.join("se","dycore")], valid_list_type="str") # Set paths for the SE dycore and "air composition" # namelist definition files: - se_dyn_nml_fil = os.path.join(cime_conf_path, os.pardir, "src", - "dynamics", "se", "namelist_definition_se_dycore.xml") - air_comp_nml_fil = os.path.join(cime_conf_path, os.pardir, "src", - "data", "namelist_definition_air_comp.xml") + se_dyn_nml_path = os.path.join(cime_conf_path, os.pardir, "src", "dynamics", "se") + air_comp_nml_path = os.path.join(cime_conf_path, os.pardir, "src", "data") #Add NML definition files to dictionary: - self.__xml_nml_def_files['namelist_definition_se_dycore.xml'] = se_dyn_nml_fil - self.__xml_nml_def_files['namelist_definition_air_comp.xml'] = air_comp_nml_fil + self._add_xml_nml_file(se_dyn_nml_path, "namelist_definition_se_dycore.xml") + self._add_xml_nml_file(air_comp_nml_path, "namelist_definition_air_comp.xml") # Add required CPP definitons: self.add_cppdef("_MPI") @@ -284,33 +373,8 @@ def __init__(self, case, case_log): # Add OpenMP CPP definitions, if needed: if nthrds > 1: self.add_cppdef("_OPENMP") - - elif fv3_grid_re.match(atm_grid) is not None: - # Dynamical core - self.create_config("dyn", dyn_desc, "fv3", - dyn_valid_vals, is_nml_attr=True) - # Horizontal grid - self.create_config("hgrid", hgrid_desc, atm_grid, - fv3_grid_re, is_nml_attr=True) - - elif mpas_grid_re.match(atm_grid) is not None: - # Dynamical core - self.create_config("dyn", dyn_desc, "mpas", - dyn_valid_vals, is_nml_attr=True) - # Horizontal grid - self.create_config("hgrid", hgrid_desc, atm_grid, - mpas_grid_re, is_nml_attr=True) - - elif eul_grid_re.match(atm_grid) is not None: - # Dynamical core - self.create_config("dyn", dyn_desc, "eul", - dyn_valid_vals, is_nml_attr=True) - # Horizontal grid - self.create_config("hgrid", hgrid_desc, atm_grid, - eul_grid_re, is_nml_attr=True) - - # If using the Eulerian dycore, then add wavenumber variables - + #End if + elif dycore == "eul": # Wavenumber variable descriptions trm_desc = "Maximum Fourier wavenumber." trn_desc = "Highest degree of the Legendre polynomials for m=0." @@ -320,34 +384,19 @@ def __init__(self, case, case_log): self.create_config("trm", trm_desc, 1, (1, None)) self.create_config("trn", trn_desc, 1, (1, None)) self.create_config("trk", trk_desc, 1, (1, None)) - - elif atm_grid == "null": - # Dynamical core - self.create_config("dyn", dyn_desc, "none", - dyn_valid_vals, is_nml_attr=True) - # Atmospheric grid - self.create_config("hgrid", hgrid_desc, atm_grid, - None, is_nml_attr=True) - + elif dycore == "none": # Source code directories self.create_config("dyn_src_dirs", dyn_dirs_desc, ["none"], valid_list_type="str") - - else: - emsg = "ERROR: The specified CAM horizontal grid, '{}', " - emsg += "does not match any known format." - raise CamConfigValError(emsg.format(atm_grid)) #End if - - # Extract dynamics option - dyn = self.get_value("dyn") + #------------ # If user-specified dynamics option is present, # check that it matches the grid-derived value - if user_dyn_opt is not None and user_dyn_opt != dyn: + if user_dyn_opt is not None and user_dyn_opt != dycore: emsg = "ERROR: User-specified dynamics option, '{}', " emsg += "does not match dycore expected from case grid: '{}'" - raise CamConfigValError(emsg.format(user_dyn_opt, dyn)) + raise CamConfigValError(emsg.format(user_dyn_opt, dycore)) # End if #--------------------------------------- @@ -355,7 +404,7 @@ def __init__(self, case, case_log): #--------------------------------------- #Set horizontal dimension variables: - if dyn == "se": + if dycore == "se": # Determine location of "np" in atm_grid string: np_idx = atm_grid.find("np") @@ -405,6 +454,7 @@ def __init__(self, case, case_log): nlon_desc = ["Number of unique longitude points in rectangular lat/lon grid.", "Total number of columns for unstructured grids."] self.create_config("nlon", nlon_desc, case_nx) + #End if #--------------------------------------- # Set initial and/or boundary conditions @@ -416,18 +466,18 @@ def __init__(self, case, case_log): analy_ic_val = 1 #Use Analytic ICs #Add analytic IC namelist definition file to dictionary: - analy_ic_nml_fil = os.path.join(cime_conf_path, os.pardir, "src", - "dynamics", "tests", - "namelist_definition_analy_ic.xml") + analy_ic_nml_path = os.path.join(cime_conf_path, os.pardir, "src", + "dynamics", "tests") #Add NML definition files to dictionary: - self.__xml_nml_def_files['namelist_definition_analy_ic.xml'] = analy_ic_nml_fil + self._add_xml_nml_file(analy_ic_nml_path, "namelist_definition_analy_ic.xml") #Add new CPP definition: self.add_cppdef("ANALYTIC_IC") else: analy_ic_val = 0 #Don't use Analytic ICs + #End if analy_ic_desc = ["Switch to turn on analytic initial conditions for the dynamics state: ", "0 => no ", @@ -860,7 +910,7 @@ def ccpp_phys_set(self, cam_nml_attr_dict, phys_nl_pg_dict): """ #Extract physics suites list: - phys_suites = self.get_value('physics_suites').split(';') + phys_suites = [x.strip() for x in self.get_value('physics_suites').split(';')] #Determine current value of "physics_suite" namelist variable: phys_nl_val = phys_nl_pg_dict['physics_suite']['values'].strip() @@ -871,48 +921,65 @@ def ccpp_phys_set(self, cam_nml_attr_dict, phys_nl_pg_dict): if phys_nl_val != 'UNSET': #If so, then check that user-provided suite matches #suite in physics_suites config list: - if phys_nl_val == phys_suites[0].strip(): + if phys_nl_val == phys_suites[0]: #If so, then set attribute to phys_suites value: - cam_nml_attr_dict["phys_suite"] = phys_suites[0].strip() + phys_nl_pg_dict['physics_suite']['values'] = phys_suites[0] + cam_nml_attr_dict["phys_suite"] = phys_suites[0] else: #If not, then throw an error: emsg = "physics_suite specified in user_nl_cam, '{}', does not\n" emsg += "match the suite listed in CAM_CONFIG_OPTS: '{}'" raise CamConfigValError(emsg.format(phys_nl_val, phys_suites[0])) + #End if else: #If not, then just set the attribute and nl value to phys_suites value: - phys_nl_pg_dict['physics_suite']['values'] = phys_suites[0].strip() - cam_nml_attr_dict["phys_suite"] = phys_suites[0].strip() + phys_nl_pg_dict['physics_suite']['values'] = phys_suites[0] + cam_nml_attr_dict["phys_suite"] = phys_suites[0] + #End if else: #Check if "physics_suite" has been set by the user: if phys_nl_val != 'UNSET': #If so, then check if user-provided value is present in the #physics_suites config list: - match_found = False - for phys_suite in phys_suites: - if phys_nl_val == phys_suite.strip(): - #If a match is found, then set attribute and leave loop: - cam_nml_attr_dict["phys_suite"] = phys_suite.strip() - match_found = True - break - - #Check that a match was found, if not, then throw an error: - if not match_found: + if phys_nl_val in phys_suites: + phys_nl_pg_dict['physics_suite']['values'] = phys_nl_val + cam_nml_attr_dict["phys_suite"] = phys_nl_val + else: emsg = "physics_suite specified in user_nl_cam, '{}', doesn't match any suites\n" emsg += "listed in CAM_CONFIG_OPTS: '{}'" raise CamConfigValError(emsg.format(phys_nl_val, self.get_value('physics_suites'))) + #End if else: #If not, then throw an error, because one needs to be specified: emsg = "No 'physics_suite' variable is present in user_nl_cam.\n" emsg += "This is required because more than one suite is listed\n" - emsg += "in CAM_CONFIG_OPTS: '{}'" - raise CamConfigValError(emsg.format(self.get_value('physics_suites'))) + emsg += f"in CAM_CONFIG_OPTS: '{self.get_value('physics_suites')}'" + raise CamConfigValError(emsg) + #End if + #End if + + #++++++++++++++++++++++++ + + def _add_xml_nml_file(self, path, filename): + + """ + Utility function to add XML namelist + definition file path to file list. + + Inputs: + + path -> Path to XML namelist file + filename -> XML namelist definition filename + + """ + #Combine file name with path and add to list: + self.__xml_nml_def_files[filename] = os.path.join(path, filename) ############################################################################### #IGNORE EVERYTHING BELOW HERE UNLESS RUNNING TESTS ON CAM_CONFIG! diff --git a/cime_config/namelist_definition_cam.xml b/cime_config/namelist_definition_cam.xml index 2c08d265..c39861c8 100644 --- a/cime_config/namelist_definition_cam.xml +++ b/cime_config/namelist_definition_cam.xml @@ -403,29 +403,6 @@ - - - - char*8 - diagnostics - qneg_nl - summary,timestep,off - - Control the writing of qneg3 and qneg4 warning messages. - 'summary' causes a summary of QNEG3 and QNEG4 errors to be - printed at the end of the run - 'timestep' causes a summary of QNEG3 and QNEG4 errors to be printed at the - end of each timestep. The total is reset at the end of each timestep. - 'off' causes the qneg3 and qneg4 warnings to be supressed. - Note that these settings do not affect the availability of qneg - history variables. - Default: summary - - - summary - - - diff --git a/src/data/namelist_definition_air_comp.xml b/src/data/namelist_definition_air_comp.xml index 312d09dd..542c6556 100644 --- a/src/data/namelist_definition_air_comp.xml +++ b/src/data/namelist_definition_air_comp.xml @@ -41,44 +41,45 @@ mass and thermodynamic properties. Default if CAM4, CAM5, or Kessler physics is used: - ['specific_humidity', - 'cloud_liquid_water_mixing_ratio_wrt_dry_air', - 'rain_mixing_ratio_wrt_dry_air'] + ['water_vapor_specific_humidity', + 'cloud_liquid_water_mixing_ratio_of_moist_air', + 'rain_water_mixing_ratio'] Default if CAM6 physics is used: - ['specific_humidity', - 'cloud_liquid_water_mixing_ratio_wrt_dry_air', - 'cloud_ice_mixing_ratio_wrt_dry_air', - 'rain_mixing_ratio_wrt_dry_air', - 'snow_mixing_ratio_wrt_dry_air'] + ['water_vapor_specific_humidity', + 'cloud_liquid_water_mixing_ratio_of_moist_air', + 'cloud_ice_water_mixing_ratio_of_moist_air', + 'rain_water_mixing_ratio', + 'snow_water_mixing_ratio'] - Otherwise default is: ['specific_humidity'] + Otherwise default is: ['water_vapor_specific_humidity'] - specific_humidity + water_vapor_specific_humidity - specific_humidity, - cloud_liquid_water_mixing_ratio_wrt_dry_air, - rain_mixing_ratio_wrt_dry_air + water_vapor_specific_humidity, + cloud_liquid_water_mixing_ratio_of_moist_air, + rain_water_mixing_ratio - specific_humidity, - cloud_liquid_water_mixing_ratio_wrt_dry_air, - rain_mixing_ratio_wrt_dry_air + water_vapor_specific_humidity, + cloud_liquid_water_mixing_ratio_of_moist_air, + rain_water_mixing_ratio - specific_humidity, - cloud_liquid_water_mixing_ratio_wrt_dry_air, - rain_mixing_ratio_wrt_dry_air + water_vapor_specific_humidity, + cloud_liquid_water_mixing_ratio_of_moist_air, + rain_water_mixing_ratio - specific_humidity, - cloud_liquid_water_mixing_ratio_wrt_dry_air, - cloud_ice_mixing_ratio_wrt_dry_air, rain_mixing_ratio_wrt_dry_air, - rain_mixing_ratio_wrt_dry_air + water_vapor_specific_humidity, + cloud_liquid_water_mixing_ratio_of_moist_air, + cloud_ice_water_mixing_ratio_of_moist_air, + rain_water_mixing_ratio, + _water_mixing_ratio diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_simple b/test/unit/sample_files/atm_in_files/test_user_nl_simple index 97f72dd7..f8a1ec1c 100644 --- a/test/unit/sample_files/atm_in_files/test_user_nl_simple +++ b/test/unit/sample_files/atm_in_files/test_user_nl_simple @@ -1,11 +1,14 @@ ! Users should add all user specific namelist changes below in the form of ! namelist_var = new_namelist_value -!Really make sure comments are properly handled: -!turkey_leg = 22.7 +!Make sure comments are properly handled: +!turkey_leg = 1.23456789 turkey_leg = 22.7 +!Now REALLY make sure comments are properly handled: +!turkey_leg = 1.23456789 !more comments! + straw_into_gold="Rapunzel" !Does this also work? marx_bros = "mario", "luigi", From b7f8c15cf2c61964640cecc18e5a7e7653c2075c Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 15 Apr 2022 15:54:00 -0600 Subject: [PATCH 23/33] Fix pylint errors. --- cime_config/atm_in_paramgen.py | 81 ++++++++++----------- cime_config/buildnml | 12 ++-- cime_config/cam_config.py | 31 ++++---- test/unit/test_atm_in_paramgen.py | 115 +++++++++++++++--------------- 4 files changed, 123 insertions(+), 116 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index 491d0c19..d7350cac 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -254,10 +254,10 @@ def remove_user_nl_comment(user_string): #Search for all comment delimiters (currently just "!"): for char_idx, char in enumerate(user_string): - if char == "!": - #Add character index to set: - comment_delim_indices.add(char_idx) - #End if + if char == "!": + #Add character index to set: + comment_delim_indices.add(char_idx) + #End if #End for #If no comments are present, then return string as-is: @@ -335,10 +335,10 @@ def user_nl_str_to_int(string, var_name): #Attempt the conversion of the string to an integer: try: integer_val = int(string) - except ValueError: + except ValueError as verr: emsg = f"\nInvalid array index entry '{string}' " emsg += f"used for variable '{var_name}' in 'user_nl_cam'." - raise AtmInParamGenError(emsg) + raise AtmInParamGenError(emsg) from verr #End except #Return relevant integer value: @@ -386,7 +386,7 @@ def check_dim_index(var_name, index_val, dim_size): if index_val <= 0: emsg = f"\nVariable '{var_name}' has index {index_val}" emsg += " in 'user_nl_cam', which is less than one (1)," - emsg +=f" the minimal index value allowed." + emsg += " the minimal index value allowed." raise AtmInParamGenError(emsg) #End if @@ -461,29 +461,33 @@ def _get_nml_value_str(var_name, var_type, var_val): #Check variable type: if var_type == 'logical': - #If logical, then write the associated truth value: + #If logical, then see if it is "True": if _is_nml_logical_true(var_name, var_val): return ".true." - else: - return ".false." #End if - elif var_type in num_set: + #If not true, then must be false: + return ".false." + #End if + + if var_type in num_set: #If a number, then write value as-is: return f"{var_val}" - elif "char*" in var_type: + #End if + + if "char*" in var_type: #Remove all quotes in the string, as they #sometimes added by ParamGen during the "reduce" phase: var_val = var_val.replace("'", "") var_val = var_val.replace('"', "") #Return with double quotes: return f'"{var_val.strip()}"' - else: - #This is an un-recognized type option, so raise an error: - emsg = f"Namelist type '{var_type}' for entry '{var_name}' is un-recognized.\n" - emsg += "Acceptable namelist types are: logical, integer, real, or char*N." - raise AtmInParamGenError(emsg) #End if + #If one makes it here, then this is an un-recognized type option, so raise an error: + emsg = f"Namelist type '{var_type}' for entry '{var_name}' is un-recognized.\n" + emsg += "Acceptable namelist types are: logical, integer, real, or char*N." + raise AtmInParamGenError(emsg) + ################################################################ # MAIN "atm_in" ParamGen class ################################################################ @@ -781,11 +785,7 @@ def check_user_nl_var(self, var_str): array_type_dims = _ARRAY_TYPE_REGEX.search(var_type) #Determine if variable is actually an array or not: - if array_type_dims: - is_array = True - else: - is_array = False - #End if + is_array = bool(array_type_dims) #Exit function here if no array indices were used in user_nl_cam file: if not array_syntax_match: @@ -886,8 +886,7 @@ def check_user_nl_var(self, var_str): #End if #Add index range to array index list: - arr_indxs[dim_idx].extend([idx for idx in - range(index_min_val, index_max_val+1)]) + arr_indxs[dim_idx].extend(list(range(index_min_val, index_max_val+1))) elif array_idx_bnds[0]: #Only minimum array bound specified: @@ -897,8 +896,7 @@ def check_user_nl_var(self, var_str): check_dim_index(var_name, index_min_val, max_dim_sizes[dim_idx]) #Add index range to array index list: - arr_indxs[dim_idx].extend([idx for idx in - range(index_min_val, max_dim_sizes[dim_idx]+1)]) + arr_indxs[dim_idx].extend(list(range(index_min_val, max_dim_sizes[dim_idx]+1))) elif array_idx_bnds[1]: @@ -909,7 +907,7 @@ def check_user_nl_var(self, var_str): check_dim_index(var_name, index_max_val, max_dim_sizes[dim_idx]) #Add index range to array index list: - arr_indxs[dim_idx].extend([idx for idx in range(1, index_max_val+1)]) + arr_indxs[dim_idx].extend(list(range(1, index_max_val+1))) else: @@ -953,8 +951,9 @@ def check_user_nl_var(self, var_str): #End if #Add index range to array index list: - arr_indxs[dim_idx].extend([idx for idx in - range(index_min_val, index_max_val+1, index_stride)]) + arr_indxs[dim_idx].extend(list(range(index_min_val, + index_max_val+1, + index_stride))) elif array_idx_bnds[0]: @@ -967,10 +966,9 @@ def check_user_nl_var(self, var_str): check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) #Add index range to array index list: - arr_indxs[dim_idx].extend([idx for idx in - range(index_min_val, - max_dim_sizes[dim_idx]+1, - index_stride)]) + arr_indxs[dim_idx].extend(list(range(index_min_val, + max_dim_sizes[dim_idx]+1, + index_stride))) elif array_idx_bnds[1]: @@ -983,8 +981,7 @@ def check_user_nl_var(self, var_str): check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) #Add index range to array index list: - arr_indxs[dim_idx].extend([idx for idx in - range(1, index_max_val+1, index_stride)]) + arr_indxs[dim_idx].extend(list(range(1, index_max_val+1, index_stride))) else: @@ -996,10 +993,9 @@ def check_user_nl_var(self, var_str): check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) #Add index range to array index list: - arr_indxs[dim_idx].extend([idx for idx in - range(1, + arr_indxs[dim_idx].extend(list(range(1, max_dim_sizes[dim_idx]+1, - index_stride)]) + index_stride))) #End if (index bounds) @@ -1047,7 +1043,7 @@ def check_array_indices(self, var_name, arr_index_list): possible_dupl = False #Check if variable name exists in dictionary: - if not (var_name in self.__set_index_vals): + if not var_name in self.__set_index_vals: #Create a new entry with an empty list, #it should then be filled out in the loop below: self.__set_index_vals[var_name] = [] @@ -1056,6 +1052,10 @@ def check_array_indices(self, var_name, arr_index_list): is_arr_dupl = False #End if + #Initialize dimension index for use in error-handling at + #end of function: + dim_indx = 0 + #Loop over each separate dimension list: for dim_indx, dim_arr_indxs in enumerate(arr_index_list): @@ -1307,9 +1307,6 @@ def write(self, output_path): raise AtmInParamGenError(emsg) #End if - #Create sets for string evaluation below: - num_set = {"integer", "real"} #types that don't need special handling - # Write Fortran namelist file: with open(os.path.join(output_path), 'w', encoding='utf-8') as atm_in_fil: #Loop through namelist groups in alphabetical order: diff --git a/cime_config/buildnml b/cime_config/buildnml index c9fb79bb..c8fb499c 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -102,14 +102,16 @@ def nml_file_dict_update(nml_fil_dict, nml_def_fils): # Loop over all found files: for nml_def_fil in nml_def_fils: - fil_name = os.path.basename(nml_def_fil) - if fil_name in nml_fil_dict: - fil_dir = os.path.dirname(os.path.abspath(nml_def_fil)) - # Print message to logger: + fil_name = os.path.basename(nml_def_fil) + if fil_name in nml_fil_dict: + fil_dir = os.path.dirname(os.path.abspath(nml_def_fil)) + # Print message to logger: _LOGGER.info(" ...found namelist definition file %s in %s", - fil_base, fil_dir) + fil_base, fil_dir) # Replace file path with SourceMods path nml_fil_dict[fil_base] = nml_def_fil + #End if + #End fof # Return updated dictionary: return nml_fil_dict diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index 0fddb2ba..e87cc38b 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -87,44 +87,51 @@ def get_atm_hgrid(atm_grid_str): mpas_grid_re = re.compile(r"mpasa[0-9]+") # MPAS dycore (not totally sure about this pattern) # Check if specified grid matches any of the pre-defined grid options. - # If so, then add both the horizontal grid and dynamical core - # to the configure object + # If so, then add both the horizontal grid regex and dynamical core + # to the configure object: + if fv_grid_re.match(atm_grid_str) is not None: #Finite Volume (FV) dycore: return "fv", fv_grid_re + #End if - elif se_grid_re.match(atm_grid_str) is not None: + if se_grid_re.match(atm_grid_str) is not None: #Spectral Element (SE) dycore: return "se", se_grid_re + #End if - elif fv3_grid_re.match(atm_grid_str) is not None: + if fv3_grid_re.match(atm_grid_str) is not None: #Finite Volume Cubed-Sphere (FV3) dycore: return "fv3", fv3_grid_re + #End if - elif mpas_grid_re.match(atm_grid_str) is not None: + if mpas_grid_re.match(atm_grid_str) is not None: #Model for Prediction Across Scales (MPAS) dycore: return "mpas", mpas_grid_re + #End if - elif eul_grid_re.match(atm_grid_str) is not None: + if eul_grid_re.match(atm_grid_str) is not None: #Eulerian Spectral (eul) dycore: return "eul", eul_grid_re + #End if - elif atm_grid_str == "null": + if atm_grid_str == "null": #Null dycore: return "none", None - - else: - emsg = "ERROR: The specified CAM horizontal grid, '{}', " - emsg += "does not match any known format." - raise CamConfigValError(emsg.format(atm_grid_str)) #End if + #If one has made it here, then the horizontal grid is not recognized, + #so throw an error: + emsg = "ERROR: The specified CAM horizontal grid, '{}', " + emsg += "does not match any known format." + raise CamConfigValError(emsg.format(atm_grid_str)) + ############################################################################### # MAIN CAM CONFIGURE OBJECT ############################################################################### diff --git a/test/unit/test_atm_in_paramgen.py b/test/unit/test_atm_in_paramgen.py index a59dffdb..9bc9cf25 100644 --- a/test/unit/test_atm_in_paramgen.py +++ b/test/unit/test_atm_in_paramgen.py @@ -110,9 +110,10 @@ def get_value(self, key): """ if key in self.conf_opts: - val = self.conf_opts[key] + val = self.conf_opts[key] else: - val = None + val = None + #End if return val @@ -661,7 +662,7 @@ def test_user_nl_non_array_comma(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("duck_quack = 1 \n") nl_file.write(", .false.") # End with @@ -704,7 +705,7 @@ def test_user_nl_double_comma(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("marx_bros = 'mario', 'luigi',\n") nl_file.write(", 'wario', 'karl'") # End with @@ -747,7 +748,7 @@ def test_user_nl_non_array_dims(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("duck_quack(5:6) = .true.") # End with @@ -786,7 +787,7 @@ def test_user_nl_too_many_array_dims(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("marx_bros(5,8) = .true.") # End with @@ -828,7 +829,7 @@ def test_user_nl_bad_index_order(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("marx_bros(4:1) = 'Gummo'") # End with @@ -850,7 +851,7 @@ def test_user_nl_bad_index_order(self): #Try again with a stride index: # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("marx_bros(4:1:2) = 'Gummo'") # End with @@ -890,7 +891,7 @@ def test_user_nl_no_stride_val(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("marx_bros(2::) = 'Gummo'") # End with @@ -932,7 +933,7 @@ def test_user_nl_three_colons(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("marx_bros(1:2:3:4) = 'Gummo'") # End with @@ -974,7 +975,7 @@ def test_user_nl_colon_dupl(self): pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("marx_bros(:) = 'Gummo'\n") nl_file.write("marx_bros(:) = 'Groucho'") # End with @@ -1021,7 +1022,7 @@ def test_user_nl_arr_index_dupl(self): pg_test.append_atm_in_pg(pg_ext) # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: nl_file.write("marx_bros(:) = 'Gummo'\n") nl_file.write("marx_bros(4) = 'Groucho'\n\n") nl_file.write("lets_ask_computer(:) = 2\n") @@ -1053,51 +1054,51 @@ def test_user_nl_arr_index_dupl(self): def test_user_nl_arr_index_range_dupl(self): - """ - check that a user_nl_cam file with a - namelist variable that has an array - index range (e.g. min:max) that covers - a previous index range for that same - variable fails with the appropriate error - """ - - # Get XML file paths: - xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") - extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_def.xml") - - # Create the ParamGen objects: - pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) - pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) - - # Append the PG objects together: - pg_test.append_atm_in_pg(pg_ext) - - # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w") as nl_file: - nl_file.write('marx_bros = "Gummo"\n') - nl_file.write('marx_bros(2:) = "Groucho"\n') - nl_file.write('marx_bros(1) = "Karl"\n') - nl_file.write('lets_ask_computer = 2\n') - nl_file.write('lets_ask_computer(5:20) = 3\n') - nl_file.write('lets_ask_computer(4) = 4\n') - nl_file.write("lets_ask_computer(:11) = 5 !Should fail here") - # End with - - # Attempt to append user_nl_cam file: - with self.assertRaises(AtmInParamGenError) as cerr: - pg_test.append_user_nl_file("user_nl_tmp") - # End with - - # Check exception message: - emsg = "Variable 'lets_ask_computer' has values" - emsg += " at the following indices being" - emsg += " set multiple times for dimension" - emsg += " (1) :\n" - emsg += "4, 5, 6, 7, 8, 9, 10, 11" - self.assertEqual(emsg, str(cerr.exception)) - - #Remove temporary user_nl_cam file - os.remove("user_nl_tmp") + """ + check that a user_nl_cam file with a + namelist variable that has an array + index range (e.g. min:max) that covers + a previous index range for that same + variable fails with the appropriate error + """ + + # Get XML file paths: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + extra_xml_fil = os.path.join(_SAMPLES_DIR, "test_extra_nml_def.xml") + + # Create the ParamGen objects: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + pg_ext = AtmInParamGen.from_namelist_xml(extra_xml_fil) + + # Append the PG objects together: + pg_test.append_atm_in_pg(pg_ext) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: + nl_file.write('marx_bros = "Gummo"\n') + nl_file.write('marx_bros(2:) = "Groucho"\n') + nl_file.write('marx_bros(1) = "Karl"\n') + nl_file.write('lets_ask_computer = 2\n') + nl_file.write('lets_ask_computer(5:20) = 3\n') + nl_file.write('lets_ask_computer(4) = 4\n') + nl_file.write("lets_ask_computer(:11) = 5 !Should fail here") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Variable 'lets_ask_computer' has values" + emsg += " at the following indices being" + emsg += " set multiple times for dimension" + emsg += " (1) :\n" + emsg += "4, 5, 6, 7, 8, 9, 10, 11" + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") ################################################# #Run unit tests if this script is called directly From 27c9b8788eab75276b9eeb20d84cd855735d91c8 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 15 Apr 2022 16:00:42 -0600 Subject: [PATCH 24/33] Fix additional pylint errors. --- cime_config/buildnml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cime_config/buildnml b/cime_config/buildnml index c8fb499c..9f521873 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -107,9 +107,9 @@ def nml_file_dict_update(nml_fil_dict, nml_def_fils): fil_dir = os.path.dirname(os.path.abspath(nml_def_fil)) # Print message to logger: _LOGGER.info(" ...found namelist definition file %s in %s", - fil_base, fil_dir) + fil_name, fil_dir) # Replace file path with SourceMods path - nml_fil_dict[fil_base] = nml_def_fil + nml_fil_dict[fil_name] = nml_def_fil #End if #End fof From ea79c2a2a3498ee98dcda1456a2258e7f45f5030 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 19 Apr 2022 09:15:20 -0600 Subject: [PATCH 25/33] Fix bugs found during integration testing. --- cime_config/buildnml | 3 ++- cime_config/config_component.xml | 4 ++-- src/data/namelist_definition_air_comp.xml | 4 ++-- src/dynamics/se/dp_coupling.F90 | 8 +++---- src/dynamics/se/test_fvm_mapping.F90 | 24 +++++++++---------- .../tests/namelist_definition_analy_ic.xml | 4 ++-- 6 files changed, 24 insertions(+), 23 deletions(-) diff --git a/cime_config/buildnml b/cime_config/buildnml index 9f521873..d68d5a69 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -102,6 +102,7 @@ def nml_file_dict_update(nml_fil_dict, nml_def_fils): # Loop over all found files: for nml_def_fil in nml_def_fils: + print(nml_def_fil) fil_name = os.path.basename(nml_def_fil) if fil_name in nml_fil_dict: fil_dir = os.path.dirname(os.path.abspath(nml_def_fil)) @@ -194,7 +195,7 @@ def buildnml(case, caseroot, compname): "*_namelist.xml")) # Combine file lists: - nml_def_fils.append(ccpp_nml_fils) + nml_def_fils.extend(ccpp_nml_fils) # Update namelist definition file dict with new files: xml_nml_dict = nml_file_dict_update(xml_nml_dict, nml_def_fils) diff --git a/cime_config/config_component.xml b/cime_config/config_component.xml index 6f9fd5fd..9a5831f3 100644 --- a/cime_config/config_component.xml +++ b/cime_config/config_component.xml @@ -164,8 +164,8 @@ -phys tj2016 -analytic_ic -phys held_suarez -phys kessler -chem terminator -analytic_ic --> - --physics-suites kessler_cam --analytic_ic - --physics-suites held_suarez --analytic_ic + --physics-suites kessler --analytic_ic + --physics-suites held_suarez_1994 --analytic_ic --dyn none --physics-suites adiabatic diff --git a/src/data/namelist_definition_air_comp.xml b/src/data/namelist_definition_air_comp.xml index 542c6556..9436b486 100644 --- a/src/data/namelist_definition_air_comp.xml +++ b/src/data/namelist_definition_air_comp.xml @@ -59,7 +59,7 @@ water_vapor_specific_humidity - + water_vapor_specific_humidity, cloud_liquid_water_mixing_ratio_of_moist_air, rain_water_mixing_ratio @@ -79,7 +79,7 @@ cloud_liquid_water_mixing_ratio_of_moist_air, cloud_ice_water_mixing_ratio_of_moist_air, rain_water_mixing_ratio, - _water_mixing_ratio + snow_water_mixing_ratio diff --git a/src/dynamics/se/dp_coupling.F90 b/src/dynamics/se/dp_coupling.F90 index 46264f0d..39d30da2 100644 --- a/src/dynamics/se/dp_coupling.F90 +++ b/src/dynamics/se/dp_coupling.F90 @@ -428,9 +428,9 @@ subroutine p_d_coupling(cam_runtime_opts, phys_state, phys_tend, dyn_in, tl_f, t do ilyr = 1, pver dp_phys(blk_ind(1),ilyr,ie) = real(phys_state%pdeldry(icol,ilyr), r8) - T_tmp(blk_ind(1),ilyr,ie) = real(phys_tend%dtdt(icol,ilyr), r8) - uv_tmp(blk_ind(1),1,ilyr,ie) = real(phys_tend%dudt(icol,ilyr), r8) - uv_tmp(blk_ind(1),2,ilyr,ie) = real(phys_tend%dvdt(icol,ilyr), r8) + T_tmp(blk_ind(1),ilyr,ie) = real(phys_tend%dTdt_total(icol,ilyr), r8) + uv_tmp(blk_ind(1),1,ilyr,ie) = real(phys_tend%dudt_total(icol,ilyr), r8) + uv_tmp(blk_ind(1),2,ilyr,ie) = real(phys_tend%dvdt_total(icol,ilyr), r8) do m = 1, pcnst dq_tmp(blk_ind(1),ilyr,m,ie) = & (real(phys_state%q(icol,ilyr,m), r8) - q_prev(icol,ilyr,m)) @@ -821,7 +821,7 @@ subroutine thermodynamic_consistency(phys_state, phys_tend, ncols, pver) ! call get_cp(1,ncols,1,pver,1,1,pcnst,phys_state%q(1:ncols,1:pver,:),.true.,inv_cp) - phys_tend%dtdt(1:ncols,1:pver) = phys_tend%dtdt(1:ncols,1:pver)*cpair*inv_cp + phys_tend%dTdt_total(1:ncols,1:pver) = phys_tend%dTdt_total(1:ncols,1:pver)*cpair*inv_cp end if end subroutine thermodynamic_consistency diff --git a/src/dynamics/se/test_fvm_mapping.F90 b/src/dynamics/se/test_fvm_mapping.F90 index dc1f3bf7..c59153a4 100644 --- a/src/dynamics/se/test_fvm_mapping.F90 +++ b/src/dynamics/se/test_fvm_mapping.F90 @@ -156,7 +156,7 @@ subroutine test_mapping_overwrite_tendencies(phys_state,phys_tend,ncols,q_prev,f do ie=1,nelemd !xxx fvm(ie)%c(:,:,:,ntrac) = 0.0_r8 end do - + phys_state%pdel(1:ncols,:) = phys_state%pdeldry(1:ncols,:) !make sure there is no conversion from wet to dry do nq=ntrac,ntrac m_cnst = nq @@ -173,17 +173,17 @@ subroutine test_mapping_overwrite_tendencies(phys_state,phys_tend,ncols,q_prev,f do icol=1,ncols do k=ntrac,ntrac - phys_tend%dudt(icol,k) = test_func(phys_state%lat(icol), phys_state%lon(icol), k, k) - phys_tend%dvdt(icol,k) = test_func(phys_state%lat(icol), phys_state%lon(icol), k, k) - phys_tend%dtdt(icol,k) = test_func(phys_state%lat(icol), phys_state%lon(icol), k, k) + phys_tend%dudt_total(icol,k) = test_func(phys_state%lat(icol), phys_state%lon(icol), k, k) + phys_tend%dvdt_total(icol,k) = test_func(phys_state%lat(icol), phys_state%lon(icol), k, k) + phys_tend%dTdt_total(icol,k) = test_func(phys_state%lat(icol), phys_state%lon(icol), k, k) end do enddo name = 'p2d_u' - call outfld(trim(name),phys_tend%dudt(:ncols,:),ncols,lchnk) + call outfld(trim(name),phys_tend%dudt_total(:ncols,:),ncols,lchnk) name = 'p2d_v' - call outfld(trim(name),phys_tend%dvdt(:ncols,:),ncols,lchnk) + call outfld(trim(name),phys_tend%dvdt_total(:ncols,:),ncols,lchnk) name = 'p2d_ptend' - call outfld(trim(name),phys_tend%dtdt(:ncols,:),ncols,lchnk) + call outfld(trim(name),phys_tend%dtdt_total(:ncols,:),ncols,lchnk) do icol=1,ncols @@ -326,7 +326,7 @@ subroutine test_mapping_overwrite_dyn_state(elem,fvm) ! end do call outfld(TRIM(name), RESHAPE(fvm(ie)%c(1:nc,1:nc,:,m_cnst),(/nc*nc,nlev/)), nc*nc, ie) end do - + elem(ie)%state%Qdp(:,:,:,:,:) = 0.0_r8 !for testing the p2d map do k=1,num_fnc do j=1,np @@ -360,9 +360,9 @@ subroutine test_mapping_overwrite_dyn_state(elem,fvm) case (swest) fvm(ie)%c(0,0,:,k) = fvm(ie)%c(0,1,:,k) case (seast) - fvm(ie)%c(nc+1,0,:,k) = fvm(ie)%c(0,nc,:,k) + fvm(ie)%c(nc+1,0,:,k) = fvm(ie)%c(0,nc,:,k) case (neast) - fvm(ie)%c(nc+1,nc+1,:,k) = fvm(ie)%c(nc,nc+1,:,k) + fvm(ie)%c(nc+1,nc+1,:,k) = fvm(ie)%c(nc,nc+1,:,k) end select end do end if @@ -524,13 +524,13 @@ function test_func(lat_in, lon_in, k, funcnum) result(fout) fout = 0.5_r8 * ( tanh( 3.0_r8*abs(lat)-pi ) + 1.0_r8) case(4) fout = 2.0_r8+cos(5.0_r8+40*lon)!1.0e-8_r8 - fout = -0.5_r8-0.5_r8*(cos(16*lon)*(sin(2_r8*lat)**16)) + fout = -0.5_r8-0.5_r8*(cos(16*lon)*(sin(2_r8*lat)**16)) case(5) ! ! approximately Y^2_2 spherical harmonic ! fout = sin(lon)*cos(40*lat)!1.0e-8_r8 - fout = 0.5_r8*(cos(16*lon)*(sin(2_r8*lat)**16)) + fout = 0.5_r8*(cos(16*lon)*(sin(2_r8*lat)**16)) case(6) ! ! approximately Y32_16 spherical harmonic diff --git a/src/dynamics/tests/namelist_definition_analy_ic.xml b/src/dynamics/tests/namelist_definition_analy_ic.xml index ad6ef6ed..0c17db2f 100644 --- a/src/dynamics/tests/namelist_definition_analy_ic.xml +++ b/src/dynamics/tests/namelist_definition_analy_ic.xml @@ -19,8 +19,8 @@ none held_suarez_1994 - held_suarez_1994 - moist_baroclinic_wave_dcmip2016 + held_suarez_1994 + moist_baroclinic_wave_dcmip2016 moist_baroclinic_wave_dcmip2016 From 4229b2f2d13b0050972b40ee3762ee526fea5004 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 20 May 2022 16:09:58 -0600 Subject: [PATCH 26/33] Add Courtney's code review requests. --- cime_config/atm_in_paramgen.py | 94 ++++++++++++++----- cime_config/buildnml | 1 - cime_config/cam_config_classes.py | 2 +- cime_config/user_nl_cam | 2 +- .../atm_in_files/test_multi_xml_in | 4 + .../atm_in_files/test_third_nml_def.xml | 14 +++ .../atm_in_files/test_user_nl_allow_dupl_var | 2 +- test/unit/test_atm_in_paramgen.py | 85 ++++++++++++++++- 8 files changed, 175 insertions(+), 29 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index d7350cac..944ff495 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -172,7 +172,7 @@ def _is_nml_logical_true(varname, var_val): ##### -def remove_user_nl_comment(user_string): +def remove_user_nl_comment(user_string, comment_delim="!"): """ Searches a one-line input string for a comment delimiter, @@ -180,6 +180,8 @@ def remove_user_nl_comment(user_string): removed. ---------- user_string -> String that will be searched and processed for comments + comment_delim -> Optional variable that sets the character type being used + as a comment delimiter. Defaults to the standard "!" fortran comment. Returns the input string, but with any commented text removed. @@ -247,14 +249,22 @@ def remove_user_nl_comment(user_string): 15. Check that an array of values with a comment returns the proper string: >>> remove_user_nl_comment('13.0d0,! 15.0d0, 1100.35d0') '13.0d0,' + + 16. Check that a line that only contains a comment returns an empty string: + >>> remove_user_nl_comment('! bananas and 13.0d0 5 .true. !@$#%*?') + '' + + 17. Check that a line with an alternative comment delimiter returns the proper string: + >>> remove_user_nl_comment('bananas #and 13.0d0 5 .true. !@$#%*?', comment_delim='#') + 'bananas ' """ #Create empty set for comment-delimiting indices: comment_delim_indices = set() - #Search for all comment delimiters (currently just "!"): + #Search for all comment delimiters: for char_idx, char in enumerate(user_string): - if char == "!": + if char == comment_delim: #Add character index to set: comment_delim_indices.add(char_idx) #End if @@ -322,8 +332,16 @@ def user_nl_str_to_int(string, var_name): >>> user_nl_str_to_int("5", "banana") 5 - 2. Check that a string with a non-integer can be - convergted properly: + 2. Check that a string with a float fails with + the correct error: + >>> user_nl_str_to_int("5.2", "banana") # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError:... + Invalid array index entry '5.2' used for variable 'banana' in 'user_nl_cam'. + + 3. Check that a string with a non-number fails with + the correct error: >>> user_nl_str_to_int("a", "banana") # doctest: +ELLIPSIS Traceback (most recent call last): ... @@ -407,7 +425,7 @@ def _get_nml_value_str(var_name, var_type, var_val): correct Fortran namelist value format ---------- var_name -> Variable name (used for error message) - var_type -> Variable type (logial, integer, float, character) + var_type -> Variable type to convert to (logical, integer, real, character) var_val -> Variable value to convert returns the fortran namelist-formatted variable @@ -427,10 +445,15 @@ def _get_nml_value_str(var_name, var_type, var_val): >>> _get_nml_value_str("banana", "integer", 5) '5' - 4. Check that a float variable outputs the correct value: + 4. Check that a real variable outputs the correct value: >>> _get_nml_value_str("banana", "real", "5d5") '5d5' + 5. Check that a real variable with an integer value outputs + the correct value: + >>> _get_nml_value_str("banana", "real", 5) + '5.d0' + 5. Check that a character variable with no quotes outputs the correct value: >>> _get_nml_value_str("banana", "char*10", "apple") @@ -470,12 +493,18 @@ def _get_nml_value_str(var_name, var_type, var_val): #End if if var_type in num_set: - #If a number, then write value as-is: + #Check if the variable value is an integer, but is being + #used for a real-type variaable: + if var_type == "real" and isinstance(var_val, int): + return f"{var_val}.d0" + #End if + + #Otherwise, simply write value as-is: return f"{var_val}" #End if if "char*" in var_type: - #Remove all quotes in the string, as they + #Remove all quotes in the string, as they are #sometimes added by ParamGen during the "reduce" phase: var_val = var_val.replace("'", "") var_val = var_val.replace('"', "") @@ -749,13 +778,13 @@ def check_user_nl_var(self, var_str): """ - #Iinitialize variable name: + #Initialize variable name: var_name = var_str #Initialize array index list: arr_indxs = [] - #Check for array syntax, i.e. parantheses: + #Check for array syntax, i.e. parentheses: array_syntax_match = _ARR_INDEX_REGEX.search(var_str) #Extract variable name: @@ -829,18 +858,25 @@ def check_user_nl_var(self, var_str): #Split text by number of commas (which should indicate dimensions): user_dim_text = user_array_text.split(",") - #Check that the user hasn't listed more dimensions - #than is acutally present in the variable: - if len(user_dim_text) > num_arr_dims: + #Check that the user hasn't listed the wrong number of dimensions: + num_user_dims = len(user_dim_text) + if num_user_dims != num_arr_dims: #Set proper grammar: + if num_user_dims == 1: + user_dim_str = "dimension" + else: + user_dim_str = "dimensions" + #End if if num_arr_dims == 1: - dim_err_str = "dimension." + array_dim_str = "dimension." else: - dim_err_str = "dimensions." + array_dim_str = "dimensions." #End if - emsg = f"Variable '{var_name}' has {len(user_dim_text)}" - emsg += " dimensions used in 'user_nl_cam', but is defined" - emsg += f" to only have {num_arr_dims} "+dim_err_str + + #Raise error with proper message: + emsg = f"Variable '{var_name}' has {num_user_dims}" + emsg += f" {user_dim_str} used in 'user_nl_cam', but is defined" + emsg += f" to have {num_arr_dims} {array_dim_str}" raise AtmInParamGenError(emsg) #End if @@ -1131,7 +1167,7 @@ def append_user_nl_file(self, user_nl_file): """ #Create ordered dictionary to store namelist groups, - #variables, and values from user_nl_XXX file: + #variables, and values from user_nl_cam file: _data = OrderedDict() #Initialize flag preventing duplicate namelist entries: @@ -1160,7 +1196,7 @@ def append_user_nl_file(self, user_nl_file): #Check if the entire line is a comment: if line_s[0][0] == "!": #Check if this comment is the duplicate keyword: - if "allow_duplicate_namliest_entries" in line_s: + if "allow_duplicate_namelist_entries" in line_s: #Next check if a user has set variable to True: for word in line_s: if word.lower() == "true": @@ -1177,7 +1213,7 @@ def append_user_nl_file(self, user_nl_file): line = remove_user_nl_comment(line) #End if - #Check ifthe first character on the line is a comma (,): + #Check if the first character on the line is a comma (,): if line.strip()[0] == ",": #Is this an array variable: if is_array: @@ -1233,7 +1269,7 @@ def append_user_nl_file(self, user_nl_file): #End if (array indices) #Extract value string: - val_str = ' '.join(line_ss[1:]) # the rest is tha value string + val_str = ' '.join(line_ss[1:]) # the rest is the value string #Check if value string ends in array continuation: if is_array: @@ -1263,6 +1299,18 @@ def append_user_nl_file(self, user_nl_file): _data[data_group][var_str] = {'values':val_str} #end if elif (is_array and is_continue_line): + #See if there is an equals sign outside of quotes by treating it like + #a comment delimiter, and seeing if characters in the string are removed: + no_equals_line = remove_user_nl_comment(line, comment_delim='=') + if len(no_equals_line) != len(line): + #This looks like the start of a new namelist entry without the + #proper ending of the previous entry. So raise an error here: + emsg = f"Line number {line_num+1} in 'user_nl_cam' appears" + emsg += " to be starting a new namelist entry,\nbut" + emsg += " the previous entry has a trailing comma (,). Please fix." + raise AtmInParamGenError(emsg) + #End if + #This is an array continuation line, so append the line to previous #variable's value as-is: _data[data_group][var_str]['values'] += line diff --git a/cime_config/buildnml b/cime_config/buildnml index d68d5a69..bc1e1a41 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -102,7 +102,6 @@ def nml_file_dict_update(nml_fil_dict, nml_def_fils): # Loop over all found files: for nml_def_fil in nml_def_fils: - print(nml_def_fil) fil_name = os.path.basename(nml_def_fil) if fil_name in nml_fil_dict: fil_dir = os.path.dirname(os.path.abspath(nml_def_fil)) diff --git a/cime_config/cam_config_classes.py b/cime_config/cam_config_classes.py index 3b5e603e..784fb566 100644 --- a/cime_config/cam_config_classes.py +++ b/cime_config/cam_config_classes.py @@ -1,5 +1,5 @@ """ -Location of interal python classes used by the +Location of internal python classes used by the "ConfigCAM" class to generate, store, and pass-on any CAM configuration variables to other components of the build system. diff --git a/cime_config/user_nl_cam b/cime_config/user_nl_cam index 232bc57b..21f5e312 100644 --- a/cime_config/user_nl_cam +++ b/cime_config/user_nl_cam @@ -5,4 +5,4 @@ ! is a special variable that allows duplicate namelist ! entries in this file, if set to "True". Otherwise ! any duplicate namelist entries will result in an error. -! allow_duplicate_namliest_entries = False +! allow_duplicate_namelist_entries = False diff --git a/test/unit/sample_files/atm_in_files/test_multi_xml_in b/test/unit/sample_files/atm_in_files/test_multi_xml_in index ae2f8d04..52d6a493 100644 --- a/test/unit/sample_files/atm_in_files/test_multi_xml_in +++ b/test/unit/sample_files/atm_in_files/test_multi_xml_in @@ -1,3 +1,7 @@ +&alien_invasions + body_snatchers = "looks human", "human", "still looks human", "alien" +/ + &bird_sounds_nl duck_quack = .false. turkey_leg = 3.14d0 diff --git a/test/unit/sample_files/atm_in_files/test_third_nml_def.xml b/test/unit/sample_files/atm_in_files/test_third_nml_def.xml index 010219b0..fda6522b 100644 --- a/test/unit/sample_files/atm_in_files/test_third_nml_def.xml +++ b/test/unit/sample_files/atm_in_files/test_third_nml_def.xml @@ -20,4 +20,18 @@ + + char*32(2,2) + sci_fi + alien_invasions + + Is it human, or a body snatcher? + + + + "looks human", "human", + "still looks human", "alien" + + + diff --git a/test/unit/sample_files/atm_in_files/test_user_nl_allow_dupl_var b/test/unit/sample_files/atm_in_files/test_user_nl_allow_dupl_var index ea54a426..01bfff3e 100644 --- a/test/unit/sample_files/atm_in_files/test_user_nl_allow_dupl_var +++ b/test/unit/sample_files/atm_in_files/test_user_nl_allow_dupl_var @@ -1,6 +1,6 @@ ! Users should add all user specific namelist changes below in the form of ! namelist_var = new_namelist_value -! allow_duplicate_namliest_entries = True +! allow_duplicate_namelist_entries = True turkey_leg = 22.7 diff --git a/test/unit/test_atm_in_paramgen.py b/test/unit/test_atm_in_paramgen.py index 9bc9cf25..de4078d1 100644 --- a/test/unit/test_atm_in_paramgen.py +++ b/test/unit/test_atm_in_paramgen.py @@ -725,6 +725,47 @@ def test_user_nl_double_comma(self): #Remove temporary user_nl_cam file os.remove("user_nl_tmp") + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file with an array + #variable with an ending comma followed by a new + #namelist variable fails correctly. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_trailing_comma(self): + + """ + Check that a user_nl_cam file with a + namelist entry that is an array, but + ends with a trailing comma, fails + with the appropriate error. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: + nl_file.write("marx_bros = 'mario', 'luigi',\n") + nl_file.write("duck_quack = .false.") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Line number 2 in 'user_nl_cam' appears" + emsg += " to be starting a new namelist entry,\nbut" + emsg += " the previous entry has a trailing comma (,). Please fix." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + #+++++++++++++++++++++++++++++++++++++++++++++++ #Check that a user_nl_cam file that contains #a non-array namelist variable with array @@ -799,7 +840,47 @@ def test_user_nl_too_many_array_dims(self): # Check exception message: emsg = "Variable 'marx_bros' has 2 dimensions" emsg += " used in 'user_nl_cam', but is defined" - emsg += f" to only have 1 dimension." + emsg += f" to have 1 dimension." + self.assertEqual(emsg, str(cerr.exception)) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + + #+++++++++++++++++++++++++++++++++++++++++++++++ + #Check that a user_nl_cam file that contains + #a array namelist variable with too few array + #dimensions specified fails correctly. + #+++++++++++++++++++++++++++++++++++++++++++++++ + + def test_user_nl_too_few_array_dims(self): + + """ + Check that a user_nl_cam file with + a namelist variable that is an array, + but that is listed with too many dimension, + fails with the appropriate error. + """ + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_third_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: + nl_file.write("body_snatchers(1) = Alien!!!") + # End with + + # Attempt to append user_nl_cam file: + with self.assertRaises(AtmInParamGenError) as cerr: + pg_test.append_user_nl_file("user_nl_tmp") + # End with + + # Check exception message: + emsg = "Variable 'body_snatchers' has 1 dimension" + emsg += " used in 'user_nl_cam', but is defined" + emsg += f" to have 2 dimensions." self.assertEqual(emsg, str(cerr.exception)) #Remove temporary user_nl_cam file @@ -816,7 +897,7 @@ def test_user_nl_bad_index_order(self): """ Check that a user_nl_cam file with a - namelist variable that is an arry + namelist variable that is an array with specified indexes that are out of order (max:min instead of min:max) fails with the appropriate error. From 34fbe6874d13ccbb8f22dc192cdeaea252615b19 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 10 Jun 2022 16:22:55 -0600 Subject: [PATCH 27/33] Update ParamGen branch to head of CIME, and implement related code mods. --- .gitignore | 1 + Externals.cfg | 37 ++++++++++++++++++------------ cime_config/atm_in_paramgen.py | 2 +- cime_config/buildlib | 5 ++-- cime_config/buildnml | 10 ++++---- cime_config/cam_config.py | 1 + cime_config/create_readnl_files.py | 7 +++--- 7 files changed, 37 insertions(+), 26 deletions(-) diff --git a/.gitignore b/.gitignore index 943cd484..d88504dd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # Ignore externals ccpp_framework +ccs_config chem_proc cime share diff --git a/Externals.cfg b/Externals.cfg index ec3386d9..b0bfd2b8 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -1,27 +1,34 @@ -[cice] -tag = cice5_20210802 +[ccs_config] +tag = ccs_config_cesm0.0.28 +protocol = git +repo_url = https://github.com/ESMCI/ccs_config_cesm +local_path = ccs_config +required = True + +[cice5] +tag = cice5_20220204 protocol = git repo_url = https://github.com/ESCOMP/CESM_CICE5 -local_path = components/cice +local_path = components/cice5 required = True [cice6] -tag = cesm_cice6_2_0_5 +tag = cesm_cice6_2_0_21 protocol = git repo_url = https://github.com/ESCOMP/CESM_CICE -local_path = components/cice6 +local_path = components/cice externals = Externals.cfg -required = False +required = True [cmeps] -tag = cmeps0.13.40 +tag = cmeps0.13.62 protocol = git repo_url = https://github.com/ESCOMP/CMEPS.git local_path = components/cmeps required = True [cdeps] -tag = cdeps0.12.32 +tag = cdeps0.12.46 protocol = git repo_url = https://github.com/ESCOMP/CDEPS.git local_path = components/cdeps @@ -29,14 +36,14 @@ externals = Externals_CDEPS.cfg required = True [cpl7] -tag = cpl7.0.5 +tag = cpl7.0.12 protocol = git repo_url = https://github.com/ESCOMP/CESM_CPL7andDataComps local_path = components/cpl7 required = True [share] -tag = share1.0.8 +tag = share1.0.11 protocol = git repo_url = https://github.com/ESCOMP/CESM_share local_path = share @@ -50,21 +57,21 @@ local_path = libraries/mct required = True [parallelio] -tag = pio2_5_4 +tag = pio2_5_6 protocol = git repo_url = https://github.com/NCAR/ParallelIO local_path = libraries/parallelio required = True [cime] -branch = CAMDEN_ParamGen +branch = ParamGen protocol = git repo_url = https://github.com/nusbaume/cime local_path = cime required = True [cism] -tag = cismwrap_2_1_93 +tag = cismwrap_2_1_95 protocol = git repo_url = https://github.com/ESCOMP/CISM-wrapper local_path = components/cism @@ -72,7 +79,7 @@ externals = Externals_CISM.cfg required = True [clm] -tag = ctsm5.1.dev064 +tag = ctsm5.1.dev082 protocol = git repo_url = https://github.com/ESCOMP/CTSM local_path = components/clm @@ -88,7 +95,7 @@ externals = Externals_FMS.cfg required = True [mosart] -tag = mosart1_0_44 +tag = mosart1_0_45 protocol = git repo_url = https://github.com/ESCOMP/MOSART local_path = components/mosart diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index 944ff495..8988d866 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -21,7 +21,7 @@ _CIME_CONF_DIR = os.path.abspath(os.path.dirname(__file__)) _CIME_ROOT = os.path.join(_CIME_CONF_DIR, os.pardir, "cime") -_PARAMGEN_ROOT = os.path.join(_CIME_ROOT, "scripts", "lib", "CIME", "ParamGen") +_PARAMGEN_ROOT = os.path.join(_CIME_ROOT, "CIME", "ParamGen") if not os.path.exists(_PARAMGEN_ROOT): _EMSG = f"ERROR: Cannot find '{_PARAMGEN_ROOT}' directory. Did you run checkout_externals?" diff --git a/cime_config/buildlib b/cime_config/buildlib index 603b2479..fdbd2b8f 100755 --- a/cime_config/buildlib +++ b/cime_config/buildlib @@ -23,12 +23,13 @@ sys.path.append(os.path.join(__CIMEROOT, "scripts", "lib")) # CIME imports from CIME.case import Case from CIME.utils import run_cmd, expect -from CIME.utils import check_minimum_python_version, stop_buffering_output +from CIME.utils import stop_buffering_output from CIME.buildlib import parse_input from CIME.build import get_standard_makefile_args +from CIME.Tools.standard_script_setup import check_minimum_python_version #pylint: enable=wrong-import-position -check_minimum_python_version(3, 6) +check_minimum_python_version(3, 7) #CAM requires version 3.7 or greater stop_buffering_output() _LOGGER = logging.getLogger(__name__) diff --git a/cime_config/buildnml b/cime_config/buildnml index bc1e1a41..370d9879 100755 --- a/cime_config/buildnml +++ b/cime_config/buildnml @@ -17,11 +17,11 @@ _LIBDIR = os.path.join(_CIMEROOT, "scripts", "Tools") sys.path.append(_LIBDIR) # pylint: disable=wildcard-import, wrong-import-position # pylint: disable=unused-wildcard-import -from standard_script_setup import * -from CIME.XML.standard_module_setup import * -from CIME.buildnml import create_namelist_infile, parse_input -from CIME.case import Case -from CIME.utils import expect +from CIME.Tools.standard_script_setup import * +from CIME.XML.standard_module_setup import * +from CIME.buildnml import create_namelist_infile, parse_input +from CIME.case import Case +from CIME.utils import expect # Save local (cime_config) directory path: _CIME_CONFIG_PATH = os.path.dirname(os.path.abspath(__file__)) diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index e87cc38b..c4497258 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -194,6 +194,7 @@ def __init__(self, case, case_log): emsg = "CAM requires python 3.7 or later, currently using python version" emsg += f" {sys.version_info[0]}.{sys.version_info[1]}" raise SystemError(emsg) + #End if # Read in needed case variables atm_grid = case.get_value("ATM_GRID") # Atmosphere (CAM) grid diff --git a/cime_config/create_readnl_files.py b/cime_config/create_readnl_files.py index 962ed520..f67bc5c2 100644 --- a/cime_config/create_readnl_files.py +++ b/cime_config/create_readnl_files.py @@ -23,9 +23,10 @@ _CURRDIR = os.path.abspath(os.path.dirname(__file__)) _CAMROOT = os.path.abspath(os.path.join(_CURRDIR, os.pardir)) _SPINSCRIPTS = os.path.join(_CAMROOT, "ccpp_framework", 'scripts') -_XML_SCHEMAS = os.path.join(_CAMROOT, "cime", "config", "xml_schemas") -_PG_SCHEMAS = os.path.join(_CAMROOT, "cime", "scripts", "lib", - "CIME", "ParamGen", "xml_schema") +_XML_SCHEMAS = os.path.join(_CAMROOT, "cime", "CIME", "data", "config", + "xml_schemas") +_PG_SCHEMAS = os.path.join(_CAMROOT, "cime", "CIME", "ParamGen", + "xml_schema") if _SPINSCRIPTS not in sys.path: sys.path.append(_SPINSCRIPTS) # end if From c9c73897924625c207d6948f5aec0eb3dcc3cef3 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Mon, 27 Jun 2022 13:31:31 -0600 Subject: [PATCH 28/33] Move CIME external to tag from ESMCI/CIME repo. --- Externals.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index b0bfd2b8..f352e7f7 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -64,9 +64,9 @@ local_path = libraries/parallelio required = True [cime] -branch = ParamGen +tag = cime6.0.38 protocol = git -repo_url = https://github.com/nusbaume/cime +repo_url = https://github.com/ESMCI/cime local_path = cime required = True From 35a18b7ae5768a3a63f2154693e6cd4234d10b19 Mon Sep 17 00:00:00 2001 From: Steve Goldhaber Date: Thu, 14 Jul 2022 22:35:17 -0600 Subject: [PATCH 29/33] Created new function (parse_dim_spec) to parse array sections Added more complex string tests for remove_user_nl_comment Note that test_user_nl_complex_array_dims fails because the file is written in a different order. This could be a problem if any value is written twice (e.g., slips through the duplicate value check or if that check is turned off). --- cime_config/atm_in_paramgen.py | 332 +++++++++--------- .../atm_in_files/test_cmplx_array_atm_in | 11 + .../atm_in_files/test_multi_xml_in | 1 + .../atm_in_files/test_third_nml_def.xml | 16 + test/unit/test_atm_in_paramgen.py | 118 +++---- 5 files changed, 254 insertions(+), 224 deletions(-) create mode 100644 test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index 8988d866..b289e615 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -257,6 +257,24 @@ def remove_user_nl_comment(user_string, comment_delim="!"): 17. Check that a line with an alternative comment delimiter returns the proper string: >>> remove_user_nl_comment('bananas #and 13.0d0 5 .true. !@$#%*?', comment_delim='#') 'bananas ' + + 18. Check that some more unusual strings are handled correctly + >>> remove_user_nl_comment("'Isn''t it a nice day'") + "'Isn''t it a nice day'" + >>> remove_user_nl_comment("'Isn''t it a nice day' !comment") + "'Isn''t it a nice day' " + >>> remove_user_nl_comment("'Isn!''!t it a nice! day'") + "'Isn!''!t it a nice! day'" + >>> remove_user_nl_comment("'Isn!''!t it a nice! day' ! comment") + "'Isn!''!t it a nice! day' " + >>> remove_user_nl_comment('''"This is 'one' string"''') + '"This is \\'one\\' string"' + >>> remove_user_nl_comment('''"This is 'one' string" !comment''') + '"This is \\'one\\' string" ' + >>> remove_user_nl_comment("'This is \\"one\\" string'") + '\\'This is "one" string\\'' + >>> remove_user_nl_comment("'This is \\"one\\" string'! comment") + '\\'This is "one" string\\'' """ #Create empty set for comment-delimiting indices: @@ -418,6 +436,147 @@ def check_dim_index(var_name, index_val, dim_size): ##### +def parse_dim_spec(var_name, array_spec_text, dim_size): + """ + Given the text of a single array dimension specification, + return the range of values specified by the specification or + raise an Exception if an error is detected. + is the variable name and is used for error messages + is the text representation of the array spec + is the size of that rank in + + 1. Check that a single, legal index returns the correct single value + >>> parse_dim_spec('banana', '5', 10) + [5] + + 2. Check that a single, out-of-bounds index generates the proper error + >>> parse_dim_spec('banana', '15', 10) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError: + Variable 'banana' has index 15 in 'user_nl_cam', which is greater than the max dimension size of 10 + >>> parse_dim_spec('banana', '0', 10) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError: + Variable 'banana' has index 0 in 'user_nl_cam', which is less than one (1), the minimal index value allowed. + >>> parse_dim_spec('banana', '-2', 10) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError: + Variable 'banana' has index -2 in 'user_nl_cam', which is less than one (1), the minimal index value allowed. + + 3. Check that a legal range returns the correct list of indices + >>> parse_dim_spec('banana', '5:9', 10) + [5, 6, 7, 8, 9] + >>> parse_dim_spec('banana', ':9', 10) + [1, 2, 3, 4, 5, 6, 7, 8, 9] + >>> parse_dim_spec('banana', ':', 10) + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + >>> parse_dim_spec('banana', '6:', 10) + [6, 7, 8, 9, 10] + + 4. Check that an out-of-bounds range returns the correct list + >>> parse_dim_spec('banana', '0:2', 10) + [1, 2] + >>> parse_dim_spec('banana', '7:11', 10) + [7, 8, 9, 10] + >>> parse_dim_spec('banana', '-1:11', 10) + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + 5. Check that an empty range returns an empty list + >>> parse_dim_spec('banana', '5:1', 10) + [] + + 6. Check that a legal range with a stride returns the correct list + >>> parse_dim_spec('banana', '5:9:2', 10) + [5, 7, 9] + >>> parse_dim_spec('banana', ':9:3', 10) + [1, 4, 7] + >>> parse_dim_spec('banana', '::3', 10) + [1, 4, 7, 10] + >>> parse_dim_spec('banana', '6:', 10) + [6, 7, 8, 9, 10] + >>> parse_dim_spec('banana', '9:1:-3', 10) + [9, 6, 3] + + 7. Check that a mismatched stride returns an empty list + >>> parse_dim_spec('banana', '9:5:2', 10) + [] + >>> parse_dim_spec('banana', '5:9:-2', 10) + [] + >>> parse_dim_spec('banana', ':9:-3', 10) + [] + >>> parse_dim_spec('banana', '::-2', 10) + [] + >>> parse_dim_spec('banana', '6::-1', 10) + [] + >>> parse_dim_spec('banana', '9:1:3', 10) + [] + + 8. Check that a missing stride value generates an error + >>> parse_dim_spec('banana', '2::', 10) # doctest: +ELLIPSIS + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError: + Two colons were provided for variable 'banana' in 'user_nl_cam', \ + but no stride value was provided. + Please provide either a stride value, or remove the extra colon. + """ + array_dims = [x.strip() for x in array_spec_text.split(':')] + if len(array_dims) > 3: + #Not sure what to do with three or more colons, so die here: + emsg = f"Variable '{var_name}' has {len(array_dims) - 1} colons (:) " + emsg += "listed in its dimension indexing in 'user_nl_cam'." + emsg += " This is not a valid Fortran array section specification." + raise AtmInParamGenError(emsg) + #End if + # Defaults + arr_beg = 1 + arr_end = dim_size + arr_stride = 1 + # Override start index? + if array_dims[0]: + arr_beg = user_nl_str_to_int(array_dims[0], var_name) + # end if + # Override end index? + if len(array_dims) > 1: + if array_dims[1].strip(): + arr_end = user_nl_str_to_int(array_dims[1], var_name) + #End if (no else, blank means use default) + else: + # We only need to check this if it is only a single index + check_dim_index(var_name, arr_beg, dim_size) + # For a single index, the end is the same as the beginning + arr_end = arr_beg + #End if + # Override stride? + if len(array_dims) > 2: + if array_dims[2]: + arr_stride = user_nl_str_to_int(array_dims[2], var_name) + if arr_stride == 0: + emsg = f"Variable '{var_name}' has a stride of zero " + emsg += "listed in its dimension indexing in 'user_nl_cam'." + emsg += " This is not a valid Fortran stride." + raise AtmInParamGenError(emsg) + #End if + else: + emsg = f"Two colons were provided for variable '{var_name}'" + emsg += " in 'user_nl_cam', but no stride value was provided." + emsg += "\nPlease provide either a stride value, or remove the " + emsg += "extra colon." + raise AtmInParamGenError(emsg) + #End if + #End if (no else, just use default stride) + # Now, create the set of entries + # We need to modify the end to make the range function compatible with + # how Fortran uses it + arr_end += int(arr_stride / abs(arr_stride)) + return [x for x in list(range(arr_beg, arr_end, arr_stride)) if + ((x >= 1) and (x <= dim_size))] + +##### + def _get_nml_value_str(var_name, var_type, var_val): """ @@ -883,167 +1042,20 @@ def check_user_nl_var(self, var_str): #Loop over dimensions: for dim_idx, array_index_text in enumerate(user_dim_text): #Create new array list entry: - arr_indxs.append([]) - - #check for colons: - array_idx_bnds = array_index_text.split(":") - - #Determine number of colons by number of list elements: - num_colons = len(array_idx_bnds) - 1 - - if num_colons == 0: - #No colons are present, so the text should only be a number: - index_val = user_nl_str_to_int(array_idx_bnds[0], var_name) - - #Check index value: - check_dim_index(var_name, index_val, max_dim_sizes[dim_idx]) - - #Add number to array index list: - arr_indxs[dim_idx].append(index_val) - - elif num_colons == 1: - #One colon is present, so now check if there are specified index bounds: - if all(array_idx_bnds): - - #Both array bounds are specified: - index_min_val = user_nl_str_to_int(array_idx_bnds[0], var_name) - index_max_val = user_nl_str_to_int(array_idx_bnds[1], var_name) - - #Check index values: - check_dim_index(var_name, index_min_val, max_dim_sizes[dim_idx]) - check_dim_index(var_name, index_max_val, max_dim_sizes[dim_idx]) - - #Make sure first value is smaller than the second: - if index_max_val < index_min_val: - emsg = f"Bad indexing, min index value '{index_min_val}'" - emsg += f" greater than max index value '{index_max_val}'" - emsg += f" for variable '{var_name}' in 'user_nl_cam'." - raise AtmInParamGenError(emsg) - #End if - - #Add index range to array index list: - arr_indxs[dim_idx].extend(list(range(index_min_val, index_max_val+1))) - elif array_idx_bnds[0]: - - #Only minimum array bound specified: - index_min_val = user_nl_str_to_int(array_idx_bnds[0], var_name) - - #Check index value: - check_dim_index(var_name, index_min_val, max_dim_sizes[dim_idx]) - - #Add index range to array index list: - arr_indxs[dim_idx].extend(list(range(index_min_val, max_dim_sizes[dim_idx]+1))) - - elif array_idx_bnds[1]: - - #Only maximum array bounds specified: - index_max_val = user_nl_str_to_int(array_idx_bnds[1], var_name) - - #Check index value: - check_dim_index(var_name, index_max_val, max_dim_sizes[dim_idx]) - - #Add index range to array index list: - arr_indxs[dim_idx].extend(list(range(1, index_max_val+1))) - - else: - - #Only a single colon provided. In this case provide a special index - #that indicates that specific indices can still be provided, but that the - #whole array dimension cannot be written again: - arr_indxs[dim_idx].append(-1) - - #End if (index bounds) - - elif num_colons == 2: - - #Two colons are present, which means a stride value should be present as - #the last numerical value. If one is not present, then throw an error: - if not array_idx_bnds[2]: - emsg = f"Two colons were provided for variable '{var_name}'" - emsg += " in 'user_nl_cam', but no stride value was provided." - emsg += "\nPlease provide either a stride value, or remove the" - emsg += "extra colon." - raise AtmInParamGenError(emsg) - #End if - - if all(array_idx_bnds): - - #A min/max/stride value has been provided: - index_min_val = user_nl_str_to_int(array_idx_bnds[0], var_name) - index_max_val = user_nl_str_to_int(array_idx_bnds[1], var_name) - index_stride = user_nl_str_to_int(array_idx_bnds[2], var_name) - - #Check index values: - check_dim_index(var_name, index_min_val, max_dim_sizes[dim_idx]) - check_dim_index(var_name, index_max_val, max_dim_sizes[dim_idx]) - check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) - - #Make sure first value is smaller than the second: - if index_max_val < index_min_val: - emsg = f"Bad indexing, min index value '{index_min_val}'" - emsg += f" greater than max index value '{index_max_val}'" - emsg += f" for variable '{var_name}' in 'user_nl_cam'." - raise AtmInParamGenError(emsg) - #End if - - #Add index range to array index list: - arr_indxs[dim_idx].extend(list(range(index_min_val, - index_max_val+1, - index_stride))) - - elif array_idx_bnds[0]: - - #Only minimum array bound specified: - index_min_val = user_nl_str_to_int(array_idx_bnds[0], var_name) - index_stride = user_nl_str_to_int(array_idx_bnds[2], var_name) - - #Check index value: - check_dim_index(var_name, index_min_val, max_dim_sizes[dim_idx]) - check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) - - #Add index range to array index list: - arr_indxs[dim_idx].extend(list(range(index_min_val, - max_dim_sizes[dim_idx]+1, - index_stride))) - - elif array_idx_bnds[1]: - - #Only maximum array bounds specified: - index_max_val = user_nl_str_to_int(array_idx_bnds[1], var_name) - index_stride = user_nl_str_to_int(array_idx_bnds[2], var_name) - - #Check index value: - check_dim_index(var_name, index_max_val, max_dim_sizes[dim_idx]) - check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) - - #Add index range to array index list: - arr_indxs[dim_idx].extend(list(range(1, index_max_val+1, index_stride))) - - else: - - #Only a stride provided, so cover the entire array dimension - #using the provided stride: - - #Extract and check stride values: - index_stride = user_nl_str_to_int(array_idx_bnds[2], var_name) - check_dim_index(var_name, index_stride, max_dim_sizes[dim_idx]) - - #Add index range to array index list: - arr_indxs[dim_idx].extend(list(range(1, - max_dim_sizes[dim_idx]+1, - index_stride))) - - #End if (index bounds) - + if array_index_text.strip() == ':': + #Only a single colon provided. In this case provide a special index + #that indicates that specific indices can still be provided, but that the + #whole array dimension cannot be written again: + arr_indxs.append([-1]) else: - - #Not sure what to do with three or more colons, so die here: - emsg = f"Variable '{var_name}' has {num_colons} colons (:) " - emsg += "listed in its dimension indexing in 'user_nl_cam'." - emsg += " Only up to two colons are supported." - raise AtmInParamGenError(emsg) - - #End if (number of colons) + indices = parse_dim_spec(var_name, array_index_text, + max_dim_sizes[dim_idx]) + if not indices: + ## Log a warning here if no values were returned? + pass + #End if + arr_indxs.append(indices) + #End if #End for (dimensions) #Return relevant variables: diff --git a/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in b/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in new file mode 100644 index 00000000..4d234346 --- /dev/null +++ b/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in @@ -0,0 +1,11 @@ +&alien_invasions + body_snatchers = "looks human", "human", "still looks human", "alien" + swedish_chef = "bork", "bork" + swedish_chef(15:10:-2) = 'bork', 'bork', 'bork' + swedish_chef(7:9:2) = 'not bork', 'not bork' + swedish_chef(1) = '' +/ + +&space_stations + space_station = "skylab" +/ diff --git a/test/unit/sample_files/atm_in_files/test_multi_xml_in b/test/unit/sample_files/atm_in_files/test_multi_xml_in index 52d6a493..1c0e2508 100644 --- a/test/unit/sample_files/atm_in_files/test_multi_xml_in +++ b/test/unit/sample_files/atm_in_files/test_multi_xml_in @@ -1,5 +1,6 @@ &alien_invasions body_snatchers = "looks human", "human", "still looks human", "alien" + swedish_chef = "bork", "bork" / &bird_sounds_nl diff --git a/test/unit/sample_files/atm_in_files/test_third_nml_def.xml b/test/unit/sample_files/atm_in_files/test_third_nml_def.xml index fda6522b..0ff51788 100644 --- a/test/unit/sample_files/atm_in_files/test_third_nml_def.xml +++ b/test/unit/sample_files/atm_in_files/test_third_nml_def.xml @@ -34,4 +34,20 @@ + + + char*16(16) + muppets + alien_invasions + + The best chef in puppet TV today. + Five time winner of "Pillow Chef" + + + + "bork", "bork" + + + + diff --git a/test/unit/test_atm_in_paramgen.py b/test/unit/test_atm_in_paramgen.py index de4078d1..bea7e310 100644 --- a/test/unit/test_atm_in_paramgen.py +++ b/test/unit/test_atm_in_paramgen.py @@ -886,67 +886,6 @@ def test_user_nl_too_few_array_dims(self): #Remove temporary user_nl_cam file os.remove("user_nl_tmp") - #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file that contains - #a array namelist variable with specified array - #indices in the wrong (max:min) order fails - #correctly. - #+++++++++++++++++++++++++++++++++++++++++++++++ - - def test_user_nl_bad_index_order(self): - - """ - Check that a user_nl_cam file with a - namelist variable that is an array - with specified indexes that are out of - order (max:min instead of min:max) fails - with the appropriate error. - """ - - # Get XML file path: - xml_test_fil = os.path.join(_SAMPLES_DIR, "test_simple_nml_def.xml") - - # Create the ParamGen object: - pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) - - # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: - nl_file.write("marx_bros(4:1) = 'Gummo'") - # End with - - # Attempt to append user_nl_cam file with line that - # contains no equals ('=') sign: - with self.assertRaises(AtmInParamGenError) as cerr: - pg_test.append_user_nl_file("user_nl_tmp") - # End with - - # Check exception message: - emsg = f"Bad indexing, min index value '4'" - emsg += f" greater than max index value '1'" - emsg += f" for variable 'marx_bros' in 'user_nl_cam'." - self.assertEqual(emsg, str(cerr.exception)) - - #Remove temporary user_nl_cam file - os.remove("user_nl_tmp") - - #Try again with a stride index: - - # Create temporary user_nl_cam file: - with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: - nl_file.write("marx_bros(4:1:2) = 'Gummo'") - # End with - - # Attempt to append user_nl_cam file: - with self.assertRaises(AtmInParamGenError) as cerr: - pg_test.append_user_nl_file("user_nl_tmp") - # End with - - # Check exception message: - emsg = f"Bad indexing, min index value '4'" - emsg += f" greater than max index value '1'" - emsg += f" for variable 'marx_bros' in 'user_nl_cam'." - self.assertEqual(emsg, str(cerr.exception)) - #+++++++++++++++++++++++++++++++++++++++++++++++ #Check that a user_nl_cam file that contains an #array variable with two colons for a specific @@ -982,9 +921,9 @@ def test_user_nl_no_stride_val(self): # End with # Check exception message: - emsg = f"Two colons were provided for variable 'marx_bros'" + emsg = "Two colons were provided for variable 'marx_bros'" emsg += " in 'user_nl_cam', but no stride value was provided." - emsg += "\nPlease provide either a stride value, or remove the" + emsg += "\nPlease provide either a stride value, or remove the " emsg += "extra colon." self.assertEqual(emsg, str(cerr.exception)) @@ -1026,7 +965,7 @@ def test_user_nl_three_colons(self): # Check exception message: emsg = f"Variable 'marx_bros' has 3 colons (:) " emsg += "listed in its dimension indexing in 'user_nl_cam'." - emsg += " Only up to two colons are supported." + emsg += " This is not a valid Fortran array section specification." self.assertEqual(emsg, str(cerr.exception)) #Remove temporary user_nl_cam file @@ -1181,6 +1120,57 @@ def test_user_nl_arr_index_range_dupl(self): #Remove temporary user_nl_cam file os.remove("user_nl_tmp") + def test_user_nl_complex_array_dims(self): + + """ + Check that a user_nl_cam file with + a namelist variable that is an array, + using complex array syntax produces the correct atm_in file + """ + + # Create fake CIME case: + fcase = FakeCase() + + # Get expected atm_in file: + atm_in_output = os.path.join(_SAMPLES_DIR, "test_cmplx_array_atm_in") + + # Get XML file path: + xml_test_fil = os.path.join(_SAMPLES_DIR, "test_third_nml_def.xml") + + # Create the ParamGen object: + pg_test = AtmInParamGen.from_namelist_xml(xml_test_fil) + + # Create temporary user_nl_cam file: + with open("user_nl_tmp", "w", encoding='utf-8') as nl_file: + nl_file.write("swedish_chef(15:10:-2) = 'bork', 'bork', 'bork'\n") + nl_file.write("swedish_chef(7:9:2) = 'not bork', 'not bork'\n") + nl_file.write("swedish_chef(1) = ''\n") + # End with + + # Attempt to append user_nl_cam file: + pg_test.append_user_nl_file("user_nl_tmp") + + # Set all ParamGen namelist values: + pg_test.reduce_atm_in(fcase, {}) + + # Create test atm_in namelist file name: + test_output = os.path.join(_TMP_DIR, "test_cmplx_array_atm_in") + + # Create CAM namelist using CIME's nmlgen routine: + pg_test.write(test_output) + + # Check that output file was written: + amsg = f"{test_output} does not exist" + self.assertTrue(os.path.exists(test_output), msg=amsg) + + # Check that output file matches expected file: + amsg = f"{test_output} does not match {atm_in_output}" + self.assertTrue(filecmp.cmp(test_output, atm_in_output, shallow=False), \ + msg=amsg) + + #Remove temporary user_nl_cam file + os.remove("user_nl_tmp") + ################################################# #Run unit tests if this script is called directly ################################################# From 614644409ebbd99e6edce65eea35790d1094ef9d Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 15 Jul 2022 11:26:39 -0600 Subject: [PATCH 30/33] Update comments and attempt to fix current CI test failures. --- cime_config/atm_in_paramgen.py | 15 ++++++++++----- cime_config/cam_autogen.py | 2 ++ cime_config/cam_build_cache.py | 2 ++ cime_config/cam_config.py | 2 ++ cime_config/cam_config_classes.py | 2 ++ cime_config/create_readnl_files.py | 2 +- .../atm_in_files/test_cmplx_array_atm_in | 7 ++++--- .../atm_in_files/test_nl_duplicate_atm_in | 4 ++-- 8 files changed, 25 insertions(+), 11 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index b289e615..836c3deb 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -3,6 +3,8 @@ CIME tool, and associated methods needed to generated the "atm_in" Fortran namelist file. + +To run doctests on this file: python -m doctest atm_in_paramgen.py """ #---------------------------------------- @@ -450,17 +452,17 @@ def parse_dim_spec(var_name, array_spec_text, dim_size): [5] 2. Check that a single, out-of-bounds index generates the proper error - >>> parse_dim_spec('banana', '15', 10) # doctest: +ELLIPSIS + >>> parse_dim_spec('banana', '15', 10) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE Traceback (most recent call last): ... atm_in_paramgen.AtmInParamGenError: Variable 'banana' has index 15 in 'user_nl_cam', which is greater than the max dimension size of 10 - >>> parse_dim_spec('banana', '0', 10) # doctest: +ELLIPSIS + >>> parse_dim_spec('banana', '0', 10) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE Traceback (most recent call last): ... atm_in_paramgen.AtmInParamGenError: Variable 'banana' has index 0 in 'user_nl_cam', which is less than one (1), the minimal index value allowed. - >>> parse_dim_spec('banana', '-2', 10) # doctest: +ELLIPSIS + >>> parse_dim_spec('banana', '-2', 10) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE Traceback (most recent call last): ... atm_in_paramgen.AtmInParamGenError: @@ -515,7 +517,7 @@ def parse_dim_spec(var_name, array_spec_text, dim_size): [] 8. Check that a missing stride value generates an error - >>> parse_dim_spec('banana', '2::', 10) # doctest: +ELLIPSIS + >>> parse_dim_spec('banana', '2::', 10) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE Traceback (most recent call last): ... atm_in_paramgen.AtmInParamGenError: @@ -1367,6 +1369,9 @@ def write(self, output_path): raise AtmInParamGenError(emsg) #End if + #Create function to properly sort variables with array indices: + var_sort_key = lambda var : var[:var.index("(")] if "(" in var else var + # Write Fortran namelist file: with open(os.path.join(output_path), 'w', encoding='utf-8') as atm_in_fil: #Loop through namelist groups in alphabetical order: @@ -1375,7 +1380,7 @@ def write(self, output_path): atm_in_fil.write("&"+nml_group+"\n") # Write all variables within that group (sorted alphabetically): - for var in sorted(self._data[nml_group]): + for var in sorted(self._data[nml_group], key=var_sort_key): #Extract variable value(s): val = self._data[nml_group][var]["values"].strip() diff --git a/cime_config/cam_autogen.py b/cime_config/cam_autogen.py index 7d7ecbaf..1ed647a3 100644 --- a/cime_config/cam_autogen.py +++ b/cime_config/cam_autogen.py @@ -3,6 +3,8 @@ which are used to autogenerate fortran source code based off of the registry and physics suites chosen by the user. + +To run doctests on this file: python cam_autogen.py ''' ######################################## diff --git a/cime_config/cam_build_cache.py b/cime_config/cam_build_cache.py index 4fc8e78e..a7241492 100644 --- a/cime_config/cam_build_cache.py +++ b/cime_config/cam_build_cache.py @@ -2,6 +2,8 @@ Class, methods, and supporting functions to track whether either of CAM's specialized pre-processing file generation routines (CAM registry and CCPP) need to be run as part of a current build. + +To run doctests on this file: python cam_build_cache.py """ #---------------------------------------- diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index c4497258..d63585ed 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -4,6 +4,8 @@ "cime_config" scripts, and which stores all meta-data and descriptions associated with the CAM configuration of a CIME case. + +To run doctests on this file: python cam_config.py """ #---------------------------------------- diff --git a/cime_config/cam_config_classes.py b/cime_config/cam_config_classes.py index 784fb566..b7eedb80 100644 --- a/cime_config/cam_config_classes.py +++ b/cime_config/cam_config_classes.py @@ -3,6 +3,8 @@ "ConfigCAM" class to generate, store, and pass-on any CAM configuration variables to other components of the build system. + +To run doctests on this file: python cam_config_classes.py """ #---------------------------------------- diff --git a/cime_config/create_readnl_files.py b/cime_config/create_readnl_files.py index f67bc5c2..6e367fe6 100644 --- a/cime_config/create_readnl_files.py +++ b/cime_config/create_readnl_files.py @@ -5,7 +5,7 @@ the appropriate read_namelist module and associated metadata. Also, create a master module to execute these namelist read functions. -To run doctest on this file: python -m doctest generate_registry_data.py +To run doctests on this file: python create_readnl_files.py """ # Python library imports diff --git a/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in b/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in index 4d234346..3024d10d 100644 --- a/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in +++ b/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in @@ -1,11 +1,12 @@ &alien_invasions body_snatchers = "looks human", "human", "still looks human", "alien" swedish_chef = "bork", "bork" - swedish_chef(15:10:-2) = 'bork', 'bork', 'bork' - swedish_chef(7:9:2) = 'not bork', 'not bork' - swedish_chef(1) = '' + swedish_chef(15:10:-2) = "bork", "bork", "bork" + swedish_chef(7:9:2) = "not bork", "not bork" + swedish_chef(1) = "" / &space_stations space_station = "skylab" / + diff --git a/test/unit/sample_files/atm_in_files/test_nl_duplicate_atm_in b/test/unit/sample_files/atm_in_files/test_nl_duplicate_atm_in index b8b889ea..c70bbd7d 100644 --- a/test/unit/sample_files/atm_in_files/test_nl_duplicate_atm_in +++ b/test/unit/sample_files/atm_in_files/test_nl_duplicate_atm_in @@ -30,11 +30,11 @@ 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0 + lets_ask_computer(1:) = 1 + lets_ask_computer(:50) = 2 lets_ask_computer(15:20) = 3 lets_ask_computer(16) = 4 - lets_ask_computer(1:) = 1 lets_ask_computer(30:36:2) = 5 - lets_ask_computer(:50) = 2 warp_drive = 0 / From fe3433d9e2612ac824f54a87f40f5103e6c024c3 Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Tue, 19 Jul 2022 13:25:39 -0600 Subject: [PATCH 31/33] Apply changes from another round of code review. --- cime_config/atm_in_paramgen.py | 155 +++++++-- cime_config/namelist_definition_cam.xml | 326 +++++++++--------- .../atm_in_files/test_cmplx_array_atm_in | 6 +- .../atm_in_files/test_extra_nml_def.xml | 2 +- test/unit/test_atm_in_paramgen.py | 108 +----- 5 files changed, 313 insertions(+), 284 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index 836c3deb..b5515038 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -34,6 +34,9 @@ from paramgen import ParamGen #pylint: enable=wrong-import-position +#Set of single and double quotes used by "_check_string_quotes" function: +_QUOTE_SET = {"'", '"'} + #Regular expression used by "remove_user_nl_comment" function: _QUOTE_REGEX = re.compile(r"\".*?\"|'.*?'") @@ -277,6 +280,8 @@ def remove_user_nl_comment(user_string, comment_delim="!"): '\\'This is "one" string\\'' >>> remove_user_nl_comment("'This is \\"one\\" string'! comment") '\\'This is "one" string\\'' + >>> remove_user_nl_comment("'This! is \\"!one\\"! string'! comment") + '\\'This! is "!one"! string\\'' """ #Create empty set for comment-delimiting indices: @@ -338,7 +343,7 @@ def user_nl_str_to_int(string, var_name): Checks if a string can be converted into an integer, and if not reports the relevant error. This function - is only used in the "check_user_nl_var" + is only used in the "get_user_nl_var_array_info" function below. ---------- string -> string to convert to integer. @@ -579,6 +584,90 @@ def parse_dim_spec(var_name, array_spec_text, dim_size): ##### +def _check_string_quotes(var_name, var_val): + + """ + Checks if a string is inside closed quotes, + i.e. has both a starting and ending quote + of the same type. This function also + raises an error if there are quotes but + they aren't closed: + + doctests: + + 1. Check that a string with single quotes returns "True": + >>> _check_string_quotes("Apple", "'Banana'") + True + + 2. Check that a string with double quotes returns "True": + >>> _check_string_quotes("Apple", '"Banana"') + True + + 3. Check that a string without quotes returns "False": + >>> _check_string_quotes("Apple", "Banana") + False + + 4. Check that a string with mis-matching quote types raises + the appropriate error: + >>> _check_string_quotes("Apple", ''' "Banana' ''') # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError: Namelist entry 'Apple' is of type character but its input value: + "Banana' + has mis-matched quotes. Please fix. + + 5. Check that a string with a missing ending quote type raises + the appropriate error: + >>> _check_string_quotes("Apple", "'Banana") # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError: Namelist entry 'Apple' is of type character but its input value: + 'Banana + has mis-matched quotes. Please fix. + + 5. Check that a string with a missing starting quote type raises + the appropriate error: + >>> _check_string_quotes("Apple", 'Banana"') # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + Traceback (most recent call last): + ... + atm_in_paramgen.AtmInParamGenError: Namelist entry 'Apple' is of type character but its input value: + Banana" + has mis-matched quotes. Please fix. + + """ + + #Make sure variable has been stripped: + var_val_strip = var_val.strip() + + #Set error message (just in case): + emsg = f"Namelist entry '{var_name}' is of type character" + emsg += " but its input value:" + emsg += f"\n{var_val}\n" + emsg += "has mis-matched quotes. Please fix." + + #Check if starting and ending quotes exist and match: + if var_val_strip[0] in _QUOTE_SET: + if var_val_strip[0] == var_val_strip[-1]: + #String is inside closed quotes: + return True + #End if + + #Starting and ending quotes don't match, + #so raise an error: + raise AtmInParamGenError(emsg) + #End if + + #Check if there are ending quotes as well: + if var_val_strip[-1] in _QUOTE_SET: + #No starting quotes, raise an error: + raise AtmInParamGenError(emsg) + #End if + + #String is not inside quotes: + return False + +##### + def _get_nml_value_str(var_name, var_type, var_val): """ @@ -623,14 +712,19 @@ def _get_nml_value_str(var_name, var_type, var_val): 6. Check that a character variable with quotes outputs the correct value: >>> _get_nml_value_str("banana", "char*250", " 'apple' ") - '"apple"' + "'apple'" 7. Check that a character variable with double quotes outputs the correct value: >>> _get_nml_value_str("banana", "char*N", ' "apple" ') '"apple"' - 8. Check that a variable with an unknown type returns + 8. Check that a character variable with a quotation mark + innternal to the string outputs the correct value: + >>> _get_nml_value_str("banana", "char*31", ''' "app'le" ''') + '"app\\'le"' + + 9. Check that a variable with an unknown type returns the proper error: >>> _get_nml_value_str("banana", "apple", "true") # doctest: +ELLIPSIS Traceback (most recent call last): @@ -655,7 +749,7 @@ def _get_nml_value_str(var_name, var_type, var_val): if var_type in num_set: #Check if the variable value is an integer, but is being - #used for a real-type variaable: + #used for a real-type variable: if var_type == "real" and isinstance(var_val, int): return f"{var_val}.d0" #End if @@ -665,12 +759,19 @@ def _get_nml_value_str(var_name, var_type, var_val): #End if if "char*" in var_type: - #Remove all quotes in the string, as they are - #sometimes added by ParamGen during the "reduce" phase: - var_val = var_val.replace("'", "") - var_val = var_val.replace('"', "") - #Return with double quotes: - return f'"{var_val.strip()}"' + #Removee extra white space: + var_val_strip = var_val.strip() + + #Check if string is wrapped in quotes: + quoted_flag = _check_string_quotes(var_name, var_val_strip) + + #If not, then pass string with quotes: + if not quoted_flag: + return f'"{var_val_strip}"' + #End if + + #If so, then pass out original string as-is: + return var_val_strip #End if #If one makes it here, then this is an un-recognized type option, so raise an error: @@ -865,18 +966,23 @@ def append_atm_in_pg(self, atm_pg_obj): #Check that there are no matching namelist groups: #------------------------------------------------ + + #Initialize error message string: + emsg = "" + + #Loop over all namelist files and namelist group sets: for nml_file, nml_groups in self.__nml_def_groups.items(): #Determine if any namelist groups are the same #between the two objects: same_groups = nml_groups.intersection(input_groups) - #If so, then raise an error (as all namelist groups must be unique): + #If so, then add to error message (as all namelist groups must be unique): if same_groups: - emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" - emsg += " the following conflicting namelist groups:\n" + emsg += f"Cannot append:\n'{input_file}'\n" + emsg += " The following namelist groups conflict with those in" + emsg += f"\n'{nml_file} :'\n" emsg += ", ".join(same_groups) - raise AtmInParamGenError(emsg) #End if #End for @@ -890,18 +996,24 @@ def append_atm_in_pg(self, atm_pg_obj): #between the two objects: same_vars = nml_vars.intersection(input_vars) - #If so, then raise an error (as all namelist variable ids must be unique): + #If so, then add to error message (as all namelist variable ids must be unique): if same_vars: - emsg = f"Both\n'{nml_file}'\nand\n'{input_file}'\nhave" - emsg += " the following conflicting namelist variables:\n" + emsg += f"Cannot append:\n'{input_file}'\n" + emsg += " The following namelist variablesconflict with those in" + emsg += f"\n'{nml_file} :'\n" emsg += ", ".join(same_vars) - raise AtmInParamGenError(emsg) #End if #End for #------------------------------------------------ #End for (input files used to create input atm_pb object) + #Check if an error message was written. If so then raise the + #error(s) here: + if emsg: + raise AtmInParamGenError(emsg) + #Endd if + #Add input PG object dictionaries to this object's dicts: self.__nml_def_groups.update(atm_pg_obj.__nml_def_groups) self.__nml_def_vars.update(atm_pg_obj.__nml_def_vars) @@ -915,12 +1027,12 @@ def append_atm_in_pg(self, atm_pg_obj): #### - def check_user_nl_var(self, var_str): + def get_user_nl_var_array_info(self, var_str): """ Checks whether the variable string is for a specific set of array - indices: + indices. ---------- var_str -> variable name string. @@ -1263,7 +1375,8 @@ def append_user_nl_file(self, user_nl_file): #Check if this variable is an array, and if so, #then return what the variable name is, what indices (if any) #are being specified, and what namelist (data) group it belongs to: - is_array, var_name, arr_indxs, data_group = self.check_user_nl_var(var_str) + is_array, var_name, arr_indxs, data_group = \ + self.get_user_nl_var_array_info(var_str) #Are there array indices specified: if arr_indxs: diff --git a/cime_config/namelist_definition_cam.xml b/cime_config/namelist_definition_cam.xml index c39861c8..b37a5399 100644 --- a/cime_config/namelist_definition_cam.xml +++ b/cime_config/namelist_definition_cam.xml @@ -14,132 +14,132 @@ UNSET_PATH - $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L26_c180105.nc - $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L30_c180105.nc - $DIN_LOC_ROOT/atm/cam/inic/cam_vcoords_L32_c180105.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.23x0.31_L26_c100513.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.23x0.31_L26_c061106.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1980-01-01_0.47x0.63_L26_c071226.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.47x0.63_L26_c061106.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-10-01_0.5x0.625_L26_c031204.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_1987-01-01_0.9x1.25_L26_c060703.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_0.9x1.25_L26_c051205.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_1.9x2.5_L26_c070408.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L26_c040809.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L26_c110309.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L26_c091007.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0001-01-01_4x5_L26_c060608.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.23x0.31_L30_c110527.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.47x0.63_L30_c100929.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L30_c100618.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L30_c090306.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L30_c070109.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L30_c110309.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L30_c100831.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_4x5_L30_c090108.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_10x15_L30_c081013.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L32_c141031.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L32_c150407.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-mam4_0000-01-01_10x15_L32_c170914.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami_0000-01-01_0.47x0.63_L26_APE_c080227.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L26_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L26_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L26_c161230.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L30_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L30_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L30_c170103.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L32_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L32_c161020.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/aqua_0000-01-01_10x15_L32_c170103.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_0.9x1.25_L30_c080724.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L26_c080114.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L30_c080215.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L26_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L30_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L26_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L30_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L26_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L30_c081104.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_10x15_L30_c121015.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_4x5_L30_c121015.nc - $DIN_LOC_ROOT/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_1.9x2.5_L30_c121015.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_4x5_L26_c060217.nc - $DIN_LOC_ROOT/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_10x15_L26_c060216.nc - $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-02-01_0.9x1.25_L66_c040928.nc - $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-07-01_1.9x2.5_L66_c040928.nc - $DIN_LOC_ROOT/atm/waccm/ic/FWT2000_f09_spinup01.cam.i.0001-01-02-00000_c160315.nc - $DIN_LOC_ROOT/atm/waccm/ic/f2000.waccm-mam3_1.9x2.5_L70.cam2.i.0017-01-01.c120410.nc - $DIN_LOC_ROOT/atm/waccm/ic/f2000.waccm-mam3_10x15_L70.cam2.i.0017-01-01.c141016.nc - $DIN_LOC_ROOT/atm/waccm/ic/b1850.waccm-mam3_1.9x2.5_L70.cam2.i.0156-01-01.c120523.nc - $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-05-01_1.9x2.5_L103_c040928.nc - $DIN_LOC_ROOT/atm/waccm/ic/wa3_4x5_1950_spinup.cam2.i.1960-01-01-00000.nc - $DIN_LOC_ROOT/atm/waccm/ic/cami_2000-01-01_10x15_L66_c041121.nc - $DIN_LOC_ROOT/atm/waccm/ic/f40.2000.4deg.wcm.carma.sulf.004.cam2.i.0008-01-01-00000.nc - $DIN_LOC_ROOT/atm/waccm/ic/f40.2deg.wcm.carma.sulf.L66.cam2.i.2010-01-01.nc - $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_L81_c110906.nc - $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_4x5_L81_c160630.nc - $DIN_LOC_ROOT/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_10x15_L81_c141027.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccmx_aqua_4x5_L126_c170705.nc - $DIN_LOC_ROOT/atm/waccm/ic/fx2000_0.9x1.25_126lev_0002-01-01-00000_c181221.nc - $DIN_LOC_ROOT/atm/waccm/ic/wcmx-cam6-phys_1.9x2.5_130lev_2000_c181115.nc - $DIN_LOC_ROOT/atm/waccm/ic/wcmx-cam6-phys_0.9x1.25_130lev_2000_c190122.nc - $DIN_LOC_ROOT/atm/waccm/ic/FC6X2000_f05_spinup01.cam.i.0002-01-01-00000_c190711.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_4x5_L130_c180803.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccmx_mam4_aqua_1.9x2.5_L130_c180803.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/T341clim01.cam2.i.0024-01-01-00000.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_256x512_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_128x256_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_128x256_L26_c040422.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_T42_L26_c031110.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L30_c090102.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L30_c031210.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_64x128_L32_c170510.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L26_c091218.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_48x96_L26_c040420.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_48x96_L30_c100426.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_32x64_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_32x64_L30_c090107.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L26_c030228.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_8x16_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-01-01_8x16_L30_c090102.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01_ne5np4_L30.140707.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L26_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L30_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne16np4_L32_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L26_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne30np4_L30_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam6_ne30np4_L32_c171023.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L26_c171018.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L30_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne60np4_L32_c171020.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L26_c171018.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L30_c171024.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_topo_cam4_ne120np4_L32_c171023.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami_1850-01-01_ne240np4_L26_c110314.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami_0000-09-01_ne240np4_L26_c061106.nc - $DIN_LOC_ROOT/atm/cam/inic/homme/cami-mam3_0000-01-ne240np4_L30_c111004.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne5np4_L26_c170517.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne16np4_L26_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne30np4_L26_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne60np4_L26_c171023.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne120np4_L26_c170419.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam4_ne240np4_L26_c170613.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne5np4_L30_c170517.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne16np4_L30_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne30np4_L30_c170417.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam5_ne120np4_L30_c170419.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne5np4_L32_c170517.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne16np4_L32_c170509.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne30np4_L32_c170509.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne120np4_L32_c170908.nc - $DIN_LOC_ROOT/atm/cam/inic/se/ape_cam6_ne240np4_L32_c170908.nc - $DIN_LOC_ROOT/atm/cam/inic/se/f2000_conus_ne30x8_L32_c190712.nc - $DIN_LOC_ROOT/atm/waccm/ic/wa3_ne5np4_1950_spinup.cam2.i.1960-01-01-00000_c150810.nc - $DIN_LOC_ROOT/atm/waccm/ic/waccm5_1850_ne30np4_L70_0001-01-11-00000_c151217.nc - $DIN_LOC_ROOT/atm/waccm/ic/fw2000_ne30np4_L70_c181221.nc - $DIN_LOC_ROOT/atm/cam/inic/gaus/cami_0000-09-01_64x128_L30_c031210.nc + ${DIN_LOC_ROOT}/atm/cam/inic/cam_vcoords_L26_c180105.nc + ${DIN_LOC_ROOT}/atm/cam/inic/cam_vcoords_L30_c180105.nc + ${DIN_LOC_ROOT}/atm/cam/inic/cam_vcoords_L32_c180105.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-01-01_0.23x0.31_L26_c100513.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-09-01_0.23x0.31_L26_c061106.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_1980-01-01_0.47x0.63_L26_c071226.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-09-01_0.47x0.63_L26_c061106.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-10-01_0.5x0.625_L26_c031204.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_1987-01-01_0.9x1.25_L26_c060703.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-09-01_0.9x1.25_L26_c051205.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-01-01_1.9x2.5_L26_c070408.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L26_c040809.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L26_c110309.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L26_c091007.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0001-01-01_4x5_L26_c060608.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-01-01_10x15_L26_c030918.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-mam3_0000-01-01_0.23x0.31_L30_c110527.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-mam3_0000-01-01_0.47x0.63_L30_c100929.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L30_c100618.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L30_c090306.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-09-01_1.9x2.5_L30_c070109.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-01-01_2.5x3.33_L30_c110309.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-09-01_2.5x3.33_L30_c100831.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-01-01_4x5_L30_c090108.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-01-01_10x15_L30_c081013.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-mam3_0000-01-01_0.9x1.25_L32_c141031.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-mam3_0000-01-01_1.9x2.5_L32_c150407.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-mam4_0000-01-01_10x15_L32_c170914.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami_0000-01-01_0.47x0.63_L26_APE_c080227.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L26_c161020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L26_c161020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/aqua_0000-01-01_10x15_L26_c161230.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L30_c161020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L30_c161020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/aqua_0000-01-01_10x15_L30_c170103.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/aqua_0006-01-01_0.9x1.25_L32_c161020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/aqua_0006-01-01_1.9x2.5_L32_c161020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/aqua_0000-01-01_10x15_L32_c170103.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-chem_1990-01-01_0.9x1.25_L30_c080724.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L26_c080114.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/cami-chem_1990-01-01_1.9x2.5_L30_c080215.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L26_c081104.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/camchemi_0012-01-01_10x15_L30_c081104.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L26_c081104.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/camchemi_0012-01-01_4x5_L30_c081104.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L26_c081104.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/camchemi_0012-01-01_1.9x2.5_L30_c081104.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_10x15_L30_c121015.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_4x5_L30_c121015.nc + ${DIN_LOC_ROOT}/atm/cam/inic/fv/trop_strat_mam3_chem_2000-01-01_1.9x2.5_L30_c121015.nc + ${DIN_LOC_ROOT}/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_4x5_L26_c060217.nc + ${DIN_LOC_ROOT}/atm/cam/chem/trop_mozart/ic/cami_0000-09-01_10x15_L26_c060216.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/cami_2000-02-01_0.9x1.25_L66_c040928.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/cami_2000-07-01_1.9x2.5_L66_c040928.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/FWT2000_f09_spinup01.cam.i.0001-01-02-00000_c160315.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/f2000.waccm-mam3_1.9x2.5_L70.cam2.i.0017-01-01.c120410.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/f2000.waccm-mam3_10x15_L70.cam2.i.0017-01-01.c141016.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/b1850.waccm-mam3_1.9x2.5_L70.cam2.i.0156-01-01.c120523.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/cami_2000-05-01_1.9x2.5_L103_c040928.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/wa3_4x5_1950_spinup.cam2.i.1960-01-01-00000.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/cami_2000-01-01_10x15_L66_c041121.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/f40.2000.4deg.wcm.carma.sulf.004.cam2.i.0008-01-01-00000.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/f40.2deg.wcm.carma.sulf.L66.cam2.i.2010-01-01.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_L81_c110906.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_4x5_L81_c160630.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/WAX3548T08CO_2003top_f2000.waccm_0017bottom_10x15_L81_c141027.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/waccmx_aqua_4x5_L126_c170705.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/fx2000_0.9x1.25_126lev_0002-01-01-00000_c181221.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/wcmx-cam6-phys_1.9x2.5_130lev_2000_c181115.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/wcmx-cam6-phys_0.9x1.25_130lev_2000_c190122.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/FC6X2000_f05_spinup01.cam.i.0002-01-01-00000_c190711.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/waccmx_mam4_aqua_4x5_L130_c180803.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/waccmx_mam4_aqua_1.9x2.5_L130_c180803.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/T341clim01.cam2.i.0024-01-01-00000.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_256x512_L26_c030918.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_128x256_L26_c030918.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-09-01_128x256_L26_c040422.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_64x128_T42_L26_c031110.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-09-01_64x128_L26_c030918.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_64x128_L30_c090102.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-09-01_64x128_L30_c031210.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_64x128_L32_c170510.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_48x96_L26_c091218.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-09-01_48x96_L26_c040420.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_48x96_L30_c100426.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-09-01_32x64_L26_c030918.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_32x64_L30_c090107.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_8x16_L26_c030228.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-09-01_8x16_L26_c030918.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-01-01_8x16_L30_c090102.nc + ${DIN_LOC_ROOT}/atm/cam/inic/homme/cami-mam3_0000-01_ne5np4_L30.140707.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne16np4_L26_c171020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne16np4_L30_c171020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne16np4_L32_c171020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne30np4_L26_c171020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne30np4_L30_c171020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam6_ne30np4_L32_c171023.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne60np4_L26_c171018.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne60np4_L30_c171020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne60np4_L32_c171020.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne120np4_L26_c171018.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne120np4_L30_c171024.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_topo_cam4_ne120np4_L32_c171023.nc + ${DIN_LOC_ROOT}/atm/cam/inic/homme/cami_1850-01-01_ne240np4_L26_c110314.nc + ${DIN_LOC_ROOT}/atm/cam/inic/homme/cami_0000-09-01_ne240np4_L26_c061106.nc + ${DIN_LOC_ROOT}/atm/cam/inic/homme/cami-mam3_0000-01-ne240np4_L30_c111004.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam4_ne5np4_L26_c170517.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam4_ne16np4_L26_c170417.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam4_ne30np4_L26_c170417.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam4_ne60np4_L26_c171023.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam4_ne120np4_L26_c170419.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam4_ne240np4_L26_c170613.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam5_ne5np4_L30_c170517.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam5_ne16np4_L30_c170417.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam5_ne30np4_L30_c170417.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam5_ne120np4_L30_c170419.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam6_ne5np4_L32_c170517.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam6_ne16np4_L32_c170509.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam6_ne30np4_L32_c170509.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam6_ne120np4_L32_c170908.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/ape_cam6_ne240np4_L32_c170908.nc + ${DIN_LOC_ROOT}/atm/cam/inic/se/f2000_conus_ne30x8_L32_c190712.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/wa3_ne5np4_1950_spinup.cam2.i.1960-01-01-00000_c150810.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/waccm5_1850_ne30np4_L70_0001-01-11-00000_c151217.nc + ${DIN_LOC_ROOT}/atm/waccm/ic/fw2000_ne30np4_L70_c181221.nc + ${DIN_LOC_ROOT}/atm/cam/inic/gaus/cami_0000-09-01_64x128_L30_c031210.nc @@ -191,43 +191,43 @@ UNSET_PATH - $DIN_LOC_ROOT/atm/cam/topo/topo-from-cami_0000-01-01_256x512_L26_c030918.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_128x256_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/T42_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20180111.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_48x96_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_32x64_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_8x16_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS_gtopo30_0.23x0.31_remap_c061107.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS_gtopo30_0.47x0.63_remap_c061106.nc - $DIN_LOC_ROOT/atm/cam/topo/fv_0.47x0.63_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171023.nc - $DIN_LOC_ROOT/atm/cam/topo/topo-from-cami_0000-10-01_0.5x0.625_L26_c031204.nc - $DIN_LOC_ROOT/atm/cam/topo/fv_0.9x1.25_nc3000_Nsw042_Nrs008_Co060_Fi001_ZR_sgh30_24km_GRNL_c170103.nc - $DIN_LOC_ROOT/atm/cam/topo/fv_1.9x2.5_nc3000_Nsw084_Nrs016_Co120_Fi001_ZR_GRNL_c190405.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_2.5x3.33_remap_c100204.nc - $DIN_LOC_ROOT/atm/cam/topo/USGS-gtopo30_4x5_remap_c050520.nc - $DIN_LOC_ROOT/atm/cam/topo/fv_10x15_nc0540_Nsw042_Nrs008_Co060_Fi001_20171220.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne5np4_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw064_20170515.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne16np4_nc3000_Co120_Fi001_PF_nullRR_Nsw084_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30np4_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171020.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne60np4_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne120np4_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171011.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne240np4_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne5pg2_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw060_20170706.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30pg2_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg2_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg2_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne240pg2_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne5pg3_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw064_20170516.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne16pg3_nc3000_Co120_Fi001_PF_nullRR_Nsw084_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30pg3_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg3_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171012.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg3_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne240pg3_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171015.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne5pg4_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw060_20170707.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30pg4_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne60pg4_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171018.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne120pg4_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171014.nc - $DIN_LOC_ROOT/atm/cam/topo/se/ne30x8_conus_nc3000_Co060_Fi001_MulG_PF_nullRR_Nsw042_20190710.nc + ${DIN_LOC_ROOT}/atm/cam/topo/topo-from-cami_0000-01-01_256x512_L26_c030918.nc + ${DIN_LOC_ROOT}/atm/cam/topo/USGS-gtopo30_128x256_c050520.nc + ${DIN_LOC_ROOT}/atm/cam/topo/T42_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20180111.nc + ${DIN_LOC_ROOT}/atm/cam/topo/USGS-gtopo30_48x96_c050520.nc + ${DIN_LOC_ROOT}/atm/cam/topo/USGS-gtopo30_32x64_c050520.nc + ${DIN_LOC_ROOT}/atm/cam/topo/USGS-gtopo30_8x16_c050520.nc + ${DIN_LOC_ROOT}/atm/cam/topo/USGS_gtopo30_0.23x0.31_remap_c061107.nc + ${DIN_LOC_ROOT}/atm/cam/topo/USGS_gtopo30_0.47x0.63_remap_c061106.nc + ${DIN_LOC_ROOT}/atm/cam/topo/fv_0.47x0.63_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171023.nc + ${DIN_LOC_ROOT}/atm/cam/topo/topo-from-cami_0000-10-01_0.5x0.625_L26_c031204.nc + ${DIN_LOC_ROOT}/atm/cam/topo/fv_0.9x1.25_nc3000_Nsw042_Nrs008_Co060_Fi001_ZR_sgh30_24km_GRNL_c170103.nc + ${DIN_LOC_ROOT}/atm/cam/topo/fv_1.9x2.5_nc3000_Nsw084_Nrs016_Co120_Fi001_ZR_GRNL_c190405.nc + ${DIN_LOC_ROOT}/atm/cam/topo/USGS-gtopo30_2.5x3.33_remap_c100204.nc + ${DIN_LOC_ROOT}/atm/cam/topo/USGS-gtopo30_4x5_remap_c050520.nc + ${DIN_LOC_ROOT}/atm/cam/topo/fv_10x15_nc0540_Nsw042_Nrs008_Co060_Fi001_20171220.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne5np4_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw064_20170515.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne16np4_nc3000_Co120_Fi001_PF_nullRR_Nsw084_20171012.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne30np4_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171020.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne60np4_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171012.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne120np4_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171011.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne240np4_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171014.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne5pg2_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw060_20170706.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne30pg2_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne60pg2_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171014.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne120pg2_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171012.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne240pg2_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171014.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne5pg3_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw064_20170516.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne16pg3_nc3000_Co120_Fi001_PF_nullRR_Nsw084_20171012.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne30pg3_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne60pg3_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171012.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne120pg3_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171014.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne240pg3_nc3000_Co008_Fi001_PF_nullRR_Nsw005_20171015.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne5pg4_nc3000_Co360_Fi001_MulG_PF_nullRR_Nsw060_20170707.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne30pg4_nc3000_Co060_Fi001_PF_nullRR_Nsw042_20171014.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne60pg4_nc3000_Co030_Fi001_PF_nullRR_Nsw021_20171018.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne120pg4_nc3000_Co015_Fi001_PF_nullRR_Nsw010_20171014.nc + ${DIN_LOC_ROOT}/atm/cam/topo/se/ne30x8_conus_nc3000_Co060_Fi001_MulG_PF_nullRR_Nsw042_20190710.nc UNSET_PATH UNSET_PATH UNSET_PATH diff --git a/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in b/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in index 3024d10d..61d018f5 100644 --- a/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in +++ b/test/unit/sample_files/atm_in_files/test_cmplx_array_atm_in @@ -1,9 +1,9 @@ &alien_invasions body_snatchers = "looks human", "human", "still looks human", "alien" swedish_chef = "bork", "bork" - swedish_chef(15:10:-2) = "bork", "bork", "bork" - swedish_chef(7:9:2) = "not bork", "not bork" - swedish_chef(1) = "" + swedish_chef(15:10:-2) = 'bork', 'bork', 'bork' + swedish_chef(7:9:2) = 'not bork', 'not bork' + swedish_chef(1) = '' / &space_stations diff --git a/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml b/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml index c7579b69..e7fd87a3 100644 --- a/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml +++ b/test/unit/sample_files/atm_in_files/test_extra_nml_def.xml @@ -65,7 +65,7 @@ HAL, where is your hard drive? - $DIN_LOC_ROOT/I/CANT/LET/YOU/DO/THAT/DAVE + ${DIN_LOC_ROOT}/I/CANT/LET/YOU/DO/THAT/DAVE diff --git a/test/unit/test_atm_in_paramgen.py b/test/unit/test_atm_in_paramgen.py index bea7e310..eb052e3d 100644 --- a/test/unit/test_atm_in_paramgen.py +++ b/test/unit/test_atm_in_paramgen.py @@ -149,9 +149,6 @@ def setUpClass(cls): super().setUpClass() #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a namelist can be built with a proper - #XML namelist definition file: - #++++++++++++++++++++++++++++++++++++++++++++++++ def test_namelist_from_xml(self): @@ -192,10 +189,6 @@ def test_namelist_from_xml(self): msg=amsg) #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a namelist can be built with a proper - #XML namelist definition file and corresponding - #attributes/guards: - #++++++++++++++++++++++++++++++++++++++++++++++++ def test_namelist_from_xml_using_attrs(self): @@ -240,10 +233,6 @@ def test_namelist_from_xml_using_attrs(self): msg=amsg) #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a namelist can be built with a proper - #XML namelist definition file and multiple - #corresponding attributes/guards: - #++++++++++++++++++++++++++++++++++++++++++++++++ def test_namelist_from_xml_using_multi_attrs(self): @@ -289,10 +278,6 @@ def test_namelist_from_xml_using_multi_attrs(self): msg=amsg) #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a namelist with missing, required - #XML elements/tags fails with the correct error - #message - #++++++++++++++++++++++++++++++++++++++++++++++++ def test_namelist_xml_missing_elems(self): @@ -320,10 +305,6 @@ def test_namelist_xml_missing_elems(self): self.assertEqual(emsg, str(cerr.exception)) #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that ParamGen can properly concantenate - #multiple XML namelist defition files into a single - #atm_in namelist file - #++++++++++++++++++++++++++++++++++++++++++++++++ def test_mutli_xml_namelist_defs(self): @@ -377,10 +358,6 @@ def test_mutli_xml_namelist_defs(self): msg=amsg) #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that trying to combine multiple XML namelist - #defition files with the same namelist group - #fails with the appropriate error - #++++++++++++++++++++++++++++++++++++++++++++++++ def test_mutli_xml_same_nl_group(self): @@ -404,16 +381,13 @@ def test_mutli_xml_same_nl_group(self): pg_test.append_atm_in_pg(pg_ext) # Check exception message: - emsg = f"Both\n'{xml_test_fil}'\nand\n'{extra_xml_fil}'\nhave" - emsg += " the following conflicting namelist groups:\n" + emsg = f"Cannot append:\n'{extra_xml_fil}'\n" + emsg += " The following namelist groups conflict with those in" + emsg += f"\n'{xml_test_fil} :'\n" emsg += "bird_sounds_nl" self.assertEqual(emsg, str(cerr.exception)) #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that trying to combine multiple XML - #namelist defition files with the same namelist - #variable fails with the appropriate error - #++++++++++++++++++++++++++++++++++++++++++++++++ def test_mutli_xml_same_nl_var(self): @@ -437,15 +411,13 @@ def test_mutli_xml_same_nl_var(self): pg_test.append_atm_in_pg(pg_ext) # Check exception message: - emsg = f"Both\n'{xml_test_fil}'\nand\n'{extra_xml_fil}'\nhave" - emsg += " the following conflicting namelist variables:\n" + emsg = f"Cannot append:\n'{extra_xml_fil}'\n" + emsg += " The following namelist variablesconflict with those in" + emsg += f"\n'{xml_test_fil} :'\n" emsg += "duck_quack" self.assertEqual(emsg, str(cerr.exception)) #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file properly modifies - #an associated atm_in file - #++++++++++++++++++++++++++++++++++++++++++++++++ def test_namelist_mode_from_user_nl_cam(self): @@ -492,10 +464,6 @@ def test_namelist_mode_from_user_nl_cam(self): msg=amsg) #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file with an improperly - #formatted namelist entry fails with the - #appropriate error - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_namelist_user_nl_bad_format_entry(self): @@ -537,18 +505,13 @@ def test_namelist_user_nl_bad_format_entry(self): self.assertEqual(emsg, str(cerr.exception)) #++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file with a namelist - #entry that is not currently present within the - #AtmInParamGen object fails with the appropriate - #error - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_namelist_user_nl_undefined_entry(self): """ Check that a user_nl_cam file with a namelist entry that has yet to be defined - in an namelist definition file throws an + in a namelist definition file throws an error and that the error message is correct. """ @@ -571,16 +534,12 @@ def test_namelist_user_nl_undefined_entry(self): self.assertEqual(emsg, str(cerr.exception)) #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file with a repeated - #namelist entry fails with the appropriate - #error message. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_namelist_user_nl_double_entry(self): """ Check that a user_nl_cam file with a - namelist entry that has is included twice + namelist entry that has been included twice in the file throws an error and that the error message is correct. Also check that the "allow_dupl" flag works as expected @@ -643,9 +602,6 @@ def test_namelist_user_nl_double_entry(self): msg=amsg) #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a line starting with a comma - #for a non-array namelist entry fails correctly. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_non_array_comma(self): @@ -682,10 +638,6 @@ def test_user_nl_non_array_comma(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file with an array - #variable with an ending and starting comma - #fails correctly. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_double_comma(self): @@ -726,10 +678,6 @@ def test_user_nl_double_comma(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file with an array - #variable with an ending comma followed by a new - #namelist variable fails correctly. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_trailing_comma(self): @@ -767,10 +715,6 @@ def test_user_nl_trailing_comma(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file that contains - #a non-array namelist variable with array - #dimensions specified fails correctly. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_non_array_dims(self): @@ -807,17 +751,13 @@ def test_user_nl_non_array_dims(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file that contains - #a array namelist variable with too many array - #dimensions specified fails correctly. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_too_many_array_dims(self): """ Check that a user_nl_cam file with a namelist variable that is an array, - but that is listed with too many dimension, + but that is listed with too many dimensions, fails with the appropriate error. """ @@ -847,17 +787,13 @@ def test_user_nl_too_many_array_dims(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file that contains - #a array namelist variable with too few array - #dimensions specified fails correctly. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_too_few_array_dims(self): """ Check that a user_nl_cam file with a namelist variable that is an array, - but that is listed with too many dimension, + but that is listed with too few dimensions, fails with the appropriate error. """ @@ -887,11 +823,6 @@ def test_user_nl_too_few_array_dims(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file that contains an - #array variable with two colons for a specific - #array dimension, but no stride value, fails - #with the appropriate error. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_no_stride_val(self): @@ -931,10 +862,6 @@ def test_user_nl_no_stride_val(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file that contains an - #array variable with three colons for a specific - #array dimension fails with the appropriate error. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_three_colons(self): @@ -972,11 +899,6 @@ def test_user_nl_three_colons(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file that contains an - #array variable with a colon dimension, and then - #that same array variable is listed again with - #a colon, fails with the appropriate error. - #+++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_colon_dupl(self): @@ -1015,10 +937,6 @@ def test_user_nl_colon_dupl(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file that contains an - #array variable with the same exact array index - #referenced twice fails with the appropriate error - #+++++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_arr_index_dupl(self): @@ -1067,10 +985,6 @@ def test_user_nl_arr_index_dupl(self): os.remove("user_nl_tmp") #+++++++++++++++++++++++++++++++++++++++++++++++++ - #Check that a user_nl_cam file that contains an - #array variable with a specified array index range - #referenced twice fails with the appropriate error - #+++++++++++++++++++++++++++++++++++++++++++++++++ def test_user_nl_arr_index_range_dupl(self): @@ -1120,6 +1034,8 @@ def test_user_nl_arr_index_range_dupl(self): #Remove temporary user_nl_cam file os.remove("user_nl_tmp") + #+++++++++++++++++++++++++++++++++++++++++++++++++ + def test_user_nl_complex_array_dims(self): """ From b3c2bc9cc6b94e10b6f032df52dd7348f2aebb1b Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Mon, 25 Jul 2022 20:31:48 -0600 Subject: [PATCH 32/33] Apply code review suggestions for the 'ConfigCAM' class and related functions. --- cime_config/atm_in_paramgen.py | 2 +- cime_config/cam_config.py | 117 +++++++++++++++-------------- cime_config/cam_config_classes.py | 43 ++++++----- test/unit/cam_config_unit_tests.py | 47 ++++++++++-- 4 files changed, 128 insertions(+), 81 deletions(-) diff --git a/cime_config/atm_in_paramgen.py b/cime_config/atm_in_paramgen.py index b5515038..b492a64b 100644 --- a/cime_config/atm_in_paramgen.py +++ b/cime_config/atm_in_paramgen.py @@ -759,7 +759,7 @@ def _get_nml_value_str(var_name, var_type, var_val): #End if if "char*" in var_type: - #Removee extra white space: + #Remove extra white space: var_val_strip = var_val.strip() #Check if string is wrapped in quotes: diff --git a/cime_config/cam_config.py b/cime_config/cam_config.py index d63585ed..fbd4e2fc 100644 --- a/cime_config/cam_config.py +++ b/cime_config/cam_config.py @@ -61,70 +61,78 @@ def get_atm_hgrid(atm_grid_str): >>> get_atm_hgrid("ne5np4.pg2") ('se', re.compile('ne[0-9]+np[1-8](.*)(pg[1-9])?')) - 3. Check that an FV3 grid returns the correct results: + 3. Check that an SE variable resolution grid returns the correct results: + >>> get_atm_hgrid("ne0np4CONUS.ne30x8") + ('se', re.compile('ne[0-9]+np[1-8](.*)(pg[1-9])?')) + + 4. Check that an FV3 grid returns the correct results: >>> get_atm_hgrid("C96") ('fv3', re.compile('C[0-9]+')) - 4. Check that an MPAS grid returns the correct results: + 5. Check that an MPAS grid returns the correct results: >>> get_atm_hgrid("mpasa480") ('mpas', re.compile('mpasa[0-9]+')) - 5. Check that a null dycore returns the correct results: + 6. Check that an MPAS grid with a "decimal" returns the correct results: + >>> get_atm_hgrid("mpasa7p5") + ('mpas', re.compile('mpasa[0-9]+')) + + 7. Check that an MPAS grid with a variable resolution grid returns the correct results: + >>> get_atm_hgrid("mpasa15-3") + ('mpas', re.compile('mpasa[0-9]+')) + + 8. Check that a null dycore returns the correct results: >>> get_atm_hgrid("null") ('none', None) - 6. Check that a horizontal grid with with no matches fails - with the correct error message: + 9. Check that an Eulerian (EUL) grid returns the correct results: + >>> get_atm_hgrid("T42") + ('eul', re.compile('T[0-9]+')) + + 10. Check that a horizontal grid with with no matches fails + with the correct error message: >>> get_atm_hgrid("1.9xC96") # doctest: +ELLIPSIS Traceback (most recent call last): ... cam_config_classes.CamConfigValError: ERROR: The specified CAM horizontal grid, '1.9xC96', does not match any known format. """ - # Create regex expressions to search for the different dynamics grids - eul_grid_re = re.compile(r"T[0-9]+") # Eulerian dycore - fv_grid_re = re.compile(r"[0-9][0-9.]*x[0-9][0-9.]*") # FV dycore - se_grid_re = re.compile(r"ne[0-9]+np[1-8](.*)(pg[1-9])?") # SE dycore - fv3_grid_re = re.compile(r"C[0-9]+") # FV3 dycore - mpas_grid_re = re.compile(r"mpasa[0-9]+") # MPAS dycore (not totally sure about this pattern) - # Check if specified grid matches any of the pre-defined grid options. # If so, then add both the horizontal grid regex and dynamical core # to the configure object: - if fv_grid_re.match(atm_grid_str) is not None: - - #Finite Volume (FV) dycore: - return "fv", fv_grid_re + #Finite-Volume (FV) grid: + grid_regex = re.compile(r"[0-9][0-9.]*x[0-9][0-9.]*") + if grid_regex.match(atm_grid_str) is not None: + return "fv", grid_regex #End if - if se_grid_re.match(atm_grid_str) is not None: - - #Spectral Element (SE) dycore: - return "se", se_grid_re + #Spectral Element (SE) grid: + grid_regex = re.compile(r"ne[0-9]+np[1-8](.*)(pg[1-9])?") + if grid_regex.match(atm_grid_str) is not None: + return "se", grid_regex #End if - if fv3_grid_re.match(atm_grid_str) is not None: - - #Finite Volume Cubed-Sphere (FV3) dycore: - return "fv3", fv3_grid_re + #Finite-Volume Cubed-Sphere (FV3) grid: + grid_regex = re.compile(r"C[0-9]+") + if grid_regex.match(atm_grid_str) is not None: + return "fv3", grid_regex #End if - if mpas_grid_re.match(atm_grid_str) is not None: - - #Model for Prediction Across Scales (MPAS) dycore: - return "mpas", mpas_grid_re + #Model for Prediction Across Scales (MPAS) grid: + grid_regex = re.compile(r"mpasa[0-9]+") + if grid_regex.match(atm_grid_str) is not None: + return "mpas", grid_regex #End if - if eul_grid_re.match(atm_grid_str) is not None: - - #Eulerian Spectral (eul) dycore: - return "eul", eul_grid_re + #Eulerian Spectral (EUL) grid: + grid_regex = re.compile(r"T[0-9]+") + if grid_regex.match(atm_grid_str) is not None: + return "eul", grid_regex #End if + #Null dycore (no specified grid): if atm_grid_str == "null": - - #Null dycore: return "none", None #End if @@ -925,10 +933,11 @@ def ccpp_phys_set(self, cam_nml_attr_dict, phys_nl_pg_dict): #Determine current value of "physics_suite" namelist variable: phys_nl_val = phys_nl_pg_dict['physics_suite']['values'].strip() - #Check if only one physics suite is listed: - if len(phys_suites) == 1: - #Check if "physics_suite" has been set by the user: - if phys_nl_val != 'UNSET': + #Check if "physics_suite" has been set by the user: + if phys_nl_val != 'UNSET': + + #Next, check if only one physics suite is listed: + if len(phys_suites) == 1: #If so, then check that user-provided suite matches #suite in physics_suites config list: if phys_nl_val == phys_suites[0]: @@ -944,33 +953,31 @@ def ccpp_phys_set(self, cam_nml_attr_dict, phys_nl_pg_dict): #End if else: - #If not, then just set the attribute and nl value to phys_suites value: - phys_nl_pg_dict['physics_suite']['values'] = phys_suites[0] - cam_nml_attr_dict["phys_suite"] = phys_suites[0] - #End if - - else: - #Check if "physics_suite" has been set by the user: - if phys_nl_val != 'UNSET': - #If so, then check if user-provided value is present in the - #physics_suites config list: + #If more than one suite is listed, then check if user-provided + #value is present in the physics_suites config list: if phys_nl_val in phys_suites: phys_nl_pg_dict['physics_suite']['values'] = phys_nl_val cam_nml_attr_dict["phys_suite"] = phys_nl_val else: + #If not, then throw an error: emsg = "physics_suite specified in user_nl_cam, '{}', doesn't match any suites\n" emsg += "listed in CAM_CONFIG_OPTS: '{}'" raise CamConfigValError(emsg.format(phys_nl_val, self.get_value('physics_suites'))) #End if - - else: - #If not, then throw an error, because one needs to be specified: - emsg = "No 'physics_suite' variable is present in user_nl_cam.\n" - emsg += "This is required because more than one suite is listed\n" - emsg += f"in CAM_CONFIG_OPTS: '{self.get_value('physics_suites')}'" - raise CamConfigValError(emsg) #End if + + elif len(phys_suites) == 1: + #Just set the attribute and nl value to phys_suites value: + phys_nl_pg_dict['physics_suite']['values'] = phys_suites[0] + cam_nml_attr_dict["phys_suite"] = phys_suites[0] + else: + #If more then one suite listed, then throw an error, + #because one needs to be specified by the user: + emsg = "No 'physics_suite' variable is present in user_nl_cam.\n" + emsg += "This is required because more than one suite is listed\n" + emsg += f"in CAM_CONFIG_OPTS: '{self.get_value('physics_suites')}'" + raise CamConfigValError(emsg) #End if #++++++++++++++++++++++++ diff --git a/cime_config/cam_config_classes.py b/cime_config/cam_config_classes.py index b7eedb80..0a938c56 100644 --- a/cime_config/cam_config_classes.py +++ b/cime_config/cam_config_classes.py @@ -4,6 +4,9 @@ any CAM configuration variables to other components of the build system. +Please note that running or testing these routines +requires python 3.7 or later. + To run doctests on this file: python cam_config_classes.py """ @@ -12,7 +15,6 @@ #---------------------------------------- import re -from collections import OrderedDict ############################################################################### # Error-handling classes @@ -21,20 +23,12 @@ class CamConfigValError(ValueError): """Class used to handle CAM config value errors (e.g., log user errors without backtrace)""" - # pylint: disable=useless-super-delegation - def __init__(self, message): - super().__init__(message) - # pylint: enable=useless-super-delegation ############################################################################### class CamConfigTypeError(TypeError): """Class used to handle CAM config type errors (e.g., log user errors without backtrace)""" - # pylint: disable=useless-super-delegation - def __init__(self, message): - super().__init__(message) - # pylint: enable=useless-super-delegation ############################################################################### # Valid value-checking functions @@ -203,7 +197,7 @@ def _check_integer_val(name, val, valid_vals=None): # If valid_vals is a list, then just check that the given value # matches one of the valid values in the list - if not val in valid_vals: + if val not in valid_vals: emsg = "ERROR: Value, '{}', provided for variable, '{}', " emsg += "does not match any of the valid values: '{}'" return emsg.format(val, name, valid_vals) @@ -319,6 +313,9 @@ def _check_string_val(name, val, valid_vals=None): # Helper function to better generalize config value checking: _TYPE_CHECK_FUNCTIONS = {"int" : _check_integer_val, "str" : _check_string_val} +# Set of valid types (for faster checking): +_VALID_TYPE_SET = {"int", "str"} + ############################################################################### # Internal generic CAM configure class ############################################################################### @@ -715,6 +712,14 @@ class ConfigList(_ConfigGen): ERROR: Value, 'b', provided for variable, 'test', does not match any of the valid values: '['1', '2', '3']' ERROR: Value, 'c', provided for variable, 'test', does not match any of the valid values: '['1', '2', '3']' + + 9. check that ConfigList with a list that does not mach the "valid_vals" range fails with the correct error: + >>> ConfigList("test", "test object description", [1, 2, 6], valid_type="int", valid_vals=(0,5)).value #doctest: +ELLIPSIS + Traceback (most recent call last): + ... + CamConfigValError: The following errors were found for a list-type config variable: + ERROR: Value, '6', provided for variable, 'test', is outside valid value range, '(0, 5)' + """ def __init__(self, name, desc, val, valid_type=None, valid_vals=None): @@ -733,7 +738,7 @@ def __init__(self, name, desc, val, valid_type=None, valid_vals=None): # End if # Check that the valid values option is only being used with a valid type: - if valid_vals is not None and valid_type not in ["int", "str"]: + if valid_vals is not None and valid_type not in _VALID_TYPE_SET: # Currently valid values can only be used with strings or integers, # so throw an error: emsg = "ERROR: valid values can only be used if valid_type is 'int' or 'str', not '{}'." @@ -788,7 +793,7 @@ def __check_type(self, val): valid_type = self.valid_type # Create empty dictionary to store errors: - bad_val_types = OrderedDict() + bad_val_type_msgs = [] good_type = "??" if valid_type == "str": @@ -796,7 +801,7 @@ def __check_type(self, val): good_type = "string" for list_entry in val: if not isinstance(list_entry, str): - bad_val_types[str(list_entry)] = str(type(list_entry)) + bad_val_type_msgs.append(f"'{list_entry}': type='{type(list_entry)}'\n") # end if # end for elif valid_type == "int": @@ -804,7 +809,7 @@ def __check_type(self, val): good_type = "int" for list_entry in val: if not isinstance(list_entry, int): - bad_val_types[str(list_entry)] = str(type(list_entry)) + bad_val_type_msgs.append(f"'{list_entry}': type='{type(list_entry)}'\n") # end if # end for else: @@ -814,16 +819,16 @@ def __check_type(self, val): raise CamConfigValError(emsg.format(valid_type)) # End if #If bad values dictionary is non-empty, then raise error: - if bad_val_types: - if len(bad_val_types) > 1: + if bad_val_type_msgs: + if len(bad_val_type_msgs) > 1: emsg = "ERROR: The following list entries, provided for variable," emsg += f" '{self.name}', are not {good_type}s, but instead are:\n" else: emsg = "ERROR: The following list entry, provided for variable," emsg += f" '{self.name}', is not a {good_type}, but instead is: " # end if - for key_str, type_str in bad_val_types.items(): - emsg += f"'{key_str}': type='{type_str}'\n" + for err_msg in bad_val_type_msgs: + emsg += err_msg # end for raise CamConfigValError(emsg) # End if @@ -881,7 +886,7 @@ def set_value(self, list_vals): self.__value = list_vals ############################################################################### -#IGNORE EVERYTHING BELOW HERE UNLESS RUNNING TESTS ON CAM_CONFIG! +#End of config class definitions ############################################################################### #Call testing routine, if script is run directly diff --git a/test/unit/cam_config_unit_tests.py b/test/unit/cam_config_unit_tests.py index 89cf9f45..4e0788bc 100644 --- a/test/unit/cam_config_unit_tests.py +++ b/test/unit/cam_config_unit_tests.py @@ -1,7 +1,9 @@ """ Python unit testing collection for the various public Config_CAM methods, including their -error-handling processes. +error-handling processes. Please note that +these tests will only work with Python 3.7 +or later. To run these unit tests, simply type: @@ -57,7 +59,7 @@ def __init__(self): #Create dictionary (so get_value works properly): self.conf_opts = { - "ATM_GRID" : "f19_f19_mg17", + "ATM_GRID" : "mpasa480z32_mpasa480", "ATM_NX" : 180, "ATM_NY" : 90, "COMP_OCN" : "socn", @@ -127,6 +129,43 @@ def test_config_get_value_check(self): #Check that testval matches ATM_NY set in the "fake" case: self.assertEqual(testval, 16) + #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + #check that "get_value" method works properly for non-null dycores + #+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + def test_config_get_dycore_values_check(self): + + """ + Check that Config_CAM.get_value properly retrieves the dycore + name and horizontal grid for a non-null dycore. + """ + + #Create new "fake" case: + fcase_dyn = FakeCase() + + #Remove "none" dyn option: + fcase_dyn.conf_opts["CAM_CONFIG_OPTS"] = "--physics-suites mango;papaya" + + #Create python logger object: + logger = logging.getLogger("cam_config") + + #create CAM configure object: + test_config_dyn = ConfigCAM(fcase_dyn, logger) + + #Get dycore name: + test_dyn = test_config_dyn.get_value("dyn") + + #Check that dycore name matches what is specified by the grid + #in the "fake" CIME case: + self.assertEqual(test_dyn, "mpas") + + #Get dycore horizontal grid: + test_hgrid = test_config_dyn.get_value("hgrid") + + #Check that dycore grid matches what is specified by the grid + #in the "fake" CIME case: + self.assertEqual(test_hgrid, "mpasa480") + #++++++++++++++++++++++++++++++++++++++++++++ #check that "set_value" method works properly #++++++++++++++++++++++++++++++++++++++++++++ @@ -155,10 +194,6 @@ def test_config_print_config_check(self): """ Check that Config_CAM.print_config properly prints to log - - Please note that this check only works with python 3.4 - or greater, so if an earlier version is used this test - is skipped. """ #Create new logger for print_config test: From c161e61ebeb754401f8c282dd5a7141415cc9afa Mon Sep 17 00:00:00 2001 From: Jesse Nusbaumer Date: Fri, 29 Jul 2022 13:27:49 -0600 Subject: [PATCH 33/33] Simplify error message code for CAM config classes. --- cime_config/cam_config_classes.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/cime_config/cam_config_classes.py b/cime_config/cam_config_classes.py index 0a938c56..970742ca 100644 --- a/cime_config/cam_config_classes.py +++ b/cime_config/cam_config_classes.py @@ -692,7 +692,6 @@ class ConfigList(_ConfigGen): '1': type='' '2': type='' '3': type='' - 6. Check that ConfigList with "valid_vals" but no "valid_type" fails with the correct error: >>> ConfigList("test", "test object description", [1, 2, 3], valid_vals=[1,2,3,4,5]).value #doctest: +ELLIPSIS @@ -801,7 +800,7 @@ def __check_type(self, val): good_type = "string" for list_entry in val: if not isinstance(list_entry, str): - bad_val_type_msgs.append(f"'{list_entry}': type='{type(list_entry)}'\n") + bad_val_type_msgs.append(f"'{list_entry}': type='{type(list_entry)}'") # end if # end for elif valid_type == "int": @@ -809,7 +808,7 @@ def __check_type(self, val): good_type = "int" for list_entry in val: if not isinstance(list_entry, int): - bad_val_type_msgs.append(f"'{list_entry}': type='{type(list_entry)}'\n") + bad_val_type_msgs.append(f"'{list_entry}': type='{type(list_entry)}'") # end if # end for else: @@ -827,9 +826,7 @@ def __check_type(self, val): emsg = "ERROR: The following list entry, provided for variable," emsg += f" '{self.name}', is not a {good_type}, but instead is: " # end if - for err_msg in bad_val_type_msgs: - emsg += err_msg - # end for + emsg += '\n'.join(bad_val_type_msgs) raise CamConfigValError(emsg) # End if