From 2c681a68df801f7b368c37cecdbdb4da996e5483 Mon Sep 17 00:00:00 2001 From: Lukas Date: Tue, 11 May 2021 09:54:50 +0200 Subject: [PATCH] feat: ROOT parameter name compatibility module (#1439) * Add new module `pyhf.compat` to aid in translating to and from ROOT names * Adds `is_scalar` and `name` attributes to paramsets * Deprecates and removes `utils.remove_prefix` --- .github/workflows/docs.yml | 1 + docs/api.rst | 1 + src/pyhf/__init__.py | 2 + src/pyhf/compat.py | 120 ++++++++++++++++++++++++++++++ src/pyhf/modifiers/histosys.py | 1 + src/pyhf/modifiers/lumi.py | 1 + src/pyhf/modifiers/normfactor.py | 1 + src/pyhf/modifiers/normsys.py | 1 + src/pyhf/modifiers/shapefactor.py | 1 + src/pyhf/modifiers/shapesys.py | 1 + src/pyhf/modifiers/staterror.py | 1 + src/pyhf/parameters/paramsets.py | 6 ++ src/pyhf/parameters/utils.py | 2 + src/pyhf/readxml.py | 19 ++--- src/pyhf/utils.py | 24 ------ tests/test_combined_modifiers.py | 71 ++++++++++++++++-- tests/test_compat.py | 76 +++++++++++++++++++ tests/test_constraints.py | 8 ++ tests/test_paramsets.py | 23 ++++++ tests/test_public_api_repr.py | 6 +- tests/test_utils.py | 6 -- 21 files changed, 323 insertions(+), 49 deletions(-) create mode 100644 src/pyhf/compat.py create mode 100644 tests/test_compat.py diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index d7c1360412..eaafd57561 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -37,6 +37,7 @@ jobs: src/pyhf/workspace.py \ src/pyhf/probability.py \ src/pyhf/patchset.py \ + src/pyhf/compat.py \ src/pyhf/interpolators \ src/pyhf/infer \ src/pyhf/optimize \ diff --git a/docs/api.rst b/docs/api.rst index 77dd897723..6118f6095a 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -18,6 +18,7 @@ Top-Level set_backend readxml writexml + compat Probability Distribution Functions (PDFs) ----------------------------------------- diff --git a/src/pyhf/__init__.py b/src/pyhf/__init__.py index 7cbc9ff2b6..b53d363b4a 100644 --- a/src/pyhf/__init__.py +++ b/src/pyhf/__init__.py @@ -155,6 +155,7 @@ def set_backend(backend, custom_optimizer=None, precision=None): from .workspace import Workspace from . import simplemodels from . import infer +from . import compat from .patchset import PatchSet __all__ = [ @@ -162,6 +163,7 @@ def set_backend(backend, custom_optimizer=None, precision=None): "PatchSet", "Workspace", "__version__", + "compat", "exceptions", "get_backend", "infer", diff --git a/src/pyhf/compat.py b/src/pyhf/compat.py new file mode 100644 index 0000000000..95d57ba144 --- /dev/null +++ b/src/pyhf/compat.py @@ -0,0 +1,120 @@ +""" +Compatibility functions for translating between ROOT and pyhf +""" + +import re + +__all__ = ["interpret_rootname", "paramset_to_rootnames"] + + +def __dir__(): + return __all__ + + +def paramset_to_rootnames(paramset): + """ + Generates parameter names for parameters in the set as ROOT would do. + + Args: + paramset (:obj:`pyhf.paramsets.paramset`): The parameter set. + + Returns: + :obj:`List[str]` or :obj:`str`: The generated parameter names + (for the non-scalar/scalar case) respectively. + + Example: + + pyhf parameter names and then the converted names for ROOT: + + * ``"lumi"`` -> ``"Lumi"`` + * unconstrained scalar parameter ``"foo"`` -> ``"foo"`` + * constrained scalar parameter ``"foo"`` -> ``"alpha_foo"`` + * non-scalar parameters ``"foo"`` -> ``"gamma_foo_i"`` + + >>> import pyhf + >>> pyhf.set_backend("numpy") + >>> model = pyhf.simplemodels.uncorrelated_background( + ... signal=[12.0, 11.0], bkg=[50.0, 52.0], bkg_uncertainty=[3.0, 7.0] + ... ) + >>> model.config.parameters + ['mu', 'uncorr_bkguncrt'] + >>> pyhf.compat.paramset_to_rootnames(model.config.param_set("mu")) + 'mu' + >>> pyhf.compat.paramset_to_rootnames(model.config.param_set("uncorr_bkguncrt")) + ['gamma_uncorr_bkguncrt_0', 'gamma_uncorr_bkguncrt_1'] + """ + + if paramset.name == 'lumi': + return 'Lumi' + if paramset.is_scalar: + if paramset.constrained: + return f'alpha_{paramset.name}' + return f'{paramset.name}' + return [f'gamma_{paramset.name}_{index}' for index in range(paramset.n_parameters)] + + +def interpret_rootname(rootname): + """ + Interprets a ROOT-generated name as best as possible. + + Possible properties of a ROOT parameter are: + + * ``"constrained"``: :obj:`bool` representing if parameter is a member of a + constrained paramset. + * ``"is_scalar"``: :obj:`bool` representing if parameter is a member of a + scalar paramset. + * ``"name"``: The name of the param set. + * ``"element"``: The index in a non-scalar param set. + + It is possible that some of the parameter names might not be determinable + and will then hold the string value ``"n/a"``. + + Args: + rootname (:obj:`str`): The ROOT-generated name of the parameter. + + Returns: + :obj:`dict`: The interpreted key-value pairs. + + Example: + + >>> import pyhf + >>> interpreted_name = pyhf.compat.interpret_rootname("gamma_foo_0") + >>> pyhf.compat.interpret_rootname("gamma_foo_0") + {'constrained': 'n/a', 'is_scalar': False, 'name': 'foo', 'element': 0} + >>> pyhf.compat.interpret_rootname("alpha_foo") + {'constrained': True, 'is_scalar': True, 'name': 'foo', 'element': 'n/a'} + >>> pyhf.compat.interpret_rootname("Lumi") + {'constrained': False, 'is_scalar': True, 'name': 'lumi', 'element': 'n/a'} + """ + + interpretation = { + 'constrained': 'n/a', + 'is_scalar': 'n/a', + 'name': 'n/a', + 'element': 'n/a', + } + if rootname.startswith('gamma_'): + interpretation['is_scalar'] = False + match = re.search(r'^gamma_(.+)_(\d+)$', rootname) + if not match: + raise ValueError(f'confusing rootname {rootname}. Please report as a bug.') + interpretation['name'] = match.group(1) + interpretation['element'] = int(match.group(2)) + else: + interpretation['is_scalar'] = True + + if rootname.startswith('alpha_'): + interpretation['constrained'] = True + match = re.search(r'^alpha_(.+)$', rootname) + if not match: + raise ValueError(f'confusing rootname {rootname}. Please report as a bug.') + interpretation['name'] = match.group(1) + + if not (rootname.startswith('alpha_') or rootname.startswith('gamma_')): + interpretation['constrained'] = False + interpretation['name'] = rootname + + if rootname == 'Lumi': + interpretation['name'] = 'lumi' + + return interpretation diff --git a/src/pyhf/modifiers/histosys.py b/src/pyhf/modifiers/histosys.py index ea6ffbbe5f..e4625114a6 100644 --- a/src/pyhf/modifiers/histosys.py +++ b/src/pyhf/modifiers/histosys.py @@ -17,6 +17,7 @@ def required_parset(cls, sample_data, modifier_data): 'n_parameters': 1, 'is_constrained': cls.is_constrained, 'is_shared': True, + 'is_scalar': True, 'inits': (0.0,), 'bounds': ((-5.0, 5.0),), 'fixed': False, diff --git a/src/pyhf/modifiers/lumi.py b/src/pyhf/modifiers/lumi.py index 31199b6ee6..c5356d3b12 100644 --- a/src/pyhf/modifiers/lumi.py +++ b/src/pyhf/modifiers/lumi.py @@ -16,6 +16,7 @@ def required_parset(cls, sample_data, modifier_data): 'n_parameters': 1, 'is_constrained': cls.is_constrained, 'is_shared': True, + 'is_scalar': True, 'op_code': cls.op_code, 'inits': None, # lumi 'bounds': None, # (0, 10*lumi) diff --git a/src/pyhf/modifiers/normfactor.py b/src/pyhf/modifiers/normfactor.py index 0157b5bfad..04901870d9 100644 --- a/src/pyhf/modifiers/normfactor.py +++ b/src/pyhf/modifiers/normfactor.py @@ -16,6 +16,7 @@ def required_parset(cls, sample_data, modifier_data): 'n_parameters': 1, 'is_constrained': cls.is_constrained, 'is_shared': True, + 'is_scalar': True, 'inits': (1.0,), 'bounds': ((0, 10),), 'fixed': False, diff --git a/src/pyhf/modifiers/normsys.py b/src/pyhf/modifiers/normsys.py index 3f395792a6..c1ff83b2d5 100644 --- a/src/pyhf/modifiers/normsys.py +++ b/src/pyhf/modifiers/normsys.py @@ -17,6 +17,7 @@ def required_parset(cls, sample_data, modifier_data): 'n_parameters': 1, 'is_constrained': cls.is_constrained, 'is_shared': True, + 'is_scalar': True, 'inits': (0.0,), 'bounds': ((-5.0, 5.0),), 'fixed': False, diff --git a/src/pyhf/modifiers/shapefactor.py b/src/pyhf/modifiers/shapefactor.py index f7b23bcf2b..f0cfeae4b4 100644 --- a/src/pyhf/modifiers/shapefactor.py +++ b/src/pyhf/modifiers/shapefactor.py @@ -16,6 +16,7 @@ def required_parset(cls, sample_data, modifier_data): 'n_parameters': len(sample_data), 'is_constrained': cls.is_constrained, 'is_shared': True, + 'is_scalar': False, 'inits': (1.0,) * len(sample_data), 'bounds': ((0.0, 10.0),) * len(sample_data), 'fixed': False, diff --git a/src/pyhf/modifiers/shapesys.py b/src/pyhf/modifiers/shapesys.py index 34e762e4f7..e88a2a84a2 100644 --- a/src/pyhf/modifiers/shapesys.py +++ b/src/pyhf/modifiers/shapesys.py @@ -24,6 +24,7 @@ def required_parset(cls, sample_data, modifier_data): 'n_parameters': n_parameters, 'is_constrained': cls.is_constrained, 'is_shared': False, + 'is_scalar': False, 'inits': (1.0,) * n_parameters, 'bounds': ((1e-10, 10.0),) * n_parameters, 'fixed': False, diff --git a/src/pyhf/modifiers/staterror.py b/src/pyhf/modifiers/staterror.py index afaf23eee4..1c4909bc91 100644 --- a/src/pyhf/modifiers/staterror.py +++ b/src/pyhf/modifiers/staterror.py @@ -16,6 +16,7 @@ def required_parset(cls, sample_data, modifier_data): 'n_parameters': len(sample_data), 'is_constrained': cls.is_constrained, 'is_shared': True, + 'is_scalar': False, 'inits': (1.0,) * len(sample_data), 'bounds': ((1e-10, 10.0),) * len(sample_data), 'fixed': False, diff --git a/src/pyhf/parameters/paramsets.py b/src/pyhf/parameters/paramsets.py index 504b63eb89..30fce98cc1 100644 --- a/src/pyhf/parameters/paramsets.py +++ b/src/pyhf/parameters/paramsets.py @@ -15,10 +15,16 @@ def __dir__(): class paramset: def __init__(self, **kwargs): + self.name = kwargs.pop('name') self.n_parameters = kwargs.pop('n_parameters') self.suggested_init = kwargs.pop('inits') self.suggested_bounds = kwargs.pop('bounds') self.suggested_fixed = kwargs.pop('fixed') + self.is_scalar = kwargs.pop('is_scalar') + if self.is_scalar and not (self.n_parameters == 1): + raise ValueError( + f'misconfigured parameter set {self.name}. Scalar but N>1 parameters.' + ) class unconstrained(paramset): diff --git a/src/pyhf/parameters/utils.py b/src/pyhf/parameters/utils.py index fa86a5da0d..7c57bd077c 100644 --- a/src/pyhf/parameters/utils.py +++ b/src/pyhf/parameters/utils.py @@ -13,6 +13,7 @@ def reduce_paramsets_requirements(paramsets_requirements, paramsets_user_configs paramset_keys = [ 'paramset_type', 'n_parameters', + 'is_scalar', 'inits', 'bounds', 'auxdata', @@ -63,6 +64,7 @@ def reduce_paramsets_requirements(paramsets_requirements, paramsets_user_configs combined_paramset[k] = v + combined_paramset['name'] = paramset_name reduced_paramsets_requirements[paramset_name] = combined_paramset return reduced_paramsets_requirements diff --git a/src/pyhf/readxml.py b/src/pyhf/readxml.py index 6efc218edc..a09a0ea9d6 100644 --- a/src/pyhf/readxml.py +++ b/src/pyhf/readxml.py @@ -1,4 +1,5 @@ from . import utils +from . import compat import logging @@ -7,7 +8,6 @@ import numpy as np import tqdm import uproot -import re log = logging.getLogger(__name__) @@ -290,26 +290,23 @@ def process_measurements(toplvl, other_parameter_configs=None): # might be specifying multiple parameters in the same ParamSetting if param.text: for param_name in param.text.strip().split(' '): - param_name = utils.remove_prefix(param_name, 'alpha_') - if param_name.startswith('gamma_') and re.search( - r'^gamma_.+_\d+$', param_name - ): + param_interpretation = compat.interpret_rootname(param_name) + if not param_interpretation['is_scalar']: raise ValueError( - f'pyhf does not support setting individual gamma parameters constant, such as for {param_name}.' + f'pyhf does not support setting non-scalar parameters ("gammas") constant, such as for {param_name}.' ) - param_name = utils.remove_prefix(param_name, 'gamma_') - # lumi will always be the first parameter - if param_name == 'Lumi': + if param_interpretation['name'] == 'lumi': result['config']['parameters'][0].update(overall_param_obj) else: # pop from parameter_configs_map because we don't want to duplicate param_obj = parameter_configs_map.pop( - param_name, {'name': param_name} + param_interpretation['name'], + {'name': param_interpretation['name']}, ) # ParamSetting will always take precedence param_obj.update(overall_param_obj) # add it back in to the parameter_configs_map - parameter_configs_map[param_name] = param_obj + parameter_configs_map[param_interpretation['name']] = param_obj result['config']['parameters'].extend(parameter_configs_map.values()) results.append(result) diff --git a/src/pyhf/utils.py b/src/pyhf/utils.py index 3da11f7271..69e172d0d9 100644 --- a/src/pyhf/utils.py +++ b/src/pyhf/utils.py @@ -18,7 +18,6 @@ "digest", "load_schema", "options_from_eqdelimstring", - "remove_prefix", "validate", ] @@ -125,29 +124,6 @@ def digest(obj, algorithm='sha256'): return hash_alg(stringified).hexdigest() -def remove_prefix(text, prefix): - """ - Remove a prefix from the beginning of the provided text. - - Example: - - >>> import pyhf - >>> pyhf.utils.remove_prefix("alpha_syst1", "alpha_") - 'syst1' - - Args: - text (:obj:`str`): A provided input to manipulate. - prefix (:obj:`str`): A prefix to remove from provided input, if it exists. - - Returns: - stripped_text (:obj:`str`): Text with the prefix removed. - """ - # NB: python3.9 can be `return text.removeprefix(prefix)` - if text.startswith(prefix): - return text[len(prefix) :] - return text - - def citation(oneline=False): """ Get the bibtex citation for pyhf diff --git a/tests/test_combined_modifiers.py b/tests/test_combined_modifiers.py index af5dd6bd6d..1e21d05388 100644 --- a/tests/test_combined_modifiers.py +++ b/tests/test_combined_modifiers.py @@ -48,6 +48,8 @@ def test_histosys(backend): par_map={ 'hello': { 'paramset': constrained_by_normal( + name='hello', + is_scalar=True, n_parameters=1, inits=[0], bounds=[[-5, 5]], @@ -58,6 +60,8 @@ def test_histosys(backend): }, 'world': { 'paramset': constrained_by_normal( + name='world', + is_scalar=True, n_parameters=1, inits=[0], bounds=[[-5, 5]], @@ -149,6 +153,8 @@ def test_normsys(backend): par_map={ 'hello': { 'paramset': constrained_by_normal( + name='hello', + is_scalar=True, n_parameters=1, inits=[0], bounds=[[-5, 5]], @@ -159,6 +165,8 @@ def test_normsys(backend): }, 'world': { 'paramset': constrained_by_normal( + name='world', + is_scalar=True, n_parameters=1, inits=[0], bounds=[[-5, 5]], @@ -252,6 +260,8 @@ def test_lumi(backend): par_map={ 'lumi': { 'paramset': constrained_by_normal( + name='lumi', + is_scalar=True, n_parameters=1, inits=[0], bounds=[[-5, 5]], @@ -309,6 +319,8 @@ def test_stat(backend): par_map={ 'staterror_chan1': { 'paramset': constrained_by_normal( + name='staterror_chan1', + is_scalar=False, n_parameters=1, inits=[1], bounds=[[0, 10]], @@ -319,6 +331,8 @@ def test_stat(backend): }, 'staterror_chan2': { 'paramset': constrained_by_normal( + name='staterror_chan2', + is_scalar=False, n_parameters=2, inits=[1, 1], bounds=[[0, 10], [0, 10]], @@ -396,12 +410,19 @@ def test_shapesys(backend): par_map={ 'dummy1': { 'paramset': paramset( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False + name='dummy1', + is_scalar=True, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(0, 1), }, 'shapesys1': { 'paramset': constrained_by_poisson( + name='shapesys1', + is_scalar=False, n_parameters=1, inits=[0], bounds=[[0, 10]], @@ -413,7 +434,9 @@ def test_shapesys(backend): }, 'shapesys2': { 'paramset': constrained_by_poisson( + name='shapesys2', n_parameters=2, + is_scalar=False, inits=[0, 0], bounds=[[0, 10], [0, 10]], fixed=False, @@ -424,7 +447,12 @@ def test_shapesys(backend): }, 'dummy2': { 'paramset': paramset( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False + name='dummy2', + is_scalar=True, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(4, 5), }, @@ -495,13 +523,23 @@ def test_normfactor(backend): par_map={ 'mu1': { 'paramset': unconstrained( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False + name='mu1', + is_scalar=True, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(0, 1), }, 'mu2': { 'paramset': unconstrained( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False + name='mu2', + is_scalar=True, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(1, 2), }, @@ -575,12 +613,19 @@ def test_shapesys_zero(backend): par_map={ 'SigXsecOverSM': { 'paramset': paramset( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False + name='SigXsecOverSM', + is_scalar=True, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(0, 1), }, 'syst': { 'paramset': constrained_by_poisson( + name='syst', + is_scalar=False, n_parameters=5, inits=[0] * 5, bounds=[[0, 10]] * 5, @@ -592,6 +637,8 @@ def test_shapesys_zero(backend): }, 'syst_lowstats': { 'paramset': constrained_by_poisson( + name='syst_lowstats', + is_scalar=False, n_parameters=0, inits=[0] * 0, bounds=[[0, 10]] * 0, @@ -669,13 +716,23 @@ def test_shapefactor(backend): par_map={ 'shapefac1': { 'paramset': unconstrained( - n_parameters=1, inits=[0], bounds=[[0, 10]], fixed=False + name='shapefac1', + is_scalar=False, + n_parameters=1, + inits=[0], + bounds=[[0, 10]], + fixed=False, ), 'slice': slice(0, 1), }, 'shapefac2': { 'paramset': unconstrained( - n_parameters=2, inits=[0, 0], bounds=[[0, 10], [0, 10]], fixed=False + name='shapefac2', + is_scalar=False, + n_parameters=2, + inits=[0, 0], + bounds=[[0, 10], [0, 10]], + fixed=False, ), 'slice': slice(1, 3), }, diff --git a/tests/test_compat.py b/tests/test_compat.py new file mode 100644 index 0000000000..7eb4bcc22d --- /dev/null +++ b/tests/test_compat.py @@ -0,0 +1,76 @@ +import pytest + +import pyhf +import pyhf.compat +import pyhf.readxml + + +def test_interpretation(): + interp = pyhf.compat.interpret_rootname('gamma_foo_0') + assert interp['constrained'] == 'n/a' + assert not interp['is_scalar'] + assert interp['name'] == 'foo' + assert interp['element'] == 0 + + interp = pyhf.compat.interpret_rootname('alpha_foo') + assert interp['constrained'] + assert interp['is_scalar'] + assert interp['name'] == 'foo' + assert interp['element'] == 'n/a' + + interp = pyhf.compat.interpret_rootname('mu') + assert not interp['constrained'] + assert interp['is_scalar'] + assert interp['name'] == 'mu' + assert interp['element'] == 'n/a' + + interp = pyhf.compat.interpret_rootname('Lumi') + assert interp['name'] == 'lumi' + + interp = pyhf.compat.interpret_rootname('Lumi') + assert interp['name'] == 'lumi' + + with pytest.raises(ValueError): + pyhf.compat.interpret_rootname('gamma_foo') + + with pytest.raises(ValueError): + pyhf.compat.interpret_rootname('alpha_') + + +def test_torootname(): + model_1 = pyhf.simplemodels.correlated_background([5], [50], [52], [48]) + model_2 = pyhf.simplemodels.uncorrelated_background([5], [50], [7]) + model_3 = pyhf.simplemodels.uncorrelated_background([5, 6], [50, 50], [7, 8]) + + assert pyhf.compat.paramset_to_rootnames(model_1.config.param_set('mu')) == 'mu' + + assert ( + pyhf.compat.paramset_to_rootnames( + model_1.config.param_set('correlated_bkg_uncertainty') + ) + == 'alpha_correlated_bkg_uncertainty' + ) + + assert pyhf.compat.paramset_to_rootnames( + model_2.config.param_set('uncorr_bkguncrt') + ) == ['gamma_uncorr_bkguncrt_0'] + + assert pyhf.compat.paramset_to_rootnames( + model_3.config.param_set('uncorr_bkguncrt') + ) == ['gamma_uncorr_bkguncrt_0', 'gamma_uncorr_bkguncrt_1'] + + +def test_fromxml(): + parsed_xml = pyhf.readxml.parse( + 'validation/xmlimport_input3/config/examples/example_ShapeSys.xml', + 'validation/xmlimport_input3', + ) + + # build the spec, strictly checks properties included + spec = { + 'channels': parsed_xml['channels'], + 'parameters': parsed_xml['measurements'][0]['config']['parameters'], + } + model = pyhf.Model(spec, poi_name='SigXsecOverSM') + + assert pyhf.compat.paramset_to_rootnames(model.config.param_set('lumi')) == 'Lumi' diff --git a/tests/test_constraints.py b/tests/test_constraints.py index fe878938ea..fa39dcd7bd 100644 --- a/tests/test_constraints.py +++ b/tests/test_constraints.py @@ -136,6 +136,8 @@ def test_batched_constraints(backend): par_map={ 'pois1': { 'paramset': constrained_by_poisson( + name='pois1', + is_scalar=False, n_parameters=1, inits=[1.0], bounds=[[0, 10]], @@ -148,6 +150,8 @@ def test_batched_constraints(backend): }, 'pois2': { 'paramset': constrained_by_poisson( + name='pois2', + is_scalar=False, n_parameters=2, inits=[1.0] * 2, bounds=[[0, 10]] * 2, @@ -159,6 +163,8 @@ def test_batched_constraints(backend): }, 'norm1': { 'paramset': constrained_by_normal( + name='norm1', + is_scalar=False, n_parameters=2, inits=[0] * 2, bounds=[[0, 10]] * 2, @@ -170,6 +176,8 @@ def test_batched_constraints(backend): }, 'norm2': { 'paramset': constrained_by_normal( + name='norm2', + is_scalar=False, n_parameters=3, inits=[0] * 3, bounds=[[0, 10]] * 3, diff --git a/tests/test_paramsets.py b/tests/test_paramsets.py index 448d3c68e3..69abf7a10b 100644 --- a/tests/test_paramsets.py +++ b/tests/test_paramsets.py @@ -4,6 +4,8 @@ def test_paramset_unconstrained(): pset = paramsets.unconstrained( + name='foo', + is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], bounds=[(-1, 1), (-2, 2), (-3, 3), (-4, 4)], @@ -17,6 +19,8 @@ def test_paramset_unconstrained(): def test_paramset_constrained_custom_sigmas(): pset = paramsets.constrained_by_normal( + name='foo', + is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], bounds=[(-1, 1), (-2, 2), (-3, 3), (-4, 4)], @@ -33,6 +37,8 @@ def test_paramset_constrained_custom_sigmas(): def test_paramset_constrained_default_sigmas(): pset = paramsets.constrained_by_normal( + name='foo', + is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], bounds=[(-1, 1), (-2, 2), (-3, 3), (-4, 4)], @@ -48,6 +54,8 @@ def test_paramset_constrained_default_sigmas(): def test_paramset_constrained_custom_factors(): pset = paramsets.constrained_by_poisson( + name='foo', + is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], bounds=[(-1, 1), (-2, 2), (-3, 3), (-4, 4)], @@ -64,6 +72,8 @@ def test_paramset_constrained_custom_factors(): def test_paramset_constrained_missiing_factors(): pset = paramsets.constrained_by_poisson( + name='foo', + is_scalar=False, n_parameters=5, inits=[0, 1, 2, 3, 4], bounds=[(-1, 1), (-2, 2), (-3, 3), (-4, 4)], @@ -73,3 +83,16 @@ def test_paramset_constrained_missiing_factors(): ) with pytest.raises(RuntimeError): pset.width() + + +def test_scalar_multiparam_failure(): + with pytest.raises(ValueError): + paramsets.paramset( + name='foo', + is_scalar=True, + n_parameters=5, + inits=[0, 1, 2, 3, 4], + bounds=[(-1, 1), (-2, 2), (-3, 3), (-4, 4)], + fixed=False, + auxdata=[0, 0, 0, 0, 0], + ) diff --git a/tests/test_public_api_repr.py b/tests/test_public_api_repr.py index a5745f994e..b3a6803dfb 100644 --- a/tests/test_public_api_repr.py +++ b/tests/test_public_api_repr.py @@ -12,6 +12,7 @@ def test_top_level_public_api(): "PatchSet", "Workspace", "__version__", + "compat", "exceptions", "get_backend", "infer", @@ -35,6 +36,10 @@ def test_cli_public_api(): assert dir(pyhf.cli) == ["cli", "complete", "contrib", "infer", "rootio", "spec"] +def test_compat_public_api(): + assert dir(pyhf.compat) == ["interpret_rootname", "paramset_to_rootnames"] + + def test_constraints_public_api(): assert dir(pyhf.constraints) == [ "gaussian_constraint_combined", @@ -233,7 +238,6 @@ def test_utils_public_api(): "digest", "load_schema", "options_from_eqdelimstring", - "remove_prefix", "validate", ] diff --git a/tests/test_utils.py b/tests/test_utils.py index 5df67c2937..cb376c4e58 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -60,12 +60,6 @@ def test_digest_bad_alg(): assert 'nonexistent_algorithm' in str(excinfo.value) -def test_remove_prefix(): - assert pyhf.utils.remove_prefix('abcDEF123', 'abc') == 'DEF123' - assert pyhf.utils.remove_prefix('abcDEF123', 'Abc') == 'abcDEF123' - assert pyhf.utils.remove_prefix('abcDEF123', '123') == 'abcDEF123' - - @pytest.mark.parametrize('oneline', [False, True]) def test_citation(oneline): citation = pyhf.utils.citation(oneline)