From a5ab61230221c5407b85144e18bf52a6db34c9e9 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Wed, 13 Sep 2023 14:50:44 -0400 Subject: [PATCH 01/30] Use python3 instead of python2 in replace_and_rename.py script --- share/replace_and_rename.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/share/replace_and_rename.py b/share/replace_and_rename.py index b183fb4..995d745 100755 --- a/share/replace_and_rename.py +++ b/share/replace_and_rename.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 import sys import os From 057c239461c2e84bc8f3b0bd8d27a324f36194fd Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Wed, 13 Sep 2023 14:52:08 -0400 Subject: [PATCH 02/30] Skip image and binary files in replace_and_rename --- share/replace_and_rename.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/share/replace_and_rename.py b/share/replace_and_rename.py index 995d745..a773d19 100755 --- a/share/replace_and_rename.py +++ b/share/replace_and_rename.py @@ -21,12 +21,18 @@ # Find the root directory of app4triqs app4triqs_root = os.path.abspath(os.path.dirname(__file__) + "/..") +# Blacklisted file-formats +fmt_blacklist = ['.png', '.h5', '.jpg', '.ico'] + # Recurse over all subdirectories and files for root, dirs, files in os.walk(app4triqs_root): for fname in files: fpath = os.path.join(root, fname) + if os.path.splitext(fname)[1] in fmt_blacklist: + continue + # Ignore certain files / directories if any(it in fpath for it in ignore_lst): continue From 0ca04d26a7c1abc761603beae46febbbcd0e64bc Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Mon, 18 Sep 2023 17:25:48 -0400 Subject: [PATCH 03/30] Raise shm size for docker run commands to comply with mkl requirements --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 30fe979..503c893 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -45,7 +45,7 @@ for (int i = 0; i < dockerPlatforms.size(); i++) { args = '-DASAN=ON -DUBSAN=ON' def img = docker.build("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${env.STAGE_NAME}", "--build-arg APPNAME=${projectName} --build-arg BUILD_ID=${env.BUILD_TAG} --build-arg CMAKE_ARGS='${args}' .") catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') { - img.inside() { + img.inside("--shm-size=4gb") { sh "make -C \$BUILD/${projectName} test CTEST_OUTPUT_ON_FAILURE=1" } } From dfe7c014fe774caf2b0d09e4fbd87e0307c2b2bc Mon Sep 17 00:00:00 2001 From: Alexander Hampel Date: Tue, 26 Sep 2023 15:24:08 -0400 Subject: [PATCH 04/30] [build] automatically set version in packaging --- packaging/CMakeLists.txt | 7 +++++++ ....2.0-foss-2021b.eb => TRIQS-app4triqs-foss-2021b.eb.in} | 2 +- packaging/conda/{meta.yaml => meta.yaml.in} | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 packaging/CMakeLists.txt rename packaging/{TRIQS-app4triqs-3.2.0-foss-2021b.eb => TRIQS-app4triqs-foss-2021b.eb.in} (97%) rename packaging/conda/{meta.yaml => meta.yaml.in} (96%) diff --git a/packaging/CMakeLists.txt b/packaging/CMakeLists.txt new file mode 100644 index 0000000..748dc4a --- /dev/null +++ b/packaging/CMakeLists.txt @@ -0,0 +1,7 @@ +# Configure the version of packaging files +configure_file(conda/meta.yaml.in conda/meta.yaml) +configure_file(conda/build.sh conda/build.sh COPYONLY) +configure_file(conda/conda_build_config.yaml conda/conda_build_config.yaml COPYONLY) +configure_file(TRIQS-app4triqs-foss-2021b.eb.in TRIQS-app4triqs-foss-2021b.eb) + + diff --git a/packaging/TRIQS-app4triqs-3.2.0-foss-2021b.eb b/packaging/TRIQS-app4triqs-foss-2021b.eb.in similarity index 97% rename from packaging/TRIQS-app4triqs-3.2.0-foss-2021b.eb rename to packaging/TRIQS-app4triqs-foss-2021b.eb.in index 113872b..c5c96f2 100644 --- a/packaging/TRIQS-app4triqs-3.2.0-foss-2021b.eb +++ b/packaging/TRIQS-app4triqs-foss-2021b.eb.in @@ -1,7 +1,7 @@ easyblock = 'CMakeMake' name = 'TRIQS-app4triqs' -version = '3.2.0' +version = '@PROJECT_VERSION@' homepage = 'https://triqs.github.io/app4triqs/' description = """ diff --git a/packaging/conda/meta.yaml b/packaging/conda/meta.yaml.in similarity index 96% rename from packaging/conda/meta.yaml rename to packaging/conda/meta.yaml.in index 7345f85..bad6e1c 100644 --- a/packaging/conda/meta.yaml +++ b/packaging/conda/meta.yaml.in @@ -1,4 +1,4 @@ -{% set version = "3.2.0" %} +{% set version = "@PROJECT_VERSION@" %} package: name: app4triqs From b4fb9624e307e4b3d0f7409748e344726feaad1b Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Wed, 20 Sep 2023 14:32:46 -0400 Subject: [PATCH 05/30] Remove numpydoc sources from doc/sphinxext --- doc/sphinxext/numpydoc/apigen.py | 427 -------- doc/sphinxext/numpydoc/docscrape.py | 497 ---------- doc/sphinxext/numpydoc/docscrape_sphinx.py | 136 --- doc/sphinxext/numpydoc/inheritance_diagram.py | 407 -------- .../numpydoc/ipython_console_highlighting.py | 114 --- doc/sphinxext/numpydoc/numpydoc.py | 116 --- doc/sphinxext/numpydoc/plot_directive.py | 933 ------------------ 7 files changed, 2630 deletions(-) delete mode 100644 doc/sphinxext/numpydoc/apigen.py delete mode 100644 doc/sphinxext/numpydoc/docscrape.py delete mode 100644 doc/sphinxext/numpydoc/docscrape_sphinx.py delete mode 100644 doc/sphinxext/numpydoc/inheritance_diagram.py delete mode 100644 doc/sphinxext/numpydoc/ipython_console_highlighting.py delete mode 100644 doc/sphinxext/numpydoc/numpydoc.py delete mode 100644 doc/sphinxext/numpydoc/plot_directive.py diff --git a/doc/sphinxext/numpydoc/apigen.py b/doc/sphinxext/numpydoc/apigen.py deleted file mode 100644 index 2619fbb..0000000 --- a/doc/sphinxext/numpydoc/apigen.py +++ /dev/null @@ -1,427 +0,0 @@ -"""Attempt to generate templates for module reference with Sphinx - -XXX - we exclude extension modules - -To include extension modules, first identify them as valid in the -``_uri2path`` method, then handle them in the ``_parse_module`` script. - -We get functions and classes by parsing the text of .py files. -Alternatively we could import the modules for discovery, and we'd have -to do that for extension modules. This would involve changing the -``_parse_module`` method to work via import and introspection, and -might involve changing ``discover_modules`` (which determines which -files are modules, and therefore which module URIs will be passed to -``_parse_module``). - -NOTE: this is a modified version of a script originally shipped with the -PyMVPA project, which we've adapted for NIPY use. PyMVPA is an MIT-licensed -project.""" - -# Stdlib imports -import os -import re - -# Functions and classes -class ApiDocWriter: - ''' Class for automatic detection and parsing of API docs - to Sphinx-parsable reST format''' - - # only separating first two levels - rst_section_levels = ['*', '=', '-', '~', '^'] - - def __init__(self, - package_name, - rst_extension='.rst', - package_skip_patterns=None, - module_skip_patterns=None, - ): - ''' Initialize package for parsing - - Parameters - ---------- - package_name : string - Name of the top-level package. *package_name* must be the - name of an importable package - rst_extension : string, optional - Extension for reST files, default '.rst' - package_skip_patterns : None or sequence of {strings, regexps} - Sequence of strings giving URIs of packages to be excluded - Operates on the package path, starting at (including) the - first dot in the package path, after *package_name* - so, - if *package_name* is ``sphinx``, then ``sphinx.util`` will - result in ``.util`` being passed for earching by these - regexps. If is None, gives default. Default is: - ['\.tests$'] - module_skip_patterns : None or sequence - Sequence of strings giving URIs of modules to be excluded - Operates on the module name including preceding URI path, - back to the first dot after *package_name*. For example - ``sphinx.util.console`` results in the string to search of - ``.util.console`` - If is None, gives default. Default is: - ['\.setup$', '\._'] - ''' - if package_skip_patterns is None: - package_skip_patterns = ['\\.tests$'] - if module_skip_patterns is None: - module_skip_patterns = ['\\.setup$', '\\._'] - self.package_name = package_name - self.rst_extension = rst_extension - self.package_skip_patterns = package_skip_patterns - self.module_skip_patterns = module_skip_patterns - - def get_package_name(self): - return self._package_name - - def set_package_name(self, package_name): - ''' Set package_name - - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> docwriter.root_path == sphinx.__path__[0] - True - >>> docwriter.package_name = 'docutils' - >>> import docutils - >>> docwriter.root_path == docutils.__path__[0] - True - ''' - # It's also possible to imagine caching the module parsing here - self._package_name = package_name - self.root_module = __import__(package_name) - self.root_path = self.root_module.__path__[0] - self.written_modules = None - - package_name = property(get_package_name, set_package_name, None, - 'get/set package_name') - - def _get_object_name(self, line): - ''' Get second token in line - >>> docwriter = ApiDocWriter('sphinx') - >>> docwriter._get_object_name(" def func(): ") - 'func' - >>> docwriter._get_object_name(" class Klass: ") - 'Klass' - >>> docwriter._get_object_name(" class Klass: ") - 'Klass' - ''' - name = line.split()[1].split('(')[0].strip() - # in case we have classes which are not derived from object - # ie. old style classes - return name.rstrip(':') - - def _uri2path(self, uri): - ''' Convert uri to absolute filepath - - Parameters - ---------- - uri : string - URI of python module to return path for - - Returns - ------- - path : None or string - Returns None if there is no valid path for this URI - Otherwise returns absolute file system path for URI - - Examples - -------- - >>> docwriter = ApiDocWriter('sphinx') - >>> import sphinx - >>> modpath = sphinx.__path__[0] - >>> res = docwriter._uri2path('sphinx.builder') - >>> res == os.path.join(modpath, 'builder.py') - True - >>> res = docwriter._uri2path('sphinx') - >>> res == os.path.join(modpath, '__init__.py') - True - >>> docwriter._uri2path('sphinx.does_not_exist') - - ''' - if uri == self.package_name: - return os.path.join(self.root_path, '__init__.py') - path = uri.replace('.', os.path.sep) - path = path.replace(self.package_name + os.path.sep, '') - path = os.path.join(self.root_path, path) - # XXX maybe check for extensions as well? - if os.path.exists(path + '.py'): # file - path += '.py' - elif os.path.exists(os.path.join(path, '__init__.py')): - path = os.path.join(path, '__init__.py') - else: - return None - return path - - def _path2uri(self, dirpath): - ''' Convert directory path to uri ''' - relpath = dirpath.replace(self.root_path, self.package_name) - if relpath.startswith(os.path.sep): - relpath = relpath[1:] - return relpath.replace(os.path.sep, '.') - - def _parse_module(self, uri): - ''' Parse module defined in *uri* ''' - filename = self._uri2path(uri) - if filename is None: - # nothing that we could handle here. - return ([],[]) - f = open(filename, 'rt') - functions, classes = self._parse_lines(f) - f.close() - return functions, classes - - def _parse_lines(self, linesource): - ''' Parse lines of text for functions and classes ''' - functions = [] - classes = [] - for line in linesource: - if line.startswith('def ') and line.count('('): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith('_'): - functions.append(name) - elif line.startswith('class '): - # exclude private stuff - name = self._get_object_name(line) - if not name.startswith('_'): - classes.append(name) - else: - pass - functions.sort() - classes.sort() - return functions, classes - - def generate_api_doc(self, uri): - '''Make autodoc documentation template string for a module - - Parameters - ---------- - uri : string - python location of module - e.g 'sphinx.builder' - - Returns - ------- - S : string - Contents of API doc - ''' - # get the names of all classes and functions - functions, classes = self._parse_module(uri) - if not len(functions) and not len(classes): - print('WARNING: Empty -',uri) # dbg - return '' - - # Make a shorter version of the uri that omits the package name for - # titles - uri_short = re.sub(r'^%s\.' % self.package_name,'',uri) - - ad = '.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n' - - chap_title = uri_short - ad += (chap_title+'\n'+ self.rst_section_levels[1] * len(chap_title) - + '\n\n') - - # Set the chapter title to read 'module' for all modules except for the - # main packages - if '.' in uri: - title = 'Module: :mod:`' + uri_short + '`' - else: - title = ':mod:`' + uri_short + '`' - ad += title + '\n' + self.rst_section_levels[2] * len(title) - - if len(classes): - ad += '\nInheritance diagram for ``%s``:\n\n' % uri - ad += '.. inheritance-diagram:: %s \n' % uri - ad += ' :parts: 3\n' - - ad += '\n.. automodule:: ' + uri + '\n' - ad += '\n.. currentmodule:: ' + uri + '\n' - multi_class = len(classes) > 1 - multi_fx = len(functions) > 1 - if multi_class: - ad += '\n' + 'Classes' + '\n' + \ - self.rst_section_levels[2] * 7 + '\n' - elif len(classes) and multi_fx: - ad += '\n' + 'Class' + '\n' + \ - self.rst_section_levels[2] * 5 + '\n' - for c in classes: - ad += '\n:class:`' + c + '`\n' \ - + self.rst_section_levels[multi_class + 2 ] * \ - (len(c)+9) + '\n\n' - ad += '\n.. autoclass:: ' + c + '\n' - # must NOT exclude from index to keep cross-refs working - ad += ' :members:\n' \ - ' :undoc-members:\n' \ - ' :show-inheritance:\n' \ - ' :inherited-members:\n' \ - '\n' \ - ' .. automethod:: __init__\n' - if multi_fx: - ad += '\n' + 'Functions' + '\n' + \ - self.rst_section_levels[2] * 9 + '\n\n' - elif len(functions) and multi_class: - ad += '\n' + 'Function' + '\n' + \ - self.rst_section_levels[2] * 8 + '\n\n' - for f in functions: - # must NOT exclude from index to keep cross-refs working - ad += '\n.. autofunction:: ' + uri + '.' + f + '\n\n' - return ad - - def _survives_exclude(self, matchstr, match_type): - ''' Returns True if *matchstr* does not match patterns - - ``self.package_name`` removed from front of string if present - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> dw._survives_exclude('sphinx.okpkg', 'package') - True - >>> dw.package_skip_patterns.append('^\\.badpkg$') - >>> dw._survives_exclude('sphinx.badpkg', 'package') - False - >>> dw._survives_exclude('sphinx.badpkg', 'module') - True - >>> dw._survives_exclude('sphinx.badmod', 'module') - True - >>> dw.module_skip_patterns.append('^\\.badmod$') - >>> dw._survives_exclude('sphinx.badmod', 'module') - False - ''' - if match_type == 'module': - patterns = self.module_skip_patterns - elif match_type == 'package': - patterns = self.package_skip_patterns - else: - raise ValueError('Cannot interpret match type "%s"' - % match_type) - # Match to URI without package name - L = len(self.package_name) - if matchstr[:L] == self.package_name: - matchstr = matchstr[L:] - for pat in patterns: - try: - pat.search - except AttributeError: - pat = re.compile(pat) - if pat.search(matchstr): - return False - return True - - def discover_modules(self): - ''' Return module sequence discovered from ``self.package_name`` - - - Parameters - ---------- - None - - Returns - ------- - mods : sequence - Sequence of module names within ``self.package_name`` - - Examples - -------- - >>> dw = ApiDocWriter('sphinx') - >>> mods = dw.discover_modules() - >>> 'sphinx.util' in mods - True - >>> dw.package_skip_patterns.append('\.util$') - >>> 'sphinx.util' in dw.discover_modules() - False - >>> - ''' - modules = [self.package_name] - # raw directory parsing - for dirpath, dirnames, filenames in os.walk(self.root_path): - # Check directory names for packages - root_uri = self._path2uri(os.path.join(self.root_path, - dirpath)) - for dirname in dirnames[:]: # copy list - we modify inplace - package_uri = '.'.join((root_uri, dirname)) - if (self._uri2path(package_uri) and - self._survives_exclude(package_uri, 'package')): - modules.append(package_uri) - else: - dirnames.remove(dirname) - # Check filenames for modules - for filename in filenames: - module_name = filename[:-3] - module_uri = '.'.join((root_uri, module_name)) - if (self._uri2path(module_uri) and - self._survives_exclude(module_uri, 'module')): - modules.append(module_uri) - return sorted(modules) - - def write_modules_api(self, modules,outdir): - # write the list - written_modules = [] - for m in modules: - api_str = self.generate_api_doc(m) - if not api_str: - continue - # write out to file - outfile = os.path.join(outdir, - m + self.rst_extension) - fileobj = open(outfile, 'wt') - fileobj.write(api_str) - fileobj.close() - written_modules.append(m) - self.written_modules = written_modules - - def write_api_docs(self, outdir): - """Generate API reST files. - - Parameters - ---------- - outdir : string - Directory name in which to store files - We create automatic filenames for each module - - Returns - ------- - None - - Notes - ----- - Sets self.written_modules to list of written modules - """ - if not os.path.exists(outdir): - os.mkdir(outdir) - # compose list of modules - modules = self.discover_modules() - self.write_modules_api(modules,outdir) - - def write_index(self, outdir, froot='gen', relative_to=None): - """Make a reST API index file from written files - - Parameters - ---------- - path : string - Filename to write index to - outdir : string - Directory to which to write generated index file - froot : string, optional - root (filename without extension) of filename to write to - Defaults to 'gen'. We add ``self.rst_extension``. - relative_to : string - path to which written filenames are relative. This - component of the written file path will be removed from - outdir, in the generated index. Default is None, meaning, - leave path as it is. - """ - if self.written_modules is None: - raise ValueError('No modules written') - # Get full filename path - path = os.path.join(outdir, froot+self.rst_extension) - # Path written into index is relative to rootpath - if relative_to is not None: - relpath = outdir.replace(relative_to + os.path.sep, '') - else: - relpath = outdir - idx = open(path,'wt') - w = idx.write - w('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') - w('.. toctree::\n\n') - for f in self.written_modules: - w(' %s\n' % os.path.join(relpath,f)) - idx.close() diff --git a/doc/sphinxext/numpydoc/docscrape.py b/doc/sphinxext/numpydoc/docscrape.py deleted file mode 100644 index 2548915..0000000 --- a/doc/sphinxext/numpydoc/docscrape.py +++ /dev/null @@ -1,497 +0,0 @@ -"""Extract reference documentation from the NumPy source tree. - -""" - -import inspect -import textwrap -import re -import pydoc -from io import StringIO -from warnings import warn -4 -class Reader: - """A line-based string reader. - - """ - def __init__(self, data): - """ - Parameters - ---------- - data : str - String with lines separated by '\n'. - - """ - if isinstance(data,list): - self._str = data - else: - self._str = data.split('\n') # store string as list of lines - - self.reset() - - def __getitem__(self, n): - return self._str[n] - - def reset(self): - self._l = 0 # current line nr - - def read(self): - if not self.eof(): - out = self[self._l] - self._l += 1 - return out - else: - return '' - - def seek_next_non_empty_line(self): - for l in self[self._l:]: - if l.strip(): - break - else: - self._l += 1 - - def eof(self): - return self._l >= len(self._str) - - def read_to_condition(self, condition_func): - start = self._l - for line in self[start:]: - if condition_func(line): - return self[start:self._l] - self._l += 1 - if self.eof(): - return self[start:self._l+1] - return [] - - def read_to_next_empty_line(self): - self.seek_next_non_empty_line() - def is_empty(line): - return not line.strip() - return self.read_to_condition(is_empty) - - def read_to_next_unindented_line(self): - def is_unindented(line): - return (line.strip() and (len(line.lstrip()) == len(line))) - return self.read_to_condition(is_unindented) - - def peek(self,n=0): - if self._l + n < len(self._str): - return self[self._l + n] - else: - return '' - - def is_empty(self): - return not ''.join(self._str).strip() - - -class NumpyDocString: - def __init__(self,docstring): - docstring = textwrap.dedent(docstring).split('\n') - - self._doc = Reader(docstring) - self._parsed_data = { - 'Signature': '', - 'Summary': [''], - 'Extended Summary': [], - 'Parameters': [], - 'Returns': [], - 'Raises': [], - 'Warns': [], - 'Other Parameters': [], - 'Attributes': [], - 'Methods': [], - 'See Also': [], - 'Notes': [], - 'Warnings': [], - 'References': '', - 'Examples': '', - 'index': {} - } - - self._parse() - - def __getitem__(self,key): - return self._parsed_data[key] - - def __setitem__(self,key,val): - if key not in self._parsed_data: - warn("Unknown section %s" % key) - else: - self._parsed_data[key] = val - - def _is_at_section(self): - self._doc.seek_next_non_empty_line() - - if self._doc.eof(): - return False - - l1 = self._doc.peek().strip() # e.g. Parameters - - if l1.startswith('.. index::'): - return True - - l2 = self._doc.peek(1).strip() # ---------- or ========== - return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) - - def _strip(self,doc): - i = 0 - j = 0 - for i,line in enumerate(doc): - if line.strip(): break - - for j,line in enumerate(doc[::-1]): - if line.strip(): break - - return doc[i:len(doc)-j] - - def _read_to_next_section(self): - section = self._doc.read_to_next_empty_line() - - while not self._is_at_section() and not self._doc.eof(): - if not self._doc.peek(-1).strip(): # previous line was empty - section += [''] - - section += self._doc.read_to_next_empty_line() - - return section - - def _read_sections(self): - while not self._doc.eof(): - data = self._read_to_next_section() - name = data[0].strip() - - if name.startswith('..'): # index section - yield name, data[1:] - elif len(data) < 2: - yield StopIteration - else: - yield name, self._strip(data[2:]) - - def _parse_param_list(self,content): - r = Reader(content) - params = [] - while not r.eof(): - header = r.read().strip() - if ' : ' in header: - arg_name, arg_type = header.split(' : ')[:2] - else: - arg_name, arg_type = header, '' - - desc = r.read_to_next_unindented_line() - desc = dedent_lines(desc) - - params.append((arg_name,arg_type,desc)) - - return params - - - _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" - r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) - def _parse_see_also(self, content): - """ - func_name : Descriptive text - continued text - another_func_name : Descriptive text - func_name1, func_name2, :meth:`func_name`, func_name3 - - """ - items = [] - - def parse_item_name(text): - """Match ':role:`name`' or 'name'""" - m = self._name_rgx.match(text) - if m: - g = m.groups() - if g[1] is None: - return g[3], None - else: - return g[2], g[1] - raise ValueError("%s is not a item name" % text) - - def push_item(name, rest): - if not name: - return - name, role = parse_item_name(name) - items.append((name, list(rest), role)) - del rest[:] - - current_func = None - rest = [] - - for line in content: - if not line.strip(): continue - - m = self._name_rgx.match(line) - if m and line[m.end():].strip().startswith(':'): - push_item(current_func, rest) - current_func, line = line[:m.end()], line[m.end():] - rest = [line.split(':', 1)[1].strip()] - if not rest[0]: - rest = [] - elif not line.startswith(' '): - push_item(current_func, rest) - current_func = None - if ',' in line: - for func in line.split(','): - push_item(func, []) - elif line.strip(): - current_func = line - elif current_func is not None: - rest.append(line.strip()) - push_item(current_func, rest) - return items - - def _parse_index(self, section, content): - """ - .. index: default - :refguide: something, else, and more - - """ - def strip_each_in(lst): - return [s.strip() for s in lst] - - out = {} - section = section.split('::') - if len(section) > 1: - out['default'] = strip_each_in(section[1].split(','))[0] - for line in content: - line = line.split(':') - if len(line) > 2: - out[line[1]] = strip_each_in(line[2].split(',')) - return out - - def _parse_summary(self): - """Grab signature (if given) and summary""" - if self._is_at_section(): - return - - summary = self._doc.read_to_next_empty_line() - summary_str = " ".join([s.strip() for s in summary]).strip() - if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): - self['Signature'] = summary_str - if not self._is_at_section(): - self['Summary'] = self._doc.read_to_next_empty_line() - else: - self['Summary'] = summary - - if not self._is_at_section(): - self['Extended Summary'] = self._read_to_next_section() - - def _parse(self): - self._doc.reset() - self._parse_summary() - - for (section,content) in self._read_sections(): - if not section.startswith('..'): - section = ' '.join([s.capitalize() for s in section.split(' ')]) - if section in ('Parameters', 'Attributes', 'Methods', - 'Returns', 'Raises', 'Warns'): - self[section] = self._parse_param_list(content) - elif section.startswith('.. index::'): - self['index'] = self._parse_index(section, content) - elif section == 'See Also': - self['See Also'] = self._parse_see_also(content) - else: - self[section] = content - - # string conversion routines - - def _str_header(self, name, symbol='-'): - return [name, len(name)*symbol] - - def _str_indent(self, doc, indent=4): - out = [] - for line in doc: - out += [' '*indent + line] - return out - - def _str_signature(self): - if self['Signature']: - return [self['Signature'].replace('*','\*')] + [''] - else: - return [''] - - def _str_summary(self): - if self['Summary']: - return self['Summary'] + [''] - else: - return [] - - def _str_extended_summary(self): - if self['Extended Summary']: - return self['Extended Summary'] + [''] - else: - return [] - - def _str_param_list(self, name): - out = [] - if self[name]: - out += self._str_header(name) - for param,param_type,desc in self[name]: - out += ['%s : %s' % (param, param_type)] - out += self._str_indent(desc) - out += [''] - return out - - def _str_section(self, name): - out = [] - if self[name]: - out += self._str_header(name) - out += self[name] - out += [''] - return out - - def _str_see_also(self, func_role): - if not self['See Also']: return [] - out = [] - out += self._str_header("See Also") - last_had_desc = True - for func, desc, role in self['See Also']: - if role: - link = ':%s:`%s`' % (role, func) - elif func_role: - link = ':%s:`%s`' % (func_role, func) - else: - link = "`%s`_" % func - if desc or last_had_desc: - out += [''] - out += [link] - else: - out[-1] += ", %s" % link - if desc: - out += self._str_indent([' '.join(desc)]) - last_had_desc = True - else: - last_had_desc = False - out += [''] - return out - - def _str_index(self): - idx = self['index'] - out = [] - out += ['.. index:: %s' % idx.get('default','')] - for section, references in idx.items(): - if section == 'default': - continue - out += [' :%s: %s' % (section, ', '.join(references))] - return out - - def __str__(self, func_role=''): - out = [] - out += self._str_signature() - out += self._str_summary() - out += self._str_extended_summary() - for param_list in ('Parameters','Returns','Raises'): - out += self._str_param_list(param_list) - out += self._str_section('Warnings') - out += self._str_see_also(func_role) - for s in ('Notes','References','Examples'): - out += self._str_section(s) - out += self._str_index() - return '\n'.join(out) - - -def indent(str,indent=4): - indent_str = ' '*indent - if str is None: - return indent_str - lines = str.split('\n') - return '\n'.join(indent_str + l for l in lines) - -def dedent_lines(lines): - """Deindent a list of lines maximally""" - return textwrap.dedent("\n".join(lines)).split("\n") - -def header(text, style='-'): - return text + '\n' + style*len(text) + '\n' - - -class FunctionDoc(NumpyDocString): - def __init__(self, func, role='func', doc=None): - self._f = func - self._role = role # e.g. "func" or "meth" - if doc is None: - doc = inspect.getdoc(func) or '' - try: - NumpyDocString.__init__(self, doc) - except ValueError as e: - print('*'*78) - print("ERROR: '%s' while parsing `%s`" % (e, self._f)) - print('*'*78) - #print "Docstring follows:" - #print doclines - #print '='*78 - - if not self['Signature']: - func, func_name = self.get_func() - try: - # try to read signature - argspec = inspect.getargspec(func) - argspec = inspect.formatargspec(*argspec) - argspec = argspec.replace('*','\*') - signature = '%s%s' % (func_name, argspec) - except TypeError as e: - signature = '%s()' % func_name - self['Signature'] = signature - - def get_func(self): - func_name = getattr(self._f, '__name__', self.__class__.__name__) - if inspect.isclass(self._f): - func = getattr(self._f, '__call__', self._f.__init__) - else: - func = self._f - return func, func_name - - def __str__(self): - out = '' - - func, func_name = self.get_func() - signature = self['Signature'].replace('*', '\*') - - roles = {'func': 'function', - 'meth': 'method'} - - if self._role: - if self._role not in roles: - print("Warning: invalid role %s" % self._role) - out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''), - func_name) - - out += super(FunctionDoc, self).__str__(func_role=self._role) - return out - - -class ClassDoc(NumpyDocString): - def __init__(self,cls,modulename='',func_doc=FunctionDoc,doc=None): - if not inspect.isclass(cls): - raise ValueError("Initialise using a class. Got %r" % cls) - self._cls = cls - - if modulename and not modulename.endswith('.'): - modulename += '.' - self._mod = modulename - self._name = cls.__name__ - self._func_doc = func_doc - - if doc is None: - doc = pydoc.getdoc(cls) - - NumpyDocString.__init__(self, doc) - - @property - def methods(self): - return [name for name,func in inspect.getmembers(self._cls) - if not name.startswith('_') and callable(func)] - - def __str__(self): - out = '' - out += super(ClassDoc, self).__str__() - out += "\n\n" - - #for m in self.methods: - # print "Parsing `%s`" % m - # out += str(self._func_doc(getattr(self._cls,m), 'meth')) + '\n\n' - # out += '.. index::\n single: %s; %s\n\n' % (self._name, m) - - return out - - diff --git a/doc/sphinxext/numpydoc/docscrape_sphinx.py b/doc/sphinxext/numpydoc/docscrape_sphinx.py deleted file mode 100644 index 2660f14..0000000 --- a/doc/sphinxext/numpydoc/docscrape_sphinx.py +++ /dev/null @@ -1,136 +0,0 @@ -import re, inspect, textwrap, pydoc -from docscrape import NumpyDocString, FunctionDoc, ClassDoc - -class SphinxDocString(NumpyDocString): - # string conversion routines - def _str_header(self, name, symbol='`'): - return ['.. rubric:: ' + name, ''] - - def _str_field_list(self, name): - return [':' + name + ':'] - - def _str_indent(self, doc, indent=4): - out = [] - for line in doc: - out += [' '*indent + line] - return out - - def _str_signature(self): - return [''] - if self['Signature']: - return ['``%s``' % self['Signature']] + [''] - else: - return [''] - - def _str_summary(self): - return self['Summary'] + [''] - - def _str_extended_summary(self): - return self['Extended Summary'] + [''] - - def _str_param_list(self, name): - out = [] - if self[name]: - out += self._str_field_list(name) - out += [''] - for param,param_type,desc in self[name]: - out += self._str_indent(['**%s** : %s' % (param.strip(), - param_type)]) - out += [''] - out += self._str_indent(desc,8) - out += [''] - return out - - def _str_section(self, name): - out = [] - if self[name]: - out += self._str_header(name) - out += [''] - content = textwrap.dedent("\n".join(self[name])).split("\n") - out += content - out += [''] - return out - - def _str_see_also(self, func_role): - out = [] - if self['See Also']: - see_also = super(SphinxDocString, self)._str_see_also(func_role) - out = ['.. seealso::', ''] - out += self._str_indent(see_also[2:]) - return out - - def _str_warnings(self): - out = [] - if self['Warnings']: - out = ['.. warning::', ''] - out += self._str_indent(self['Warnings']) - return out - - def _str_index(self): - idx = self['index'] - out = [] - if len(idx) == 0: - return out - - out += ['.. index:: %s' % idx.get('default','')] - for section, references in idx.items(): - if section == 'default': - continue - elif section == 'refguide': - out += [' single: %s' % (', '.join(references))] - else: - out += [' %s: %s' % (section, ','.join(references))] - return out - - def _str_references(self): - out = [] - if self['References']: - out += self._str_header('References') - if isinstance(self['References'], str): - self['References'] = [self['References']] - out.extend(self['References']) - out += [''] - return out - - def __str__(self, indent=0, func_role="obj"): - out = [] - out += self._str_signature() - out += self._str_index() + [''] - out += self._str_summary() - out += self._str_extended_summary() - for param_list in ('Parameters', 'Attributes', 'Methods', - 'Returns','Raises'): - out += self._str_param_list(param_list) - out += self._str_warnings() - out += self._str_see_also(func_role) - out += self._str_section('Notes') - out += self._str_references() - out += self._str_section('Examples') - out = self._str_indent(out,indent) - return '\n'.join(out) - -class SphinxFunctionDoc(SphinxDocString, FunctionDoc): - pass - -class SphinxClassDoc(SphinxDocString, ClassDoc): - pass - -def get_doc_object(obj, what=None, doc=None): - if what is None: - if inspect.isclass(obj): - what = 'class' - elif inspect.ismodule(obj): - what = 'module' - elif callable(obj): - what = 'function' - else: - what = 'object' - if what == 'class': - return SphinxClassDoc(obj, '', func_doc=SphinxFunctionDoc, doc=doc) - elif what in ('function', 'method'): - return SphinxFunctionDoc(obj, '', doc=doc) - else: - if doc is None: - doc = pydoc.getdoc(obj) - return SphinxDocString(doc) - diff --git a/doc/sphinxext/numpydoc/inheritance_diagram.py b/doc/sphinxext/numpydoc/inheritance_diagram.py deleted file mode 100644 index a1ba7e7..0000000 --- a/doc/sphinxext/numpydoc/inheritance_diagram.py +++ /dev/null @@ -1,407 +0,0 @@ -""" -Defines a docutils directive for inserting inheritance diagrams. - -Provide the directive with one or more classes or modules (separated -by whitespace). For modules, all of the classes in that module will -be used. - -Example:: - - Given the following classes: - - class A: pass - class B(A): pass - class C(A): pass - class D(B, C): pass - class E(B): pass - - .. inheritance-diagram: D E - - Produces a graph like the following: - - A - / \ - B C - / \ / - E D - -The graph is inserted as a PNG+image map into HTML and a PDF in -LaTeX. -""" - -import inspect -import os -import re -import subprocess -try: - from hashlib import md5 -except ImportError: - from md5 import md5 - -from docutils.nodes import Body, Element -from docutils.parsers.rst import directives -from sphinx.roles import xfileref_role - -def my_import(name): - """Module importer - taken from the python documentation. - - This function allows importing names with dots in them.""" - - mod = __import__(name) - components = name.split('.') - for comp in components[1:]: - mod = getattr(mod, comp) - return mod - -class DotException(Exception): - pass - -class InheritanceGraph: - """ - Given a list of classes, determines the set of classes that - they inherit from all the way to the root "object", and then - is able to generate a graphviz dot graph from them. - """ - def __init__(self, class_names, show_builtins=False): - """ - *class_names* is a list of child classes to show bases from. - - If *show_builtins* is True, then Python builtins will be shown - in the graph. - """ - self.class_names = class_names - self.classes = self._import_classes(class_names) - self.all_classes = self._all_classes(self.classes) - if len(self.all_classes) == 0: - raise ValueError("No classes found for inheritance diagram") - self.show_builtins = show_builtins - - py_sig_re = re.compile(r'''^([\w.]*\.)? # class names - (\w+) \s* $ # optionally arguments - ''', re.VERBOSE) - - def _import_class_or_module(self, name): - """ - Import a class using its fully-qualified *name*. - """ - try: - path, base = self.py_sig_re.match(name).groups() - except: - raise ValueError( - "Invalid class or module '%s' specified for inheritance diagram" % name) - fullname = (path or '') + base - path = (path and path.rstrip('.')) - if not path: - path = base - try: - module = __import__(path, None, None, []) - # We must do an import of the fully qualified name. Otherwise if a - # subpackage 'a.b' is requested where 'import a' does NOT provide - # 'a.b' automatically, then 'a.b' will not be found below. This - # second call will force the equivalent of 'import a.b' to happen - # after the top-level import above. - my_import(fullname) - - except ImportError: - raise ValueError( - "Could not import class or module '%s' specified for inheritance diagram" % name) - - try: - todoc = module - for comp in fullname.split('.')[1:]: - todoc = getattr(todoc, comp) - except AttributeError: - raise ValueError( - "Could not find class or module '%s' specified for inheritance diagram" % name) - - # If a class, just return it - if inspect.isclass(todoc): - return [todoc] - elif inspect.ismodule(todoc): - classes = [] - for cls in list(todoc.__dict__.values()): - if inspect.isclass(cls) and cls.__module__ == todoc.__name__: - classes.append(cls) - return classes - raise ValueError( - "'%s' does not resolve to a class or module" % name) - - def _import_classes(self, class_names): - """ - Import a list of classes. - """ - classes = [] - for name in class_names: - classes.extend(self._import_class_or_module(name)) - return classes - - def _all_classes(self, classes): - """ - Return a list of all classes that are ancestors of *classes*. - """ - all_classes = {} - - def recurse(cls): - all_classes[cls] = None - for c in cls.__bases__: - if c not in all_classes: - recurse(c) - - for cls in classes: - recurse(cls) - - return list(all_classes.keys()) - - def class_name(self, cls, parts=0): - """ - Given a class object, return a fully-qualified name. This - works for things I've tested in matplotlib so far, but may not - be completely general. - """ - module = cls.__module__ - if module == '__builtin__': - fullname = cls.__name__ - else: - fullname = "%s.%s" % (module, cls.__name__) - if parts == 0: - return fullname - name_parts = fullname.split('.') - return '.'.join(name_parts[-parts:]) - - def get_all_class_names(self): - """ - Get all of the class names involved in the graph. - """ - return [self.class_name(x) for x in self.all_classes] - - # These are the default options for graphviz - default_graph_options = { - "rankdir": "LR", - "size": '"8.0, 12.0"' - } - default_node_options = { - "shape": "box", - "fontsize": 10, - "height": 0.25, - "fontname": "Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans", - "style": '"setlinewidth(0.5)"' - } - default_edge_options = { - "arrowsize": 0.5, - "style": '"setlinewidth(0.5)"' - } - - def _format_node_options(self, options): - return ','.join(["%s=%s" % x for x in list(options.items())]) - def _format_graph_options(self, options): - return ''.join(["%s=%s;\n" % x for x in list(options.items())]) - - def generate_dot(self, fd, name, parts=0, urls={}, - graph_options={}, node_options={}, - edge_options={}): - """ - Generate a graphviz dot graph from the classes that - were passed in to __init__. - - *fd* is a Python file-like object to write to. - - *name* is the name of the graph - - *urls* is a dictionary mapping class names to http urls - - *graph_options*, *node_options*, *edge_options* are - dictionaries containing key/value pairs to pass on as graphviz - properties. - """ - g_options = self.default_graph_options.copy() - g_options.update(graph_options) - n_options = self.default_node_options.copy() - n_options.update(node_options) - e_options = self.default_edge_options.copy() - e_options.update(edge_options) - - fd.write('digraph %s {\n' % name) - fd.write(self._format_graph_options(g_options)) - - for cls in self.all_classes: - if not self.show_builtins and cls in list(__builtins__.values()): - continue - - name = self.class_name(cls, parts) - - # Write the node - this_node_options = n_options.copy() - url = urls.get(self.class_name(cls)) - if url is not None: - this_node_options['URL'] = '"%s"' % url - fd.write(' "%s" [%s];\n' % - (name, self._format_node_options(this_node_options))) - - # Write the edges - for base in cls.__bases__: - if not self.show_builtins and base in list(__builtins__.values()): - continue - - base_name = self.class_name(base, parts) - fd.write(' "%s" -> "%s" [%s];\n' % - (base_name, name, - self._format_node_options(e_options))) - fd.write('}\n') - - def run_dot(self, args, name, parts=0, urls={}, - graph_options={}, node_options={}, edge_options={}): - """ - Run graphviz 'dot' over this graph, returning whatever 'dot' - writes to stdout. - - *args* will be passed along as commandline arguments. - - *name* is the name of the graph - - *urls* is a dictionary mapping class names to http urls - - Raises DotException for any of the many os and - installation-related errors that may occur. - """ - try: - dot = subprocess.Popen(['dot'] + list(args), - stdin=subprocess.PIPE, stdout=subprocess.PIPE, - close_fds=True) - except OSError: - raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?") - except ValueError: - raise DotException("'dot' called with invalid arguments") - except: - raise DotException("Unexpected error calling 'dot'") - - self.generate_dot(dot.stdin, name, parts, urls, graph_options, - node_options, edge_options) - dot.stdin.close() - result = dot.stdout.read() - returncode = dot.wait() - if returncode != 0: - raise DotException("'dot' returned the errorcode %d" % returncode) - return result - -class inheritance_diagram(Body, Element): - """ - A docutils node to use as a placeholder for the inheritance - diagram. - """ - pass - -def inheritance_diagram_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, - state_machine): - """ - Run when the inheritance_diagram directive is first encountered. - """ - node = inheritance_diagram() - - class_names = arguments - - # Create a graph starting with the list of classes - graph = InheritanceGraph(class_names) - - # Create xref nodes for each target of the graph's image map and - # add them to the doc tree so that Sphinx can resolve the - # references to real URLs later. These nodes will eventually be - # removed from the doctree after we're done with them. - for name in graph.get_all_class_names(): - refnodes, x = xfileref_role( - 'class', ':class:`%s`' % name, name, 0, state) - node.extend(refnodes) - # Store the graph object so we can use it to generate the - # dot file later - node['graph'] = graph - # Store the original content for use as a hash - node['parts'] = options.get('parts', 0) - node['content'] = " ".join(class_names) - return [node] - -def get_graph_hash(node): - return md5(node['content'] + str(node['parts'])).hexdigest()[-10:] - -def html_output_graph(self, node): - """ - Output the graph for HTML. This will insert a PNG with clickable - image map. - """ - graph = node['graph'] - parts = node['parts'] - - graph_hash = get_graph_hash(node) - name = "inheritance%s" % graph_hash - path = '_images' - dest_path = os.path.join(setup.app.builder.outdir, path) - if not os.path.exists(dest_path): - os.makedirs(dest_path) - png_path = os.path.join(dest_path, name + ".png") - path = setup.app.builder.imgpath - - # Create a mapping from fully-qualified class names to URLs. - urls = {} - for child in node: - if child.get('refuri') is not None: - urls[child['reftitle']] = child.get('refuri') - elif child.get('refid') is not None: - urls[child['reftitle']] = '#' + child.get('refid') - - # These arguments to dot will save a PNG file to disk and write - # an HTML image map to stdout. - image_map = graph.run_dot(['-Tpng', '-o%s' % png_path, '-Tcmapx'], - name, parts, urls) - return ('%s' % - (path, name, name, image_map)) - -def latex_output_graph(self, node): - """ - Output the graph for LaTeX. This will insert a PDF. - """ - graph = node['graph'] - parts = node['parts'] - - graph_hash = get_graph_hash(node) - name = "inheritance%s" % graph_hash - dest_path = os.path.abspath(os.path.join(setup.app.builder.outdir, '_images')) - if not os.path.exists(dest_path): - os.makedirs(dest_path) - pdf_path = os.path.abspath(os.path.join(dest_path, name + ".pdf")) - - graph.run_dot(['-Tpdf', '-o%s' % pdf_path], - name, parts, graph_options={'size': '"6.0,6.0"'}) - return '\n\\includegraphics{%s}\n\n' % pdf_path - -def visit_inheritance_diagram(inner_func): - """ - This is just a wrapper around html/latex_output_graph to make it - easier to handle errors and insert warnings. - """ - def visitor(self, node): - try: - content = inner_func(self, node) - except DotException as e: - # Insert the exception as a warning in the document - warning = self.document.reporter.warning(str(e), line=node.line) - warning.parent = node - node.children = [warning] - else: - source = self.document.attributes['source'] - self.body.append(content) - node.children = [] - return visitor - -def do_nothing(self, node): - pass - -def setup(app): - setup.app = app - setup.confdir = app.confdir - - app.add_node( - inheritance_diagram, - latex=(visit_inheritance_diagram(latex_output_graph), do_nothing), - html=(visit_inheritance_diagram(html_output_graph), do_nothing)) - app.add_directive( - 'inheritance-diagram', inheritance_diagram_directive, - False, (1, 100, 0), parts = directives.nonnegative_int) diff --git a/doc/sphinxext/numpydoc/ipython_console_highlighting.py b/doc/sphinxext/numpydoc/ipython_console_highlighting.py deleted file mode 100644 index 217b779..0000000 --- a/doc/sphinxext/numpydoc/ipython_console_highlighting.py +++ /dev/null @@ -1,114 +0,0 @@ -"""reST directive for syntax-highlighting ipython interactive sessions. - -XXX - See what improvements can be made based on the new (as of Sept 2009) -'pycon' lexer for the python console. At the very least it will give better -highlighted tracebacks. -""" - -#----------------------------------------------------------------------------- -# Needed modules - -# Standard library -import re - -# Third party -from pygments.lexer import Lexer, do_insertions -from pygments.lexers.agile import (PythonConsoleLexer, PythonLexer, - PythonTracebackLexer) -from pygments.token import Comment, Generic - -from sphinx import highlighting - -#----------------------------------------------------------------------------- -# Global constants -line_re = re.compile('.*?\n') - -#----------------------------------------------------------------------------- -# Code begins - classes and functions - -class IPythonConsoleLexer(Lexer): - """ - For IPython console output or doctests, such as: - - .. sourcecode:: ipython - - In [1]: a = 'foo' - - In [2]: a - Out[2]: 'foo' - - In [3]: print a - foo - - In [4]: 1 / 0 - - Notes: - - - Tracebacks are not currently supported. - - - It assumes the default IPython prompts, not customized ones. - """ - - name = 'IPython console session' - aliases = ['ipython'] - mimetypes = ['text/x-ipython-console'] - input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)") - output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)") - continue_prompt = re.compile(" \.\.\.+:") - tb_start = re.compile("\-+") - - def get_tokens_unprocessed(self, text): - pylexer = PythonLexer(**self.options) - tblexer = PythonTracebackLexer(**self.options) - - curcode = '' - insertions = [] - for match in line_re.finditer(text): - line = match.group() - input_prompt = self.input_prompt.match(line) - continue_prompt = self.continue_prompt.match(line.rstrip()) - output_prompt = self.output_prompt.match(line) - if line.startswith("#"): - insertions.append((len(curcode), - [(0, Comment, line)])) - elif input_prompt is not None: - insertions.append((len(curcode), - [(0, Generic.Prompt, input_prompt.group())])) - curcode += line[input_prompt.end():] - elif continue_prompt is not None: - insertions.append((len(curcode), - [(0, Generic.Prompt, continue_prompt.group())])) - curcode += line[continue_prompt.end():] - elif output_prompt is not None: - # Use the 'error' token for output. We should probably make - # our own token, but error is typicaly in a bright color like - # red, so it works fine for our output prompts. - insertions.append((len(curcode), - [(0, Generic.Error, output_prompt.group())])) - curcode += line[output_prompt.end():] - else: - if curcode: - for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)): - yield item - curcode = '' - insertions = [] - yield match.start(), Generic.Output, line - if curcode: - for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)): - yield item - - -def setup(app): - """Setup as a sphinx extension.""" - - # This is only a lexer, so adding it below to pygments appears sufficient. - # But if somebody knows that the right API usage should be to do that via - # sphinx, by all means fix it here. At least having this setup.py - # suppresses the sphinx warning we'd get without it. - pass - -#----------------------------------------------------------------------------- -# Register the extension as a valid pygments lexer -highlighting.lexers['ipython'] = IPythonConsoleLexer() diff --git a/doc/sphinxext/numpydoc/numpydoc.py b/doc/sphinxext/numpydoc/numpydoc.py deleted file mode 100644 index eea482d..0000000 --- a/doc/sphinxext/numpydoc/numpydoc.py +++ /dev/null @@ -1,116 +0,0 @@ -""" -======== -numpydoc -======== - -Sphinx extension that handles docstrings in the Numpy standard format. [1] - -It will: - -- Convert Parameters etc. sections to field lists. -- Convert See Also section to a See also entry. -- Renumber references. -- Extract the signature from the docstring, if it can't be determined otherwise. - -.. [1] http://projects.scipy.org/scipy/numpy/wiki/CodingStyleGuidelines#docstring-standard - -""" - -import os, re, pydoc -from docscrape_sphinx import get_doc_object, SphinxDocString -import inspect - -def mangle_docstrings(app, what, name, obj, options, lines, - reference_offset=[0]): - if what == 'module': - # Strip top title - title_re = re.compile(r'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*', - re.I|re.S) - lines[:] = title_re.sub('', "\n".join(lines)).split("\n") - else: - doc = get_doc_object(obj, what, "\n".join(lines)) - lines[:] = str(doc).split("\n") - - if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \ - obj.__name__: - if hasattr(obj, '__module__'): - v = dict(full_name="%s.%s" % (obj.__module__, obj.__name__)) - else: - v = dict(full_name=obj.__name__) - lines += ['', '.. htmlonly::', ''] - lines += [' %s' % x for x in - (app.config.numpydoc_edit_link % v).split("\n")] - - # replace reference numbers so that there are no duplicates - references = [] - for l in lines: - l = l.strip() - if l.startswith('.. ['): - try: - references.append(int(l[len('.. ['):l.index(']')])) - except ValueError: - print("WARNING: invalid reference in %s docstring" % name) - - # Start renaming from the biggest number, otherwise we may - # overwrite references. - references.sort() - if references: - for i, line in enumerate(lines): - for r in references: - new_r = reference_offset[0] + r - lines[i] = lines[i].replace('[%d]_' % r, - '[%d]_' % new_r) - lines[i] = lines[i].replace('.. [%d]' % r, - '.. [%d]' % new_r) - - reference_offset[0] += len(references) - -def mangle_signature(app, what, name, obj, options, sig, retann): - # Do not try to inspect classes that don't define `__init__` - if (inspect.isclass(obj) and - 'initializes x; see ' in pydoc.getdoc(obj.__init__)): - return '', '' - - if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return - if not hasattr(obj, '__doc__'): return - - doc = SphinxDocString(pydoc.getdoc(obj)) - if doc['Signature']: - sig = re.sub("^[^(]*", "", doc['Signature']) - return sig, '' - -def initialize(app): - try: - app.connect('autodoc-process-signature', mangle_signature) - except: - monkeypatch_sphinx_ext_autodoc() - -def setup(app, get_doc_object_=get_doc_object): - global get_doc_object - get_doc_object = get_doc_object_ - - app.connect('autodoc-process-docstring', mangle_docstrings) - app.connect('builder-inited', initialize) - app.add_config_value('numpydoc_edit_link', None, True) - -#------------------------------------------------------------------------------ -# Monkeypatch sphinx.ext.autodoc to accept argspecless autodocs (Sphinx < 0.5) -#------------------------------------------------------------------------------ - -def monkeypatch_sphinx_ext_autodoc(): - global _original_format_signature - import sphinx.ext.autodoc - - if sphinx.ext.autodoc.format_signature is our_format_signature: - return - - print("[numpydoc] Monkeypatching sphinx.ext.autodoc ...") - _original_format_signature = sphinx.ext.autodoc.format_signature - sphinx.ext.autodoc.format_signature = our_format_signature - -def our_format_signature(what, obj): - r = mangle_signature(None, what, None, obj, None, None, None) - if r is not None: - return r[0] - else: - return _original_format_signature(what, obj) diff --git a/doc/sphinxext/numpydoc/plot_directive.py b/doc/sphinxext/numpydoc/plot_directive.py deleted file mode 100644 index 45ca91f..0000000 --- a/doc/sphinxext/numpydoc/plot_directive.py +++ /dev/null @@ -1,933 +0,0 @@ -""" -A directive for including a Matplotlib plot in a Sphinx document -================================================================ - -This is a Sphinx extension providing a reStructuredText directive -``.. plot::`` for including a plot in a Sphinx document. - -In HTML output, ``.. plot::`` will include a .png file with a link -to a high-res .png and .pdf. In LaTeX output, it will include a .pdf. - -The plot content may be defined in one of three ways: - -1. **A path to a source file** as the argument to the directive:: - - .. plot:: path/to/plot.py - - When a path to a source file is given, the content of the - directive may optionally contain a caption for the plot:: - - .. plot:: path/to/plot.py - - The plot caption. - - Additionally, one may specify the name of a function to call (with - no arguments) immediately after importing the module:: - - .. plot:: path/to/plot.py plot_function1 - -2. Included as **inline content** to the directive:: - - .. plot:: - - import matplotlib.pyplot as plt - plt.plot([1, 2, 3], [4, 5, 6]) - plt.title("A plotting exammple") - -3. Using **doctest** syntax:: - - .. plot:: - - A plotting example: - >>> import matplotlib.pyplot as plt - >>> plt.plot([1, 2, 3], [4, 5, 6]) - -Options -------- - -The ``.. plot::`` directive supports the following options: - - ``:format:`` : {'python', 'doctest'} - The format of the input. If unset, the format is auto-detected. - - ``:include-source:`` : bool - Whether to display the source code. The default can be changed using - the ``plot_include_source`` variable in :file:`conf.py` (which itself - defaults to False). - - ``:show-source-link:`` : bool - Whether to show a link to the source in HTML. The default can be - changed using the ``plot_html_show_source_link`` variable in - :file:`conf.py` (which itself defaults to True). - - ``:context:`` : bool or str - If provided, the code will be run in the context of all previous plot - directives for which the ``:context:`` option was specified. This only - applies to inline code plot directives, not those run from files. If - the ``:context: reset`` option is specified, the context is reset - for this and future plots, and previous figures are closed prior to - running the code. ``:context: close-figs`` keeps the context but closes - previous figures before running the code. - - ``:nofigs:`` : bool - If specified, the code block will be run, but no figures will be - inserted. This is usually useful with the ``:context:`` option. - - ``:caption:`` : str - If specified, the option's argument will be used as a caption for the - figure. This overwrites the caption given in the content, when the plot - is generated from a file. - -Additionally, this directive supports all the options of the `image directive -`_, -except for ``:target:`` (since plot will add its own target). These include -``:alt:``, ``:height:``, ``:width:``, ``:scale:``, ``:align:`` and ``:class:``. - -Configuration options ---------------------- - -The plot directive has the following configuration options: - - plot_include_source - Default value for the include-source option (default: False). - - plot_html_show_source_link - Whether to show a link to the source in HTML (default: True). - - plot_pre_code - Code that should be executed before each plot. If None (the default), - it will default to a string containing:: - - import numpy as np - from matplotlib import pyplot as plt - - plot_basedir - Base directory, to which ``plot::`` file names are relative to. - If None or empty (the default), file names are relative to the - directory where the file containing the directive is. - - plot_formats - File formats to generate (default: ['png', 'hires.png', 'pdf']). - List of tuples or strings:: - - [(suffix, dpi), suffix, ...] - - that determine the file format and the DPI. For entries whose - DPI was omitted, sensible defaults are chosen. When passing from - the command line through sphinx_build the list should be passed as - suffix:dpi,suffix:dpi, ... - - plot_html_show_formats - Whether to show links to the files in HTML (default: True). - - plot_rcparams - A dictionary containing any non-standard rcParams that should - be applied before each plot (default: {}). - - plot_apply_rcparams - By default, rcParams are applied when ``:context:`` option is not used - in a plot directive. If set, this configuration option overrides this - behavior and applies rcParams before each plot. - - plot_working_directory - By default, the working directory will be changed to the directory of - the example, so the code can get at its data files, if any. Also its - path will be added to `sys.path` so it can import any helper modules - sitting beside it. This configuration option can be used to specify - a central directory (also added to `sys.path`) where data files and - helper modules for all code are located. - - plot_template - Provide a customized template for preparing restructured text. - - plot_srcset - Allow the srcset image option for responsive image resolutions. List of - strings with the multiplicative factors followed by an "x". - e.g. ["2.0x", "1.5x"]. "2.0x" will create a png with the default "png" - resolution from plot_formats, multiplied by 2. If plot_srcset is - specified, the plot directive uses the - :doc:`/api/sphinxext_figmpl_directive_api` (instead of the usual figure - directive) in the intermediary rst file that is generated. - The plot_srcset option is incompatible with *singlehtml* builds, and an - error will be raised. - -Notes on how it works ---------------------- - -The plot directive runs the code it is given, either in the source file or the -code under the directive. The figure created (if any) is saved in the sphinx -build directory under a subdirectory named ``plot_directive``. It then creates -an intermediate rst file that calls a ``.. figure:`` directive (or -``.. figmpl::`` directive if ``plot_srcset`` is being used) and has links to -the ``*.png`` files in the ``plot_directive`` directory. These translations can -be customized by changing the *plot_template*. See the source of -:doc:`/api/sphinxext_plot_directive_api` for the templates defined in *TEMPLATE* -and *TEMPLATE_SRCSET*. -""" - -import contextlib -import doctest -from io import StringIO -import itertools -import os -from os.path import relpath -from pathlib import Path -import re -import shutil -import sys -import textwrap -import traceback - -from docutils.parsers.rst import directives, Directive -from docutils.parsers.rst.directives.images import Image -import jinja2 # Sphinx dependency. - -from sphinx.errors import ExtensionError - -import matplotlib -from matplotlib.backend_bases import FigureManagerBase -import matplotlib.pyplot as plt -from matplotlib import _pylab_helpers, cbook - -matplotlib.use("agg") - -__version__ = 2 - - -# ----------------------------------------------------------------------------- -# Registration hook -# ----------------------------------------------------------------------------- - - -def _option_boolean(arg): - if not arg or not arg.strip(): - # no argument given, assume used as a flag - return True - elif arg.strip().lower() in ('no', '0', 'false'): - return False - elif arg.strip().lower() in ('yes', '1', 'true'): - return True - else: - raise ValueError(f'{arg!r} unknown boolean') - - -def _option_context(arg): - if arg in [None, 'reset', 'close-figs']: - return arg - raise ValueError("Argument should be None or 'reset' or 'close-figs'") - - -def _option_format(arg): - return directives.choice(arg, ('python', 'doctest')) - - -def mark_plot_labels(app, document): - """ - To make plots referenceable, we need to move the reference from the - "htmlonly" (or "latexonly") node to the actual figure node itself. - """ - for name, explicit in document.nametypes.items(): - if not explicit: - continue - labelid = document.nameids[name] - if labelid is None: - continue - node = document.ids[labelid] - if node.tagname in ('html_only', 'latex_only'): - for n in node: - if n.tagname == 'figure': - sectname = name - for c in n: - if c.tagname == 'caption': - sectname = c.astext() - break - - node['ids'].remove(labelid) - node['names'].remove(name) - n['ids'].append(labelid) - n['names'].append(name) - document.settings.env.labels[name] = \ - document.settings.env.docname, labelid, sectname - break - - -class PlotDirective(Directive): - """The ``.. plot::`` directive, as documented in the module's docstring.""" - - has_content = True - required_arguments = 0 - optional_arguments = 2 - final_argument_whitespace = False - option_spec = { - 'alt': directives.unchanged, - 'height': directives.length_or_unitless, - 'width': directives.length_or_percentage_or_unitless, - 'scale': directives.nonnegative_int, - 'align': Image.align, - 'class': directives.class_option, - 'include-source': _option_boolean, - 'show-source-link': _option_boolean, - 'format': _option_format, - 'context': _option_context, - 'nofigs': directives.flag, - 'caption': directives.unchanged, - } - - def run(self): - """Run the plot directive.""" - try: - return run(self.arguments, self.content, self.options, - self.state_machine, self.state, self.lineno) - except Exception as e: - raise self.error(str(e)) - - -def _copy_css_file(app, exc): - if exc is None and app.builder.format == 'html': - src = cbook._get_data_path('plot_directive/plot_directive.css') - dst = app.outdir / Path('_static') - dst.mkdir(exist_ok=True) - # Use copyfile because we do not want to copy src's permissions. - shutil.copyfile(src, dst / Path('plot_directive.css')) - - -def setup(app): - setup.app = app - setup.config = app.config - setup.confdir = app.confdir - app.add_directive('plot', PlotDirective) - app.add_config_value('plot_pre_code', None, True) - app.add_config_value('plot_include_source', False, True) - app.add_config_value('plot_html_show_source_link', True, True) - app.add_config_value('plot_formats', ['png', 'hires.png', 'pdf'], True) - app.add_config_value('plot_basedir', None, True) - app.add_config_value('plot_html_show_formats', True, True) - app.add_config_value('plot_rcparams', {}, True) - app.add_config_value('plot_apply_rcparams', False, True) - app.add_config_value('plot_working_directory', None, True) - app.add_config_value('plot_template', None, True) - app.add_config_value('plot_srcset', [], True) - app.connect('doctree-read', mark_plot_labels) - app.add_css_file('plot_directive.css') - app.connect('build-finished', _copy_css_file) - metadata = {'parallel_read_safe': True, 'parallel_write_safe': True, - 'version': matplotlib.__version__} - return metadata - - -# ----------------------------------------------------------------------------- -# Doctest handling -# ----------------------------------------------------------------------------- - - -def contains_doctest(text): - try: - # check if it's valid Python as-is - compile(text, '', 'exec') - return False - except SyntaxError: - pass - r = re.compile(r'^\s*>>>', re.M) - m = r.search(text) - return bool(m) - - -def _split_code_at_show(text, function_name): - """Split code at plt.show().""" - - is_doctest = contains_doctest(text) - if function_name is None: - parts = [] - part = [] - for line in text.split("\n"): - if ((not is_doctest and line.startswith('plt.show(')) or - (is_doctest and line.strip() == '>>> plt.show()')): - part.append(line) - parts.append("\n".join(part)) - part = [] - else: - part.append(line) - if "\n".join(part).strip(): - parts.append("\n".join(part)) - else: - parts = [text] - return is_doctest, parts - - -# ----------------------------------------------------------------------------- -# Template -# ----------------------------------------------------------------------------- - -_SOURCECODE = """ -{{ source_code }} - -.. only:: html - - {% if src_name or (html_show_formats and not multi_image) %} - ( - {%- if src_name -%} - :download:`Source code <{{ build_dir }}/{{ src_name }}>` - {%- endif -%} - {%- if html_show_formats and not multi_image -%} - {%- for img in images -%} - {%- for fmt in img.formats -%} - {%- if src_name or not loop.first -%}, {% endif -%} - :download:`{{ fmt }} <{{ build_dir }}/{{ img.basename }}.{{ fmt }}>` - {%- endfor -%} - {%- endfor -%} - {%- endif -%} - ) - {% endif %} -""" - -TEMPLATE_SRCSET = _SOURCECODE + """ - {% for img in images %} - .. figure-mpl:: {{ build_dir }}/{{ img.basename }}.{{ default_fmt }} - {% for option in options -%} - {{ option }} - {% endfor %} - {%- if caption -%} - {{ caption }} {# appropriate leading whitespace added beforehand #} - {% endif -%} - {%- if srcset -%} - :srcset: {{ build_dir }}/{{ img.basename }}.{{ default_fmt }} - {%- for sr in srcset -%} - , {{ build_dir }}/{{ img.basename }}.{{ sr }}.{{ default_fmt }} {{sr}} - {%- endfor -%} - {% endif %} - - {% if html_show_formats and multi_image %} - ( - {%- for fmt in img.formats -%} - {%- if not loop.first -%}, {% endif -%} - :download:`{{ fmt }} <{{ build_dir }}/{{ img.basename }}.{{ fmt }}>` - {%- endfor -%} - ) - {% endif %} - - - {% endfor %} - -.. only:: not html - - {% for img in images %} - .. figure-mpl:: {{ build_dir }}/{{ img.basename }}.* - {% for option in options -%} - {{ option }} - {% endfor -%} - - {{ caption }} {# appropriate leading whitespace added beforehand #} - {% endfor %} - -""" - -TEMPLATE = _SOURCECODE + """ - - {% for img in images %} - .. figure:: {{ build_dir }}/{{ img.basename }}.{{ default_fmt }} - {% for option in options -%} - {{ option }} - {% endfor %} - - {% if html_show_formats and multi_image -%} - ( - {%- for fmt in img.formats -%} - {%- if not loop.first -%}, {% endif -%} - :download:`{{ fmt }} <{{ build_dir }}/{{ img.basename }}.{{ fmt }}>` - {%- endfor -%} - ) - {%- endif -%} - - {{ caption }} {# appropriate leading whitespace added beforehand #} - {% endfor %} - -.. only:: not html - - {% for img in images %} - .. figure:: {{ build_dir }}/{{ img.basename }}.* - {% for option in options -%} - {{ option }} - {% endfor -%} - - {{ caption }} {# appropriate leading whitespace added beforehand #} - {% endfor %} - -""" - -exception_template = """ -.. only:: html - - [`source code <%(linkdir)s/%(basename)s.py>`__] - -Exception occurred rendering plot. - -""" - -# the context of the plot for all directives specified with the -# :context: option -plot_context = dict() - - -class ImageFile: - def __init__(self, basename, dirname): - self.basename = basename - self.dirname = dirname - self.formats = [] - - def filename(self, format): - return os.path.join(self.dirname, f"{self.basename}.{format}") - - def filenames(self): - return [self.filename(fmt) for fmt in self.formats] - - -def out_of_date(original, derived, includes=None): - """ - Return whether *derived* is out-of-date relative to *original* or any of - the RST files included in it using the RST include directive (*includes*). - *derived* and *original* are full paths, and *includes* is optionally a - list of full paths which may have been included in the *original*. - """ - if not os.path.exists(derived): - return True - - if includes is None: - includes = [] - files_to_check = [original, *includes] - - def out_of_date_one(original, derived_mtime): - return (os.path.exists(original) and - derived_mtime < os.stat(original).st_mtime) - - derived_mtime = os.stat(derived).st_mtime - return any(out_of_date_one(f, derived_mtime) for f in files_to_check) - - -class PlotError(RuntimeError): - pass - - -def _run_code(code, code_path, ns=None, function_name=None): - """ - Import a Python module from a path, and run the function given by - name, if function_name is not None. - """ - - # Change the working directory to the directory of the example, so - # it can get at its data files, if any. Add its path to sys.path - # so it can import any helper modules sitting beside it. - pwd = os.getcwd() - if setup.config.plot_working_directory is not None: - try: - os.chdir(setup.config.plot_working_directory) - except OSError as err: - raise OSError(f'{err}\n`plot_working_directory` option in ' - f'Sphinx configuration file must be a valid ' - f'directory path') from err - except TypeError as err: - raise TypeError(f'{err}\n`plot_working_directory` option in ' - f'Sphinx configuration file must be a string or ' - f'None') from err - elif code_path is not None: - dirname = os.path.abspath(os.path.dirname(code_path)) - os.chdir(dirname) - - with cbook._setattr_cm( - sys, argv=[code_path], path=[os.getcwd(), *sys.path]), \ - contextlib.redirect_stdout(StringIO()): - try: - if ns is None: - ns = {} - if not ns: - if setup.config.plot_pre_code is None: - exec('import numpy as np\n' - 'from matplotlib import pyplot as plt\n', ns) - else: - exec(str(setup.config.plot_pre_code), ns) - if "__main__" in code: - ns['__name__'] = '__main__' - - # Patch out non-interactive show() to avoid triggering a warning. - with cbook._setattr_cm(FigureManagerBase, show=lambda self: None): - exec(code, ns) - if function_name is not None: - exec(function_name + "()", ns) - - except (Exception, SystemExit) as err: - raise PlotError(traceback.format_exc()) from err - finally: - os.chdir(pwd) - return ns - - -def clear_state(plot_rcparams, close=True): - if close: - plt.close('all') - matplotlib.rc_file_defaults() - matplotlib.rcParams.update(plot_rcparams) - - -def get_plot_formats(config): - default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200} - formats = [] - plot_formats = config.plot_formats - for fmt in plot_formats: - if isinstance(fmt, str): - if ':' in fmt: - suffix, dpi = fmt.split(':') - formats.append((str(suffix), int(dpi))) - else: - formats.append((fmt, default_dpi.get(fmt, 80))) - elif isinstance(fmt, (tuple, list)) and len(fmt) == 2: - formats.append((str(fmt[0]), int(fmt[1]))) - else: - raise PlotError('invalid image format "%r" in plot_formats' % fmt) - return formats - - -def _parse_srcset(entries): - """ - Parse srcset for multiples... - """ - srcset = {} - for entry in entries: - entry = entry.strip() - if len(entry) >= 2: - mult = entry[:-1] - srcset[float(mult)] = entry - else: - raise ExtensionError(f'srcset argument {entry!r} is invalid.') - return srcset - - -def render_figures(code, code_path, output_dir, output_base, context, - function_name, config, context_reset=False, - close_figs=False, - code_includes=None): - """ - Run a pyplot script and save the images in *output_dir*. - - Save the images under *output_dir* with file names derived from - *output_base* - """ - - if function_name is not None: - output_base = f'{output_base}_{function_name}' - formats = get_plot_formats(config) - - # Try to determine if all images already exist - - is_doctest, code_pieces = _split_code_at_show(code, function_name) - # Look for single-figure output files first - img = ImageFile(output_base, output_dir) - for format, dpi in formats: - if context or out_of_date(code_path, img.filename(format), - includes=code_includes): - all_exists = False - break - img.formats.append(format) - else: - all_exists = True - - if all_exists: - return [(code, [img])] - - # Then look for multi-figure output files - results = [] - for i, code_piece in enumerate(code_pieces): - images = [] - for j in itertools.count(): - if len(code_pieces) > 1: - img = ImageFile('%s_%02d_%02d' % (output_base, i, j), - output_dir) - else: - img = ImageFile('%s_%02d' % (output_base, j), output_dir) - for fmt, dpi in formats: - if context or out_of_date(code_path, img.filename(fmt), - includes=code_includes): - all_exists = False - break - img.formats.append(fmt) - - # assume that if we have one, we have them all - if not all_exists: - all_exists = (j > 0) - break - images.append(img) - if not all_exists: - break - results.append((code_piece, images)) - else: - all_exists = True - - if all_exists: - return results - - # We didn't find the files, so build them - - results = [] - ns = plot_context if context else {} - - if context_reset: - clear_state(config.plot_rcparams) - plot_context.clear() - - close_figs = not context or close_figs - - for i, code_piece in enumerate(code_pieces): - - if not context or config.plot_apply_rcparams: - clear_state(config.plot_rcparams, close_figs) - elif close_figs: - plt.close('all') - - _run_code(doctest.script_from_examples(code_piece) if is_doctest - else code_piece, - code_path, ns, function_name) - - images = [] - fig_managers = _pylab_helpers.Gcf.get_all_fig_managers() - for j, figman in enumerate(fig_managers): - if len(fig_managers) == 1 and len(code_pieces) == 1: - img = ImageFile(output_base, output_dir) - elif len(code_pieces) == 1: - img = ImageFile("%s_%02d" % (output_base, j), output_dir) - else: - img = ImageFile("%s_%02d_%02d" % (output_base, i, j), - output_dir) - images.append(img) - - for fmt, dpi in formats: - try: - figman.canvas.figure.savefig(img.filename(fmt), dpi=dpi) - if fmt == formats[0][0] and config.plot_srcset: - # save a 2x, 3x etc version of the default... - srcset = _parse_srcset(config.plot_srcset) - for mult, suffix in srcset.items(): - fm = f'{suffix}.{fmt}' - img.formats.append(fm) - figman.canvas.figure.savefig(img.filename(fm), - dpi=int(dpi * mult)) - except Exception as err: - raise PlotError(traceback.format_exc()) from err - img.formats.append(fmt) - - results.append((code_piece, images)) - - if not context or config.plot_apply_rcparams: - clear_state(config.plot_rcparams, close=not context) - - return results - - -def run(arguments, content, options, state_machine, state, lineno): - document = state_machine.document - config = document.settings.env.config - nofigs = 'nofigs' in options - - if config.plot_srcset and setup.app.builder.name == 'singlehtml': - raise ExtensionError( - 'plot_srcset option not compatible with single HTML writer') - - formats = get_plot_formats(config) - default_fmt = formats[0][0] - - options.setdefault('include-source', config.plot_include_source) - options.setdefault('show-source-link', config.plot_html_show_source_link) - - if 'class' in options: - # classes are parsed into a list of string, and output by simply - # printing the list, abusing the fact that RST guarantees to strip - # non-conforming characters - options['class'] = ['plot-directive'] + options['class'] - else: - options.setdefault('class', ['plot-directive']) - keep_context = 'context' in options - context_opt = None if not keep_context else options['context'] - - rst_file = document.attributes['source'] - rst_dir = os.path.dirname(rst_file) - - if len(arguments): - if not config.plot_basedir: - source_file_name = os.path.join(setup.app.builder.srcdir, - directives.uri(arguments[0])) - else: - source_file_name = os.path.join(setup.confdir, config.plot_basedir, - directives.uri(arguments[0])) - # If there is content, it will be passed as a caption. - caption = '\n'.join(content) - - # Enforce unambiguous use of captions. - if "caption" in options: - if caption: - raise ValueError( - 'Caption specified in both content and options.' - ' Please remove ambiguity.' - ) - # Use caption option - caption = options["caption"] - - # If the optional function name is provided, use it - if len(arguments) == 2: - function_name = arguments[1] - else: - function_name = None - - code = Path(source_file_name).read_text(encoding='utf-8') - output_base = os.path.basename(source_file_name) - else: - source_file_name = rst_file - code = textwrap.dedent("\n".join(map(str, content))) - counter = document.attributes.get('_plot_counter', 0) + 1 - document.attributes['_plot_counter'] = counter - base, ext = os.path.splitext(os.path.basename(source_file_name)) - output_base = '%s-%d.py' % (base, counter) - function_name = None - caption = options.get('caption', '') - - base, source_ext = os.path.splitext(output_base) - if source_ext in ('.py', '.rst', '.txt'): - output_base = base - else: - source_ext = '' - - # ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames - output_base = output_base.replace('.', '-') - - # is it in doctest format? - is_doctest = contains_doctest(code) - if 'format' in options: - if options['format'] == 'python': - is_doctest = False - else: - is_doctest = True - - # determine output directory name fragment - source_rel_name = relpath(source_file_name, setup.confdir) - source_rel_dir = os.path.dirname(source_rel_name).lstrip(os.path.sep) - - # build_dir: where to place output files (temporarily) - build_dir = os.path.join(os.path.dirname(setup.app.doctreedir), - 'plot_directive', - source_rel_dir) - # get rid of .. in paths, also changes pathsep - # see note in Python docs for warning about symbolic links on Windows. - # need to compare source and dest paths at end - build_dir = os.path.normpath(build_dir) - os.makedirs(build_dir, exist_ok=True) - - # how to link to files from the RST file - try: - build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/') - except ValueError: - # on Windows, relpath raises ValueError when path and start are on - # different mounts/drives - build_dir_link = build_dir - - # get list of included rst files so that the output is updated when any - # plots in the included files change. These attributes are modified by the - # include directive (see the docutils.parsers.rst.directives.misc module). - try: - source_file_includes = [os.path.join(os.getcwd(), t[0]) - for t in state.document.include_log] - except AttributeError: - # the document.include_log attribute only exists in docutils >=0.17, - # before that we need to inspect the state machine - possible_sources = {os.path.join(setup.confdir, t[0]) - for t in state_machine.input_lines.items} - source_file_includes = [f for f in possible_sources - if os.path.isfile(f)] - # remove the source file itself from the includes - try: - source_file_includes.remove(source_file_name) - except ValueError: - pass - - # save script (if necessary) - if options['show-source-link']: - Path(build_dir, output_base + source_ext).write_text( - doctest.script_from_examples(code) - if source_file_name == rst_file and is_doctest - else code, - encoding='utf-8') - - # make figures - try: - results = render_figures(code=code, - code_path=source_file_name, - output_dir=build_dir, - output_base=output_base, - context=keep_context, - function_name=function_name, - config=config, - context_reset=context_opt == 'reset', - close_figs=context_opt == 'close-figs', - code_includes=source_file_includes) - errors = [] - except PlotError as err: - reporter = state.memo.reporter - sm = reporter.system_message( - 2, "Exception occurred in plotting {}\n from {}:\n{}".format( - output_base, source_file_name, err), - line=lineno) - results = [(code, [])] - errors = [sm] - - # Properly indent the caption - if caption and config.plot_srcset: - caption = f':caption: {caption}' - elif caption: - caption = '\n' + '\n'.join(' ' + line.strip() - for line in caption.split('\n')) - # generate output restructuredtext - total_lines = [] - for j, (code_piece, images) in enumerate(results): - if options['include-source']: - if is_doctest: - lines = ['', *code_piece.splitlines()] - else: - lines = ['.. code-block:: python', '', - *textwrap.indent(code_piece, ' ').splitlines()] - source_code = "\n".join(lines) - else: - source_code = "" - - if nofigs: - images = [] - - opts = [ - f':{key}: {val}' for key, val in options.items() - if key in ('alt', 'height', 'width', 'scale', 'align', 'class')] - - # Not-None src_name signals the need for a source download in the - # generated html - if j == 0 and options['show-source-link']: - src_name = output_base + source_ext - else: - src_name = None - if config.plot_srcset: - srcset = [*_parse_srcset(config.plot_srcset).values()] - template = TEMPLATE_SRCSET - else: - srcset = None - template = TEMPLATE - - result = jinja2.Template(config.plot_template or template).render( - default_fmt=default_fmt, - build_dir=build_dir_link, - src_name=src_name, - multi_image=len(images) > 1, - options=opts, - srcset=srcset, - images=images, - source_code=source_code, - html_show_formats=config.plot_html_show_formats and len(images), - caption=caption) - total_lines.extend(result.split("\n")) - total_lines.extend("\n") - - if total_lines: - state_machine.insert_input(total_lines, source=source_file_name) - - return errors From 93ee35d25e0e99cdb58bd5cdee11495023230a32 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Wed, 27 Sep 2023 11:12:07 -0400 Subject: [PATCH 06/30] [jenkins] For sanitized build use RelWithDebInfo build mode --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 503c893..c847ef6 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -42,7 +42,7 @@ for (int i = 0; i < dockerPlatforms.size(); i++) { if (platform == documentationPlatform) args = '-DBuild_Documentation=1' else if (platform == "sanitize") - args = '-DASAN=ON -DUBSAN=ON' + args = '-DASAN=ON -DUBSAN=ON -DCMAKE_BUILD_TYPE=RelWithDebInfo' def img = docker.build("flatironinstitute/${dockerName}:${env.BRANCH_NAME}-${env.STAGE_NAME}", "--build-arg APPNAME=${projectName} --build-arg BUILD_ID=${env.BUILD_TAG} --build-arg CMAKE_ARGS='${args}' .") catchError(buildResult: 'UNSTABLE', stageResult: 'UNSTABLE') { img.inside("--shm-size=4gb") { From f4fb3278eef62dc4fe744447a242d3f0f6efa3df Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Wed, 27 Sep 2023 11:12:45 -0400 Subject: [PATCH 07/30] [cmake] Run Debug checks also in RelWithDebInfo build mode --- c++/app4triqs/CMakeLists.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/c++/app4triqs/CMakeLists.txt b/c++/app4triqs/CMakeLists.txt index 836263e..c40ef28 100644 --- a/c++/app4triqs/CMakeLists.txt +++ b/c++/app4triqs/CMakeLists.txt @@ -15,9 +15,9 @@ target_include_directories(${PROJECT_NAME}_c SYSTEM INTERFACE $:APP4TRIQS_DEBUG> - $<$:TRIQS_DEBUG> - $<$:TRIQS_ARRAYS_ENFORCE_BOUNDCHECK> + $<$:APP4TRIQS_DEBUG> + $<$:TRIQS_DEBUG> + $<$:TRIQS_ARRAYS_ENFORCE_BOUNDCHECK> ) # Install library and headers From ab04e46ebfe9a0889eab91ca0ea22fc71850a817 Mon Sep 17 00:00:00 2001 From: Alexander Hampel Date: Wed, 27 Sep 2023 13:44:47 -0400 Subject: [PATCH 08/30] [build] add packaging directory to cmake --- CMakeLists.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 2b317f4..1bd0174 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -148,6 +148,9 @@ endif() # Additional configuration files add_subdirectory(share) +# add packaging for automatic Versioning +add_subdirectory(packaging) + # ############# # Debian Package From 2e3e7d7feb70163f563a98dabcebac36bd12343e Mon Sep 17 00:00:00 2001 From: Henri Menke Date: Fri, 13 Oct 2023 13:35:03 +0200 Subject: [PATCH 09/30] Use ccache to speed up compilation --- .github/workflows/build.yml | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 01b2246..4768d1e 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -6,6 +6,17 @@ on: pull_request: branches: [ unstable ] +env: + CMAKE_C_COMPILER_LAUNCHER: ccache + CMAKE_CXX_COMPILER_LAUNCHER: ccache + CCACHE_COMPILERCHECK: content + CCACHE_BASEDIR: ${{ github.workspace }} + CCACHE_DIR: ${{ github.workspace }}/.ccache + CCACHE_MAXSIZE: 500M + CCACHE_SLOPPINESS: pch_defines,time_macros,include_file_mtime,include_file_ctime + CCACHE_COMPRESS: "1" + CCACHE_COMPRESSLEVEL: "1" + jobs: build: @@ -16,13 +27,20 @@ jobs: - {os: ubuntu-22.04, cc: gcc-12, cxx: g++-12} - {os: ubuntu-22.04, cc: clang-15, cxx: clang++-15} - {os: macos-12, cc: gcc-12, cxx: g++-12} - - {os: macos-12, cc: /usr/local/opt/llvm/bin/clang, cxx: /usr/local/opt/llvm/bin/clang++} + - {os: macos-12, cc: clang, cxx: clang++} runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v2 + - uses: actions/cache/restore@v3 + with: + path: ${{ env.CCACHE_DIR }} + key: ccache-${{ matrix.os }}-${{ matrix.cc }}-${{ github.run_id }} + restore-keys: + ccache-${{ matrix.os }}-${{ matrix.cc }}- + - name: Install ubuntu dependencies if: matrix.os == 'ubuntu-22.04' run: > @@ -30,6 +48,7 @@ jobs: sudo apt-get install lsb-release wget software-properties-common && wget -O /tmp/llvm.sh https://apt.llvm.org/llvm.sh && sudo chmod +x /tmp/llvm.sh && sudo /tmp/llvm.sh 15 && sudo apt-get install + ccache clang-15 g++-12 gfortran @@ -63,13 +82,14 @@ jobs: - name: Install homebrew dependencies if: matrix.os == 'macos-12' run: | - brew install gcc@12 llvm boost fftw hdf5 open-mpi openblas + brew install ccache gcc@12 llvm boost fftw hdf5 open-mpi openblas pip3 install mako numpy scipy mpi4py pip3 install -r requirements.txt - name: add clang cxxflags if: ${{ contains(matrix.cxx, 'clang') }} run: + echo "PATH=/usr/local/opt/llvm/bin:$PATH" >> $GITHUB_ENV echo "CXXFLAGS=-stdlib=libc++" >> $GITHUB_ENV - name: Build & Install TRIQS @@ -100,3 +120,13 @@ jobs: source $HOME/install/share/triqs/triqsvars.sh cd build ctest -j2 --output-on-failure + + - name: ccache statistics + if: always() + run: ccache -sv + + - uses: actions/cache/save@v3 + if: always() + with: + path: ${{ env.CCACHE_DIR }} + key: ccache-${{ matrix.os }}-${{ matrix.cc }}-${{ github.run_id }} From 2c70bdfada1ffb0e720ec9409ee035b822a0fbbe Mon Sep 17 00:00:00 2001 From: Henri Menke Date: Mon, 16 Oct 2023 09:54:59 +0200 Subject: [PATCH 10/30] Prevent unintentional parallelization in OpenBLAS --- .github/workflows/build.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 4768d1e..d3ad868 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -116,6 +116,7 @@ jobs: - name: Test app4triqs env: DYLD_FALLBACK_LIBRARY_PATH: /usr/local/opt/llvm/lib + OPENBLAS_NUM_THREADS: "1" run: | source $HOME/install/share/triqs/triqsvars.sh cd build From 6b2d9b2bfbb86ab61c55e58e5ea0d7f0003a2642 Mon Sep 17 00:00:00 2001 From: Henri Menke Date: Mon, 23 Oct 2023 16:36:00 +0200 Subject: [PATCH 11/30] Allow manual dispatch and triggering action from other workflow --- .github/workflows/build.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d3ad868..0b18a06 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -5,6 +5,8 @@ on: branches: [ unstable ] pull_request: branches: [ unstable ] + workflow_call: + workflow_dispatch: env: CMAKE_C_COMPILER_LAUNCHER: ccache From 99b65a348bf5d741ca2504c02e5cb9a8d1537c61 Mon Sep 17 00:00:00 2001 From: Henri Menke Date: Tue, 31 Oct 2023 19:01:20 +0100 Subject: [PATCH 12/30] Fix settings environment variables --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 0b18a06..57b9a17 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -90,7 +90,7 @@ jobs: - name: add clang cxxflags if: ${{ contains(matrix.cxx, 'clang') }} - run: + run: | echo "PATH=/usr/local/opt/llvm/bin:$PATH" >> $GITHUB_ENV echo "CXXFLAGS=-stdlib=libc++" >> $GITHUB_ENV From 373d30ab61cae8defd113b944e036ea01ccb6ec6 Mon Sep 17 00:00:00 2001 From: Henri Menke Date: Tue, 31 Oct 2023 19:17:25 +0100 Subject: [PATCH 13/30] Bump actions/checkout from 2 to 4 Bumps [actions/checkout](https://github.com/actions/checkout) from 2 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v2...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: Henri Menke --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 57b9a17..6451ee9 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -34,7 +34,7 @@ jobs: runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: actions/cache/restore@v3 with: From 1a7a9af4e7bf9e2f0d07e5f6f03bd109b1e94050 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Tue, 5 Dec 2023 16:33:29 -0500 Subject: [PATCH 14/30] Fix typo in Jenkinsfile --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index c847ef6..0b0fed7 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -24,7 +24,7 @@ properties([ def platforms = [:] /****************** linux builds (in docker) */ -/* Each platform must have a cooresponding Dockerfile.PLATFORM in triqs/packaging */ +/* Each platform must have a corresponding Dockerfile.PLATFORM in triqs/packaging */ def dockerPlatforms = ["ubuntu-clang", "ubuntu-gcc", "sanitize"] /* .each is currently broken in jenkins */ for (int i = 0; i < dockerPlatforms.size(); i++) { From e3b947aee8cde6c63e0a4684dad198f076495e1a Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Wed, 6 Dec 2023 16:47:17 -0500 Subject: [PATCH 15/30] [cmake] Set policy CMP0144 to new --- CMakeLists.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 1bd0174..45ee166 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -21,6 +21,9 @@ cmake_minimum_required(VERSION 3.20 FATAL_ERROR) cmake_policy(VERSION 3.20) +if(POLICY CMP0144) + cmake_policy(SET CMP0144 NEW) +endif() # ############ # Define Project From f6a2e9802a5e5b7720af2993302ec937ce8afbe5 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Tue, 30 Jan 2024 10:51:58 -0500 Subject: [PATCH 16/30] [cmake] Use GNUInstallDirs to obtain installation directories --- CMakeLists.txt | 3 +++ c++/app4triqs/CMakeLists.txt | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 45ee166..98638f6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -124,6 +124,9 @@ target_compile_options(${PROJECT_NAME}_warnings $<$:-Wno-tautological-constant-compare> ) +# Provide GNU Installation directories +include(GNUInstallDirs) + # ############# # Build Project diff --git a/c++/app4triqs/CMakeLists.txt b/c++/app4triqs/CMakeLists.txt index c40ef28..0986bcc 100644 --- a/c++/app4triqs/CMakeLists.txt +++ b/c++/app4triqs/CMakeLists.txt @@ -11,7 +11,7 @@ set_target_properties(${PROJECT_NAME}_c PROPERTIES VERSION ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR} ) target_include_directories(${PROJECT_NAME}_c PUBLIC $) -target_include_directories(${PROJECT_NAME}_c SYSTEM INTERFACE $) +target_include_directories(${PROJECT_NAME}_c SYSTEM INTERFACE $) target_compile_definitions(${PROJECT_NAME}_c PUBLIC APP4TRIQS_GIT_HASH=${PROJECT_GIT_HASH} TRIQS_GIT_HASH=${TRIQS_GIT_HASH} @@ -21,8 +21,8 @@ target_compile_definitions(${PROJECT_NAME}_c PUBLIC ) # Install library and headers -install(TARGETS ${PROJECT_NAME}_c EXPORT ${PROJECT_NAME}-targets DESTINATION lib) -install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION include FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h") +install(TARGETS ${PROJECT_NAME}_c EXPORT ${PROJECT_NAME}-targets DESTINATION ${CMAKE_INSTALL_LIBDIR}) +install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h") # ========= Static Analyzer Checks ========== From a4565a1b34f715c11a29a194ac692fa90db077ec Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Tue, 30 Jan 2024 10:55:59 -0500 Subject: [PATCH 17/30] Set proper GNU install dirs also in env vars file --- share/app4triqsvars.sh.in | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/share/app4triqsvars.sh.in b/share/app4triqsvars.sh.in index 87618a1..487b8bc 100644 --- a/share/app4triqsvars.sh.in +++ b/share/app4triqsvars.sh.in @@ -2,9 +2,9 @@ export @PROJECT_NAME@_ROOT=@CMAKE_INSTALL_PREFIX@ -export CPLUS_INCLUDE_PATH=@CMAKE_INSTALL_PREFIX@/include:$CPLUS_INCLUDE_PATH -export PATH=@CMAKE_INSTALL_PREFIX@/bin:$PATH -export LIBRARY_PATH=@CMAKE_INSTALL_PREFIX@/lib:$LIBRARY_PATH -export LD_LIBRARY_PATH=@CMAKE_INSTALL_PREFIX@/lib:$LD_LIBRARY_PATH +export CPLUS_INCLUDE_PATH=@CMAKE_INSTALL_FULL_INCLUDEDIR@:$CPLUS_INCLUDE_PATH +export PATH=@CMAKE_INSTALL_FULL_BINDIR@:$PATH +export LIBRARY_PATH=@CMAKE_INSTALL_FULL_LIBDIR@:$LIBRARY_PATH +export LD_LIBRARY_PATH=@CMAKE_INSTALL_FULL_LIBDIR@:$LD_LIBRARY_PATH export CMAKE_PREFIX_PATH=@CMAKE_INSTALL_PREFIX@:$CMAKE_PREFIX_PATH @EXPORT_PYTHON_PATH@ From f6e53b6ef2bd9a0c0bda76890c363d85e7830a13 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Fri, 16 Feb 2024 11:56:00 -0500 Subject: [PATCH 18/30] [jenkins] Add ubuntu-intel build --- Jenkinsfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Jenkinsfile b/Jenkinsfile index 0b0fed7..4097b7b 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -25,7 +25,7 @@ def platforms = [:] /****************** linux builds (in docker) */ /* Each platform must have a corresponding Dockerfile.PLATFORM in triqs/packaging */ -def dockerPlatforms = ["ubuntu-clang", "ubuntu-gcc", "sanitize"] +def dockerPlatforms = ["ubuntu-clang", "ubuntu-gcc", "ubuntu-intel", "sanitize"] /* .each is currently broken in jenkins */ for (int i = 0; i < dockerPlatforms.size(); i++) { def platform = dockerPlatforms[i] From 3e1a77116ef07512ed8e7555531d1af6d6f03fea Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Tue, 27 Feb 2024 12:06:45 -0500 Subject: [PATCH 19/30] [cmake] Consistently use GNUInstallDirs for install commands --- doc/CMakeLists.txt | 2 +- share/CMakeLists.txt | 2 +- share/cmake/CMakeLists.txt | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/CMakeLists.txt b/doc/CMakeLists.txt index 40eb49c..77eb64e 100644 --- a/doc/CMakeLists.txt +++ b/doc/CMakeLists.txt @@ -74,7 +74,7 @@ endif() # --------------------------------- # Install # --------------------------------- -install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html/ COMPONENT documentation DESTINATION share/doc/${PROJECT_NAME} +install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html/ COMPONENT documentation DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/doc/${PROJECT_NAME} FILES_MATCHING REGEX "\\.(html|pdf|png|gif|jpg|svg|ico|js|xsl|css|py|txt|inv|bib|ttf|woff2|eot|sh)$" PATTERN "_*" diff --git a/share/CMakeLists.txt b/share/CMakeLists.txt index 8d258a5..4a76a47 100644 --- a/share/CMakeLists.txt +++ b/share/CMakeLists.txt @@ -18,7 +18,7 @@ if(NOT IS_SUBPROJECT FILES ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.modulefile ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}vars.sh - DESTINATION share/${PROJECT_NAME} + DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/${PROJECT_NAME} ) message(STATUS "*********************************************************************************") diff --git a/share/cmake/CMakeLists.txt b/share/cmake/CMakeLists.txt index dad532b..c845b7b 100644 --- a/share/cmake/CMakeLists.txt +++ b/share/cmake/CMakeLists.txt @@ -4,7 +4,7 @@ install( FILES ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config.cmake ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-config-version.cmake - DESTINATION lib/cmake/${PROJECT_NAME} + DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME} ) -install(EXPORT ${PROJECT_NAME}-targets NAMESPACE ${PROJECT_NAME}:: DESTINATION lib/cmake/${PROJECT_NAME}) +install(EXPORT ${PROJECT_NAME}-targets NAMESPACE ${PROJECT_NAME}:: DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}) From d202f698fef7beecede3a10c20d8d511999e2faa Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Tue, 27 Feb 2024 12:19:10 -0500 Subject: [PATCH 20/30] [cmake] Minor improvements in top-level CMakeLists.txt --- CMakeLists.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 98638f6..1909be8 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -44,7 +44,7 @@ if(NOT ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR} VERSION_EQUAL ${TRIQS_V message(FATAL_ERROR "The ${PROJECT_NAME} version ${PROJECT_VERSION} is not compatible with TRIQS version ${TRIQS_VERSION}.") endif() -# Default Install directory to TRIQS_ROOT if not given or invalid. +# Default Install directory to TRIQS_ROOT if not given or when provided as relative path. if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT OR (NOT IS_ABSOLUTE ${CMAKE_INSTALL_PREFIX})) message(STATUS "No install prefix given (or invalid). Defaulting to TRIQS_ROOT") set(CMAKE_INSTALL_PREFIX ${TRIQS_ROOT} CACHE PATH "default install path" FORCE) @@ -87,12 +87,12 @@ if(Build_Tests) enable_testing() endif() -# Build static libraries by default -option(BUILD_SHARED_LIBS "Enable compilation of shared libraries" OFF) - # ############ # Global Compilation Settings +# Build static libraries by default +option(BUILD_SHARED_LIBS "Enable compilation of shared libraries" OFF) + # Export the list of compile-commands into compile_commands.json set(CMAKE_EXPORT_COMPILE_COMMANDS ON) From 1ac5c875e40efe5e6028c210a01e039734073e67 Mon Sep 17 00:00:00 2001 From: Thomas Hahn Date: Wed, 28 Feb 2024 16:10:12 -0500 Subject: [PATCH 21/30] [cmake] Use unstable branch of cpp2py --- deps/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/CMakeLists.txt b/deps/CMakeLists.txt index f6a3b89..e26dfba 100644 --- a/deps/CMakeLists.txt +++ b/deps/CMakeLists.txt @@ -52,7 +52,7 @@ if(PythonSupport OR (NOT IS_SUBPROJECT AND Build_Documentation)) external_dependency(Cpp2Py GIT_REPO https://github.com/TRIQS/cpp2py VERSION 2.0 - GIT_TAG master + GIT_TAG unstable BUILD_ALWAYS EXCLUDE_FROM_ALL ) From e92057573d5c3db9dc1fce6f386e2ae57f4b124d Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Fri, 1 Mar 2024 10:37:51 -0500 Subject: [PATCH 22/30] [cmake] Fix target inclusion directory to use GNUInstallDirs --- share/cmake/app4triqs-config.cmake.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/share/cmake/app4triqs-config.cmake.in b/share/cmake/app4triqs-config.cmake.in index f5a8427..b349060 100644 --- a/share/cmake/app4triqs-config.cmake.in +++ b/share/cmake/app4triqs-config.cmake.in @@ -24,7 +24,7 @@ set(@PROJECT_NAME@_ROOT @CMAKE_INSTALL_PREFIX@ CACHE STRING "@PROJECT_NAME@ root #find_dep(depname 1.0) # Include the exported targets of this project -include(@CMAKE_INSTALL_PREFIX@/lib/cmake/@PROJECT_NAME@/@PROJECT_NAME@-targets.cmake) +include(@CMAKE_INSTALL_LIBDIR@/cmake/@PROJECT_NAME@/@PROJECT_NAME@-targets.cmake) message(STATUS "Found @PROJECT_NAME@-config.cmake with version @PROJECT_VERSION@, hash = @PROJECT_GIT_HASH@, root = @CMAKE_INSTALL_PREFIX@") From 8d850803cb12cf92662d21c7946901bb3abe0af8 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Fri, 1 Mar 2024 13:55:33 -0500 Subject: [PATCH 23/30] [cmake] Only use GNUInstallDirs for LIBDIR --- c++/app4triqs/CMakeLists.txt | 2 +- doc/CMakeLists.txt | 2 +- share/CMakeLists.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/c++/app4triqs/CMakeLists.txt b/c++/app4triqs/CMakeLists.txt index 0986bcc..4d12e77 100644 --- a/c++/app4triqs/CMakeLists.txt +++ b/c++/app4triqs/CMakeLists.txt @@ -22,7 +22,7 @@ target_compile_definitions(${PROJECT_NAME}_c PUBLIC # Install library and headers install(TARGETS ${PROJECT_NAME}_c EXPORT ${PROJECT_NAME}-targets DESTINATION ${CMAKE_INSTALL_LIBDIR}) -install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h") +install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DESTINATION include FILES_MATCHING PATTERN "*.hpp" PATTERN "*.h") # ========= Static Analyzer Checks ========== diff --git a/doc/CMakeLists.txt b/doc/CMakeLists.txt index 77eb64e..40eb49c 100644 --- a/doc/CMakeLists.txt +++ b/doc/CMakeLists.txt @@ -74,7 +74,7 @@ endif() # --------------------------------- # Install # --------------------------------- -install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html/ COMPONENT documentation DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/doc/${PROJECT_NAME} +install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html/ COMPONENT documentation DESTINATION share/doc/${PROJECT_NAME} FILES_MATCHING REGEX "\\.(html|pdf|png|gif|jpg|svg|ico|js|xsl|css|py|txt|inv|bib|ttf|woff2|eot|sh)$" PATTERN "_*" diff --git a/share/CMakeLists.txt b/share/CMakeLists.txt index 4a76a47..8d258a5 100644 --- a/share/CMakeLists.txt +++ b/share/CMakeLists.txt @@ -18,7 +18,7 @@ if(NOT IS_SUBPROJECT FILES ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.modulefile ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}vars.sh - DESTINATION ${CMAKE_INSTALL_DATAROOTDIR}/${PROJECT_NAME} + DESTINATION share/${PROJECT_NAME} ) message(STATUS "*********************************************************************************") From 7bac90fdcba8ab7f7a72a9b198873f1a0b16c564 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Fri, 1 Mar 2024 13:55:40 -0500 Subject: [PATCH 24/30] [cmake] Correct target file inclusion PATH to be absolute --- share/cmake/app4triqs-config.cmake.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/share/cmake/app4triqs-config.cmake.in b/share/cmake/app4triqs-config.cmake.in index b349060..b6bb5bb 100644 --- a/share/cmake/app4triqs-config.cmake.in +++ b/share/cmake/app4triqs-config.cmake.in @@ -24,7 +24,7 @@ set(@PROJECT_NAME@_ROOT @CMAKE_INSTALL_PREFIX@ CACHE STRING "@PROJECT_NAME@ root #find_dep(depname 1.0) # Include the exported targets of this project -include(@CMAKE_INSTALL_LIBDIR@/cmake/@PROJECT_NAME@/@PROJECT_NAME@-targets.cmake) +include(@CMAKE_INSTALL_FULL_LIBDIR@/cmake/@PROJECT_NAME@/@PROJECT_NAME@-targets.cmake) message(STATUS "Found @PROJECT_NAME@-config.cmake with version @PROJECT_VERSION@, hash = @PROJECT_GIT_HASH@, root = @CMAKE_INSTALL_PREFIX@") From 9c6917526c7a21e99cbabb8a88c4ca259ca6e6b9 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Mon, 4 Mar 2024 17:25:14 -0500 Subject: [PATCH 25/30] [cmake] Do not define DEBUG macros for RelWithDebInfo builds --- c++/app4triqs/CMakeLists.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/c++/app4triqs/CMakeLists.txt b/c++/app4triqs/CMakeLists.txt index 4d12e77..fda42ad 100644 --- a/c++/app4triqs/CMakeLists.txt +++ b/c++/app4triqs/CMakeLists.txt @@ -15,9 +15,9 @@ target_include_directories(${PROJECT_NAME}_c SYSTEM INTERFACE $:APP4TRIQS_DEBUG> - $<$:TRIQS_DEBUG> - $<$:TRIQS_ARRAYS_ENFORCE_BOUNDCHECK> + $<$:APP4TRIQS_DEBUG> + $<$:TRIQS_DEBUG> + $<$:TRIQS_ARRAYS_ENFORCE_BOUNDCHECK> ) # Install library and headers From 44580aad298c14abe8267f6a0d60bdb9a24db023 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Mon, 4 Mar 2024 17:51:58 -0500 Subject: [PATCH 26/30] Additional corrections to 8d850803 --- c++/app4triqs/CMakeLists.txt | 2 +- share/app4triqsvars.sh.in | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/c++/app4triqs/CMakeLists.txt b/c++/app4triqs/CMakeLists.txt index fda42ad..5f51a38 100644 --- a/c++/app4triqs/CMakeLists.txt +++ b/c++/app4triqs/CMakeLists.txt @@ -11,7 +11,7 @@ set_target_properties(${PROJECT_NAME}_c PROPERTIES VERSION ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR} ) target_include_directories(${PROJECT_NAME}_c PUBLIC $) -target_include_directories(${PROJECT_NAME}_c SYSTEM INTERFACE $) +target_include_directories(${PROJECT_NAME}_c SYSTEM INTERFACE $) target_compile_definitions(${PROJECT_NAME}_c PUBLIC APP4TRIQS_GIT_HASH=${PROJECT_GIT_HASH} TRIQS_GIT_HASH=${TRIQS_GIT_HASH} diff --git a/share/app4triqsvars.sh.in b/share/app4triqsvars.sh.in index 487b8bc..a499f10 100644 --- a/share/app4triqsvars.sh.in +++ b/share/app4triqsvars.sh.in @@ -2,8 +2,8 @@ export @PROJECT_NAME@_ROOT=@CMAKE_INSTALL_PREFIX@ -export CPLUS_INCLUDE_PATH=@CMAKE_INSTALL_FULL_INCLUDEDIR@:$CPLUS_INCLUDE_PATH -export PATH=@CMAKE_INSTALL_FULL_BINDIR@:$PATH +export CPLUS_INCLUDE_PATH=@CMAKE_INSTALL_PREFIX@/include:$CPLUS_INCLUDE_PATH +export PATH=@CMAKE_INSTALL_PREFIX@/bin:$PATH export LIBRARY_PATH=@CMAKE_INSTALL_FULL_LIBDIR@:$LIBRARY_PATH export LD_LIBRARY_PATH=@CMAKE_INSTALL_FULL_LIBDIR@:$LD_LIBRARY_PATH export CMAKE_PREFIX_PATH=@CMAKE_INSTALL_PREFIX@:$CMAKE_PREFIX_PATH From a3a49e514af824a6027316ba4f2f784f94da8379 Mon Sep 17 00:00:00 2001 From: Thomas Hahn Date: Tue, 5 Mar 2024 10:11:26 -0500 Subject: [PATCH 27/30] Update macos build instructions in Jenkinsfile and build.yml --- .github/workflows/build.yml | 9 +++++++-- Jenkinsfile | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6451ee9..15f352c 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -85,8 +85,13 @@ jobs: if: matrix.os == 'macos-12' run: | brew install ccache gcc@12 llvm boost fftw hdf5 open-mpi openblas - pip3 install mako numpy scipy mpi4py - pip3 install -r requirements.txt + mkdir $HOME/.venv + python3 -m venv $HOME/.venv/my_python + source $HOME/.venv/my_python/bin/activate + pip install mako numpy scipy mpi4py + pip install -r requirements.txt + echo "VIRTUAL_ENV=$VIRTUAL_ENV" >> $GITHUB_ENV + echo "PATH=$PATH" >> $GITHUB_ENV - name: add clang cxxflags if: ${{ contains(matrix.cxx, 'clang') }} diff --git a/Jenkinsfile b/Jenkinsfile index 4097b7b..ff81d63 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -89,6 +89,7 @@ for (int i = 0; i < osxPlatforms.size(); i++) { "LD_LIBRARY_PATH=$hdf5/lib", "PYTHONPATH=$installDir/lib/python3.9/site-packages", "CMAKE_PREFIX_PATH=$venv/lib/cmake/triqs", + "VIRTUAL_ENV=$venv", "OMP_NUM_THREADS=2"]) { deleteDir() /* note: this is installing into the parent (triqs) venv (install dir), which is thus shared among apps and so not be completely safe */ From c2fcb83ac1ec556dc1e46c424c85aa9de954d2e0 Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Wed, 6 Mar 2024 10:43:32 -0500 Subject: [PATCH 28/30] Disable notes about C++ ABI changes when using gcc --- CMakeLists.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 1909be8..0ab66c7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -112,6 +112,7 @@ target_compile_options(${PROJECT_NAME}_warnings -Wpedantic -Wno-sign-compare $<$:-Wno-comma-subscript> + $<$:-Wno-psabi> # Disable notes about ABI changes $<$:-Wshadow=local> $<$:-Wno-attributes> $<$:-Wno-deprecated-declarations> From 2fd6922983060ad8b47ff004bb8ea4324169fbaf Mon Sep 17 00:00:00 2001 From: Nils Wentzell Date: Mon, 11 Mar 2024 16:34:21 -0400 Subject: [PATCH 29/30] Update requirements.txt with dependencies for documentation builds --- requirements.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/requirements.txt b/requirements.txt index f1c7dff..5fa6fa3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,10 @@ mako numpy scipy +# For documentation builds we additionaly require +#sphinx +#numpydoc +#nbsphinx +#sphinx_rtd_theme +#myst_parser +#linkify-it-py From 2ec749ad4b107f606d42d11f059cbdc7b2651383 Mon Sep 17 00:00:00 2001 From: Thomas Hahn Date: Fri, 22 Mar 2024 15:26:07 -0400 Subject: [PATCH 30/30] Remove pip3 install command from Dockerfile --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 5502cd9..1ae1711 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,8 +2,8 @@ FROM flatironinstitute/triqs:unstable-ubuntu-clang ARG APPNAME=app4triqs -COPY requirements.txt /src/$APPNAME/requirements.txt -RUN pip3 install -r /src/$APPNAME/requirements.txt +# Install here missing dependencies, e.g. +# RUN apt-get install -y python3-skimage COPY --chown=build . $SRC/$APPNAME WORKDIR $BUILD/$APPNAME