Skip to content

Commit

Permalink
Merge pull request #1325 from pllim/cubeviz-metadata
Browse files Browse the repository at this point in the history
ENH: Cubeviz metadata viewer
  • Loading branch information
pllim authored May 19, 2022
2 parents 806c94d + 07270be commit 12d97b8
Show file tree
Hide file tree
Showing 22 changed files with 460 additions and 169 deletions.
2 changes: 2 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ Cubeviz

- Cubeviz image viewer now has coordinates info panel like Imviz. [#1315]

- New Metadata Viewer plugin. [#1325]

Imviz
^^^^^

Expand Down
7 changes: 7 additions & 0 deletions docs/cubeviz/plugins.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,13 @@ more detail under :ref:`Specviz: Data Analysis Plugins <specviz-plugins>`. All
are accessed via the :guilabel:`plugin` icon in the upper right corner
of the Cubeviz application.

.. _cubeviz-metadata-viewer:

Metadata Viewer
===============

This plugin allows viewing of any metadata associated with the selected data.

.. _cubeviz-export-plot:

Export Plot
Expand Down
1 change: 1 addition & 0 deletions jdaviz/configs/cubeviz/cubeviz.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ toolbar:
- g-subset-tools
- g-coords-info
tray:
- g-metadata-viewer
- g-plot-options
- g-subset-plugin
- cubeviz-slice
Expand Down
40 changes: 28 additions & 12 deletions jdaviz/configs/cubeviz/plugins/parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from specutils import Spectrum1D

from jdaviz.core.registries import data_parser_registry
from jdaviz.utils import standardize_metadata, PRIHDR_KEY

__all__ = ['parse_data']

Expand Down Expand Up @@ -45,7 +46,7 @@ def parse_data(app, file_obj, data_type=None, data_label=None):
# generic enough to work with other file types (e.g. ASDF). For now, this
# supports MaNGA and JWST data.
if isinstance(file_obj, fits.hdu.hdulist.HDUList):
_parse_hdu(app, file_obj, file_name=data_label)
_parse_hdulist(app, file_obj, file_name=data_label)
elif isinstance(file_obj, str) and os.path.exists(file_obj):
file_name = os.path.basename(file_obj)

Expand All @@ -68,7 +69,7 @@ def parse_data(app, file_obj, data_type=None, data_label=None):
_parse_esa_s3d(app, hdulist, data_label, ext=ext, viewer_name=viewer_name)

else:
_parse_hdu(app, hdulist, file_name=data_label or file_name)
_parse_hdulist(app, hdulist, file_name=data_label or file_name)

# If the data types are custom data objects, use explicit parsers. Note
# that this relies on the glue-astronomy machinery to turn the data object
Expand All @@ -82,7 +83,7 @@ def parse_data(app, file_obj, data_type=None, data_label=None):
raise NotImplementedError(f'Unsupported data format: {file_obj}')


def _parse_hdu(app, hdulist, file_name=None):
def _parse_hdulist(app, hdulist, file_name=None):
if file_name is None:
if hasattr(hdulist, 'file_name'):
file_name = hdulist.file_name
Expand Down Expand Up @@ -116,21 +117,25 @@ def _parse_hdu(app, hdulist, file_name=None):

flux = hdu.data << flux_unit

metadata = standardize_metadata(hdu.header)
if hdu.name != 'PRIMARY' and 'PRIMARY' in hdulist:
metadata[PRIHDR_KEY] = standardize_metadata(hdulist['PRIMARY'].header)

try:
sc = Spectrum1D(flux=flux, wcs=wcs)
sc = Spectrum1D(flux=flux, wcs=wcs, meta=metadata)
except Exception as e:
logging.warning(e)
continue

app.add_data(sc, data_label)

# If the data type is some kind of integer, assume it's the mask/dq
if hdu.data.dtype in (int, np.uint, np.uint32) or \
any(x in hdu.name.lower() for x in EXT_TYPES['mask']):
if (hdu.data.dtype in (int, np.uint, np.uint32) or
any(x in hdu.name.lower() for x in EXT_TYPES['mask'])):
app.add_data_to_viewer('mask-viewer', data_label)

if 'errtype' in [x.lower() for x in hdu.header.keys()] or \
any(x in hdu.name.lower() for x in EXT_TYPES['uncert']):
if ('errtype' in [x.lower() for x in hdu.header.keys()] or
any(x in hdu.name.lower() for x in EXT_TYPES['uncert'])):
app.add_data_to_viewer('uncert-viewer', data_label)

if any(x in hdu.name.lower() for x in EXT_TYPES['flux']):
Expand Down Expand Up @@ -161,11 +166,16 @@ def _parse_jwst_s3d(app, hdulist, data_label, ext='SCI', viewer_name='flux-viewe
unit = u.Unit(hdulist[ext].header.get('BUNIT', 'count'))
flux = hdulist[ext].data << unit
wcs = WCS(hdulist['SCI'].header, hdulist) # Everything uses SCI WCS
data = Spectrum1D(flux, wcs=wcs)

metadata = standardize_metadata(hdulist[ext].header)
if hdulist[ext].name != 'PRIMARY' and 'PRIMARY' in hdulist:
metadata[PRIHDR_KEY] = standardize_metadata(hdulist['PRIMARY'].header)

data = Spectrum1D(flux, wcs=wcs, meta=metadata)

# NOTE: Tried to only pass in sliced WCS but got error in Glue.
# sliced_wcs = wcs[:, 0, 0] # Only want wavelengths
# data = Spectrum1D(flux, wcs=sliced_wcs)
# data = Spectrum1D(flux, wcs=sliced_wcs, meta=metadata)

app.add_data(data, data_label)
app.add_data_to_viewer(viewer_name, data_label)
Expand Down Expand Up @@ -194,7 +204,13 @@ def _parse_esa_s3d(app, hdulist, data_label, ext='DATA', viewer_name='flux-viewe
wcs = WCS(wcs_dict)
flux = np.moveaxis(flux, 0, -1)
flux = np.swapaxes(flux, 0, 1)
data = Spectrum1D(flux, wcs=wcs)

metadata = standardize_metadata(hdulist[ext].header)
metadata.update(wcs_dict) # To be internally consistent
if hdulist[ext].name != 'PRIMARY' and 'PRIMARY' in hdulist:
metadata[PRIHDR_KEY] = standardize_metadata(hdulist['PRIMARY'].header)

data = Spectrum1D(flux, wcs=wcs, meta=metadata)

app.add_data(data, data_label)
app.add_data_to_viewer(viewer_name, data_label)
Expand Down Expand Up @@ -225,7 +241,7 @@ def _parse_spectrum1d_3d(app, file_obj, data_label=None):

flux = np.moveaxis(flux, 1, 0)

s1d = Spectrum1D(flux=flux, wcs=file_obj.wcs)
s1d = Spectrum1D(flux=flux, wcs=file_obj.wcs, meta=standardize_metadata(file_obj.meta))

cur_data_label = f"{data_label}[{attr.upper()}]"
app.add_data(s1d, cur_data_label)
Expand Down
9 changes: 9 additions & 0 deletions jdaviz/configs/cubeviz/plugins/tests/test_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
from astropy.wcs import WCS
from specutils import Spectrum1D

from jdaviz.utils import PRIHDR_KEY


@pytest.fixture
def image_hdu_obj():
Expand Down Expand Up @@ -58,6 +60,12 @@ def test_fits_image_hdu_parse_from_file(tmpdir, image_hdu_obj, cubeviz_helper):

# This tests the same data as test_fits_image_hdu_parse above.

cubeviz_helper.app.data_collection[0].meta['EXTNAME'] == 'FLUX'
cubeviz_helper.app.data_collection[1].meta['EXTNAME'] == 'MASK'
cubeviz_helper.app.data_collection[2].meta['EXTNAME'] == 'ERR'
for i in range(3):
assert cubeviz_helper.app.data_collection[i].meta[PRIHDR_KEY]['BITPIX'] == 8

flux_viewer = cubeviz_helper.app.get_viewer('flux-viewer')
flux_viewer.on_mouse_or_key_event({'event': 'mousemove', 'domain': {'x': 0, 'y': 0}})
assert flux_viewer.label_mouseover.pixel == 'x=00.0 y=00.0'
Expand Down Expand Up @@ -114,6 +122,7 @@ def test_spectrum1d_parse(spectrum1d, cubeviz_helper):

assert len(cubeviz_helper.app.data_collection) == 1
assert cubeviz_helper.app.data_collection[0].label.endswith('[FLUX]')
assert cubeviz_helper.app.data_collection[0].meta['uncertainty_type'] == 'std'

# Coordinate display is only for spatial image, which is missing here.
flux_viewer = cubeviz_helper.app.get_viewer('flux-viewer')
Expand Down
86 changes: 71 additions & 15 deletions jdaviz/configs/default/plugins/metadata_viewer/metadata_viewer.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from astropy.io.fits import Header
from traitlets import Bool, List, observe

from jdaviz.core.registries import tray_registry
from jdaviz.core.template_mixin import TemplateMixin, DatasetSelectMixin
from jdaviz.utils import PRIHDR_KEY, COMMENTCARD_KEY

__all__ = ['MetadataViewer']

Expand All @@ -11,36 +11,92 @@
class MetadataViewer(TemplateMixin, DatasetSelectMixin):
template_file = __file__, "metadata_viewer.vue"
has_metadata = Bool(False).tag(sync=True)
has_primary = Bool(False).tag(sync=True)
show_primary = Bool(False).tag(sync=True)
has_comments = Bool(False).tag(sync=True)
metadata = List([]).tag(sync=True)

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# override the default filters on dataset entries to require metadata in entries
self.dataset.add_filter('not_from_plugin')

def reset(self):
self.has_metadata = False
self.has_primary = False
self.show_primary = False
self.has_comments = False
self.metadata = []

@observe("dataset_selected")
def _show_metadata(self, event):
def show_metadata(self, event):
data = self.dataset.selected_dc_item
if data is None or not hasattr(data, 'meta') or not isinstance(data.meta, dict) or len(data.meta) < 1: # noqa
self.has_metadata = False
self.metadata = []
if (data is None or not hasattr(data, 'meta') or not isinstance(data.meta, dict)
or len(data.meta) < 1):
self.reset()
return

if 'header' in data.meta and isinstance(data.meta['header'], (dict, Header)):
if isinstance(data.meta['header'], Header): # Specviz
meta = dict(data.meta['header'])
else:
meta = data.meta['header']
if PRIHDR_KEY in data.meta:
self.has_primary = True
else:
meta = data.meta
self.has_primary = False
self.show_primary = False

self.find_public_metadata(data.meta, primary_only=self.show_primary)

@observe("show_primary")
def handle_show_primary(self, event):
if not self.show_primary:
self.show_metadata(event)
return

data = self.dataset.selected_dc_item
if (data is None or not hasattr(data, 'meta') or not isinstance(data.meta, dict)
or len(data.meta) < 1):
self.reset()
return

self.find_public_metadata(data.meta, primary_only=True)

def find_public_metadata(self, meta, primary_only=False):
if primary_only:
if PRIHDR_KEY in meta:
meta = meta[PRIHDR_KEY]
else:
self.reset()
return

d = flatten_nested_dict(meta)
for badkey in ('COMMENT', 'HISTORY', ''):
# Some FITS keywords cause "# ipykernel cannot clean for JSON" messages.
# Also, we want to hide internal metadata that starts with underscore.
badkeys = ['COMMENT', 'HISTORY', ''] + [k for k in d if k.startswith('_')]
for badkey in badkeys:
if badkey in d:
del d[badkey] # ipykernel cannot clean for JSON
del d[badkey]

if COMMENTCARD_KEY in meta:
has_comments = True

def get_comment(key):
if key in meta[COMMENTCARD_KEY]._header:
val = meta[COMMENTCARD_KEY][key]
else:
val = ''
return val
else:
has_comments = False

def get_comment(key):
return ''

# TODO: Option to not sort?
self.metadata = sorted(zip(d.keys(), map(str, d.values())))
self.has_metadata = True
public_meta = sorted(zip(d.keys(), map(str, d.values()), map(get_comment, d.keys())))
if len(public_meta) > 0:
self.metadata = public_meta
self.has_metadata = True
self.has_comments = has_comments
else:
self.reset()


# TODO: If this is natively supported by asdf in the future, replace with native function.
Expand Down
11 changes: 11 additions & 0 deletions jdaviz/configs/default/plugins/metadata_viewer/metadata_viewer.vue
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,29 @@
hint="Select the data to see metadata."
/>

<v-row v-if="has_primary">
<v-switch
label="Show primary header"
hint="Show MEF primary header metadata instead."
v-model="show_primary"
persistent-hint>
</v-switch>
</v-row>

<j-plugin-section-header>Metadata</j-plugin-section-header>
<div v-if="has_metadata">
<v-row no-gutters>
<v-col cols=6><U>Key</U></v-col>
<v-col cols=6><U>Value</U></v-col>
<v-col v-if="has_comments" cols=6><U>Comment</U></v-col>
</v-row>
<v-row
v-for="item in metadata"
:key="item[0]"
no-gutters>
<v-col cols=6>{{ item[0] }}</v-col>
<v-col cols=6>{{ item[1] }}</v-col>
<v-col v-if="has_comments" cols=6>{{ item[2] }}</v-col>
</v-row>
</div>
<v-row v-else>
Expand Down
Loading

0 comments on commit 12d97b8

Please sign in to comment.