diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index a7c5b6c9..5dfce732 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,7 +15,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, macos-latest] + os: [ubuntu-20.04, macos-latest] python-version: [3.5, 3.6, 3.7, 3.8] steps: @@ -25,12 +25,12 @@ jobs: with: python-version: ${{ matrix.python-version }} - name: Install dependencies for Linux - if: matrix.os == 'ubuntu-latest' + if: matrix.os == 'ubuntu-20.04' run: | - sudo apt-get install libproj-dev proj-data proj-bin libgeos-dev + sudo apt-get install libproj-dev proj-data proj-bin libgeos-dev octave sudo apt-get install libhdf5-dev libnetcdf-dev pip install --upgrade pip - pip install flake8 pytest pytest-cov numpy + if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Install dependencies for MacOS if: matrix.os == 'macos-latest' @@ -39,8 +39,9 @@ jobs: brew install geos brew install hdf5 brew install netcdf + brew install octave pip install --upgrade pip - pip install flake8 pytest pytest-cov numpy + if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Lint with flake8 run: | @@ -51,4 +52,6 @@ jobs: - name: Test with pytest run: | pip install --no-deps . - pytest + git clone ${{ secrets.TMD_MATLAB_TOOLBOX }} + pytest --username=${{ secrets.EARTHDATA_USERNAME }} \ + --password=${{ secrets.EARTHDATA_PASSWORD }} diff --git a/.github/workflows/python-request.yml b/.github/workflows/python-request.yml index a1812299..f9603c84 100644 --- a/.github/workflows/python-request.yml +++ b/.github/workflows/python-request.yml @@ -12,7 +12,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, macos-latest] + os: [ubuntu-20.04, macos-latest] python-version: [3.5, 3.6, 3.7, 3.8] steps: @@ -22,12 +22,12 @@ jobs: with: python-version: ${{ matrix.python-version }} - name: Install dependencies for Linux - if: matrix.os == 'ubuntu-latest' + if: matrix.os == 'ubuntu-20.04' run: | - sudo apt-get install libproj-dev proj-data proj-bin libgeos-dev + sudo apt-get install libproj-dev proj-data proj-bin libgeos-dev octave sudo apt-get install libhdf5-dev libnetcdf-dev pip install --upgrade pip - pip install flake8 pytest pytest-cov numpy + if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Install dependencies for MacOS if: matrix.os == 'macos-latest' @@ -36,8 +36,9 @@ jobs: brew install geos brew install hdf5 brew install netcdf + brew install octave pip install --upgrade pip - pip install flake8 pytest pytest-cov numpy + if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - name: Lint with flake8 run: | @@ -48,4 +49,6 @@ jobs: - name: Test with pytest run: | pip install --no-deps . - pytest + git clone ${{ secrets.TMD_MATLAB_TOOLBOX }} + pytest --username=${{ secrets.EARTHDATA_USERNAME }} \ + --password=${{ secrets.EARTHDATA_PASSWORD }} diff --git a/doc/source/conf.py b/doc/source/conf.py index 85ee5afb..1da1697d 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -22,7 +22,7 @@ author = 'Tyler C. Sutterley' # The full version, including alpha/beta/rc tags -release = '1.0.2.6' +release = '1.0.2.7' # -- General configuration --------------------------------------------------- diff --git a/pyTMD/bilinear_interp.py b/pyTMD/bilinear_interp.py index 7e7e9c2e..f5af0190 100644 --- a/pyTMD/bilinear_interp.py +++ b/pyTMD/bilinear_interp.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -bilinear_interp.py (07/2020) +bilinear_interp.py (08/2020) Bilinear interpolation of input data to output coordinates CALLING SEQUENCE: @@ -25,6 +25,7 @@ https://numpy.org/doc/stable/user/numpy-for-matlab-users.html UPDATE HISTORY: + Updated 08/2020: check that output coordinates are within bounds Updated 07/2020: split into separate function Updated 06/2020: use argmin and argmax in bilinear interpolation Updated 09/2017: Rewritten in Python @@ -57,6 +58,9 @@ def bilinear_interp(ilon,ilat,idata,lon,lat,dtype=np.float): #-- grid step size of tide model dlon = np.abs(ilon[1] - ilon[0]) dlat = np.abs(ilat[1] - ilat[0]) + #-- find valid points (within bounds) + valid, = np.nonzero((lon >= ilon.min()) & (lon <= ilon.max()) & + (lat > ilat.min()) & (lat < ilat.max())) #-- Convert input coordinates to radians phi = ilon*dtr th = (90.0 - ilat)*dtr @@ -64,8 +68,12 @@ def bilinear_interp(ilon,ilat,idata,lon,lat,dtype=np.float): xphi = lon*dtr xth = (90.0 - lat)*dtr #-- interpolate gridded data values to data - data = np.zeros_like(lon,dtype=dtype) - for i,l in enumerate(lon): + npts = len(lon) + data = np.ma.zeros((npts),dtype=dtype) + data.mask = np.ones((npts),dtype=np.bool) + data.mask[valid] = False + #-- for each valid point + for i in valid: #-- calculating the indices for the original grid dx = (ilon - np.floor(lon[i]/dlon)*dlon)**2 dy = (ilat - np.floor(lat[i]/dlat)*dlat)**2 @@ -73,13 +81,13 @@ def bilinear_interp(ilon,ilat,idata,lon,lat,dtype=np.float): ith = np.argmin(dy) #-- if on corner value: use exact if ((lat[i] == ilat[ith]) & (lon[i] == ilon[iph])): - data[i] = idata[ith,iph] + data.data[i] = idata[ith,iph] elif ((lat[i] == ilat[ith+1]) & (lon[i] == ilon[iph])): - data[i] = idata[ith+1,iph] + data.data[i] = idata[ith+1,iph] elif ((lat[i] == ilat[ith]) & (lon[i] == ilon[iph+1])): - data[i] = idata[ith,iph+1] + data.data[i] = idata[ith,iph+1] elif ((lat[i] == ilat[ith+1]) & (lon[i] == ilon[iph+1])): - data[i] = idata[ith+1,iph+1] + data.data[i] = idata[ith+1,iph+1] else: #-- corner weight values for i,j Wa = (xphi[i]-phi[iph])*(xth[i]-th[ith]) @@ -94,6 +102,6 @@ def bilinear_interp(ilon,ilat,idata,lon,lat,dtype=np.float): Ic = idata[ith+1,iph]#-- (0,1) Id = idata[ith+1,iph+1]#-- (1,1) #-- calculate interpolated value for i - data[i] = (Ia*Wa + Ib*Wb + Ic*Wc + Id*Wd)/W + data.data[i] = (Ia*Wa + Ib*Wb + Ic*Wc + Id*Wd)/W #-- return interpolated values return data diff --git a/pyTMD/read_tide_model.py b/pyTMD/read_tide_model.py index 2878217e..e9b6ab2a 100644 --- a/pyTMD/read_tide_model.py +++ b/pyTMD/read_tide_model.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -read_tide_model.py (07/2020) +read_tide_model.py (08/2020) Reads files for a tidal model and makes initial calculations to run tide program Includes functions to extract tidal harmonic constants from OTIS tide models for given locations @@ -50,6 +50,7 @@ bilinear_interp.py: bilinear interpolation of data to specified coordinates UPDATE HISTORY: + Updated 08/2020: check that interpolated points are within range of model Updated 07/2020: added function docstrings. separate bilinear interpolation update griddata interpolation. changed TYPE variable to keyword argument Updated 06/2020: output currents as numpy masked arrays @@ -118,6 +119,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', xi,yi,hz,mz,iob,dt = read_tide_grid(grid_file) #-- run wrapper function to convert coordinate systems of input lat/lon x,y = convert_ll_xy(ilon,ilat,EPSG,'F') + invalid = (x < xi.min()) | (x > xi.max()) | (y < yi.min()) | (y > yi.max()) #-- grid step size of tide model dx = xi[1] - xi[0] dy = yi[1] - yi[0] @@ -236,7 +238,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', z[z==0] = np.nan #-- use quick bilinear to interpolate values z1 = np.ma.zeros((npts),dtype=z.dtype) - z1.data = bilinear_interp(xi,yi,z,x,y,dtype=np.complex128) + z1.data[:] = bilinear_interp(xi,yi,z,x,y,dtype=np.complex128) #-- replace nan values with fill_value z1.mask = (np.isnan(z1.data) | (~mz1.astype(np.bool))) z1.data[z1.mask] = z1.fill_value @@ -257,7 +259,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', z[z==0] = np.nan #-- use scipy griddata to interpolate values z1 = np.ma.zeros((npts),dtype=z.dtype) - z1.data=scipy.interpolate.griddata(interp_points, + z1.data[:] = scipy.interpolate.griddata(interp_points, z.flatten(), np.c_[x,y], method=METHOD) #-- replace nan values with fill_value z1.mask = (np.isnan(z1.data) | (~mz1.astype(np.bool))) @@ -283,7 +285,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', u[u==0] = np.nan #-- use quick bilinear to interpolate values u1 = np.ma.zeros((npts),dtype=u.dtype) - u1.data = bilinear_interp(xi,yi,u,x,y,dtype=np.complex128) + u1.data[:] = bilinear_interp(xi,yi,u,x,y,dtype=np.complex128) #-- replace nan values with fill_value u1.mask = (np.isnan(u1.data) | (~mu1.astype(np.bool))) u1.data[u1.mask] = u1.fill_value @@ -303,7 +305,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', u[u==0] = np.nan #-- use scipy griddata to interpolate values u1 = np.ma.zeros((npts),dtype=u.dtype) - u1.data=scipy.interpolate.griddata(interp_points, + u1.data[:] = scipy.interpolate.griddata(interp_points, u.flatten(), np.c_[x,y], method=METHOD) #-- replace nan values with fill_value u1.mask = (np.isnan(u1.data) | (~mu1.astype(np.bool))) @@ -363,6 +365,9 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', amplitude.mask[:,i] = np.copy(v1.mask) ph.data[:,i] = np.arctan2(-np.imag(v1),np.real(v1)) ph.mask[:,i] = np.copy(v1.mask) + #-- update mask to invalidate points outside model domain + ph.mask[:,i] |= invalid + amplitude.mask[:,i] |= invalid #-- convert phase to degrees phase = ph*180.0/np.pi diff --git a/pytest.ini b/pytest.ini index 448d3d1d..73e72209 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,3 +2,5 @@ minversion = 2.0 norecursedirs = .git python_files = test*.py +testpaths = + test diff --git a/test/requirements.txt b/requirements-dev.txt similarity index 53% rename from test/requirements.txt rename to requirements-dev.txt index c58d200d..36025047 100644 --- a/test/requirements.txt +++ b/requirements-dev.txt @@ -1,2 +1,5 @@ +flake8 pytest>=4.6 pytest-cov +numpy +oct2py diff --git a/setup.py b/setup.py index 4ed926d5..e387830d 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ setup( name='pyTMD', - version='1.0.2.6', + version='1.0.2.7', description='Tide Model Driver to read OTIS, GOT and FES formatted tidal solutions and make tidal predictions', long_description=long_description, long_description_content_type="text/markdown", diff --git a/test/conftest.py b/test/conftest.py new file mode 100644 index 00000000..be722345 --- /dev/null +++ b/test/conftest.py @@ -0,0 +1,15 @@ +import pytest + +def pytest_addoption(parser): + parser.addoption("--username", action="store", help="NASA Earthdata username") + parser.addoption("--password", action="store", help="NASA Earthdata password") + +@pytest.fixture +def username(request): + """ Returns NASA Earthdata username """ + return request.config.getoption("--username") + +@pytest.fixture +def password(request): + """ Returns NASA Earthdata password """ + return request.config.getoption("--password") diff --git a/test/test_download_and_read.py b/test/test_download_and_read.py index dc4151fd..0752c4f9 100644 --- a/test/test_download_and_read.py +++ b/test/test_download_and_read.py @@ -5,6 +5,21 @@ Tests the read program to verify that constituents are being extracted Tests that interpolated results are comparable to Matlab TMD program https://github.com/EarthAndSpaceResearch/TMD_Matlab_Toolbox_v2.5 + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python + https://numpy.org + https://numpy.org/doc/stable/user/numpy-for-matlab-users.html + scipy: Scientific Tools for Python + https://docs.scipy.org/doc/ + Oct2Py: Python to GNU Octave Bridge + https://oct2py.readthedocs.io/en/latest/ + +UPDATE HISTORY: + Updated 08/2020: directly call Matlab program (octave) and compare outputs + will install octave and oct2py in development requirements + Updated 08/2020: Download Antarctic tide gauge database and compare with RMS + Written 08/2020 """ import os import pytest @@ -18,6 +33,7 @@ import pyTMD.read_tide_model import pyTMD.predict_tidal_ts import pyTMD.infer_minor_corrections +from oct2py import octave #-- current file path filename = inspect.getframeinfo(inspect.currentframe()).filename @@ -110,7 +126,7 @@ def test_compare_CATS2008(): i = count + s*10 stations[s] = file_contents[i + 1].strip() shortname[s] = file_contents[i + 3].strip() - lon,lat,aux1,aux2 = file_contents[i + 4].split() + lat,lon,aux1,aux2 = file_contents[i + 4].split() station_lon[s] = np.float(lon) station_lat[s] = np.float(lat) amp = file_contents[i + 7].split() @@ -140,6 +156,15 @@ def test_compare_CATS2008(): model_z = model_amp*np.exp(-1j*model_ph*np.pi/180.0) #-- valid stations for all constituents valid = np.all((~station_z.mask) & (~model_z.mask), axis=1) + invalid_list = ['Ablation Lake','Amery','Bahia Esperanza','Beaver Lake', + 'Cape Roberts','Casey','Doake Ice Rumples','EE4A','EE4B', + 'Eklund Islands','Gerlache C','Groussac','Gurrachaga','Half Moon Is.', + 'Heard Island','Hobbs Pool','Mawson','McMurdo','Mikkelsen','Palmer', + 'Primavera','Rutford GL','Rutford GPS','Rothera','Scott Base', + 'Seymour Is','Terra Nova Bay'] + #-- remove coastal stations from the list + invalid_stations = [i for i,s in enumerate(shortname) if s in invalid_list] + valid[invalid_stations] = False nv = np.count_nonzero(valid) #-- compare with RMS values from King et al. (2011) #-- https://doi.org/10.1029/2011JC006949 @@ -148,53 +173,118 @@ def test_compare_CATS2008(): for i,c in enumerate(constituents): #-- calculate difference and rms difference = np.abs(station_z[valid,i] - model_z[valid,i]) - rms[i] = np.sqrt(np.sum(difference**2))/(2.0*nv) + #-- round to precision of King et al. (2011) + rms[i] = np.round(np.sqrt(np.sum(difference**2)/(2.0*nv)),decimals=1) #-- test RMS differences assert np.all(rms <= RMS) +#-- PURPOSE: calculate the matlab serial date from calendar date +#-- http://scienceworld.wolfram.com/astronomy/JulianDate.html +def convert_calendar_serial(year, month, day, hour=0.0, minute=0.0, second=0.0): + #-- return the date in days since serial epoch 0000-01-01T00:00:00 + sd = 367.0*year - np.floor(7.0*(year + np.floor((month+9.0)/12.0))/4.0) - \ + np.floor(3.0*(np.floor((year + (month - 9.0)/7.0)/100.0) + 1.0)/4.0) + \ + np.floor(275.0*month/9.0) + day + hour/24.0 + minute/1440.0 + \ + second/86400.0 - 30.0 + return sd + #-- PURPOSE: Tests that interpolated results are comparable to Matlab program def test_verify_CATS2008(): #-- model parameters for CATS2008 grid_file = os.path.join(filepath,'grid_CATS2008') - model_file = os.path.join(filepath,'hf.CATS2008.out') + elevation_file = os.path.join(filepath,'hf.CATS2008.out') + transport_file = os.path.join(filepath,'uv.CATS2008.out') GRID = 'OTIS' EPSG = 'CATS2008' TYPE = 'z' - #-- compare daily outputs at a single point - ilon = [5.0] - ilat = [-62.5] - #-- calculate daily results for an entire year - delta_time = np.arange(365)*86400.0 + #-- open Antarctic Tide Gauge (AntTG) database + with open(os.path.join(filepath,'AntTG_ocean_height_v1.txt'),'r') as f: + file_contents = f.read().splitlines() + #-- counts the number of lines in the header + count = 0 + HEADER = True + #-- Reading over header text + while HEADER: + #-- check if file line at count starts with matlab comment string + HEADER = file_contents[count].startswith('%') + #-- add 1 to counter + count += 1 + #-- rewind 1 line + count -= 1 + #-- iterate over number of stations + AntTG = {} + antarctic_stations = (len(file_contents) - count)//10 + stations = [None]*antarctic_stations + shortname = [None]*antarctic_stations + station_type = [None]*antarctic_stations + station_lon = np.zeros((antarctic_stations)) + station_lat = np.zeros((antarctic_stations)) + for s in range(antarctic_stations): + i = count + s*10 + stations[s] = file_contents[i + 1].strip() + shortname[s] = file_contents[i + 3].strip() + lat,lon,aux1,aux2 = file_contents[i + 4].split() + station_type[s] = file_contents[i + 6].strip() + station_lon[s] = np.float(lon) + station_lat[s] = np.float(lat) + + #-- calculate daily results for a time period #-- convert time to days since 1992-01-01T00:00:00 - tide_time = pyTMD.time.convert_delta_time(delta_time, - epoch1=(1998,1,1,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) - #-- presently not converting times to dynamic times + tide_time = np.arange(pyTMD.time.convert_calendar_dates(2000,1,1), + pyTMD.time.convert_calendar_dates(2000,12,31)+1) + #-- serial dates for matlab program (days since 0000-01-01T00:00:00) + SDtime = np.arange(convert_calendar_serial(2000,1,1), + convert_calendar_serial(2000,12,31)+1) + #-- presently not converting times to dynamic times for model comparisons deltat = np.zeros_like(tide_time) - #-- extract amplitude and phase from tide model - amp,ph,D,c = pyTMD.read_tide_model.extract_tidal_constants(ilon, ilat, - grid_file, model_file, EPSG, TYPE=TYPE, METHOD='spline', GRID=GRID) + #-- number of days + ndays = len(tide_time) + #-- extract amplitude and phase from tide model + amp,ph,D,c = pyTMD.read_tide_model.extract_tidal_constants(station_lon, + station_lat, grid_file, elevation_file, EPSG, TYPE=TYPE, + METHOD='spline', GRID=GRID) #-- calculate complex phase in radians for Euler's cph = -1j*ph*np.pi/180.0 - #-- calculate constituent oscillation - hc = amp*np.exp(cph) - - #-- allocate for out tides at point - tide = np.ma.zeros((365)) - tide.mask = np.zeros((365),dtype=np.bool) - #-- predict tidal elevations at time and infer minor corrections - tide.mask[:] = np.any(hc.mask) - tide.data[:] = pyTMD.predict_tidal_ts(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=GRID) - minor = pyTMD.infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=GRID) - tide.data[:] += minor.data[:] - - #-- read validation data from Matlab TMD program - #-- https://github.com/EarthAndSpaceResearch/TMD_Matlab_Toolbox_v2.5 - validation = np.loadtxt(os.path.join(filepath,'tmd_cats2008_output.txt.gz')) - difference = tide.data - validation[:,3] - #-- verify differences are within float32 tolerance - eps = np.finfo(np.float32).eps - assert all(np.abs(difference) < eps) + #-- will verify differences between model outputs are within tolerance + eps = np.finfo(np.float16).eps + + #-- compare daily outputs at each station point + invalid_list = ['Ablation Lake','Amery','Bahia Esperanza','Beaver Lake', + 'Cape Roberts','Casey','Doake Ice Rumples','EE4A','EE4B', + 'Eklund Islands','Gerlache C','Groussac','Gurrachaga','Half Moon Is.', + 'Heard Island','Hobbs Pool','Mawson','McMurdo','Mikkelsen','Palmer', + 'Primavera','Rutford GL','Rutford GPS','Rothera','Scott Base', + 'Seymour Is','Terra Nova Bay'] + #-- remove coastal stations from the list + valid_stations=[i for i,s in enumerate(shortname) if s not in invalid_list] + for i in valid_stations: + #-- calculate constituent oscillation for station + hc = amp[i,None,:]*np.exp(cph[i,None,:]) + #-- allocate for out tides at point + tide = np.ma.zeros((ndays)) + tide.mask = np.zeros((ndays),dtype=np.bool) + #-- predict tidal elevations at time and infer minor corrections + tide.mask[:] = np.any(hc.mask) + tide.data[:] = pyTMD.predict_tidal_ts(tide_time, hc, c, + DELTAT=deltat, CORRECTIONS=GRID) + minor = pyTMD.infer_minor_corrections(tide_time, hc, c, + DELTAT=deltat, CORRECTIONS=GRID) + tide.data[:] += minor.data[:] + + #-- compute validation data from Matlab TMD program using octave + #-- https://github.com/EarthAndSpaceResearch/TMD_Matlab_Toolbox_v2.5 + TMDpath = os.path.join(filepath,'..','TMD_Matlab_Toolbox_v2.5','TMD') + octave.addpath(octave.genpath(os.path.normpath(TMDpath))) + octave.addpath(filepath) + CFname = os.path.join(filepath,'Model_CATS2008') + validation,cons = octave.tmd_tide_pred(CFname,SDtime, + station_lat[i],station_lon[i],'z',nout=2) + + #-- calculate differences between matlab and python version + difference = np.ma.zeros((ndays)) + difference.data[:] = tide.data - validation.T + difference.mask = (tide.mask | np.isnan(validation)) + if not np.all(difference.mask): + assert np.all(np.abs(difference) < eps) diff --git a/test/tmd_cats2008_output.txt.gz b/test/tmd_cats2008_output.txt.gz deleted file mode 100644 index 9ff7c577..00000000 Binary files a/test/tmd_cats2008_output.txt.gz and /dev/null differ