Skip to content

Commit

Permalink
directly call matlab program in test suite
Browse files Browse the repository at this point in the history
installs octave and oct2py in development requirements

development requirements includes pytest/flake8/numpy

uses AntTG coordinates as reference points for comparison

update workflow for both python package and request

upgrade linux workflow to use ubuntu-20.04

update version number for release
  • Loading branch information
tsutterley committed Aug 18, 2020
1 parent 66ec509 commit 186d44d
Show file tree
Hide file tree
Showing 11 changed files with 191 additions and 62 deletions.
15 changes: 9 additions & 6 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
os: [ubuntu-20.04, macos-latest]
python-version: [3.5, 3.6, 3.7, 3.8]

steps:
Expand All @@ -25,12 +25,12 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies for Linux
if: matrix.os == 'ubuntu-latest'
if: matrix.os == 'ubuntu-20.04'
run: |
sudo apt-get install libproj-dev proj-data proj-bin libgeos-dev
sudo apt-get install libproj-dev proj-data proj-bin libgeos-dev octave
sudo apt-get install libhdf5-dev libnetcdf-dev
pip install --upgrade pip
pip install flake8 pytest pytest-cov numpy
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Install dependencies for MacOS
if: matrix.os == 'macos-latest'
Expand All @@ -39,8 +39,9 @@ jobs:
brew install geos
brew install hdf5
brew install netcdf
brew install octave
pip install --upgrade pip
pip install flake8 pytest pytest-cov numpy
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
Expand All @@ -51,4 +52,6 @@ jobs:
- name: Test with pytest
run: |
pip install --no-deps .
pytest
git clone ${{ secrets.TMD_MATLAB_TOOLBOX }}
pytest --username=${{ secrets.EARTHDATA_USERNAME }} \
--password=${{ secrets.EARTHDATA_PASSWORD }}
15 changes: 9 additions & 6 deletions .github/workflows/python-request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
os: [ubuntu-20.04, macos-latest]
python-version: [3.5, 3.6, 3.7, 3.8]

steps:
Expand All @@ -22,12 +22,12 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies for Linux
if: matrix.os == 'ubuntu-latest'
if: matrix.os == 'ubuntu-20.04'
run: |
sudo apt-get install libproj-dev proj-data proj-bin libgeos-dev
sudo apt-get install libproj-dev proj-data proj-bin libgeos-dev octave
sudo apt-get install libhdf5-dev libnetcdf-dev
pip install --upgrade pip
pip install flake8 pytest pytest-cov numpy
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Install dependencies for MacOS
if: matrix.os == 'macos-latest'
Expand All @@ -36,8 +36,9 @@ jobs:
brew install geos
brew install hdf5
brew install netcdf
brew install octave
pip install --upgrade pip
pip install flake8 pytest pytest-cov numpy
if [ -f requirements-dev.txt ]; then pip install -r requirements-dev.txt; fi
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
Expand All @@ -48,4 +49,6 @@ jobs:
- name: Test with pytest
run: |
pip install --no-deps .
pytest
git clone ${{ secrets.TMD_MATLAB_TOOLBOX }}
pytest --username=${{ secrets.EARTHDATA_USERNAME }} \
--password=${{ secrets.EARTHDATA_PASSWORD }}
2 changes: 1 addition & 1 deletion doc/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
author = 'Tyler C. Sutterley'

# The full version, including alpha/beta/rc tags
release = '1.0.2.6'
release = '1.0.2.7'


# -- General configuration ---------------------------------------------------
Expand Down
24 changes: 16 additions & 8 deletions pyTMD/bilinear_interp.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python
u"""
bilinear_interp.py (07/2020)
bilinear_interp.py (08/2020)
Bilinear interpolation of input data to output coordinates
CALLING SEQUENCE:
Expand All @@ -25,6 +25,7 @@
https://numpy.org/doc/stable/user/numpy-for-matlab-users.html
UPDATE HISTORY:
Updated 08/2020: check that output coordinates are within bounds
Updated 07/2020: split into separate function
Updated 06/2020: use argmin and argmax in bilinear interpolation
Updated 09/2017: Rewritten in Python
Expand Down Expand Up @@ -57,29 +58,36 @@ def bilinear_interp(ilon,ilat,idata,lon,lat,dtype=np.float):
#-- grid step size of tide model
dlon = np.abs(ilon[1] - ilon[0])
dlat = np.abs(ilat[1] - ilat[0])
#-- find valid points (within bounds)
valid, = np.nonzero((lon >= ilon.min()) & (lon <= ilon.max()) &
(lat > ilat.min()) & (lat < ilat.max()))
#-- Convert input coordinates to radians
phi = ilon*dtr
th = (90.0 - ilat)*dtr
#-- Convert output data coordinates to radians
xphi = lon*dtr
xth = (90.0 - lat)*dtr
#-- interpolate gridded data values to data
data = np.zeros_like(lon,dtype=dtype)
for i,l in enumerate(lon):
npts = len(lon)
data = np.ma.zeros((npts),dtype=dtype)
data.mask = np.ones((npts),dtype=np.bool)
data.mask[valid] = False
#-- for each valid point
for i in valid:
#-- calculating the indices for the original grid
dx = (ilon - np.floor(lon[i]/dlon)*dlon)**2
dy = (ilat - np.floor(lat[i]/dlat)*dlat)**2
iph = np.argmin(dx)
ith = np.argmin(dy)
#-- if on corner value: use exact
if ((lat[i] == ilat[ith]) & (lon[i] == ilon[iph])):
data[i] = idata[ith,iph]
data.data[i] = idata[ith,iph]
elif ((lat[i] == ilat[ith+1]) & (lon[i] == ilon[iph])):
data[i] = idata[ith+1,iph]
data.data[i] = idata[ith+1,iph]
elif ((lat[i] == ilat[ith]) & (lon[i] == ilon[iph+1])):
data[i] = idata[ith,iph+1]
data.data[i] = idata[ith,iph+1]
elif ((lat[i] == ilat[ith+1]) & (lon[i] == ilon[iph+1])):
data[i] = idata[ith+1,iph+1]
data.data[i] = idata[ith+1,iph+1]
else:
#-- corner weight values for i,j
Wa = (xphi[i]-phi[iph])*(xth[i]-th[ith])
Expand All @@ -94,6 +102,6 @@ def bilinear_interp(ilon,ilat,idata,lon,lat,dtype=np.float):
Ic = idata[ith+1,iph]#-- (0,1)
Id = idata[ith+1,iph+1]#-- (1,1)
#-- calculate interpolated value for i
data[i] = (Ia*Wa + Ib*Wb + Ic*Wc + Id*Wd)/W
data.data[i] = (Ia*Wa + Ib*Wb + Ic*Wc + Id*Wd)/W
#-- return interpolated values
return data
15 changes: 10 additions & 5 deletions pyTMD/read_tide_model.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env python
u"""
read_tide_model.py (07/2020)
read_tide_model.py (08/2020)
Reads files for a tidal model and makes initial calculations to run tide program
Includes functions to extract tidal harmonic constants from OTIS tide models for
given locations
Expand Down Expand Up @@ -50,6 +50,7 @@
bilinear_interp.py: bilinear interpolation of data to specified coordinates
UPDATE HISTORY:
Updated 08/2020: check that interpolated points are within range of model
Updated 07/2020: added function docstrings. separate bilinear interpolation
update griddata interpolation. changed TYPE variable to keyword argument
Updated 06/2020: output currents as numpy masked arrays
Expand Down Expand Up @@ -118,6 +119,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z',
xi,yi,hz,mz,iob,dt = read_tide_grid(grid_file)
#-- run wrapper function to convert coordinate systems of input lat/lon
x,y = convert_ll_xy(ilon,ilat,EPSG,'F')
invalid = (x < xi.min()) | (x > xi.max()) | (y < yi.min()) | (y > yi.max())
#-- grid step size of tide model
dx = xi[1] - xi[0]
dy = yi[1] - yi[0]
Expand Down Expand Up @@ -236,7 +238,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z',
z[z==0] = np.nan
#-- use quick bilinear to interpolate values
z1 = np.ma.zeros((npts),dtype=z.dtype)
z1.data = bilinear_interp(xi,yi,z,x,y,dtype=np.complex128)
z1.data[:] = bilinear_interp(xi,yi,z,x,y,dtype=np.complex128)
#-- replace nan values with fill_value
z1.mask = (np.isnan(z1.data) | (~mz1.astype(np.bool)))
z1.data[z1.mask] = z1.fill_value
Expand All @@ -257,7 +259,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z',
z[z==0] = np.nan
#-- use scipy griddata to interpolate values
z1 = np.ma.zeros((npts),dtype=z.dtype)
z1.data=scipy.interpolate.griddata(interp_points,
z1.data[:] = scipy.interpolate.griddata(interp_points,
z.flatten(), np.c_[x,y], method=METHOD)
#-- replace nan values with fill_value
z1.mask = (np.isnan(z1.data) | (~mz1.astype(np.bool)))
Expand All @@ -283,7 +285,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z',
u[u==0] = np.nan
#-- use quick bilinear to interpolate values
u1 = np.ma.zeros((npts),dtype=u.dtype)
u1.data = bilinear_interp(xi,yi,u,x,y,dtype=np.complex128)
u1.data[:] = bilinear_interp(xi,yi,u,x,y,dtype=np.complex128)
#-- replace nan values with fill_value
u1.mask = (np.isnan(u1.data) | (~mu1.astype(np.bool)))
u1.data[u1.mask] = u1.fill_value
Expand All @@ -303,7 +305,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z',
u[u==0] = np.nan
#-- use scipy griddata to interpolate values
u1 = np.ma.zeros((npts),dtype=u.dtype)
u1.data=scipy.interpolate.griddata(interp_points,
u1.data[:] = scipy.interpolate.griddata(interp_points,
u.flatten(), np.c_[x,y], method=METHOD)
#-- replace nan values with fill_value
u1.mask = (np.isnan(u1.data) | (~mu1.astype(np.bool)))
Expand Down Expand Up @@ -363,6 +365,9 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z',
amplitude.mask[:,i] = np.copy(v1.mask)
ph.data[:,i] = np.arctan2(-np.imag(v1),np.real(v1))
ph.mask[:,i] = np.copy(v1.mask)
#-- update mask to invalidate points outside model domain
ph.mask[:,i] |= invalid
amplitude.mask[:,i] |= invalid

#-- convert phase to degrees
phase = ph*180.0/np.pi
Expand Down
2 changes: 2 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,5 @@
minversion = 2.0
norecursedirs = .git
python_files = test*.py
testpaths =
test
3 changes: 3 additions & 0 deletions test/requirements.txt → requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,5 @@
flake8
pytest>=4.6
pytest-cov
numpy
oct2py
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

setup(
name='pyTMD',
version='1.0.2.6',
version='1.0.2.7',
description='Tide Model Driver to read OTIS, GOT and FES formatted tidal solutions and make tidal predictions',
long_description=long_description,
long_description_content_type="text/markdown",
Expand Down
15 changes: 15 additions & 0 deletions test/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import pytest

def pytest_addoption(parser):
parser.addoption("--username", action="store", help="NASA Earthdata username")
parser.addoption("--password", action="store", help="NASA Earthdata password")

@pytest.fixture
def username(request):
""" Returns NASA Earthdata username """
return request.config.getoption("--username")

@pytest.fixture
def password(request):
""" Returns NASA Earthdata password """
return request.config.getoption("--password")
Loading

0 comments on commit 186d44d

Please sign in to comment.