Skip to content

Commit

Permalink
test should not cache
Browse files Browse the repository at this point in the history
  • Loading branch information
dhodcz2 committed Jan 23, 2024
1 parent 8133862 commit b2baa63
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 29 deletions.
4 changes: 0 additions & 4 deletions .github/workflows/cache.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
name: Cache

on:
# push:
# branches: [main]
# pull_request:
# branches: [main]
workflow_dispatch:

jobs:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ jobs:
with:
python-version: '3.11'
architecture: 'x64'
cache: 'pip'
cache-dependency-path: requirements-dev.txt
# cache: 'pip'
# cache-dependency-path: requirements-dev.txt

- name: Install Dependencies
run: |
Expand Down
24 changes: 1 addition & 23 deletions src/tile2net/tileseg/inference/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -719,25 +719,6 @@ def validate(self, *args, grid: Raster, **kwargs):


class RemoteInference(Inference):
# def validate(
# self,
# val_loader: DataLoader,
# net: torch.nn.parallel.DataParallel,
# criterion: tile2net.tileseg.loss.utils.CrossEntropyLoss2d,
# optim: torch.optim.sgd.SGD,
# epoch: int,
# calc_metrics=True,
# dump_assets=False,
# dump_all_images=False,
# testing=None,
# grid: Raster = None,
# **kwargs
# ):
# """
# Run validation for one epoch
# :val_loader: data loader for validation
# """

def inference(self, rasterfactory=None):
from tile2net.raster.raster import Raster
grid = Raster.from_info(cfg.CITY_INFO_PATH)
Expand All @@ -751,7 +732,6 @@ def inference(self, rasterfactory=None):
if exists
)


gdfs = [
polygons
for polygons in it_polygons
Expand All @@ -761,8 +741,6 @@ def inference(self, rasterfactory=None):
if not len(gdfs):
logger.error('No polygons were dumped')

# todo: for now we concate from a list of all the polygons generated during the session;
# eventually we will serialize all the files and then use dask for batching
if not gdfs:
poly_network = gpd.GeoDataFrame()
logging.warning(
Expand All @@ -776,7 +754,7 @@ def inference(self, rasterfactory=None):
polys = grid.ntw_poly
net = PedNet(poly=polys, project=grid.project)
net.convert_whole_poly2line()
logger.debug('{len(net.complete_net)=}')
logger.debug(f'{len(net.complete_net)=}')


@commandline
Expand Down

0 comments on commit b2baa63

Please sign in to comment.