Skip to content

Commit

Permalink
Output Directory Name in CLI (#10)
Browse files Browse the repository at this point in the history
  • Loading branch information
kozlov721 authored Jul 19, 2024
1 parent 7439926 commit 4bc7533
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 37 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ shared_with_container/
│ └── <models will be downloaded here>
└── outputs/
└── <output_dir_name>
└── <output_dir>
├── resnet18.onnx
├── resnet18.dlc
├── logs.txt
Expand All @@ -122,7 +122,7 @@ While adhering to this structure is not mandatory as long as the files are visib

The converter first searches for files exactly at the provided path. If not found, it searches relative to `/app/shared_with_container/`.

The `output_dir_name` can be specified in the config file. If such a directory already exists, the `output_dir_name` will be appended with the current date and time. If not specified, the `output_dir_name` will be autogenerated in the following format: `<model_name>_to_<target>_<date>_<time>`.
The `output_dir` can be specified using the `--output-dir` CLI argument. If such a directory already exists, the `output_dir_name` will be appended with the current date and time. If not specified, the `output_dir_name` will be autogenerated in the following format: `<model_name>_to_<target>_<date>_<time>`.

### Usage

Expand Down Expand Up @@ -280,7 +280,7 @@ To run the inference, use:
```bash
modelconverter infer rvc4 \
--model_path <path_to_model.dlc> \
--dest <dest> \
--output-dir <output_dir_name> \
--input_path <input_path>
--path <path_to_config.yaml>
```
Expand Down
63 changes: 41 additions & 22 deletions modelconverter/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ class Format(str, Enum):
),
]
ModelPathOption: TypeAlias = Annotated[
str, typer.Option(help="Path to or url of the model file.")
str, typer.Option(help="A URL or a path to the model file.")
]

DockerOption: TypeAlias = Annotated[
Expand All @@ -112,16 +112,24 @@ class Format(str, Enum):
typer.Option(help="Use GPU for conversion. Only relevant for HAILO."),
]

OutputDirOption: TypeAlias = Annotated[
Optional[str],
typer.Option(
..., "--output-dir", "-o", help="Name of the output directory."
),
]


def get_output_dir_name(target: Target, config: Config) -> Path:
def get_output_dir_name(
target: Target, name: str, output_dir: Optional[str]
) -> Path:
date = datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
if config.output_dir_name is not None:
output_dir_name = config.output_dir_name
if (OUTPUTS_DIR / output_dir_name).exists():
shutil.rmtree(OUTPUTS_DIR / output_dir_name)
if output_dir is not None:
if (OUTPUTS_DIR / output_dir).exists():
shutil.rmtree(OUTPUTS_DIR / output_dir)
else:
output_dir_name = f"{config.name}_to_{target.name.lower()}_{date}"
return OUTPUTS_DIR / output_dir_name
output_dir = f"{name}_to_{target.name.lower()}_{date}"
return OUTPUTS_DIR / output_dir


def get_configs(
Expand Down Expand Up @@ -205,19 +213,22 @@ def infer(
Path,
typer.Option(
...,
"--input-path",
"-i",
help="Path to the directory with data for inference."
"The directory must contain one subdirectory per input, named the same as the input."
"Inference data must be provided in the NPY format.",
),
],
path: PathOption,
dest: Annotated[
Path, typer.Option(..., help="Path to the output directory.")
],
output_dir: OutputDirOption = None,
stage: Annotated[
Optional[str],
typer.Option(
help="Name of the stage to run. Only needed for multistage configs."
...,
"--stage",
"-s",
help="Name of the stage to run. Only needed for multistage configs.",
),
] = None,
dev: DevOption = False,
Expand All @@ -240,8 +251,11 @@ def infer(
try:
mult_cfg, _, _ = get_configs(path, opts)
cfg = mult_cfg.get_stage_config(stage)
output_path = get_output_dir_name(
target, mult_cfg.name, output_dir
)
Inferer = get_inferer(target)
Inferer.from_config(model_path, input_path, dest, cfg).run()
Inferer.from_config(model_path, input_path, output_path, cfg).run()
except Exception:
logger.exception("Encountered an unexpected error!")
exit(2)
Expand All @@ -253,18 +267,20 @@ def infer(
str(model_path),
"--input-path",
str(input_path),
"--dest",
str(dest),
"--path",
str(path),
]
if output_dir is not None:
args.extend(["--output-dir", output_dir])
if opts is not None:
args.extend(opts)
docker_exec(target.value, *args, tag=tag, use_gpu=gpu)


@app.command()
def shell(target: TargetArgument, dev: DevOption = False, gpu: GPUOption = True):
def shell(
target: TargetArgument, dev: DevOption = False, gpu: GPUOption = True
):
"""Boots up a shell inside a docker container for the specified target platform."""
if dev:
docker_build(target.value, tag="dev")
Expand Down Expand Up @@ -338,6 +354,7 @@ def benchmark(
def convert(
target: TargetArgument,
path: PathOption = None,
output_dir: OutputDirOption = None,
dev: DevOption = False,
to: FormatOption = Format.NATIVE,
gpu: GPUOption = True,
Expand Down Expand Up @@ -389,24 +406,24 @@ def convert(
if archive_preprocess:
cfg, preprocessing = extract_preprocessing(cfg)

output_dir = get_output_dir_name(target, cfg)
output_dir.mkdir(parents=True, exist_ok=True)
output_path = get_output_dir_name(target, cfg.name, output_dir)
output_path.mkdir(parents=True, exist_ok=True)
reset_logging()
setup_logging(
file=str(output_dir / "modelconverter.log"), use_rich=True
file=str(output_path / "modelconverter.log"), use_rich=True
)
if is_multistage:
from modelconverter.packages.multistage_exporter import (
MultiStageExporter,
)

exporter = MultiStageExporter(
target=target, config=cfg, output_dir=output_dir
target=target, config=cfg, output_dir=output_path
)
else:
exporter = get_exporter(target)(
config=next(iter(cfg.stages.values())),
output_dir=output_dir,
output_dir=output_path,
)

out_models = exporter.run()
Expand All @@ -433,7 +450,7 @@ def convert(
executables_paths=[
str(out_model) for out_model in out_models
]
+ [str(output_dir / "buildinfo.json")],
+ [str(output_path / "buildinfo.json")],
)
out_models = [generator.make_archive()]
logger.info(f"Model exported to {out_models[0]}")
Expand Down Expand Up @@ -476,6 +493,8 @@ def convert(
]
if main_stage is not None:
args.extend(["--main-stage", main_stage])
if output_dir is not None:
args.extend(["--output-dir", output_dir])
if path is not None:
args.extend(["--path", path])
if opts is not None:
Expand Down
3 changes: 0 additions & 3 deletions modelconverter/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,6 @@ def _download_input_model(cls, value: Dict[str, Any]) -> Dict[str, Any]:
# TODO: Output remote url
class Config(LuxonisConfig):
stages: Annotated[Dict[str, SingleStageConfig], Field(min_length=1)]
output_dir_name: Optional[str] = None
name: str

def get_stage_config(self, stage: Optional[str]) -> SingleStageConfig:
Expand All @@ -466,11 +465,9 @@ def _validate_name(cls, data: Dict[str, Any]) -> Dict[str, Any]:
def _validate_stages(cls, data: Dict[str, Any]) -> Dict[str, Any]:
if "stages" not in data:
name = data.pop("name", "default_stage")
output_dir_name = data.pop("output_dir_name", None)
data = {
"stages": {name: data},
"name": name,
"output_dir_name": output_dir_name,
}
else:
extra = {}
Expand Down
4 changes: 0 additions & 4 deletions shared_with_container/configs/defaults.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,6 @@
# Name of the model. Will be the stem of the input model if undefined.
name: ~

# Override the output directory name. If not provided, the output directory
# will be named `<model_name>_to_<target>_<timestamp>`.
output_dir_name: ~

# List of stages for multistage models. Doesn't have to be provided
# for single-stage models.
stages:
Expand Down
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,10 +193,10 @@ def prepare(
result_convert = subprocess_run(
f"modelconverter convert {service.replace('_superblob', '').replace('_non_quant', '')} "
f"--path {config_url} "
f"--output-dir _{model_name}-test "
"--dev "
"--no-gpu "
f"input_model {file_url} "
f"output_dir_name _{model_name}-test "
"hailo.compression_level 0 "
"hailo.optimization_level 0 "
"hailo.early_stop True "
Expand Down
6 changes: 3 additions & 3 deletions tests/test_packages/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def mnist_infer(mnist_env):
result = subprocess_run(
f"modelconverter infer {service} "
f"--model-path {converted_model_path} "
f"--dest {dest} "
f"--output-dir {dest} "
f"--input-path {input_files_dir.parent} "
f"--path {config_url} "
"--dev "
Expand Down Expand Up @@ -83,7 +83,7 @@ def resnet18_infer(resnet18_env):
result = subprocess_run(
f"modelconverter infer {service} "
f"--model-path {converted_model_path} "
f"--dest {dest} "
f"--output-dir {dest} "
f"--input-path {input_files_dir.parent} "
f"--path {config_url} "
"--dev "
Expand Down Expand Up @@ -128,7 +128,7 @@ def yolov6n_infer(yolov6n_env):
result = subprocess_run(
f"modelconverter infer {service} "
f"--model-path {converted_model_path} "
f"--dest {dest} "
f"--output-dir {dest} "
f"--input-path {input_files_dir.parent} "
f"--path {config_url} "
"--dev "
Expand Down
1 change: 0 additions & 1 deletion tests/test_utils/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,6 @@ def load_and_compare(
expected = {
"name": name,
"stages": {name: expected},
"output_dir_name": None,
}
assert config == expected

Expand Down

0 comments on commit 4bc7533

Please sign in to comment.