Skip to content

Commit

Permalink
Black formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
clearml committed Nov 11, 2024
1 parent 1494f3d commit b6e4a82
Showing 1 changed file with 15 additions and 44 deletions.
59 changes: 15 additions & 44 deletions clearml/storage/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@

class CacheManager(object):
__cache_managers = {}
_default_cache_file_limit = deferred_config(
"storage.cache.default_cache_manager_size", 100
)
_default_cache_file_limit = deferred_config("storage.cache.default_cache_manager_size", 100)
_storage_manager_folder = "storage_manager"
_default_context = "global"
_local_to_remote_url_lookup = OrderedDict()
Expand All @@ -48,9 +46,7 @@ def set_cache_limit(self, cache_file_limit):
self._file_limit = max(self._file_limit, int(cache_file_limit))
return self._file_limit

def get_local_copy(
self, remote_url, force_download, skip_zero_size_check=False
):
def get_local_copy(self, remote_url, force_download, skip_zero_size_check=False):
# type: (str, bool, bool) -> Optional[str]
helper = StorageHelper.get(remote_url)

Expand All @@ -64,9 +60,7 @@ def get_local_copy(
# noinspection PyProtectedMember
direct_access = helper.get_driver_direct_access(remote_url)
except (OSError, ValueError):
LoggerRoot.get_base_logger().debug(
"Failed accessing local file: {}".format(remote_url)
)
LoggerRoot.get_base_logger().debug("Failed accessing local file: {}".format(remote_url))
return None

if direct_access:
Expand Down Expand Up @@ -131,12 +125,12 @@ def _conform_filename(self, file_name):
file_ext = "".join(Path(file_name).suffixes[-2:])
file_ext = file_ext.rstrip(" ")

file_basename = file_name[:-len(file_ext)]
file_basename = file_name[: -len(file_ext)]
file_basename = file_basename.strip()

# Omit characters from extensionss
if len(file_ext) > allowed_length:
file_ext = file_ext[-(allowed_length - 1):]
file_ext = file_ext[-(allowed_length - 1) :]
file_ext = "." + file_ext.lstrip(".")

# Updating maximum character length
Expand All @@ -159,9 +153,7 @@ def get_cache_folder(self):
"""
:return: full path to current contexts cache folder
"""
folder = Path(
get_cache_dir() / CacheManager._storage_manager_folder / self._context
)
folder = Path(get_cache_dir() / CacheManager._storage_manager_folder / self._context)
return folder.as_posix()

def get_cache_file(self, remote_url=None, local_filename=None):
Expand All @@ -185,18 +177,12 @@ def sort_max_access_time(x):
try:
if x.is_dir():
dir_files = list(x.iterdir())
atime = (
max(atime, max(safe_time(s) for s in dir_files))
if dir_files
else atime
)
atime = max(atime, max(safe_time(s) for s in dir_files)) if dir_files else atime
except Exception:
pass
return atime

folder = Path(
get_cache_dir() / CacheManager._storage_manager_folder / self._context
)
folder = Path(get_cache_dir() / CacheManager._storage_manager_folder / self._context)
folder.mkdir(parents=True, exist_ok=True)
local_filename = local_filename or self.get_hashed_url_file(remote_url)
local_filename = self._conform_filename(local_filename)
Expand All @@ -223,15 +209,9 @@ def sort_max_access_time(x):
lock_files = dict()
files = []
for f in sorted(folder_files, reverse=True, key=sort_max_access_time):
if f.name.startswith(CacheManager._lockfile_prefix) and f.name.endswith(
CacheManager._lockfile_suffix
):
if f.name.startswith(CacheManager._lockfile_prefix) and f.name.endswith(CacheManager._lockfile_suffix):
# parse the lock filename
name = f.name[
len(CacheManager._lockfile_prefix):-len(
CacheManager._lockfile_suffix
)
]
name = f.name[len(CacheManager._lockfile_prefix) : -len(CacheManager._lockfile_suffix)]
num, _, name = name.partition(".")
lock_files[name] = lock_files.get(name, []) + [f.as_posix()]
else:
Expand All @@ -242,7 +222,7 @@ def sort_max_access_time(x):
lock_files.pop(f.name, None)

# delete old files
files = files[self._file_limit:]
files = files[self._file_limit :]
for f in files:
# check if the file is in the lock folder list:
folder_lock = self._folder_locks.get(f.absolute().as_posix())
Expand Down Expand Up @@ -279,9 +259,7 @@ def sort_max_access_time(x):
shutil.rmtree(f.as_posix(), ignore_errors=False)
except Exception as e:
# failed deleting folder
LoggerRoot.get_base_logger().debug(
"Exception {}\nFailed deleting folder {}".format(e, f)
)
LoggerRoot.get_base_logger().debug("Exception {}\nFailed deleting folder {}".format(e, f))

# cleanup old lock files
for lock_files in lock_files.values():
Expand Down Expand Up @@ -382,9 +360,7 @@ def get_remote_url(local_copy_path):
except Exception:
return local_copy_path

return CacheManager._local_to_remote_url_lookup.get(
hash(conform_local_copy_path), local_copy_path
)
return CacheManager._local_to_remote_url_lookup.get(hash(conform_local_copy_path), local_copy_path)

@staticmethod
def _add_remote_url(remote_url, local_copy_path):
Expand All @@ -411,10 +387,7 @@ def _add_remote_url(remote_url, local_copy_path):
pass
CacheManager._local_to_remote_url_lookup[hash(local_copy_path)] = remote_url
# protect against overuse, so we do not blowup the memory
if (
len(CacheManager._local_to_remote_url_lookup)
> CacheManager.__local_to_remote_url_lookup_max_size
):
if len(CacheManager._local_to_remote_url_lookup) > CacheManager.__local_to_remote_url_lookup_max_size:
# pop the first item (FIFO)
CacheManager._local_to_remote_url_lookup.popitem(last=False)

Expand All @@ -429,6 +402,4 @@ def get_context_folder_lookup(cls, context):
# type: (Optional[str]) -> str
if not context:
return cls._default_context_folder_template
return cls._context_to_folder_lookup.get(
str(context), cls._default_context_folder_template
)
return cls._context_to_folder_lookup.get(str(context), cls._default_context_folder_template)

0 comments on commit b6e4a82

Please sign in to comment.