diff --git a/.circleci/config.yml b/.circleci/config.yml index 9d92254..a28c6ad 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -38,7 +38,7 @@ install_dep: &install_dep - run: name: Install Dependencies command: | - pip install --progress-bar off torch shapely flake8 flake8-bugbear flake8-comprehensions isort 'black @ git+https://github.com/psf/black@673327449f86fce558adde153bb6cbe54bfebad2' + pip install --progress-bar off torch shapely install_fvcore: &install_fvcore - run: @@ -83,19 +83,6 @@ jobs: - <<: *install_fvcore - - run: - name: isort - command: | - isort -c -sp . - - run: - name: black - command: | - black --check . - - run: - name: flake8 - command: | - flake8 . - - <<: *run_unittests - store_artifacts: diff --git a/fvcore/common/file_io.py b/fvcore/common/file_io.py index 1c9ba54..70034e3 100644 --- a/fvcore/common/file_io.py +++ b/fvcore/common/file_io.py @@ -6,12 +6,14 @@ import os import shutil import traceback +import types from collections import OrderedDict from typing import IO, Any, Callable, Dict, List, MutableMapping, Optional, Union from urllib.parse import urlparse import portalocker # type: ignore from fvcore.common.download import download +from google.cloud import storage __all__ = ["LazyPath", "PathManager", "get_cache_dir", "file_lock"] @@ -588,6 +590,248 @@ def _get_local_path(self, path: str, **kwargs: Any) -> str: return PathManager.get_local_path(os.fspath(direct_url), **kwargs) +# Override for close() on files to write to google cloud +def close_and_upload(self): + mode = self.mode + name = self.name + self._close() + with open(name, mode.replace("w", "r")) as file_to_upload: + self._gc_blob.upload_from_file(file_to_upload) + + +class GoogleCloudHandler(PathHandler): + """ + Support for Google Cloud Storage file system + """ + + def _get_supported_prefixes(self) -> List[str]: + """ + Returns: + List[str]: the list of URI prefixes this PathHandler can support + """ + return ["gs://"] + + def _get_local_path(self, path: str, **kwargs: Any) -> str: + """ + Get a filepath which is compatible with native Python I/O such as `open` + and `os.path`. + If URI points to a remote resource, this function may download and cache + the resource to local disk. In this case, the cache stays on filesystem + (under `file_io.get_cache_dir()`) and will be used by a different run. + Therefore this function is meant to be used with read-only resources. + Args: + path (str): A URI supported by this PathHandler + Returns: + local_path (str): a file path which exists on the local file system + """ + self._cache_remote_file(path) + return self._get_local_cache_path(path) + + def _copy_from_local( + self, local_path: str, dst_path: str, overwrite: bool = False, **kwargs: Any + ) -> bool: + """ + Copies a local file to the specified URI. + If the URI is another local path, this should be functionally identical + to copy. + Args: + local_path (str): a file path which exists on the local file system + dst_path (str): A URI supported by this PathHandler + overwrite (bool): Bool flag for forcing overwrite of existing URI + Returns: + status (bool): True on success + """ + return self._upload_file(dst_path, local_path) + + def _open( + self, path: str, mode: str = "r", buffering: int = -1, **kwargs: Any + ) -> Union[IO[str], IO[bytes]]: + """ + Open a stream to a URI, similar to the built-in `open`. + Args: + path (str): A URI supported by this PathHandler + mode (str): Specifies the mode in which the file is opened. It defaults + to 'r'. + buffering (int): An optional integer used to set the buffering policy. + Pass 0 to switch buffering off and an integer >= 1 to indicate the + size in bytes of a fixed-size chunk buffer. When no buffering + argument is given, the default buffering policy depends on the + underlying I/O implementation. + Returns: + file: a file-like object. + """ + self._cache_remote_file(path) + return self._open_local_copy(path, mode) + + def _copy( + self, src_path: str, dst_path: str, overwrite: bool = False, **kwargs: Any + ) -> bool: + """ + Copies a source path to a destination path. + Args: + src_path (str): A URI supported by this PathHandler + dst_path (str): A URI supported by this PathHandler + overwrite (bool): Bool flag for forcing overwrite of existing file + Returns: + status (bool): True on success + """ + + if not self._cache_remote_file(src_path): + return False + local_path = self._get_local_cache_path(src_path) + return self._copy_from_local(local_path, dst_path) + + def _exists(self, path: str, **kwargs: Any) -> bool: + """ + Checks if there is a resource at the given URI. + Args: + path (str): A URI supported by this PathHandler + Returns: + bool: true if the path exists + """ + return self._get_blob(path).exists() + + def _isfile(self, path: str, **kwargs: Any) -> bool: + """ + Checks if the resource at the given URI is a file. + Args: + path (str): A URI supported by this PathHandler + Returns: + bool: true if the path is a file + """ + + return "." in path.split("/")[-1] + + def _isdir(self, path: str, **kwargs: Any) -> bool: + """ + Checks if the resource at the given URI is a directory. + Args: + path (str): A URI supported by this PathHandler + Returns: + bool: true if the path is a directory + """ + return "/" == path[-1] + + def _ls(self, path: str, **kwargs: Any) -> List[str]: + """ + List the contents of the directory at the provided URI. + Args: + path (str): A URI supported by this PathHandler + Returns: + List[str]: list of contents in given path + """ + raise NotImplementedError() + + def _mkdirs(self, path: str, **kwargs: Any) -> None: + """ + Recursive directory creation function. Like mkdir(), but makes all + intermediate-level directories needed to contain the leaf directory. + Similar to the native `os.makedirs`. + Args: + path (str): A URI supported by this PathHandler + """ + # GCS does this automatically + pass + + def _rm(self, path: str, **kwargs: Any) -> None: + """ + Remove the file (not directory) at the provided URI. + Args: + path (str): A URI supported by this PathHandler + """ + if not self._exists(path): + return + if self._isdir(path): + return + self._delete_remote_resource(path) + + def _get_gc_bucket(self, path: str) -> storage.Bucket: + if not hasattr(self, "_gc_client"): + self._create_gc_client(path) + gc_bucket_name = self._extract_gc_bucket_name(path) + return self._gc_client.get_bucket(gc_bucket_name) + + def _create_gc_client(self, path: str): + namespace = self._extract_gc_namespace(path) + gc_client = storage.Client(project=namespace) + self._gc_client = gc_client + + def _get_blob(self, path: str) -> storage.Blob: + gc_bucket = self._get_gc_bucket(path) + return gc_bucket.blob(self._extract_blob_path(path)) + + def _cache_blob(self, local_path: str, gc_blob: storage.Blob) -> bool: + if not gc_blob.exists(): + return False + with open(local_path, "wb") as file: + gc_blob.download_to_file(file) + return True + + def _upload_file(self, destination_path: str, local_path: str): + gc_blob = self._get_blob(destination_path) + if not gc_blob._exists(): + return False + with open(local_path, "r") as file: + gc_blob.upload_from_file(file) + return True + + def _cache_remote_file(self, remote_path: str): + local_path = self._get_local_cache_path(remote_path) + local_directory = self._get_local_cache_directory(remote_path) + self._maybe_make_directory(local_directory) + gc_blob = self._get_blob(remote_path) + return self._cache_blob(local_path, gc_blob) + + def _open_local_copy(self, path: str, mode: str) -> Union[IO[str], IO[bytes]]: + local_path = self._get_local_cache_path(path) + gc_blob = self._get_blob(path) + file = open(local_path, mode) + if "w" in mode: + self._decorate_file_with_gc_methods(file, gc_blob) + return file + + def _delete_remote_resource(self, path): + self._get_blob(path).delete() + + def _decorate_file_with_gc_methods( + self, file: Union[IO[str], IO[bytes]], gc_blob: storage.Blob + ): + file._gc_blob = gc_blob + file._close = file.close + file.close = types.MethodType(close_and_upload, file) + + def _maybe_make_directory(self, path: str) -> bool: + is_made = False + with file_lock(path): + if not os.path.exists(path): + os.makedirs(path) + is_made = True + return is_made + + def _extract_gc_namespace(self, path: str) -> str: + return self._extract_gc_bucket_name(path).replace("-data", "") + + def _extract_gc_bucket_name(self, path: str) -> str: + return self._remove_file_system(path).split("/")[0] + + def _remove_file_system(self, path: str) -> str: + return path.replace("gs://", "") + + def _remove_bucket_name(self, path: str) -> str: + return path.replace(self._extract_gc_bucket_name(path) + "/", "") + + def _extract_blob_path(self, path: str) -> str: + return self._remove_file_system(self._remove_bucket_name(path)) + + def _get_local_cache_path(self, path: str) -> str: + path = self._extract_blob_path(path) + return "/".join([".", "tmp", path]) + + def _get_local_cache_directory(self, path: str) -> str: + path = self._get_local_cache_path(path) + return path.replace(path.split("/")[-1], "") + + # NOTE: this class should be renamed back to PathManager when it is moved to a new library class PathManagerBase: """ @@ -896,3 +1140,4 @@ def set_strict_kwargs_checking(self, enable: bool) -> None: PathManager.register_handler(HTTPURLHandler()) PathManager.register_handler(OneDrivePathHandler()) +PathManager.register_handler(GoogleCloudHandler()) diff --git a/fvcore/common/registry.py b/fvcore/common/registry.py index 45a9c30..da384b0 100644 --- a/fvcore/common/registry.py +++ b/fvcore/common/registry.py @@ -2,6 +2,8 @@ from typing import Dict, Optional +from tabulate import tabulate + class Registry(object): """ @@ -73,3 +75,13 @@ def get(self, name: str) -> object: def __contains__(self, name: str) -> bool: return name in self._obj_map + + def __repr__(self) -> str: + table_headers = ["Names", "Objects"] + table = tabulate( + self._obj_map.items(), headers=table_headers, tablefmt="fancy_grid" + ) + return "Registry of {}:\n".format(self._name) + table + + # pyre-fixme[4]: Attribute must be annotated. + __str__ = __repr__ diff --git a/packaging/fvcore/meta.yaml b/packaging/fvcore/meta.yaml index 6d7e3a3..51138fb 100644 --- a/packaging/fvcore/meta.yaml +++ b/packaging/fvcore/meta.yaml @@ -20,6 +20,7 @@ requirements: - termcolor - pillow - tabulate + - google-cloud-storage build: string: py{{py}} diff --git a/setup.py b/setup.py old mode 100755 new mode 100644 index 3f70bfc..c9417f0 --- a/setup.py +++ b/setup.py @@ -51,6 +51,7 @@ def get_version(): "termcolor>=1.1", "Pillow", "tabulate", + "google-cloud-storage", ], extras_require={"all": ["shapely"]}, packages=find_packages(exclude=("tests",)), diff --git a/tests/bm_main.py b/tests/bm_main.py old mode 100755 new mode 100644 index 6edd65d..8ff46aa --- a/tests/bm_main.py +++ b/tests/bm_main.py @@ -3,9 +3,8 @@ import glob import importlib - -# pyre-fixme[21]: Could not find name `sys` in `os.path`. -from os.path import basename, dirname, isfile, join, sys +import sys +from os.path import basename, dirname, isfile, join if __name__ == "__main__": diff --git a/tests/test_file_io.py b/tests/test_file_io.py index fd18a99..c0113a1 100644 --- a/tests/test_file_io.py +++ b/tests/test_file_io.py @@ -1,22 +1,27 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. +import inspect +import io import os import shutil import tempfile import unittest import uuid from contextlib import contextmanager -from typing import Generator, Optional +from typing import IO, Generator, Optional, Union from unittest.mock import MagicMock, patch from fvcore.common import file_io from fvcore.common.file_io import ( + GoogleCloudHandler, HTTPURLHandler, LazyPath, PathManager, PathManagerBase, + close_and_upload, get_cache_dir, ) +from google.cloud import storage class TestNativeIO(unittest.TestCase): @@ -327,3 +332,297 @@ def test_one_drive_download(self) -> None: + "XM4VkNaX0NfMzNnUWJKc1VQVElqM3JRdTk5/root/content" ) self.assertEquals(_direct_url, _gt_url) + + +class TestCloudUtils(unittest.TestCase): + gc_auth = False + skip_gc_auth_required_tests_message = ( + "Provide a GC project and bucket you are" + + "authorised against, then set the fc_auth flag to True" + ) + + @classmethod + def setUpClass(cls): + cls.gc_project_name = "project-name" + cls.gc_bucket_name = "project-name-data" + cls.gc_default_path = "/".join(["gs:/", cls.gc_bucket_name, "test"]) + cls.gc_pathhandler = GoogleCloudHandler() + + @classmethod + def tearDownClass(cls, _gc_auth=gc_auth): + shutil.rmtree("tmp/") + if not _gc_auth: + return + remote_file_path = "/".join([cls.gc_default_path, "path/test.txt"]) + cls.gc_pathhandler._delete_remote_resource(remote_file_path) + remote_file_path = "/".join([cls.gc_default_path, "path/uploaded.txt"]) + cls.gc_pathhandler._delete_remote_resource(remote_file_path) + + def test_supported_prefixes(self): + supported_prefixes = self.gc_pathhandler._get_supported_prefixes() + self.assertEqual(supported_prefixes, ["gs://"]) + + def test_remove_file_system_from_remote_path(self): + path = self.gc_pathhandler._remove_file_system( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertEqual(path, "/".join([self.gc_bucket_name, "test/path/file.txt"])) + + def test_remove_bucket_name_from_remote_path(self): + path = self.gc_pathhandler._remove_bucket_name( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertEqual(path, "gs://test/path/file.txt") + + def test_extract_namespace_from_remote_path(self): + namespace = self.gc_pathhandler._extract_gc_namespace( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertEqual(namespace, self.gc_project_name) + + def test_extract_bucket_from_remote_path(self): + bucket_name = self.gc_pathhandler._extract_gc_bucket_name( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertEqual(bucket_name, self.gc_bucket_name) + + def test_extract_blob_path(self): + blob_path = self.gc_pathhandler._extract_blob_path( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertEqual(blob_path, "test/path/file.txt") + + def test_get_local_cache_path(self): + tmp_path = self.gc_pathhandler._get_local_cache_path( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertEqual(tmp_path, "./tmp/test/path/file.txt") + + def test_get_local_cache_directory(self): + tmp_path = self.gc_pathhandler._get_local_cache_directory( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertEqual(tmp_path, "./tmp/test/path/") + + def _add_gc_methods_to_file(self, file: Union[IO[str], IO[bytes]]): + gc_blob = storage.Blob("test", storage.Bucket("test")) + self.gc_pathhandler._decorate_file_with_gc_methods(file, gc_blob) + self.assertTrue(isinstance(file._gc_blob, storage.Blob)) + self.assertEqual( + inspect.getsource(file.close), inspect.getsource(close_and_upload) + ) + file._close() + self.assertRaises(ValueError, file.readline) + + def test_maybe_make_directory_doesnt_exist(self): + self.assertTrue( + self.gc_pathhandler._maybe_make_directory("./tmp/test/path/test.txt") + ) + + def test_maybe_make_directory_exists(self): + self.assertFalse( + self.gc_pathhandler._maybe_make_directory("./tmp/test/path/test.txt") + ) + + def test_add_gc_methods_to_text_file(self): + file = open("/tmp/test.txt", "w") + self._add_gc_methods_to_file(file) + + def test_add_gc_methods_to_binary_file(self): + file = open("/tmp/test.txt", "wb") + self._add_gc_methods_to_file(file) + + def test_is_file_when_path_is_a_file(self): + remote_path = "/".join([self.gc_default_path, "path/test.txt"]) + is_file = self.gc_pathhandler._isfile(remote_path) + self.assertTrue(is_file) + + def test_is_file_when_path_is_directory(self): + remote_path = "/".join([self.gc_default_path, "path/"]) + is_file = self.gc_pathhandler._isfile(remote_path) + self.assertFalse(is_file) + + def test_is_dir_when_path_is_a_driectory(self): + remote_path = "/".join([self.gc_default_path, "path/"]) + is_directory = self.gc_pathhandler._isdir(remote_path) + self.assertTrue(is_directory) + + def test_id_dir_when_path_is_a_file(self): + remote_path = "/".join([self.gc_default_path, "path/test.txt"]) + is_directory = self.gc_pathhandler._isdir(remote_path) + self.assertFalse(is_directory) + + # Require GCS Authentication ====> + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_add_client_to_handler(self): + self.gc_pathhandler._create_gc_client( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertTrue(isinstance(self.gc_pathhandler._gc_client, storage.Client)) + self.assertEqual(self.gc_pathhandler._gc_client.project, self.gc_project_name) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_get_requested_gc_bucket(self): + gc_bucket = self.gc_pathhandler._get_gc_bucket( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertTrue(isinstance(gc_bucket, storage.Bucket)) + self.assertEqual(gc_bucket.name, self.gc_bucket_name) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_get_blob(self): + gc_blob = self.gc_pathhandler._get_blob( + "/".join([self.gc_default_path, "path/file.txt"]) + ) + self.assertTrue(isinstance(gc_blob, storage.Blob)) + self.assertEqual(gc_blob.name, "test/path/file.txt") + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_exist_when_blob_exists(self): + self.assertTrue( + self.gc_pathhandler._exists("/".join([self.gc_default_path, ""])) + ) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_exist_when_blob_doesnt_exist(self): + self.assertFalse( + self.gc_pathhandler._exists( + "/".join([self.gc_default_path, "doesnt/exist.txt"]) + ) + ) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def _gc_local_file_write_and_upload( + self, file: Union[IO[str], IO[bytes]], message: str + ): + gc_blob = self.gc_pathhandler._get_blob( + "/".join([self.gc_default_path, "path/test.txt"]) + ) + self.gc_pathhandler._decorate_file_with_gc_methods(file, gc_blob) + file.write(message) + file.close() + self.assertTrue(gc_blob.exists()) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_gc_local_file_binary_write_and_upload(self): + file = open("/tmp/text_binary.txt", "wb") + self._gc_local_file_write_and_upload(file, b"{\x03\xff\x00d") + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_gc_local_file_text_write_and_upload(self): + file = open("/tmp/test.txt", "w") + self._gc_local_file_write_and_upload(file, "This is a google cloud file test\n") + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_open_read_text_file(self): + file = self.gc_pathhandler._open( + "/".join([self.gc_default_path, "path/test2.txt"]) + ) + self.assertTrue(isinstance(file, io.TextIOWrapper)) + self.assertEqual(file.read(), "Retrieved from GC") + file.close() + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def write_message_with_open(self, path: str, message: str, mode: str): + file = self.gc_pathhandler._open(path, mode) + file.write(message) + file.close() + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def read_remote_file(self, path: str, mode: str) -> str: + with self.gc_pathhandler._open(path, mode) as file: + read = file.read() + return read + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_open_write_new_text_file(self): + remote_path = "/".join([self.gc_default_path, "path/test_open_write.txt"]) + message = "File created locally and uploaded with _open" + self.write_message_with_open(remote_path, message, "w") + read = self.read_remote_file(remote_path, "r") + self.assertEqual(read, "File created locally and uploaded with _open") + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_open_write_existing_text_file(self): + remote_path = "/".join([self.gc_default_path, "path/test_open_write.txt"]) + message = "Written to existing upload" + self.write_message_with_open(remote_path, message, "w") + read = self.read_remote_file(remote_path, "r") + self.assertEqual(read, "Written to existing upload") + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_copy_from_local_file_exists(self): + self.gc_pathhandler._maybe_make_directory("./tmp/") + remote_path = "/".join([self.gc_default_path, "path/uploaded.txt"]) + local_path = "./tmp/test_upload.txt" + with open(local_path, "w") as file: + file.write("Local file to test uploading") + isUploaded = self.gc_pathhandler._copy_from_local(local_path, remote_path) + self.assertTrue(isUploaded) + read = self.read_remote_file(remote_path, "r") + self.assertEqual(read, "Local file to test uploading") + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_copy_from_local_file_doesnt_exist(self): + local_path = "/file/that/doesnt/exist.txt" + remote_path = "/".join([self.gc_default_path, "doesnt/exist.txt"]) + isUploaded = self.gc_pathhandler._copy_from_local(local_path, remote_path) + self.assertFalse(isUploaded) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_copy_remote_file_exists(self): + remote_source = "/".join([self.gc_default_path, "path/uploaded.txt"]) + remote_destination = "/".join([self.gc_default_path, "path/uploaded-copy.txt"]) + isCopied = self.gc_pathhandler._copy(remote_source, remote_destination) + self.assertTrue(isCopied) + self.assertTrue(self.gc_pathhandler._exists(remote_destination)) + read = self.read_remote_file(remote_destination, "r") + self.assertEqual(read, "Local file to test uploading") + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_copy_remote_file_doesnt_exist(self): + remote_source = "/".join([self.gc_default_path, "doesnt/exist.txt"]) + remote_destination = "/".join([self.gc_default_path, "doesnt/exist-copy.txt"]) + isCopied = self.gc_pathhandler._copy(remote_source, remote_destination) + self.assertFalse(isCopied) + self.assertFalse(self.gc_pathhandler._exists(remote_destination)) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_get_local_path_remote_file_exists(self): + remote_path = "/".join([self.gc_default_path, "path/uploaded.txt"]) + cache_path = self.gc_pathhandler._get_local_path(remote_path) + self.assertEqual(cache_path, "./tmp/test/path/uploaded.txt") + with open(cache_path) as file: + read = file.read() + self.assertEqual(read, "Local file to test uploading") + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_get_local_path_remote_file_doesnt_exists(self): + remote_path = "/".join([self.gc_default_path, "will/exist.txt"]) + cache_path = self.gc_pathhandler._get_local_path(remote_path) + self.assertEqual(cache_path, "./tmp/test/will/exist.txt") + self.assertTrue( + os.path.exists(self.gc_pathhandler._get_local_cache_directory(remote_path)) + ) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_rm_when_remote_file_exists(self): + remote_path = "/".join([self.gc_default_path, "path/uploaded-copy.txt"]) + self.assertTrue(self.gc_pathhandler._exists(remote_path)) + self.gc_pathhandler._rm(remote_path) + self.assertFalse(self.gc_pathhandler._exists(remote_path)) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_rm_when_remote_file_doesnt_exist(self): + remote_path = "/".join([self.gc_default_path, "doesnt/exist.txt"]) + self.assertFalse(self.gc_pathhandler._exists(remote_path)) + self.gc_pathhandler._rm(remote_path) + + @unittest.skipIf(not gc_auth, skip_gc_auth_required_tests_message) + def test_rm_when_remote_path_is_directory(self): + remote_path = "/".join([self.gc_default_path, ""]) + self.assertTrue(self.gc_pathhandler._exists(remote_path)) + self.gc_pathhandler._rm(remote_path) + self.assertTrue(self.gc_pathhandler._exists(remote_path)) + + # ====>