From 36db39c75179a0a491c69a4491f7ae7e4615e66f Mon Sep 17 00:00:00 2001 From: Lysandre Debut Date: Tue, 8 Feb 2022 14:56:39 -0500 Subject: [PATCH] Upgrade black to version ~=22.0 (#3691) * Upgrade black to version ~=22.0 * Last fixes --- setup.py | 2 +- src/datasets/arrow_reader.py | 2 +- src/datasets/utils/py_utils.py | 3 +-- tests/test_info_utils.py | 4 ++-- tests/test_load.py | 2 +- 5 files changed, 6 insertions(+), 7 deletions(-) diff --git a/setup.py b/setup.py index 1544a6719b0..32675574a86 100644 --- a/setup.py +++ b/setup.py @@ -188,7 +188,7 @@ ] ) -QUALITY_REQUIRE = ["black==21.4b0", "flake8>=3.8.3", "isort>=5.0.0", "pyyaml>=5.3.1"] +QUALITY_REQUIRE = ["black~=22.0", "flake8>=3.8.3", "isort>=5.0.0", "pyyaml>=5.3.1"] EXTRAS_REQUIRE = { diff --git a/src/datasets/arrow_reader.py b/src/datasets/arrow_reader.py index b3e7da385bd..0d6f6fbd4fd 100644 --- a/src/datasets/arrow_reader.py +++ b/src/datasets/arrow_reader.py @@ -43,7 +43,7 @@ HF_GCP_BASE_URL = "https://storage.googleapis.com/huggingface-nlp/cache/datasets" _SUB_SPEC_RE = re.compile( - fr""" + rf""" ^ (?P{_split_re[1:-1]}) (\[ diff --git a/src/datasets/utils/py_utils.py b/src/datasets/utils/py_utils.py index d46389b7318..0afc5a48bf6 100644 --- a/src/datasets/utils/py_utils.py +++ b/src/datasets/utils/py_utils.py @@ -71,7 +71,7 @@ def size_str(size_in_bytes): if not size_in_bytes: return "Unknown size" - _NAME_LIST = [("PiB", 2 ** 50), ("TiB", 2 ** 40), ("GiB", 2 ** 30), ("MiB", 2 ** 20), ("KiB", 2 ** 10)] + _NAME_LIST = [("PiB", 2**50), ("TiB", 2**40), ("GiB", 2**30), ("MiB", 2**20), ("KiB", 2**10)] size_in_bytes = float(size_in_bytes) for (name, size_bytes) in _NAME_LIST: @@ -634,6 +634,5 @@ def _save_regex(pickler, obj): dill._dill.log.info("# Re") return - except ImportError: pass diff --git a/tests/test_info_utils.py b/tests/test_info_utils.py index b47b8bfa9fe..3e71c78eef9 100644 --- a/tests/test_info_utils.py +++ b/tests/test_info_utils.py @@ -4,8 +4,8 @@ from datasets.utils.info_utils import is_small_dataset -@pytest.mark.parametrize("dataset_size", [None, 400 * 2 ** 20, 600 * 2 ** 20]) -@pytest.mark.parametrize("input_in_memory_max_size", ["default", 0, 100 * 2 ** 20, 900 * 2 ** 20]) +@pytest.mark.parametrize("dataset_size", [None, 400 * 2**20, 600 * 2**20]) +@pytest.mark.parametrize("input_in_memory_max_size", ["default", 0, 100 * 2**20, 900 * 2**20]) def test_is_small_dataset(dataset_size, input_in_memory_max_size, monkeypatch): if input_in_memory_max_size != "default": monkeypatch.setattr(datasets.config, "IN_MEMORY_MAX_SIZE", input_in_memory_max_size) diff --git a/tests/test_load.py b/tests/test_load.py index 2a63186f0a3..96f9727bc44 100644 --- a/tests/test_load.py +++ b/tests/test_load.py @@ -489,7 +489,7 @@ def test_load_dataset_local(dataset_loading_script_dir, data_dir, keep_in_memory with pytest.raises(FileNotFoundError) as exc_info: datasets.load_dataset(SAMPLE_DATASET_NAME_THAT_DOESNT_EXIST) m_combined_path = re.search( - fr"http\S*{re.escape(SAMPLE_DATASET_NAME_THAT_DOESNT_EXIST + '/' + SAMPLE_DATASET_NAME_THAT_DOESNT_EXIST + '.py')}\b", + rf"http\S*{re.escape(SAMPLE_DATASET_NAME_THAT_DOESNT_EXIST + '/' + SAMPLE_DATASET_NAME_THAT_DOESNT_EXIST + '.py')}\b", str(exc_info.value), ) assert m_combined_path is not None and is_remote_url(m_combined_path.group())