Skip to content

Commit 2c58907

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 5289f9b commit 2c58907

File tree

8 files changed

+19
-28
lines changed

8 files changed

+19
-28
lines changed

colour_datasets/loaders/__init__.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -160,9 +160,7 @@ def load(dataset: int | str) -> Any:
160160
--------
161161
>>> len(load("3245883").keys()) # doctest: +SKIP
162162
28
163-
>>> len(
164-
... load("Camera Spectral Sensitivity Database - " "Jiang et al. (2013)").keys()
165-
... )
163+
>>> len(load("Camera Spectral Sensitivity Database - Jiang et al. (2013)").keys())
166164
... # doctest: +SKIP
167165
28
168166
"""

colour_datasets/loaders/hung1995.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -140,10 +140,8 @@ def load(
140140
"Table II.csv": "Intra- and interobserver variances for each "
141141
"reference hue expressed in circumferential "
142142
"hue-angle difference.",
143-
"Table III.csv": "Weight-averaged constant hue loci for the CL "
144-
"experiment.",
145-
"Table IV.csv": "Weight-averaged constant hue loci for the VL "
146-
"experiment.",
143+
"Table III.csv": "Weight-averaged constant hue loci for the CL experiment.",
144+
"Table IV.csv": "Weight-averaged constant hue loci for the VL experiment.",
147145
}
148146

149147
for filename in filenames:

colour_datasets/loaders/kuopio.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,7 @@ class instances.
256256
prefix = re.sub("\\.|\\(|\\)|/|\\s", "", title)
257257
class_attribute = f"DatasetLoader_{prefix}"
258258
dataset_loader_class = cast(
259-
DatasetLoader_KuopioUniversity,
259+
"DatasetLoader_KuopioUniversity",
260260
type(
261261
str(class_attribute),
262262
(DatasetLoader_KuopioUniversity,),

colour_datasets/loaders/luo1999.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -471,8 +471,8 @@ def load(self) -> Dict[str, CorrespondingColourDataset_Luo1999]:
471471
as_float_array(XYZ_t) / 100,
472472
as_float_array(XYZ_cr) / 100,
473473
as_float_array(XYZ_ct) / 100,
474-
cast(float, Y_r) * np.pi,
475-
cast(float, Y_t) * np.pi,
474+
cast("float", Y_r) * np.pi,
475+
cast("float", Y_t) * np.pi,
476476
B_r,
477477
B_t,
478478
dataset_metadata,

colour_datasets/loaders/solomotav2023.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,7 @@ def load(self) -> Dict[str, Dict[str, RGB_CameraSensitivities]]:
100100
("Ground Truth", "ground-truths"),
101101
]:
102102
csv_files = glob.glob(
103-
f'{os.path.join(self.record.repository, "dataset", path, path)}/'
104-
f"*.csv"
103+
f"{os.path.join(self.record.repository, 'dataset', path, path)}/*.csv"
105104
)
106105
for csv_file in csv_files:
107106
camera_name = os.path.splitext(os.path.basename(csv_file))[0].replace(

colour_datasets/records/zenodo.py

Lines changed: 10 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -200,22 +200,22 @@ def strip_html(text: str) -> str:
200200

201201
files = "\n".join(
202202
[
203-
f'- {file_data["key"]} : {file_data["links"]["self"]}'
203+
f"- {file_data['key']} : {file_data['links']['self']}"
204204
for file_data in sorted(files, key=lambda x: x["key"])
205205
]
206206
)
207207

208208
return "\n".join(
209209
[
210-
f'{metadata["title"]} - {metadata["version"]}',
211-
f'{"=" * (len(self.title) + 3 + len(metadata["version"]))}',
210+
f"{metadata['title']} - {metadata['version']}",
211+
f"{'=' * (len(self.title) + 3 + len(metadata['version']))}",
212212
"",
213213
f"Record ID : {self.id}",
214214
f"Authors : {authors}",
215-
f'License : {metadata["license"]["id"]}',
216-
f'DOI : {metadata["doi"]}',
217-
f'Publication Date : {metadata["publication_date"]}',
218-
f'URL : {self._data["links"]["self_html"]}\n',
215+
f"License : {metadata['license']['id']}",
216+
f"DOI : {metadata['doi']}",
217+
f"Publication Date : {metadata['publication_date']}",
218+
f"URL : {self._data['links']['self_html']}\n",
219219
"Description",
220220
"-----------",
221221
"",
@@ -628,10 +628,7 @@ def __str__(self) -> str:
628628

629629
datasets = "\n".join(
630630
[
631-
(
632-
f"[{'x' if dataset.synced() else ' '}] "
633-
f"{dataset.id} : {dataset.title}"
634-
)
631+
(f"[{'x' if dataset.synced() else ' '}] {dataset.id} : {dataset.title}")
635632
for dataset in sorted(self.values(), key=lambda x: x.title)
636633
]
637634
)
@@ -641,11 +638,11 @@ def __str__(self) -> str:
641638
return "\n".join(
642639
[
643640
f"{self._configuration.community}",
644-
f'{"=" * len(self._configuration.community)}',
641+
f"{'=' * len(self._configuration.community)}",
645642
"",
646643
f"Datasets : {len(self)}",
647644
f"Synced : {synced}",
648-
f'URL : {self._data["community"]["links"]["self_html"]}',
645+
f"URL : {self._data['community']['links']['self_html']}",
649646
"",
650647
"Datasets",
651648
"--------",

colour_datasets/utilities/common.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -179,8 +179,7 @@ def url_download(
179179

180180
if md5 is not None and md5.lower() != hash_md5(filename):
181181
msg = (
182-
f'"MD5" hash of "{filename}" file does not match the '
183-
f"expected hash!"
182+
f'"MD5" hash of "{filename}" file does not match the expected hash!'
184183
)
185184
raise ValueError( # noqa: TRY301
186185
msg

utilities/export_todo.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ def extract_todo_items(root_directory: str) -> dict:
8181

8282
if in_todo and line.startswith("#"):
8383
todo_item.append(line.replace("#", "").strip())
84-
elif len(todo_item):
84+
elif todo_item:
8585
key = filename.replace("../", "")
8686
if not todo_items.get(key):
8787
todo_items[key] = []

0 commit comments

Comments
 (0)