diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 6724b2764..03d60e521 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -12,5 +12,5 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-python@v2 with: - python-version: '3.8' - - uses: pre-commit/action@v2.0.3 + python-version: '3.12' + - uses: pre-commit/action@v3.0.1 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 483db5ecc..01d9da4bc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.5.0 # Use the ref you want to point at + rev: v5.0.0 # Use the ref you want to point at hooks: - id: trailing-whitespace types: [file, text] @@ -21,7 +21,7 @@ repos: types: [python] additional_dependencies: ['click==8.0.4'] - repo: https://github.com/PyCQA/flake8 - rev: 3.7.9 + rev: 7.0.0 hooks: - id: flake8 types: [python] diff --git a/benchmarks/CL_MASR/analyze_logs.py b/benchmarks/CL_MASR/analyze_logs.py index 78061c49a..088933b17 100644 --- a/benchmarks/CL_MASR/analyze_logs.py +++ b/benchmarks/CL_MASR/analyze_logs.py @@ -373,7 +373,7 @@ def plot_wer( plt.xticks(range(len(locales)), locales, rotation=90) if xlabel is not None: plt.xlabel(xlabel) - plt.ylabel("WER (\%)" if usetex else "WER (%)") # noqa: W605 + plt.ylabel("WER (\\%)" if usetex else "WER (%)") # noqa: W605 fig.tight_layout() plt.savefig(output_image, bbox_inches="tight") plt.close() @@ -851,7 +851,7 @@ def hex_to_rgb(hex_color: "str") -> "Tuple": f"{name.lower().replace(' ', '_')}.{args.format}", ), xlabel=None, - ylabel=f"{name} (\%)" + ylabel=f"{name} (\\%)" if args.usetex else f"{name} (%)", # noqa: W605 xticks=["base"] + [f"L{i}" for i in range(1, 1 + len(new_locales))], diff --git a/benchmarks/CL_MASR/common_voice_prepare.py b/benchmarks/CL_MASR/common_voice_prepare.py index e6f6fdb10..f882da633 100644 --- a/benchmarks/CL_MASR/common_voice_prepare.py +++ b/benchmarks/CL_MASR/common_voice_prepare.py @@ -111,7 +111,7 @@ def prepare_common_voice( _LOGGER.info( "----------------------------------------------------------------------", ) - _LOGGER.info(f"Merging TSV files...") + _LOGGER.info("Merging TSV files...") for split, max_duration in zip(_SPLITS, max_durations): tsv_files = [ os.path.join(data_folder, locale, f"{split}_with_duration.tsv") @@ -126,7 +126,7 @@ def prepare_common_voice( _LOGGER.info( "----------------------------------------------------------------------", ) - _LOGGER.info(f"Creating data manifest CSV files...") + _LOGGER.info("Creating data manifest CSV files...") for split in _SPLITS: preprocess_tsv_file( os.path.join(data_folder, f"{split}_with_duration.tsv"), diff --git a/benchmarks/CL_MASR/wavlm/pretrain.py b/benchmarks/CL_MASR/wavlm/pretrain.py index b65be5573..868f9836c 100644 --- a/benchmarks/CL_MASR/wavlm/pretrain.py +++ b/benchmarks/CL_MASR/wavlm/pretrain.py @@ -341,7 +341,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["locales"], f"wer_test.txt", + hparams, run_opts, hparams["locales"], "wer_test.txt", ) diff --git a/benchmarks/CL_MASR/wavlm/train_agem.py b/benchmarks/CL_MASR/wavlm/train_agem.py index 0d96eabc1..3ad1c4389 100644 --- a/benchmarks/CL_MASR/wavlm/train_agem.py +++ b/benchmarks/CL_MASR/wavlm/train_agem.py @@ -431,7 +431,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/wavlm/train_der.py b/benchmarks/CL_MASR/wavlm/train_der.py index 987e94b8a..47f0219eb 100644 --- a/benchmarks/CL_MASR/wavlm/train_der.py +++ b/benchmarks/CL_MASR/wavlm/train_der.py @@ -363,7 +363,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) replay_buffer = [] diff --git a/benchmarks/CL_MASR/wavlm/train_er.py b/benchmarks/CL_MASR/wavlm/train_er.py index 6fe01ae43..ac1462e41 100644 --- a/benchmarks/CL_MASR/wavlm/train_er.py +++ b/benchmarks/CL_MASR/wavlm/train_er.py @@ -306,7 +306,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/wavlm/train_ewc.py b/benchmarks/CL_MASR/wavlm/train_ewc.py index 23e546bef..79e7f16e4 100644 --- a/benchmarks/CL_MASR/wavlm/train_ewc.py +++ b/benchmarks/CL_MASR/wavlm/train_ewc.py @@ -417,7 +417,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/wavlm/train_ft.py b/benchmarks/CL_MASR/wavlm/train_ft.py index 3f8f7aaf4..325053267 100644 --- a/benchmarks/CL_MASR/wavlm/train_ft.py +++ b/benchmarks/CL_MASR/wavlm/train_ft.py @@ -305,7 +305,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/wavlm/train_joint.py b/benchmarks/CL_MASR/wavlm/train_joint.py index 52d4b9c94..471e4b761 100644 --- a/benchmarks/CL_MASR/wavlm/train_joint.py +++ b/benchmarks/CL_MASR/wavlm/train_joint.py @@ -305,7 +305,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales @@ -358,7 +358,7 @@ def train(hparams, run_opts): hparams, run_opts, hparams["base_locales"] + hparams["new_locales"], - f"wer_test_after.txt", + "wer_test_after.txt", ) diff --git a/benchmarks/CL_MASR/wavlm/train_l2p.py b/benchmarks/CL_MASR/wavlm/train_l2p.py index 49114dcb2..0f36f3425 100644 --- a/benchmarks/CL_MASR/wavlm/train_l2p.py +++ b/benchmarks/CL_MASR/wavlm/train_l2p.py @@ -374,7 +374,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/wavlm/train_lwf.py b/benchmarks/CL_MASR/wavlm/train_lwf.py index fd29e613d..95cae6329 100644 --- a/benchmarks/CL_MASR/wavlm/train_lwf.py +++ b/benchmarks/CL_MASR/wavlm/train_lwf.py @@ -335,7 +335,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/wavlm/train_mas.py b/benchmarks/CL_MASR/wavlm/train_mas.py index 6e8431334..cfdb4d320 100644 --- a/benchmarks/CL_MASR/wavlm/train_mas.py +++ b/benchmarks/CL_MASR/wavlm/train_mas.py @@ -421,7 +421,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/wavlm/train_pb.py b/benchmarks/CL_MASR/wavlm/train_pb.py index 052d70b73..a098748ac 100644 --- a/benchmarks/CL_MASR/wavlm/train_pb.py +++ b/benchmarks/CL_MASR/wavlm/train_pb.py @@ -398,7 +398,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/wavlm/train_pnn.py b/benchmarks/CL_MASR/wavlm/train_pnn.py index 3641fa54b..90d89e672 100644 --- a/benchmarks/CL_MASR/wavlm/train_pnn.py +++ b/benchmarks/CL_MASR/wavlm/train_pnn.py @@ -309,7 +309,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/whisper/model.py b/benchmarks/CL_MASR/whisper/model.py index 8c7dc26e6..39554a163 100644 --- a/benchmarks/CL_MASR/whisper/model.py +++ b/benchmarks/CL_MASR/whisper/model.py @@ -277,7 +277,7 @@ def generate( if forced_decoder_locale is None: # Compute most likely language token IDs all_lang_tokens = [ - f"<|{l}|>" for l in self.tokenizer.supported_languages + f"<|{lang}|>" for lang in self.tokenizer.supported_languages ] all_lang_tokens_ids = self.tokenizer.convert_tokens_to_ids( all_lang_tokens @@ -383,8 +383,8 @@ def _greedy_search( alive_mask_unchanged = gen_token_ids != endoftext_id if not alive_mask_unchanged.all(): alive_mask[ - alive_mask == True - ] = alive_mask_unchanged # noqa: E712 + alive_mask == True # noqa: E712 + ] = alive_mask_unchanged if not alive_mask.any(): break # B* x S x F @@ -567,8 +567,8 @@ def _beam_search( alive_mask_unchanged = end_idxes < beam_size if not alive_mask_unchanged.all(): alive_mask[ - alive_mask == True - ] = alive_mask_unchanged # noqa: E712 + alive_mask == True # noqa: E712 + ] = alive_mask_unchanged if not alive_mask.any(): break # N x B* x S x F diff --git a/benchmarks/CL_MASR/whisper/train_agem.py b/benchmarks/CL_MASR/whisper/train_agem.py index c6b01fd70..4774bdfed 100644 --- a/benchmarks/CL_MASR/whisper/train_agem.py +++ b/benchmarks/CL_MASR/whisper/train_agem.py @@ -452,7 +452,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/whisper/train_der.py b/benchmarks/CL_MASR/whisper/train_der.py index 86ab58048..42294b916 100644 --- a/benchmarks/CL_MASR/whisper/train_der.py +++ b/benchmarks/CL_MASR/whisper/train_der.py @@ -409,7 +409,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) replay_buffer = [] diff --git a/benchmarks/CL_MASR/whisper/train_er.py b/benchmarks/CL_MASR/whisper/train_er.py index 2783c1f85..bf31c3ec7 100644 --- a/benchmarks/CL_MASR/whisper/train_er.py +++ b/benchmarks/CL_MASR/whisper/train_er.py @@ -332,7 +332,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/whisper/train_ewc.py b/benchmarks/CL_MASR/whisper/train_ewc.py index bf98e5508..848646ee4 100644 --- a/benchmarks/CL_MASR/whisper/train_ewc.py +++ b/benchmarks/CL_MASR/whisper/train_ewc.py @@ -454,7 +454,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/whisper/train_ft.py b/benchmarks/CL_MASR/whisper/train_ft.py index cf404404d..13c9ec9d9 100644 --- a/benchmarks/CL_MASR/whisper/train_ft.py +++ b/benchmarks/CL_MASR/whisper/train_ft.py @@ -331,7 +331,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/whisper/train_joint.py b/benchmarks/CL_MASR/whisper/train_joint.py index ea0cb2743..3c67c6cf6 100644 --- a/benchmarks/CL_MASR/whisper/train_joint.py +++ b/benchmarks/CL_MASR/whisper/train_joint.py @@ -332,7 +332,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales @@ -413,7 +413,7 @@ def train(hparams, run_opts): hparams, run_opts, hparams["base_locales"] + hparams["new_locales"], - f"wer_test_after.txt", + "wer_test_after.txt", ) diff --git a/benchmarks/CL_MASR/whisper/train_l2p.py b/benchmarks/CL_MASR/whisper/train_l2p.py index d2ce451d0..c392e8bd4 100644 --- a/benchmarks/CL_MASR/whisper/train_l2p.py +++ b/benchmarks/CL_MASR/whisper/train_l2p.py @@ -403,7 +403,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/whisper/train_lwf.py b/benchmarks/CL_MASR/whisper/train_lwf.py index d69d4ab3a..0851886f3 100644 --- a/benchmarks/CL_MASR/whisper/train_lwf.py +++ b/benchmarks/CL_MASR/whisper/train_lwf.py @@ -368,7 +368,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/whisper/train_mas.py b/benchmarks/CL_MASR/whisper/train_mas.py index 3fd37a444..dcb1c6fbe 100644 --- a/benchmarks/CL_MASR/whisper/train_mas.py +++ b/benchmarks/CL_MASR/whisper/train_mas.py @@ -456,7 +456,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/whisper/train_pb.py b/benchmarks/CL_MASR/whisper/train_pb.py index b032fb6cd..f5a2945e1 100644 --- a/benchmarks/CL_MASR/whisper/train_pb.py +++ b/benchmarks/CL_MASR/whisper/train_pb.py @@ -422,7 +422,7 @@ def train(hparams, run_opts): # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/CL_MASR/whisper/train_pnn.py b/benchmarks/CL_MASR/whisper/train_pnn.py index c610935e8..290a279fe 100644 --- a/benchmarks/CL_MASR/whisper/train_pnn.py +++ b/benchmarks/CL_MASR/whisper/train_pnn.py @@ -334,7 +334,7 @@ def train(hparams, run_opts): """ # Testing test( - hparams, run_opts, hparams["base_locales"], f"wer_test_before.txt", + hparams, run_opts, hparams["base_locales"], "wer_test_before.txt", ) # Train on new locales diff --git a/benchmarks/DASB/IEMOCAP/iemocap_prepare.py b/benchmarks/DASB/IEMOCAP/iemocap_prepare.py index d42fcff19..0a6c469d0 100644 --- a/benchmarks/DASB/IEMOCAP/iemocap_prepare.py +++ b/benchmarks/DASB/IEMOCAP/iemocap_prepare.py @@ -271,7 +271,7 @@ def load_utterInfo(inputFile): # [START_TIME - END_TIME] TURN_NAME EMOTION [V, A, D] # [V, A, D] means [Valence, Arousal, Dominance] pattern = re.compile( - "[\[]*[0-9]*[.][0-9]*[ -]*[0-9]*[.][0-9]*[\]][\t][a-z0-9_]*[\t][a-z]{3}[\t][\[][0-9]*[.][0-9]*[, ]+[0-9]*[.][0-9]*[, ]+[0-9]*[.][0-9]*[\]]", + "[\[]*[0-9]*[.][0-9]*[ -]*[0-9]*[.][0-9]*[\]][\t][a-z0-9_]*[\t][a-z]{3}[\t][\[][0-9]*[.][0-9]*[, ]+[0-9]*[.][0-9]*[, ]+[0-9]*[.][0-9]*[\]]", # noqa re.IGNORECASE, ) # noqa with open(inputFile, "r") as myfile: diff --git a/benchmarks/DASB/Libri2Mix/separation/conformer/train_continuous_ssl.py b/benchmarks/DASB/Libri2Mix/separation/conformer/train_continuous_ssl.py index a6765f483..280f92c74 100644 --- a/benchmarks/DASB/Libri2Mix/separation/conformer/train_continuous_ssl.py +++ b/benchmarks/DASB/Libri2Mix/separation/conformer/train_continuous_ssl.py @@ -372,7 +372,7 @@ def on_stage_end(self, stage, stage_loss, epoch=None): ) hparams["train_logger"].log_stats( stats_meta={ - f"SSL parameters/buffers (M)": f"{ssl_params / 1e6:.2f}", + "SSL parameters/buffers (M)": f"{ssl_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/Libri2Mix/separation/conformer/train_dac.py b/benchmarks/DASB/Libri2Mix/separation/conformer/train_dac.py index eed798594..0b9eb26eb 100644 --- a/benchmarks/DASB/Libri2Mix/separation/conformer/train_dac.py +++ b/benchmarks/DASB/Libri2Mix/separation/conformer/train_dac.py @@ -128,7 +128,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/Libri2Mix/separation/conformer/train_discrete_ssl.py b/benchmarks/DASB/Libri2Mix/separation/conformer/train_discrete_ssl.py index a288450c3..0f4e085f5 100644 --- a/benchmarks/DASB/Libri2Mix/separation/conformer/train_discrete_ssl.py +++ b/benchmarks/DASB/Libri2Mix/separation/conformer/train_discrete_ssl.py @@ -155,7 +155,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/Libri2Mix/separation/conformer/train_encodec.py b/benchmarks/DASB/Libri2Mix/separation/conformer/train_encodec.py index b4e9f2a1e..8621d8a6d 100644 --- a/benchmarks/DASB/Libri2Mix/separation/conformer/train_encodec.py +++ b/benchmarks/DASB/Libri2Mix/separation/conformer/train_encodec.py @@ -376,7 +376,7 @@ def on_stage_end(self, stage, stage_loss, epoch=None): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/Libri2Mix/separation/conformer/train_speech_tokenizer.py b/benchmarks/DASB/Libri2Mix/separation/conformer/train_speech_tokenizer.py index 926376b56..7691a247c 100644 --- a/benchmarks/DASB/Libri2Mix/separation/conformer/train_speech_tokenizer.py +++ b/benchmarks/DASB/Libri2Mix/separation/conformer/train_speech_tokenizer.py @@ -121,7 +121,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_continuous_ssl.py b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_continuous_ssl.py index 469280af0..7c80c71ec 100644 --- a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_continuous_ssl.py +++ b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_continuous_ssl.py @@ -372,7 +372,7 @@ def on_stage_end(self, stage, stage_loss, epoch=None): ) hparams["train_logger"].log_stats( stats_meta={ - f"SSL parameters/buffers (M)": f"{ssl_params / 1e6:.2f}", + "SSL parameters/buffers (M)": f"{ssl_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_dac.py b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_dac.py index eed798594..0b9eb26eb 100644 --- a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_dac.py +++ b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_dac.py @@ -128,7 +128,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_discrete_ssl.py b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_discrete_ssl.py index a288450c3..0f4e085f5 100644 --- a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_discrete_ssl.py +++ b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_discrete_ssl.py @@ -155,7 +155,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_encodec.py b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_encodec.py index b2f2fed53..fa28c948a 100644 --- a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_encodec.py +++ b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_encodec.py @@ -376,7 +376,7 @@ def on_stage_end(self, stage, stage_loss, epoch=None): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_speech_tokenizer.py b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_speech_tokenizer.py index 926376b56..7691a247c 100644 --- a/benchmarks/DASB/Libri2Mix/separation/crdnn/train_speech_tokenizer.py +++ b/benchmarks/DASB/Libri2Mix/separation/crdnn/train_speech_tokenizer.py @@ -121,7 +121,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_continuous_ssl.py b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_continuous_ssl.py index 72ba7f6ce..b0f90ba60 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_continuous_ssl.py +++ b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_continuous_ssl.py @@ -313,7 +313,7 @@ def on_stage_end(self, stage, stage_loss, epoch=None): ) hparams["train_logger"].log_stats( stats_meta={ - f"SSL parameters/buffers (M)": f"{ssl_params / 1e6:.2f}", + "SSL parameters/buffers (M)": f"{ssl_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_dac.py b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_dac.py index 2fe919e6a..3d263b0a0 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_dac.py +++ b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_dac.py @@ -126,7 +126,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_discrete_ssl.py b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_discrete_ssl.py index f6811b5fa..8120a4b82 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_discrete_ssl.py +++ b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_discrete_ssl.py @@ -153,7 +153,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_encodec.py b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_encodec.py index e9a0a7649..2ab16d698 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_encodec.py +++ b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_encodec.py @@ -305,7 +305,7 @@ def on_stage_end(self, stage, stage_loss, epoch=None): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_speech_tokenizer.py b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_speech_tokenizer.py index c25d78e26..140850a76 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/conformer/train_speech_tokenizer.py +++ b/benchmarks/DASB/VoiceBank/enhancement/conformer/train_speech_tokenizer.py @@ -119,7 +119,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_continuous_ssl.py b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_continuous_ssl.py index a4c55687c..bd038ad8f 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_continuous_ssl.py +++ b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_continuous_ssl.py @@ -313,7 +313,7 @@ def on_stage_end(self, stage, stage_loss, epoch=None): ) hparams["train_logger"].log_stats( stats_meta={ - f"SSL parameters/buffers (M)": f"{ssl_params / 1e6:.2f}", + "SSL parameters/buffers (M)": f"{ssl_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_dac.py b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_dac.py index 2fe919e6a..3d263b0a0 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_dac.py +++ b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_dac.py @@ -126,7 +126,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_discrete_ssl.py b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_discrete_ssl.py index f6811b5fa..8120a4b82 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_discrete_ssl.py +++ b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_discrete_ssl.py @@ -153,7 +153,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_encodec.py b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_encodec.py index 82a5470f9..bbf2ce370 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_encodec.py +++ b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_encodec.py @@ -305,7 +305,7 @@ def on_stage_end(self, stage, stage_loss, epoch=None): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_speech_tokenizer.py b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_speech_tokenizer.py index c25d78e26..140850a76 100644 --- a/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_speech_tokenizer.py +++ b/benchmarks/DASB/VoiceBank/enhancement/crdnn/train_speech_tokenizer.py @@ -119,7 +119,7 @@ def toks_to_sig(self, toks): ) hparams["train_logger"].log_stats( stats_meta={ - f"Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", + "Codec parameters/buffers (M)": f"{codec_params / 1e6:.2f}", "Model parameters/buffers (M)": f"{model_params / 1e6:.2f}", }, ) diff --git a/benchmarks/MOABB/utils/parse_results.py b/benchmarks/MOABB/utils/parse_results.py index 5c22445b3..3bbd470f8 100644 --- a/benchmarks/MOABB/utils/parse_results.py +++ b/benchmarks/MOABB/utils/parse_results.py @@ -68,7 +68,7 @@ def visualize_results(paradigm: str, results: dict, vis_metrics: list) -> None: """ print("\n----", paradigm.name, "----") for key in results: - if type(results[key]) == dict: + if isinstance(results[key], dict): for m in vis_metrics: print( key, diff --git a/benchmarks/MOABB/utils/prepare.py b/benchmarks/MOABB/utils/prepare.py index 9f6371be4..e2fa37c55 100644 --- a/benchmarks/MOABB/utils/prepare.py +++ b/benchmarks/MOABB/utils/prepare.py @@ -83,11 +83,11 @@ def get_output_dict( ) if verbose == 1: - for l in np.unique(labels): + for label in np.unique(labels): print( print( "Number of label {0} examples: {1}".format( - l, np.where(labels == l)[0].shape[0] + label, np.where(labels == label)[0].shape[0] ) ) ) diff --git a/benchmarks/MP3S/Buckeye/LSTM/buckeye_prepare.py b/benchmarks/MP3S/Buckeye/LSTM/buckeye_prepare.py index 4ea352128..b6ec8e730 100755 --- a/benchmarks/MP3S/Buckeye/LSTM/buckeye_prepare.py +++ b/benchmarks/MP3S/Buckeye/LSTM/buckeye_prepare.py @@ -192,7 +192,7 @@ def unzip_buckeye(buckeye_dir): for zip_fil in files: if ".py" not in zip_fil: os.system( - f"unzip -q {os.path.join(buckeye_dir,zip_fil)} -d {buckeye_dir}" + f"unzip -q {os.path.join(buckeye_dir, zip_fil)} -d {buckeye_dir}" ) files = os.listdir(buckeye_dir) for under_dir in files: diff --git a/benchmarks/MP3S/IEMOCAP/ecapa_tdnn/iemocap_prepare.py b/benchmarks/MP3S/IEMOCAP/ecapa_tdnn/iemocap_prepare.py index 48feed81f..4236092bb 100644 --- a/benchmarks/MP3S/IEMOCAP/ecapa_tdnn/iemocap_prepare.py +++ b/benchmarks/MP3S/IEMOCAP/ecapa_tdnn/iemocap_prepare.py @@ -273,7 +273,7 @@ def load_utterInfo(inputFile): # [START_TIME - END_TIME] TURN_NAME EMOTION [V, A, D] # [V, A, D] means [Valence, Arousal, Dominance] pattern = re.compile( - "[\[]*[0-9]*[.][0-9]*[ -]*[0-9]*[.][0-9]*[\]][\t][a-z0-9_]*[\t][a-z]{3}[\t][\[][0-9]*[.][0-9]*[, ]+[0-9]*[.][0-9]*[, ]+[0-9]*[.][0-9]*[\]]", + "[\[]*[0-9]*[.][0-9]*[ -]*[0-9]*[.][0-9]*[\]][\t][a-z0-9_]*[\t][a-z]{3}[\t][\[][0-9]*[.][0-9]*[, ]+[0-9]*[.][0-9]*[, ]+[0-9]*[.][0-9]*[\]]", # noqa re.IGNORECASE, ) # noqa with open(inputFile, "r") as myfile: diff --git a/tests/utils/recipe_tests.py b/tests/utils/recipe_tests.py index 747ac6f1d..d6e826f14 100644 --- a/tests/utils/recipe_tests.py +++ b/tests/utils/recipe_tests.py @@ -41,11 +41,11 @@ def check_row_for_test(row, filters_fields, filters, test_field): test = True for i, field in enumerate(filters_fields): field_values = filters[i] - if type(field_values) == str: + if isinstance(field_values, str): # ... AND ... filter if not (field_values == row[field]): test = False - elif type(field_values) == list: # type(field) == list + elif isinstance(field_values, list): # type(field) == list # ... AND (... OR ...) ... filter; at least one entry of the list matches test_flag = False for filt in field_values: @@ -138,7 +138,7 @@ def prepare_test( ) as csvf: reader = csv.DictReader(csvf, delimiter=",", skipinitialspace=True) for row_id, row in enumerate(reader): - recipe_id = f"{recipe_csvfile[:-4]}_row_{row_id+2:02d}" + recipe_id = f"{recipe_csvfile[:-4]}_row_{row_id + 2:02d}" if not ( check_row_for_test(row, filters_fields, filters, test_field) ): diff --git a/tests/utils/refactoring_checks.py b/tests/utils/refactoring_checks.py index c9f125cee..053c22941 100644 --- a/tests/utils/refactoring_checks.py +++ b/tests/utils/refactoring_checks.py @@ -299,7 +299,7 @@ def gather_refactoring_results( with open(yaml_path, "w") as yaml_out: yaml.dump(results, yaml_out, default_flow_style=None) - print(f"\tsame: {results[repo]['same'] }") + print(f"\tsame: {results[repo]['same']}") def test_performance(