From 350babe21143cff85ae4a5bb4fb234311c6f0146 Mon Sep 17 00:00:00 2001 From: Mengwei Liu Date: Thu, 17 Jul 2025 15:24:03 -0700 Subject: [PATCH] Fix lint issues --- test/TARGETS | 1 + test/test_hf_tokenizer.py | 8 +++----- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/test/TARGETS b/test/TARGETS index eebd6cc..d55abea 100644 --- a/test/TARGETS +++ b/test/TARGETS @@ -4,6 +4,7 @@ load("@fbsource//xplat/executorch/build:runtime_wrapper.bzl", "runtime") load(":targets.bzl", "define_common_targets") +# @noautodeps oncall("ai_infra_mobile_platform") define_common_targets() diff --git a/test/test_hf_tokenizer.py b/test/test_hf_tokenizer.py index 304da49..6162dc1 100644 --- a/test/test_hf_tokenizer.py +++ b/test/test_hf_tokenizer.py @@ -10,18 +10,16 @@ """ import unittest +from tempfile import TemporaryDirectory + import pytest from pytorch_tokenizers import CppHFTokenizer from transformers import AutoTokenizer -from tempfile import TemporaryDirectory PROMPT = "What is the capital of France?" -@pytest.mark.parametrize("model_id", [ - "HuggingFaceTB/SmolLM3-3B", - "Qwen/Qwen2.5-0.5B" -]) +@pytest.mark.parametrize("model_id", ["HuggingFaceTB/SmolLM3-3B", "Qwen/Qwen2.5-0.5B"]) def test_models(model_id: str) -> None: with TemporaryDirectory() as temp_dir: tokenizer = AutoTokenizer.from_pretrained(model_id)