diff --git a/backend/app.py b/backend/app.py index 1cbac8a1..6d79a6a9 100644 --- a/backend/app.py +++ b/backend/app.py @@ -163,18 +163,17 @@ def _get_gpu_status() -> str: elif backend_type == "mlx": return "Metal (Apple Silicon via MLX)" - # Intel XPU (Arc / Data Center) via IPEX - try: - import intel_extension_for_pytorch # noqa: F401 - - if hasattr(torch, "xpu") and torch.xpu.is_available(): - try: - xpu_name = torch.xpu.get_device_name(0) - except Exception: - xpu_name = "Intel GPU" - return f"XPU ({xpu_name})" - except ImportError: - pass + # Intel XPU (Arc / Data Center) — native PyTorch 2.4+ support; IPEX optional + if hasattr(torch, "xpu") and torch.xpu.is_available(): + try: + import intel_extension_for_pytorch # noqa: F401 -- enhances XPU perf if available + except Exception: + pass + try: + xpu_name = torch.xpu.get_device_name(0) + except Exception: + xpu_name = "Intel GPU" + return f"XPU ({xpu_name})" return "None (CPU only)" diff --git a/backend/backends/base.py b/backend/backends/base.py index c566af10..d774645e 100644 --- a/backend/backends/base.py +++ b/backend/backends/base.py @@ -102,13 +102,12 @@ def get_torch_device( return "cuda" if allow_xpu: - try: - import intel_extension_for_pytorch # noqa: F401 - - if hasattr(torch, "xpu") and torch.xpu.is_available(): - return "xpu" - except ImportError: - pass + if hasattr(torch, "xpu") and torch.xpu.is_available(): + try: + import intel_extension_for_pytorch # noqa: F401 -- enhances XPU perf if available + except Exception: + pass + return "xpu" if allow_directml: try: diff --git a/backend/routes/health.py b/backend/routes/health.py index 79c513f5..0bb454f9 100644 --- a/backend/routes/health.py +++ b/backend/routes/health.py @@ -67,17 +67,17 @@ async def health(): has_xpu = False xpu_name = None - try: - import intel_extension_for_pytorch as ipex # noqa: F401 -- side-effect import enables XPU - - if hasattr(torch, "xpu") and torch.xpu.is_available(): - has_xpu = True - try: - xpu_name = torch.xpu.get_device_name(0) - except Exception: - xpu_name = "Intel GPU" - except ImportError: - pass + # Native XPU support in PyTorch 2.4+; IPEX optional for enhanced performance + if hasattr(torch, "xpu") and torch.xpu.is_available(): + has_xpu = True + try: + import intel_extension_for_pytorch # noqa: F401 -- enhances XPU perf if available + except Exception: + pass + try: + xpu_name = torch.xpu.get_device_name(0) + except Exception: + xpu_name = "Intel GPU" has_directml = False directml_name = None