|
| 1 | +/* |
| 2 | + * Copyright (c) Meta Platforms, Inc. and affiliates. |
| 3 | + * All rights reserved. |
| 4 | + * |
| 5 | + * This source code is licensed under the BSD-style license found in the |
| 6 | + * LICENSE file in the root directory of this source tree. |
| 7 | + */ |
| 8 | + |
| 9 | +//! Build utilities shared across monarch *-sys crates |
| 10 | +//! |
| 11 | +//! This module provides common functionality for Python environment discovery |
| 12 | +//! and CUDA installation detection used by various build scripts. |
| 13 | +
|
| 14 | +use std::env; |
| 15 | +use std::path::Path; |
| 16 | +use std::path::PathBuf; |
| 17 | + |
| 18 | +use glob::glob; |
| 19 | +use which::which; |
| 20 | + |
| 21 | +/// Python script to extract Python paths from sysconfig |
| 22 | +pub const PYTHON_PRINT_DIRS: &str = r" |
| 23 | +import sysconfig |
| 24 | +print('PYTHON_INCLUDE_DIR:', sysconfig.get_config_var('INCLUDEDIR')) |
| 25 | +print('PYTHON_LIB_DIR:', sysconfig.get_config_var('LIBDIR')) |
| 26 | +"; |
| 27 | + |
| 28 | +/// Python script to extract PyTorch details from torch.utils.cpp_extension |
| 29 | +pub const PYTHON_PRINT_PYTORCH_DETAILS: &str = r" |
| 30 | +import torch |
| 31 | +from torch.utils import cpp_extension |
| 32 | +print('LIBTORCH_CXX11:', torch._C._GLIBCXX_USE_CXX11_ABI) |
| 33 | +for include_path in cpp_extension.include_paths(): |
| 34 | + print('LIBTORCH_INCLUDE:', include_path) |
| 35 | +for library_path in cpp_extension.library_paths(): |
| 36 | + print('LIBTORCH_LIB:', library_path) |
| 37 | +"; |
| 38 | + |
| 39 | +/// Python script to extract PyTorch details including CUDA info |
| 40 | +pub const PYTHON_PRINT_CUDA_DETAILS: &str = r" |
| 41 | +import torch |
| 42 | +from torch.utils import cpp_extension |
| 43 | +print('CUDA_HOME:', cpp_extension.CUDA_HOME) |
| 44 | +for include_path in cpp_extension.include_paths(): |
| 45 | + print('LIBTORCH_INCLUDE:', include_path) |
| 46 | +for library_path in cpp_extension.library_paths(): |
| 47 | + print('LIBTORCH_LIB:', library_path) |
| 48 | +print('LIBTORCH_CXX11:', torch._C._GLIBCXX_USE_CXX11_ABI) |
| 49 | +"; |
| 50 | + |
| 51 | +/// Python script to extract Python include paths |
| 52 | +pub const PYTHON_PRINT_INCLUDE_PATH: &str = r" |
| 53 | +import sysconfig |
| 54 | +print('PYTHON_INCLUDE:', sysconfig.get_path('include')) |
| 55 | +print('PYTHON_INCLUDE_DIR:', sysconfig.get_config_var('INCLUDEDIR')) |
| 56 | +print('PYTHON_LIB_DIR:', sysconfig.get_config_var('LIBDIR')) |
| 57 | +"; |
| 58 | + |
| 59 | +/// Configuration structure for CUDA environment |
| 60 | +#[derive(Debug, Clone, Default)] |
| 61 | +pub struct CudaConfig { |
| 62 | + pub cuda_home: Option<PathBuf>, |
| 63 | + pub include_dirs: Vec<PathBuf>, |
| 64 | + pub lib_dirs: Vec<PathBuf>, |
| 65 | +} |
| 66 | + |
| 67 | +/// Result of Python environment discovery |
| 68 | +#[derive(Debug, Clone)] |
| 69 | +pub struct PythonConfig { |
| 70 | + pub include_dir: Option<String>, |
| 71 | + pub lib_dir: Option<String>, |
| 72 | +} |
| 73 | + |
| 74 | +/// Error type for build utilities |
| 75 | +#[derive(Debug)] |
| 76 | +pub enum BuildError { |
| 77 | + CudaNotFound, |
| 78 | + PythonNotFound, |
| 79 | + CommandFailed(String), |
| 80 | + PathNotFound(String), |
| 81 | +} |
| 82 | + |
| 83 | +impl std::fmt::Display for BuildError { |
| 84 | + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { |
| 85 | + match self { |
| 86 | + BuildError::CudaNotFound => write!(f, "CUDA installation not found"), |
| 87 | + BuildError::PythonNotFound => write!(f, "Python interpreter not found"), |
| 88 | + BuildError::CommandFailed(cmd) => write!(f, "Command failed: {}", cmd), |
| 89 | + BuildError::PathNotFound(path) => write!(f, "Path not found: {}", path), |
| 90 | + } |
| 91 | + } |
| 92 | +} |
| 93 | + |
| 94 | +impl std::error::Error for BuildError {} |
| 95 | + |
| 96 | +/// Get environment variable with cargo rerun notification |
| 97 | +pub fn get_env_var_with_rerun(name: &str) -> Result<String, std::env::VarError> { |
| 98 | + println!("cargo::rerun-if-env-changed={}", name); |
| 99 | + env::var(name) |
| 100 | +} |
| 101 | + |
| 102 | +/// Find CUDA home directory using various heuristics |
| 103 | +/// |
| 104 | +/// This function attempts to locate CUDA installation through: |
| 105 | +/// 1. CUDA_HOME environment variable |
| 106 | +/// 2. CUDA_PATH environment variable |
| 107 | +/// 3. Finding nvcc in PATH and deriving cuda home |
| 108 | +/// 4. Platform-specific default locations |
| 109 | +pub fn find_cuda_home() -> Option<String> { |
| 110 | + // Guess #1: Environment variables |
| 111 | + let mut cuda_home = env::var("CUDA_HOME") |
| 112 | + .ok() |
| 113 | + .or_else(|| env::var("CUDA_PATH").ok()); |
| 114 | + |
| 115 | + if cuda_home.is_none() { |
| 116 | + // Guess #2: Find nvcc in PATH |
| 117 | + if let Ok(nvcc_path) = which("nvcc") { |
| 118 | + // Get parent directory twice (nvcc is in CUDA_HOME/bin) |
| 119 | + if let Some(cuda_dir) = nvcc_path.parent().and_then(|p| p.parent()) { |
| 120 | + cuda_home = Some(cuda_dir.to_string_lossy().into_owned()); |
| 121 | + } |
| 122 | + } else { |
| 123 | + // Guess #3: Platform-specific defaults |
| 124 | + if cfg!(windows) { |
| 125 | + let pattern = r"C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v*.*"; |
| 126 | + let cuda_homes: Vec<_> = glob(pattern).unwrap().filter_map(Result::ok).collect(); |
| 127 | + if !cuda_homes.is_empty() { |
| 128 | + cuda_home = Some(cuda_homes[0].to_string_lossy().into_owned()); |
| 129 | + } |
| 130 | + } else { |
| 131 | + // Unix-like systems |
| 132 | + let cuda_candidate = "/usr/local/cuda"; |
| 133 | + if Path::new(cuda_candidate).exists() { |
| 134 | + cuda_home = Some(cuda_candidate.to_string()); |
| 135 | + } |
| 136 | + } |
| 137 | + } |
| 138 | + } |
| 139 | + |
| 140 | + cuda_home |
| 141 | +} |
| 142 | + |
| 143 | +/// Discover CUDA configuration including home, include dirs, and lib dirs |
| 144 | +pub fn discover_cuda_config() -> Result<CudaConfig, BuildError> { |
| 145 | + let cuda_home = find_cuda_home().ok_or(BuildError::CudaNotFound)?; |
| 146 | + let cuda_home_path = PathBuf::from(&cuda_home); |
| 147 | + |
| 148 | + let mut config = CudaConfig { |
| 149 | + cuda_home: Some(cuda_home_path.clone()), |
| 150 | + include_dirs: Vec::new(), |
| 151 | + lib_dirs: Vec::new(), |
| 152 | + }; |
| 153 | + |
| 154 | + // Add standard include directory |
| 155 | + let include_dir = cuda_home_path.join("include"); |
| 156 | + if include_dir.exists() { |
| 157 | + config.include_dirs.push(include_dir); |
| 158 | + } |
| 159 | + |
| 160 | + // Add standard library directories |
| 161 | + for lib_subdir in &["lib64", "lib", "lib/x64"] { |
| 162 | + let lib_dir = cuda_home_path.join(lib_subdir); |
| 163 | + if lib_dir.exists() { |
| 164 | + config.lib_dirs.push(lib_dir); |
| 165 | + break; // Use first found |
| 166 | + } |
| 167 | + } |
| 168 | + |
| 169 | + Ok(config) |
| 170 | +} |
| 171 | + |
| 172 | +/// Validate CUDA installation exists and is complete |
| 173 | +pub fn validate_cuda_installation() -> Result<String, BuildError> { |
| 174 | + let cuda_config = discover_cuda_config()?; |
| 175 | + let cuda_home = cuda_config.cuda_home.ok_or(BuildError::CudaNotFound)?; |
| 176 | + let cuda_home_str = cuda_home.to_string_lossy().to_string(); |
| 177 | + |
| 178 | + // Verify CUDA include directory exists |
| 179 | + let cuda_include_path = cuda_home.join("include"); |
| 180 | + if !cuda_include_path.exists() { |
| 181 | + return Err(BuildError::PathNotFound(format!( |
| 182 | + "CUDA include directory at {}", |
| 183 | + cuda_include_path.display() |
| 184 | + ))); |
| 185 | + } |
| 186 | + |
| 187 | + Ok(cuda_home_str) |
| 188 | +} |
| 189 | + |
| 190 | +/// Get CUDA library directory |
| 191 | +pub fn get_cuda_lib_dir() -> Result<String, BuildError> { |
| 192 | + // Check if user explicitly set CUDA_LIB_DIR |
| 193 | + if let Ok(cuda_lib_dir) = env::var("CUDA_LIB_DIR") { |
| 194 | + return Ok(cuda_lib_dir); |
| 195 | + } |
| 196 | + |
| 197 | + // Try to deduce from CUDA configuration |
| 198 | + let cuda_config = discover_cuda_config()?; |
| 199 | + if let Some(cuda_home) = cuda_config.cuda_home { |
| 200 | + let lib64_path = cuda_home.join("lib64"); |
| 201 | + if lib64_path.exists() { |
| 202 | + return Ok(lib64_path.to_string_lossy().to_string()); |
| 203 | + } |
| 204 | + let lib_path = cuda_home.join("lib"); |
| 205 | + if lib_path.exists() { |
| 206 | + return Ok(lib_path.to_string_lossy().to_string()); |
| 207 | + } |
| 208 | + } |
| 209 | + |
| 210 | + Err(BuildError::PathNotFound( |
| 211 | + "CUDA library directory".to_string(), |
| 212 | + )) |
| 213 | +} |
| 214 | + |
| 215 | +/// Discover Python environment directories using sysconfig |
| 216 | +/// |
| 217 | +/// Returns tuple of (include_dir, lib_dir) as optional strings |
| 218 | +pub fn python_env_dirs() -> Result<PythonConfig, BuildError> { |
| 219 | + python_env_dirs_with_interpreter("python") |
| 220 | +} |
| 221 | + |
| 222 | +/// Discover Python environment directories with specific interpreter |
| 223 | +pub fn python_env_dirs_with_interpreter(interpreter: &str) -> Result<PythonConfig, BuildError> { |
| 224 | + let output = std::process::Command::new(interpreter) |
| 225 | + .arg("-c") |
| 226 | + .arg(PYTHON_PRINT_DIRS) |
| 227 | + .output() |
| 228 | + .map_err(|_| BuildError::CommandFailed(format!("running {}", interpreter)))?; |
| 229 | + |
| 230 | + if !output.status.success() { |
| 231 | + return Err(BuildError::CommandFailed(format!( |
| 232 | + "{} exited with error", |
| 233 | + interpreter |
| 234 | + ))); |
| 235 | + } |
| 236 | + |
| 237 | + let mut include_dir = None; |
| 238 | + let mut lib_dir = None; |
| 239 | + |
| 240 | + for line in String::from_utf8_lossy(&output.stdout).lines() { |
| 241 | + if let Some(path) = line.strip_prefix("PYTHON_INCLUDE_DIR: ") { |
| 242 | + include_dir = Some(path.to_string()); |
| 243 | + } |
| 244 | + if let Some(path) = line.strip_prefix("PYTHON_LIB_DIR: ") { |
| 245 | + lib_dir = Some(path.to_string()); |
| 246 | + } |
| 247 | + } |
| 248 | + |
| 249 | + Ok(PythonConfig { |
| 250 | + include_dir, |
| 251 | + lib_dir, |
| 252 | + }) |
| 253 | +} |
| 254 | + |
| 255 | +/// Print helpful error message for CUDA not found |
| 256 | +pub fn print_cuda_error_help() { |
| 257 | + eprintln!("Error: CUDA installation not found!"); |
| 258 | + eprintln!("Please ensure CUDA is installed and one of the following is true:"); |
| 259 | + eprintln!(" 1. Set CUDA_HOME environment variable to your CUDA installation directory"); |
| 260 | + eprintln!(" 2. Set CUDA_PATH environment variable to your CUDA installation directory"); |
| 261 | + eprintln!(" 3. Ensure 'nvcc' is in your PATH"); |
| 262 | + eprintln!(" 4. Install CUDA to the default location (/usr/local/cuda on Linux)"); |
| 263 | + eprintln!(); |
| 264 | + eprintln!("Example: export CUDA_HOME=/usr/local/cuda-12.0"); |
| 265 | +} |
| 266 | + |
| 267 | +/// Print helpful error message for CUDA lib dir not found |
| 268 | +pub fn print_cuda_lib_error_help() { |
| 269 | + eprintln!("Error: CUDA library directory not found!"); |
| 270 | + eprintln!("Please set CUDA_LIB_DIR environment variable to your CUDA library directory."); |
| 271 | + eprintln!(); |
| 272 | + eprintln!("Example: export CUDA_LIB_DIR=/usr/local/cuda-12.0/lib64"); |
| 273 | + eprintln!("Or: export CUDA_LIB_DIR=/usr/lib64"); |
| 274 | +} |
| 275 | + |
| 276 | +#[cfg(test)] |
| 277 | +mod tests { |
| 278 | + use super::*; |
| 279 | + |
| 280 | + #[test] |
| 281 | + fn test_find_cuda_home_env_var() { |
| 282 | + env::set_var("CUDA_HOME", "/test/cuda"); |
| 283 | + let result = find_cuda_home(); |
| 284 | + env::remove_var("CUDA_HOME"); |
| 285 | + assert_eq!(result, Some("/test/cuda".to_string())); |
| 286 | + } |
| 287 | + |
| 288 | + #[test] |
| 289 | + fn test_python_scripts_constants() { |
| 290 | + assert!(PYTHON_PRINT_DIRS.contains("sysconfig")); |
| 291 | + assert!(PYTHON_PRINT_PYTORCH_DETAILS.contains("torch")); |
| 292 | + assert!(PYTHON_PRINT_CUDA_DETAILS.contains("CUDA_HOME")); |
| 293 | + } |
| 294 | +} |
0 commit comments