Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,4 @@ package-lock.json
CLAUDE.md
.prettierrc
off
AGENTS.md
25 changes: 23 additions & 2 deletions crates/fhir-gen/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,19 @@ fn main() {
return;
}

println!("cargo:warning=Downloading R6 definitions from HL7 build server");

let resources_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("resources/R6");

// Create the resources directory if it doesn't exist
fs::create_dir_all(&resources_dir).expect("Failed to create resources directory");

// Skip download when the repository already vendors the extracted fixtures.
if has_existing_payload(&resources_dir) {
println!("cargo:warning=Using existing R6 definitions; skipping download");
return;
}

println!("cargo:warning=Downloading R6 definitions from HL7 build server");

let url = "https://build.fhir.org/definitions.json.zip";

let output_path = resources_dir.join("definitions.json.zip");
Expand Down Expand Up @@ -111,6 +117,21 @@ fn main() {
println!("FHIR definitions downloaded successfully");
}

fn has_existing_payload(resources_dir: &Path) -> bool {
match resources_dir.read_dir() {
Ok(entries) => entries
.filter_map(Result::ok)
.any(|entry| entry.file_type().map(|ty| ty.is_file()).unwrap_or(false)),
Err(err) => {
println!(
"cargo:warning=Unable to inspect {:?} for cached payloads: {}",
resources_dir, err
);
false
}
}
}

fn download_with_retry(
client: &reqwest::blocking::Client,
url: &str,
Expand Down
139 changes: 48 additions & 91 deletions crates/fhir-gen/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,7 @@ fn process_single_version(version: &FhirVersion, output_path: impl AsRef<Path>)
let mut all_resources = Vec::new();
let mut all_complex_types = Vec::new();

// First pass: collect all bundles and extract global information
// First pass: parse all JSON files and collect all StructureDefinitions
let bundles: Vec<_> = visit_dirs(&version_dir)?
.into_iter()
.filter_map(|file_path| match parse_structure_definitions(&file_path) {
Expand All @@ -342,20 +342,58 @@ fn process_single_version(version: &FhirVersion, output_path: impl AsRef<Path>)
})
.collect();

// Extract global information from all bundles
// Collect and extract all elements for cycle detection
let mut all_elements = Vec::new();
let mut all_struct_defs = Vec::new();

for bundle in &bundles {
if let Some(entries) = bundle.entry.as_ref() {
for entry in entries {
if let Some(resource) = &entry.resource {
if let Resource::StructureDefinition(def) = resource {
if is_valid_structure_definition(def) {
all_struct_defs.push(def);
if let Some(snapshot) = &def.snapshot {
if let Some(elements) = &snapshot.element {
all_elements.extend(elements.iter());
}
}
}
}
}
}
}

// Extract global information
if let Some((hierarchy, resources, complex_types)) = extract_bundle_info(bundle) {
global_type_hierarchy.extend(hierarchy);
all_resources.extend(resources);
all_complex_types.extend(complex_types);
}
}

// Second pass: generate code for each bundle
for bundle in bundles {
generate_code(bundle, &version_path, false)?; // false = don't generate global constructs
// Sort StructureDefinitions by name for deterministic output
all_struct_defs.sort_by(|a, b| a.name.cmp(&b.name));

// Detect cycles across all elements
let cycles = detect_struct_cycles(&all_elements);

// Generate code for each StructureDefinition in sorted order
for def in all_struct_defs {
let content = structure_definition_to_rust(def, &cycles);
let mut file = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(&version_path)?;
write!(file, "{}", content)?;
}

// Sort for deterministic output
all_resources.sort();
all_resources.dedup();
all_complex_types.sort();
all_complex_types.dedup();

// Generate global constructs once at the end
generate_global_constructs(
&version_path,
Expand Down Expand Up @@ -676,90 +714,6 @@ fn generate_global_constructs(
Ok(())
}

/// Generates Rust code from a Bundle of FHIR StructureDefinitions.
///
/// This is the main code generation function that processes all StructureDefinitions
/// in a Bundle and writes the corresponding Rust code to a file.
///
/// # Arguments
///
/// * `bundle` - FHIR Bundle containing StructureDefinitions and other resources
/// * `output_path` - Path to the output Rust file
///
/// # Returns
///
/// Returns `Ok(())` on success, or an `io::Error` if file operations fail.
///
/// # Process Overview
///
/// 1. **First Pass**: Collects all ElementDefinitions and detects circular dependencies
/// 2. **Second Pass**: Generates Rust code for each valid StructureDefinition
/// 3. **Final Step**: Generates a unified Resource enum and helper implementations
///
/// # Generated Code Includes
///
/// - Struct definitions for complex types and resources
/// - Enum definitions for choice types (polymorphic elements)
/// - A Resource enum containing all resource types
/// - From<T> implementations for primitive type conversions
/// - Proper derive macros for serialization and FHIR-specific functionality
fn generate_code(
bundle: Bundle,
output_path: impl AsRef<Path>,
_generate_globals: bool,
) -> io::Result<()> {
// First collect all ElementDefinitions across all StructureDefinitions
let mut all_elements = Vec::new();

if let Some(entries) = bundle.entry.as_ref() {
// First pass: collect all elements
for entry in entries {
if let Some(resource) = &entry.resource {
if let Resource::StructureDefinition(def) = resource {
if is_valid_structure_definition(def) {
if let Some(snapshot) = &def.snapshot {
if let Some(elements) = &snapshot.element {
all_elements.extend(elements.iter());
}
}
}
}
}
}

// Detect cycles using all collected elements
let element_refs: Vec<&ElementDefinition> = all_elements;
let cycles = detect_struct_cycles(&element_refs);

// Second pass: generate code
for entry in entries {
if let Some(resource) = &entry.resource {
match resource {
Resource::StructureDefinition(def) => {
if is_valid_structure_definition(def) {
let content = structure_definition_to_rust(def, &cycles);
let mut file = std::fs::OpenOptions::new()
.create(true)
.append(true)
.open(output_path.as_ref())?;
write!(file, "{}", content)?;
}
}
Resource::SearchParameter(_param) => {
// TODO: Generate code for search parameter
}
Resource::OperationDefinition(_op) => {
// TODO: Generate code for operation definition
}
_ => {} // Skip other resource types for now
}
}
}
}

Ok(())
}

/// Generates a Rust enum containing all FHIR resource types.
///
/// This function creates a single enum that can represent any FHIR resource,
Expand Down Expand Up @@ -2179,8 +2133,11 @@ fn process_elements(
}
}

// Process each group
for (path, group) in element_groups {
// Process each group in sorted order for deterministic output
let mut sorted_groups: Vec<_> = element_groups.into_iter().collect();
sorted_groups.sort_by(|a, b| a.0.cmp(&b.0));

for (path, group) in sorted_groups {
let type_name = generate_type_name(&path);

// Skip if we've already processed this type
Expand Down
27 changes: 12 additions & 15 deletions crates/fhir-macro/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -812,9 +812,15 @@ fn generate_serialize_impl(data: &Data, name: &Ident) -> proc_macro2::TokenStrea

// Import SerializeMap trait if we have flattened fields
let import_serialize_map = if has_flattened_fields {
quote! { use serde::ser::SerializeMap; }
quote! {
use serde::ser::SerializeMap;
use crate::serde_helpers;
}
} else {
quote! { use serde::ser::SerializeStruct; }
quote! {
use serde::ser::SerializeStruct;
use crate::serde_helpers;
}
};

let mut field_serializers = Vec::new();
Expand Down Expand Up @@ -1131,19 +1137,15 @@ fn generate_serialize_impl(data: &Data, name: &Ident) -> proc_macro2::TokenStrea
if has_flattened_fields {
// For SerializeMap
quote! {
// Use serde_json to check if the field serializes to null or empty object
let json_value = serde_json::to_value(&#field_access).map_err(|_| serde::ser::Error::custom("serialization failed"))?;
if !json_value.is_null() && !(json_value.is_object() && json_value.as_object().unwrap().is_empty()) {
if !#field_access.is_empty() {
// Use serialize_entry for SerializeMap
state.serialize_entry(&#effective_field_name_str, &#field_access)?;
}
}
} else {
// For SerializeStruct
quote! {
// Use serde_json to check if the field serializes to null or empty object
let json_value = serde_json::to_value(&#field_access).map_err(|_| serde::ser::Error::custom("serialization failed"))?;
if !json_value.is_null() && !(json_value.is_object() && json_value.as_object().unwrap().is_empty()) {
if !#field_access.is_empty() {
// Use serialize_field for SerializeStruct
state.serialize_field(&#effective_field_name_str, &#field_access)?;
}
Expand All @@ -1154,19 +1156,15 @@ fn generate_serialize_impl(data: &Data, name: &Ident) -> proc_macro2::TokenStrea
if has_flattened_fields {
// For SerializeMap
quote! {
// Use serde_json to check if the field serializes to null or empty object
let json_value = serde_json::to_value(&#field_access).map_err(|_| serde::ser::Error::custom("serialization failed"))?;
if !json_value.is_null() && !(json_value.is_object() && json_value.as_object().unwrap().is_empty()) {
if crate::serde_helpers::has_non_empty_content(&#field_access) {
// Use serialize_entry for SerializeMap
state.serialize_entry(&#effective_field_name_str, &#field_access)?;
}
}
} else {
// For SerializeStruct
quote! {
// Use serde_json to check if the field serializes to null or empty object
let json_value = serde_json::to_value(&#field_access).map_err(|_| serde::ser::Error::custom("serialization failed"))?;
if !json_value.is_null() && !(json_value.is_object() && json_value.as_object().unwrap().is_empty()) {
if crate::serde_helpers::has_non_empty_content(&#field_access) {
// Use serialize_field for SerializeStruct
state.serialize_field(&#effective_field_name_str, &#field_access)?;
}
Expand All @@ -1177,7 +1175,6 @@ fn generate_serialize_impl(data: &Data, name: &Ident) -> proc_macro2::TokenStrea
field_counts.push(field_counting_code);
field_serializers.push(field_serializing_code);
}
// Use the has_flattened_fields variable defined at the top of the function
if has_flattened_fields {
// If we have flattened fields, use serialize_map instead of serialize_struct
quote! {
Expand Down
45 changes: 43 additions & 2 deletions crates/fhir/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,54 @@ fn main() {
return;
}

println!("cargo:warning=Downloading R6 test data from HL7 build server");

let resources_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests/data/R6");

// Create the resources directory if it doesn't exist
fs::create_dir_all(&resources_dir).expect("Failed to create resources directory");

// Skip download when the repository already contains the extracted fixtures.
// This allows offline builds/tests (and prevents repeated downloads) while
// still letting the fallback pull fresh fixtures when the directory is empty.
match resources_dir.read_dir() {
Ok(dir_entries) => {
let mut has_files = false;
for entry in dir_entries {
match entry {
Ok(entry) => {
if entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
has_files = true;
break;
}
}
Err(err) => {
println!(
"cargo:warning=Failed to inspect {:?}: {}",
resources_dir, err
);
}
}
}

if has_files {
println!("cargo:warning=Using existing R6 test data; skipping download");
return;
} else {
println!(
"cargo:warning=No files found in {:?}; downloading test data",
resources_dir
);
}
}
Err(err) => {
println!(
"cargo:warning=Unable to list {:?}: {}; continuing with download",
resources_dir, err
);
}
}

println!("cargo:warning=Downloading R6 test data from HL7 build server");

let url = "https://build.fhir.org/examples-json.zip";

let output_path = resources_dir.join("examples.json.zip");
Expand Down
2 changes: 1 addition & 1 deletion crates/fhir/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1432,7 +1432,7 @@ pub mod parameters;
// Re-export commonly used types from parameters module
pub use parameters::{ParameterValueAccessor, VersionIndependentParameters};

// Removed the FhirSerde trait definition
mod serde_helpers;

/// Multi-version FHIR resource container supporting version-agnostic operations.
///
Expand Down
Loading
Loading