diff --git a/Makefile b/Makefile index 680455313371..89a9d0cdc37a 100644 --- a/Makefile +++ b/Makefile @@ -30,10 +30,10 @@ SPACE := $() $() NEVER_PYUPGRADE_PATHS := .venv/ .tox/ lib/galaxy/schema/bco/ \ lib/galaxy/schema/drs/ lib/tool_shed_client/schema/trs \ scripts/check_python.py tools/ test/functional/tools/cwl_tools/ -PY37_PYUPGRADE_PATHS := lib/galaxy/exceptions/ lib/galaxy/job_metrics/ \ - lib/galaxy/objectstore/ lib/galaxy/tool_util/ lib/galaxy/util/ \ - test/unit/job_metrics/ test/unit/objectstore/ test/unit/tool_util/ \ - test/unit/util/ +PY38_PYUPGRADE_PATHS := lib/galaxy/exceptions/ lib/galaxy/job_metrics/ \ + lib/galaxy/objectstore/ lib/galaxy/tool_util/ lib/galaxy/tool_util_models/ \ + lib/galaxy/util/ test/unit/job_metrics/ test/unit/objectstore/ \ + test/unit/tool_util/ test/unit/tool_util_models/ test/unit/util/ all: help @echo "This makefile is used for building Galaxy's JS client, documentation, and drive the release process. A sensible all target is not implemented." @@ -62,10 +62,10 @@ format: ## Format Python code base remove-unused-imports: ## Remove unused imports in Python code base $(IN_VENV) autoflake --in-place --remove-all-unused-imports --recursive --verbose lib/ test/ -pyupgrade: ## Convert older code patterns to Python 3.7/3.9 idiomatic ones - ack --type=python -f | grep -v '^$(subst $(SPACE),\|^,$(NEVER_PYUPGRADE_PATHS) $(PY37_PYUPGRADE_PATHS))' | xargs pyupgrade --py39-plus - ack --type=python -f | grep -v '^$(subst $(SPACE),\|^,$(NEVER_PYUPGRADE_PATHS) $(PY37_PYUPGRADE_PATHS))' | xargs auto-walrus - ack --type=python -f $(PY37_PYUPGRADE_PATHS) | xargs pyupgrade --py37-plus +pyupgrade: ## Convert older code patterns to Python 3.8/3.9 idiomatic ones + ack --type=python -f | grep -v '^$(subst $(SPACE),\|^,$(NEVER_PYUPGRADE_PATHS) $(PY38_PYUPGRADE_PATHS))' | xargs pyupgrade --py39-plus + ack --type=python -f | grep -v '^$(subst $(SPACE),\|^,$(NEVER_PYUPGRADE_PATHS) $(PY38_PYUPGRADE_PATHS))' | xargs auto-walrus + ack --type=python -f $(PY38_PYUPGRADE_PATHS) | xargs pyupgrade --py38-plus docs-slides-ready: test -f plantuml.jar || wget http://jaist.dl.sourceforge.net/project/plantuml/plantuml.jar diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index 0d342a85890c..583ef4521de6 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -18044,8 +18044,8 @@ export interface components { */ url: string; }; - /** RootModel[Dict[str, int]] */ - RootModel_Dict_str__int__: { + /** RootModel[dict[str, int]] */ + RootModel_dict_str__int__: { [key: string]: number; }; /** RulesParameterModel */ @@ -39456,7 +39456,7 @@ export interface operations { [name: string]: unknown; }; content: { - "application/json": components["schemas"]["RootModel_Dict_str__int__"]; + "application/json": components["schemas"]["RootModel_dict_str__int__"]; }; }; /** @description Request Error */ diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py index d1b9aa8d46ec..86f0fc1dc31a 100644 --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -9,10 +9,7 @@ from typing import ( Any, Callable, - Dict, - List, Optional, - Tuple, ) from beaker.cache import CacheManager @@ -188,7 +185,7 @@ class HaltableContainer(Container): - haltables: List[Tuple[str, Callable]] + haltables: list[tuple[str, Callable]] def __init__(self) -> None: super().__init__() @@ -593,7 +590,7 @@ def __init__(self, configure_logging=True, use_converters=True, use_display_appl self.job_config = self._register_singleton(jobs.JobConfiguration) # Setup infrastructure for short term storage manager. - short_term_storage_config_kwds: Dict[str, Any] = {} + short_term_storage_config_kwds: dict[str, Any] = {} short_term_storage_config_kwds["short_term_storage_directory"] = self.config.short_term_storage_dir short_term_storage_default_duration = self.config.short_term_storage_default_duration short_term_storage_maximum_duration = self.config.short_term_storage_maximum_duration diff --git a/lib/galaxy/authnz/custos_authnz.py b/lib/galaxy/authnz/custos_authnz.py index 6a9a89373235..45337f0f2910 100644 --- a/lib/galaxy/authnz/custos_authnz.py +++ b/lib/galaxy/authnz/custos_authnz.py @@ -10,7 +10,6 @@ timedelta, ) from typing import ( - List, Optional, ) from urllib.parse import quote @@ -59,9 +58,9 @@ class CustosAuthnzConfiguration: redirect_uri: str ca_bundle: Optional[str] pkce_support: bool - accepted_audiences: List[str] + accepted_audiences: list[str] extra_params: Optional[dict] - extra_scopes: List[str] + extra_scopes: list[str] authorization_endpoint: Optional[str] token_endpoint: Optional[str] end_session_endpoint: Optional[str] @@ -581,7 +580,7 @@ class _CustosAuthBasedProviderCacheItem: oidc_backend_config: dict idphint: str - _CustosAuthBasedProvidersCache: List[_CustosAuthBasedProviderCacheItem] = [] + _CustosAuthBasedProvidersCache: list[_CustosAuthBasedProviderCacheItem] = [] @staticmethod def GetCustosBasedAuthProvider(provider, oidc_config, oidc_backend_config, idphint=None): diff --git a/lib/galaxy/celery/__init__.py b/lib/galaxy/celery/__init__.py index 7b313d55f3e9..5fc03abe7f70 100644 --- a/lib/galaxy/celery/__init__.py +++ b/lib/galaxy/celery/__init__.py @@ -9,7 +9,6 @@ from typing import ( Any, Callable, - Dict, ) import pebble @@ -201,7 +200,7 @@ def wrapper(*args, **kwds): def init_celery_app(): - celery_app_kwd: Dict[str, Any] = { + celery_app_kwd: dict[str, Any] = { "include": TASKS_MODULES, "task_default_queue": DEFAULT_TASK_QUEUE, "task_create_missing_queues": True, @@ -235,7 +234,7 @@ def schedule_task(task, interval): "schedule": interval, } - beat_schedule: Dict[str, Dict[str, Any]] = {} + beat_schedule: dict[str, dict[str, Any]] = {} schedule_task("prune_history_audit_table", config.history_audit_table_prune_interval) schedule_task("cleanup_short_term_storage", config.short_term_storage_cleanup_interval) diff --git a/lib/galaxy/celery/tasks.py b/lib/galaxy/celery/tasks.py index 79ade5ccefb9..07024badba43 100644 --- a/lib/galaxy/celery/tasks.py +++ b/lib/galaxy/celery/tasks.py @@ -503,8 +503,7 @@ def send_notification_to_recipients_async( @galaxy_task(action="dispatch pending notifications") def dispatch_pending_notifications(notification_manager: NotificationManager): """Dispatch pending notifications.""" - count = notification_manager.dispatch_pending_notifications_via_channels() - if count: + if count := notification_manager.dispatch_pending_notifications_via_channels(): log.info(f"Successfully dispatched {count} notifications.") diff --git a/lib/galaxy/config/__init__.py b/lib/galaxy/config/__init__.py index 724f3297aec1..2a5e1afed9c3 100644 --- a/lib/galaxy/config/__init__.py +++ b/lib/galaxy/config/__init__.py @@ -21,10 +21,7 @@ Any, Callable, cast, - Dict, - List, Optional, - Set, SupportsInt, TYPE_CHECKING, TypeVar, @@ -74,7 +71,7 @@ GALAXY_CONFIG_SCHEMA_PATH = GALAXY_SCHEMAS_PATH / "config_schema.yml" REPORTS_CONFIG_SCHEMA_PATH = GALAXY_SCHEMAS_PATH / "reports_config_schema.yml" TOOL_SHED_CONFIG_SCHEMA_PATH = GALAXY_SCHEMAS_PATH / "tool_shed_config_schema.yml" -LOGGING_CONFIG_DEFAULT: Dict[str, Any] = { +LOGGING_CONFIG_DEFAULT: dict[str, Any] = { "disable_existing_loggers": False, "version": 1, "root": { @@ -151,7 +148,7 @@ } """Default value for logging configuration, passed to :func:`logging.config.dictConfig`""" -DEPENDENT_CONFIG_DEFAULTS: Dict[str, str] = { +DEPENDENT_CONFIG_DEFAULTS: dict[str, str] = { "mulled_resolution_cache_url": "database_connection", "citation_cache_url": "database_connection", "biotools_service_cache_url": "database_connection", @@ -241,13 +238,13 @@ def expand_pretty_datetime_format(value): class BaseAppConfiguration(HasDynamicProperties): # Override in subclasses (optional): {KEY: config option, VALUE: deprecated directory name} # If VALUE == first directory in a user-supplied path that resolves to KEY, it will be stripped from that path - renamed_options: Optional[Dict[str, str]] = None - deprecated_dirs: Dict[str, str] = {} - paths_to_check_against_root: Set[str] = ( + renamed_options: Optional[dict[str, str]] = None + deprecated_dirs: dict[str, str] = {} + paths_to_check_against_root: set[str] = ( set() ) # backward compatibility: if resolved path doesn't exist, try resolving w.r.t root - add_sample_file_to_defaults: Set[str] = set() # for these options, add sample config files to their defaults - listify_options: Set[str] = set() # values for these options are processed as lists of values + add_sample_file_to_defaults: set[str] = set() # for these options, add sample config files to their defaults + listify_options: set[str] = set() # values for these options are processed as lists of values object_store_store_by: str shed_tools_dir: str @@ -441,7 +438,7 @@ def _set_alt_paths(self, option, *alt_paths): return path def _update_raw_config_from_kwargs(self, kwargs): - type_converters: Dict[str, Callable[[Any], Union[bool, int, float, str]]] = { + type_converters: dict[str, Callable[[Any], Union[bool, int, float, str]]] = { "bool": string_as_bool, "int": int, "float": float, @@ -603,7 +600,7 @@ class CommonConfigurationMixin: """Shared configuration settings code for Galaxy and ToolShed.""" sentry_dsn: str - config_dict: Dict[str, str] + config_dict: dict[str, str] @property def admin_users(self): @@ -712,7 +709,7 @@ class GalaxyAppConfiguration(BaseAppConfiguration, CommonConfigurationMixin): "tool_config_file", } - allowed_origin_hostnames: List[str] + allowed_origin_hostnames: list[str] builds_file_path: str container_resolvers_config_file: str database_connection: str @@ -729,7 +726,7 @@ class GalaxyAppConfiguration(BaseAppConfiguration, CommonConfigurationMixin): len_file_path: str manage_dependency_relationships: bool monitor_thread_join_timeout: int - mulled_channels: List[str] + mulled_channels: list[str] new_file_path: str nginx_upload_store: str password_expiration_period: timedelta @@ -737,24 +734,24 @@ class GalaxyAppConfiguration(BaseAppConfiguration, CommonConfigurationMixin): pretty_datetime_format: str sanitize_allowlist_file: str shed_tool_data_path: str - themes: Dict[str, Dict[str, str]] - themes_by_host: Dict[str, Dict[str, Dict[str, str]]] + themes: dict[str, dict[str, str]] + themes_by_host: dict[str, dict[str, dict[str, str]]] tool_data_path: str tool_dependency_dir: Optional[str] - tool_filters: List[str] - tool_label_filters: List[str] + tool_filters: list[str] + tool_label_filters: list[str] tool_path: str - tool_section_filters: List[str] - toolbox_filter_base_modules: List[str] + tool_section_filters: list[str] + toolbox_filter_base_modules: list[str] track_jobs_in_database: bool trust_jupyter_notebook_conversion: bool tus_upload_store: str use_remote_user: bool user_library_import_dir_auto_creation: bool - user_library_import_symlink_allowlist: List[str] - user_tool_filters: List[str] - user_tool_label_filters: List[str] - user_tool_section_filters: List[str] + user_library_import_symlink_allowlist: list[str] + user_tool_filters: list[str] + user_tool_label_filters: list[str] + user_tool_section_filters: list[str] visualization_plugins_directory: str workflow_resource_params_mapper: str @@ -788,7 +785,7 @@ def config_value_for_host(self, config_option, host): val = getattr(self, config_option) if config_option in self.schema.per_host_options: per_host_option = f"{config_option}_by_host" - per_host: Dict[str, Any] = {} + per_host: dict[str, Any] = {} if per_host_option in self.config_dict: per_host = self.config_dict[per_host_option] or {} else: @@ -800,7 +797,7 @@ def config_value_for_host(self, config_option, host): return val - def _process_config(self, kwargs: Dict[str, Any]) -> None: + def _process_config(self, kwargs: dict[str, Any]) -> None: self._check_database_connection_strings() # Backwards compatibility for names used in too many places to fix self.datatypes_config = self.datatypes_config_file @@ -856,7 +853,7 @@ def _process_config(self, kwargs: Dict[str, Any]) -> None: self.tool_data_path = self._in_data_dir(self.schema.defaults["tool_data_path"]) self.builds_file_path = os.path.join(self.tool_data_path, self.builds_file_path) self.len_file_path = os.path.join(self.tool_data_path, self.len_file_path) - self.oidc: Dict[str, Dict] = {} + self.oidc: dict[str, dict] = {} self.fixed_delegated_auth: bool = False self.integrated_tool_panel_config = self._in_managed_config_dir(self.integrated_tool_panel_config) integrated_tool_panel_tracking_directory = kwargs.get("integrated_tool_panel_tracking_directory") @@ -1469,7 +1466,7 @@ def get_database_engine_options(kwargs, model_prefix=""): Allow options for the SQLAlchemy database engine to be passed by using the prefix "database_engine_option". """ - conversions: Dict[str, Callable[[Any], Union[bool, int]]] = { + conversions: dict[str, Callable[[Any], Union[bool, int]]] = { "convert_unicode": string_as_bool, "pool_timeout": int, "echo": string_as_bool, diff --git a/lib/galaxy/config/config_manage.py b/lib/galaxy/config/config_manage.py index 8e7b2e8aa9e1..144cb38a1990 100644 --- a/lib/galaxy/config/config_manage.py +++ b/lib/galaxy/config/config_manage.py @@ -11,11 +11,8 @@ from typing import ( Any, Callable, - Dict, - List, NamedTuple, Optional, - Tuple, ) import yaml @@ -70,9 +67,9 @@ class App(NamedTuple): - config_paths: List[str] + config_paths: list[str] default_port: str - expected_app_factories: List[str] + expected_app_factories: list[str] destination: str schema_path: Traversable @@ -90,7 +87,7 @@ def schema(self) -> AppSchema: class _OptionAction: - def converted(self, args: Namespace, app_desc: App, key: str, value: Any) -> Tuple[str, Any]: + def converted(self, args: Namespace, app_desc: App, key: str, value: Any) -> tuple[str, Any]: raise NotImplementedError() def lint(self, args: Namespace, app_desc: App, key: str, value: Any) -> None: @@ -157,7 +154,7 @@ def lint(self, args, app_desc, key, value) -> None: ) -OPTION_ACTIONS: Dict[str, _OptionAction] = { +OPTION_ACTIONS: dict[str, _OptionAction] = { "use_beaker_session": _DeprecatedAndDroppedAction(), "use_interactive": _DeprecatedAndDroppedAction(), "session_type": _DeprecatedAndDroppedAction(), @@ -213,7 +210,7 @@ def lint(self, args, app_desc, key, value) -> None: class OptionValue(NamedTuple): name: str value: Any - option: Dict[str, Any] + option: dict[str, Any] GALAXY_APP = App( @@ -240,7 +237,7 @@ class OptionValue(NamedTuple): APPS = {"galaxy": GALAXY_APP, "tool_shed": SHED_APP, "reports": REPORTS_APP} -def main(argv: Optional[List[str]] = None) -> None: +def main(argv: Optional[list[str]] = None) -> None: """Entry point for conversion process.""" if argv is None: argv = sys.argv[1:] @@ -316,7 +313,7 @@ def _find_config(args: Namespace, app_desc: App) -> str: return path -def _find_app_options(app_desc: App, path: str) -> Dict[str, Any]: +def _find_app_options(app_desc: App, path: str) -> dict[str, Any]: """Load app (as opposed to server) options from specified path. Supplied ``path`` may be either YAML or ini file. @@ -330,7 +327,7 @@ def _find_app_options(app_desc: App, path: str) -> Dict[str, Any]: return app_items -def _find_app_options_from_config_parser(p: NicerConfigParser) -> Dict[str, Any]: +def _find_app_options_from_config_parser(p: NicerConfigParser) -> dict[str, Any]: if not p.has_section("app:main"): _warn(NO_APP_MAIN_MESSAGE) app_items = {} @@ -363,7 +360,7 @@ def _validate(args: Namespace, app_desc: App) -> None: with tempfile.NamedTemporaryFile("w", delete=False, suffix=".yml") as config_p: ordered_dump(raw_config, config_p) - def _clean(p: Tuple[str, ...], k: str, v: Any) -> bool: + def _clean(p: tuple[str, ...], k: str, v: Any) -> bool: return k not in ["reloadable", "path_resolves_to", "per_host", "deprecated_alias", "resolves_to"] clean_schema = remap(app_desc.schema.raw_schema, _clean) @@ -395,7 +392,7 @@ def _run_conversion(args: Namespace, app_desc: App) -> None: p = nice_config_parser(ini_config) app_items = _find_app_options_from_config_parser(p) - app_dict: Dict[str, OptionValue] = {} + app_dict: dict[str, OptionValue] = {} schema = app_desc.schema for key, value in app_items.items(): if key in ["__file__", "here"]: @@ -464,7 +461,7 @@ def _write_to_file(args: Namespace, f: StringIO, path: str) -> None: to_f.write(contents) -def _order_load_path(path: str) -> Dict[str, Any]: +def _order_load_path(path: str) -> dict[str, Any]: """Load (with ``_ordered_load``) on specified path (a YAML file).""" with open(path) as f: # Allow empty mapping (not allowed by pykwalify) @@ -482,7 +479,7 @@ def _write_sample_section(args: Namespace, f: StringIO, section_header: str, sch _write_option(args, f, key, option_value, as_comment=True) -def _write_section(args: Namespace, f: StringIO, section_header: str, section_dict: Dict[str, OptionValue]) -> None: +def _write_section(args: Namespace, f: StringIO, section_header: str, section_dict: dict[str, OptionValue]) -> None: _write_header(f, section_header) for key, option_value in section_dict.items(): _write_option(args, f, key, option_value) @@ -509,7 +506,7 @@ def _write_option(args: Namespace, f: StringIO, key: str, option_value: OptionVa f.write(f"{lines_indented}\n\n") -def _parse_option_value(option_value: OptionValue) -> Tuple[Dict[str, Any], Any]: +def _parse_option_value(option_value: OptionValue) -> tuple[dict[str, Any], Any]: option = option_value.option value = option_value.value # Hack to get nicer YAML values during conversion @@ -526,7 +523,7 @@ def _warn(message: str) -> None: print(f"WARNING: {message}") -def _get_option_desc(option: Dict[str, Any]) -> str: +def _get_option_desc(option: dict[str, Any]) -> str: desc = option["desc"] if parent_dir := option.get("path_resolves_to"): path_resolves = f"The value of this option will be resolved with respect to <{parent_dir}>." @@ -534,7 +531,7 @@ def _get_option_desc(option: Dict[str, Any]) -> str: return desc -ACTIONS: Dict[str, Callable] = { +ACTIONS: dict[str, Callable] = { "convert": _run_conversion, "build_sample_yaml": _build_sample_yaml, "validate": _validate, diff --git a/lib/galaxy/datatypes/_schema.py b/lib/galaxy/datatypes/_schema.py index 680e32d3bea1..39c61737439a 100644 --- a/lib/galaxy/datatypes/_schema.py +++ b/lib/galaxy/datatypes/_schema.py @@ -1,6 +1,4 @@ from typing import ( - Dict, - List, Optional, ) @@ -58,7 +56,7 @@ class DatatypeDetails(BaseModel): title="Display in upload", description="If True, the associated file extension will be displayed in the `File Format` select list in the `Upload File from your computer` tool in the `Get Data` tool section of the tool panel", ) - composite_files: Optional[List[CompositeFileInfo]] = Field( + composite_files: Optional[list[CompositeFileInfo]] = Field( default=None, title="Composite files", description="A collection of files composing this data type" ) upload_warning: Optional[str] = Field( @@ -74,12 +72,12 @@ class DatatypeDetails(BaseModel): class DatatypesMap(BaseModel): - ext_to_class_name: Dict[str, str] = Field( + ext_to_class_name: dict[str, str] = Field( ..., # Mark this field as required title="Extension Map", description="Dictionary mapping datatype's extensions with implementation classes", ) - class_to_classes: Dict[str, Dict[str, bool]] = Field( + class_to_classes: dict[str, dict[str, bool]] = Field( ..., # Mark this field as required title="Classes Map", description="Dictionary mapping datatype's classes with their base classes", @@ -87,7 +85,7 @@ class DatatypesMap(BaseModel): class DatatypesCombinedMap(BaseModel): - datatypes: List[str] = Field( + datatypes: list[str] = Field( ..., # Mark this field as required title="Datatypes", description="List of datatypes extensions", @@ -121,7 +119,7 @@ class DatatypeConverter(BaseModel): class DatatypeConverterList(RootModel): - root: List[DatatypeConverter] = Field(title="List of data type converters", default=[]) + root: list[DatatypeConverter] = Field(title="List of data type converters", default=[]) class DatatypeEDAMDetails(BaseModel): @@ -144,7 +142,7 @@ class DatatypeEDAMDetails(BaseModel): class DatatypesEDAMDetailsDict(RootModel): - root: Dict[str, DatatypeEDAMDetails] = Field( + root: dict[str, DatatypeEDAMDetails] = Field( title="Dict of EDAM details for formats", default={}, ) @@ -166,7 +164,7 @@ class DatatypeVisualizationMapping(BaseModel): class DatatypeVisualizationMappingsList(RootModel): - root: List[DatatypeVisualizationMapping] = Field( + root: list[DatatypeVisualizationMapping] = Field( title="List of datatype visualization mappings", default=[], ) diff --git a/lib/galaxy/datatypes/binary.py b/lib/galaxy/datatypes/binary.py index 146447cb0e5c..481692421b81 100644 --- a/lib/galaxy/datatypes/binary.py +++ b/lib/galaxy/datatypes/binary.py @@ -13,14 +13,11 @@ import tarfile import tempfile import zipfile +from collections.abc import Iterable from json import dumps from typing import ( Any, - Dict, - Iterable, - List, Optional, - Tuple, TYPE_CHECKING, Union, ) @@ -375,7 +372,7 @@ class DynamicCompressedArchive(CompressedArchive): compressed_format: str uncompressed_datatype_instance: Data - def matches_any(self, target_datatypes: List[Any]) -> bool: + def matches_any(self, target_datatypes: list[Any]) -> bool: """Treat two aspects of compressed datatypes separately.""" compressed_target_datatypes = [] uncompressed_target_datatypes = [] @@ -661,7 +658,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N _BamOrSam().set_meta(dataset, overwrite=overwrite, **kwd) @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """ Merges BAM files @@ -1116,7 +1113,7 @@ def set_meta( if self.set_index_file(dataset, index_file): dataset.metadata.cram_index = index_file - def get_cram_version(self, filename: str) -> Tuple[int, int]: + def get_cram_version(self, filename: str) -> tuple[int, int]: try: with open(filename, "rb") as fh: header = bytearray(fh.read(6)) @@ -2210,7 +2207,7 @@ def display_data( to_ext = to_ext or dataset.extension return self._serve_raw(dataset, to_ext, headers, **kwd) - out_dict: Dict = {} + out_dict: dict = {} try: with h5py.File(dataset.get_file_name(), "r", locking=False) as handle: out_dict["Attributes"] = {} diff --git a/lib/galaxy/datatypes/blast.py b/lib/galaxy/datatypes/blast.py index c19244acb28f..1d8a879dd3a3 100644 --- a/lib/galaxy/datatypes/blast.py +++ b/lib/galaxy/datatypes/blast.py @@ -35,8 +35,6 @@ from time import sleep from typing import ( Callable, - Dict, - List, Optional, ) @@ -106,7 +104,7 @@ def sniff_prefix(self, file_prefix: FilePrefix) -> bool: return True @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """Merging multiple XML files is non-trivial and must be done in subclasses.""" if len(split_files) == 1: # For one file only, use base class method (move/copy) @@ -256,12 +254,12 @@ def display_data( return smart_str(f"{title}
{msg}
"), headers @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """Merge BLAST databases (not implemented for now).""" raise NotImplementedError("Merging BLAST databases is non-trivial (do this via makeblastdb?)") @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """Split a BLAST database (not implemented for now).""" if split_params is None: return None diff --git a/lib/galaxy/datatypes/constructive_solid_geometry.py b/lib/galaxy/datatypes/constructive_solid_geometry.py index d41d7e781201..fbc6e5822a29 100644 --- a/lib/galaxy/datatypes/constructive_solid_geometry.py +++ b/lib/galaxy/datatypes/constructive_solid_geometry.py @@ -8,9 +8,7 @@ import logging import re from typing import ( - List, Optional, - Tuple, TYPE_CHECKING, ) @@ -345,7 +343,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N # FIELD FieldData 2 processing_field_section = True num_fields = int(items[-1]) - fields_processed: List[str] = [] + fields_processed: list[str] = [] elif processing_field_section: if len(fields_processed) == num_fields: processing_field_section = False @@ -396,7 +394,7 @@ def set_initial_metadata(self, i: int, line: str, dataset: DatasetProtocol) -> D def set_structure_metadata( self, line: str, dataset: DatasetProtocol, dataset_type: Optional[str] - ) -> Tuple[DatasetProtocol, Optional[str]]: + ) -> tuple[DatasetProtocol, Optional[str]]: """ The fourth part of legacy VTK files is the dataset structure. The geometry part describes the geometry and topology of the dataset. diff --git a/lib/galaxy/datatypes/data.py b/lib/galaxy/datatypes/data.py index c044f00c0336..157979239442 100644 --- a/lib/galaxy/datatypes/data.py +++ b/lib/galaxy/datatypes/data.py @@ -5,17 +5,16 @@ import shutil import string import tempfile +from collections.abc import ( + Generator, + Iterable, +) from inspect import isclass from typing import ( Any, Callable, - Dict, - Generator, IO, - Iterable, - List, Optional, - Tuple, TYPE_CHECKING, Union, ) @@ -86,7 +85,7 @@ DOWNLOAD_FILENAME_PATTERN_COLLECTION_ELEMENT = "Galaxy${hdca_hid}-[${hdca_name}__${element_identifier}].${ext}" DEFAULT_MAX_PEEK_SIZE = 1000000 # 1 MB -Headers = Dict[str, Any] +Headers = dict[str, Any] class DatatypeConverterNotFoundException(Exception): @@ -123,8 +122,8 @@ def validate(dataset_instance: DatasetProtocol) -> DatatypeValidation: def get_params_and_input_name( - converter, deps: Optional[Dict], target_context: Optional[Dict] = None -) -> Tuple[Dict, str]: + converter, deps: Optional[dict], target_context: Optional[dict] = None +) -> tuple[dict, str]: # Generate parameter dictionary params = {} # determine input parameter name and add to params @@ -226,13 +225,13 @@ class Data(metaclass=DataMeta): no_value="?", ) # Stores the set of display applications, and viewing methods, supported by this datatype - supported_display_apps: Dict[str, Any] = {} + supported_display_apps: dict[str, Any] = {} # The dataset contains binary data --> do not space_to_tab or convert newlines, etc. # Allow binary file uploads of this type when True. is_binary: Union[bool, Literal["maybe"]] = True # Composite datatypes composite_type: Optional[str] = None - composite_files: Dict[str, Any] = {} + composite_files: dict[str, Any] = {} primary_file_name = "index" # Allow user to change between this datatype and others. If left to None, # datatype change is allowed if the datatype is not composite. @@ -248,15 +247,15 @@ class Data(metaclass=DataMeta): track_type: Optional[str] = None # Data sources. - data_sources: Dict[str, str] = {} + data_sources: dict[str, str] = {} - dataproviders: Dict[str, Any] + dataproviders: dict[str, Any] def __init__(self, **kwd): """Initialize the datatype""" self.supported_display_apps = self.supported_display_apps.copy() self.composite_files = self.composite_files.copy() - self.display_applications: Dict[str, DisplayApplication] = {} + self.display_applications: dict[str, DisplayApplication] = {} @classmethod def is_datatype_change_allowed(cls) -> bool: @@ -305,7 +304,7 @@ def init_meta(self, dataset: HasMetadata, copy_from: Optional[HasMetadata] = Non def set_meta(self, dataset: DatasetProtocol, *, overwrite: bool = True, **kwd) -> None: """Unimplemented method, allows guessing of metadata from contents of file""" - def missing_meta(self, dataset: HasMetadata, check: Optional[List] = None, skip: Optional[List] = None) -> bool: + def missing_meta(self, dataset: HasMetadata, check: Optional[list] = None, skip: Optional[list] = None) -> bool: """ Checks for empty metadata values. Returns False if no non-optional metadata is missing and the missing metadata key otherwise. @@ -378,7 +377,7 @@ def display_peek(self, dataset: DatasetProtocol) -> str: def _archive_main_file( self, archive: ZipstreamWrapper, display_name: str, data_filename: str - ) -> Tuple[bool, str, str]: + ) -> tuple[bool, str, str]: """Called from _archive_composite_dataset to add central file to archive. Unless subclassed, this will add the main dataset file (argument data_filename) @@ -400,7 +399,7 @@ def _archive_main_file( def _archive_composite_dataset( self, trans, data: DatasetHasHidProtocol, headers: Headers, do_action: str = "zip" - ) -> Tuple[Union[ZipstreamWrapper, str], Headers]: + ) -> tuple[Union[ZipstreamWrapper, str], Headers]: # save a composite object into a compressed archive for downloading outfname = data.name[0:150] outfname = "".join(c in FILENAME_VALID_CHARS and c or "_" for c in outfname) @@ -436,7 +435,7 @@ def _archive_composite_dataset( return archive, headers return trans.show_error_message(msg), headers - def __archive_extra_files_path(self, extra_files_path: str) -> Generator[Tuple[str, str], None, None]: + def __archive_extra_files_path(self, extra_files_path: str) -> Generator[tuple[str, str], None, None]: """Yield filepaths and relative filepaths for files in extra_files_path""" for root, _, files in os.walk(extra_files_path): for fname in files: @@ -446,7 +445,7 @@ def __archive_extra_files_path(self, extra_files_path: str) -> Generator[Tuple[s def _serve_raw( self, dataset: DatasetHasHidProtocol, to_ext: Optional[str], headers: Headers, **kwd - ) -> Tuple[IO, Headers]: + ) -> tuple[IO, Headers]: headers["Content-Length"] = str(os.stat(dataset.get_file_name()).st_size) headers["content-type"] = ( "application/octet-stream" # force octet-stream so Safari doesn't append mime extensions to filename @@ -773,7 +772,7 @@ def get_display_application( ) -> Union["DisplayApplication", None]: return self.display_applications.get(key, default) - def get_display_applications_by_dataset(self, dataset: DatasetProtocol, trans) -> Dict[str, "DisplayApplication"]: + def get_display_applications_by_dataset(self, dataset: DatasetProtocol, trans) -> dict[str, "DisplayApplication"]: rval = {} for key, value in self.display_applications.items(): value = value.filter_by_dataset(dataset, trans) @@ -781,7 +780,7 @@ def get_display_applications_by_dataset(self, dataset: DatasetProtocol, trans) - rval[key] = value return rval - def get_display_types(self) -> List[str]: + def get_display_types(self) -> list[str]: """Returns display types available""" return list(self.supported_display_apps.keys()) @@ -829,13 +828,13 @@ def get_display_links( ) return target_frame, [] - def get_converter_types(self, original_dataset: HasExt, datatypes_registry: "Registry") -> Dict[str, Dict]: + def get_converter_types(self, original_dataset: HasExt, datatypes_registry: "Registry") -> dict[str, dict]: """Returns available converters by type for this dataset""" return datatypes_registry.get_converters_by_datatype(original_dataset.ext) def find_conversion_destination( - self, dataset: DatasetProtocol, accepted_formats: List[str], datatypes_registry, **kwd - ) -> Tuple[bool, Optional[str], Any]: + self, dataset: DatasetProtocol, accepted_formats: list[str], datatypes_registry, **kwd + ) -> tuple[bool, Optional[str], Any]: """Returns ( direct_match, converted_ext, existing converted dataset )""" return datatypes_registry.find_conversion_destination_for_dataset_by_extensions( dataset, accepted_formats, **kwd @@ -848,8 +847,8 @@ def convert_dataset( target_type: str, return_output: bool = False, visible: bool = True, - deps: Optional[Dict] = None, - target_context: Optional[Dict] = None, + deps: Optional[dict] = None, + target_context: Optional[dict] = None, history=None, ): """This function adds a job to the queue to convert a dataset to another type. Returns a message about success/failure.""" @@ -933,7 +932,7 @@ def writable_files(self): files[key] = value return files - def get_writable_files_for_dataset(self, dataset: Optional[HasMetadata]) -> Dict: + def get_writable_files_for_dataset(self, dataset: Optional[HasMetadata]) -> dict: files = {} if self.composite_type != "auto_primary_file": files[self.primary_file_name] = self.__new_composite_file(self.primary_file_name) @@ -963,7 +962,7 @@ def generate_primary_file(self, dataset: HasExtraFilesAndMetadata) -> str: def has_resolution(self): return False - def matches_any(self, target_datatypes: List[Any]) -> bool: + def matches_any(self, target_datatypes: list[Any]) -> bool: """ Check if this datatype is of any of the target_datatypes or is a subtype thereof. @@ -972,7 +971,7 @@ def matches_any(self, target_datatypes: List[Any]) -> bool: return isinstance(self, datatype_classes) @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """ Merge files with copy.copyfileobj() will not hit the max argument limitation of cat. gz and bz2 files are also working. @@ -1031,7 +1030,7 @@ def _clean_and_set_mime_type(self, trans, mime: str, headers: Headers) -> None: def handle_dataset_as_image(self, hda: DatasetProtocol) -> str: raise Exception("Unimplemented Method") - def __getstate__(self) -> Dict[str, Any]: + def __getstate__(self) -> dict[str, Any]: state = self.__dict__.copy() state.pop("display_applications", None) return state @@ -1142,7 +1141,7 @@ def set_peek(self, dataset: DatasetProtocol, **kwd) -> None: dataset.blurb = "file purged from disk" @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """ Split the input files by line. """ @@ -1241,7 +1240,7 @@ class Directory(Data): def _archive_main_file( self, archive: ZipstreamWrapper, display_name: str, data_filename: str - ) -> Tuple[bool, str, str]: + ) -> tuple[bool, str, str]: """Overwrites the method to not do anything. No main file gets added to a directory archive. @@ -1340,10 +1339,9 @@ def _find_store_root_folder_name(self, dataset: DatasetProtocol) -> Optional[str return sub_folder_name # The store is in a subfolder of the extra files folder return None # The directory structure does not look like Zarr format - def _load_zarr_metadata_file(self, store_root_path: str) -> Optional[Dict[str, Any]]: + def _load_zarr_metadata_file(self, store_root_path: str) -> Optional[dict[str, Any]]: """Returns the path to the metadata file in the Zarr store.""" - meta_file = self._find_zarr_metadata_file(store_root_path) - if meta_file: + if meta_file := self._find_zarr_metadata_file(store_root_path): with open(meta_file) as f: return json.load(f) return None @@ -1368,8 +1366,7 @@ def _find_zarr_metadata_file(self, store_root_path: str) -> Optional[str]: def _get_format_version(self, store_root_path: str) -> Optional[str]: """Returns the Zarr format version from the metadata file in the Zarr store.""" - metadata_file = self._load_zarr_metadata_file(store_root_path) - if metadata_file: + if metadata_file := self._load_zarr_metadata_file(store_root_path): return metadata_file.get("zarr_format") return None diff --git a/lib/galaxy/datatypes/dataproviders/base.py b/lib/galaxy/datatypes/dataproviders/base.py index a271437f184e..a6e49d209a0d 100644 --- a/lib/galaxy/datatypes/dataproviders/base.py +++ b/lib/galaxy/datatypes/dataproviders/base.py @@ -10,7 +10,6 @@ import logging from collections import deque -from typing import Dict from . import exceptions @@ -78,7 +77,7 @@ class DataProvider(metaclass=HasSettings): # a definition of expected types for keyword arguments sent to __init__ # useful for controlling how query string dictionaries can be parsed into correct types for __init__ # empty in this base class - settings: Dict[str, str] = {} + settings: dict[str, str] = {} def __init__(self, source, **kwargs): """Sets up a data provider, validates supplied source. diff --git a/lib/galaxy/datatypes/genetics.py b/lib/galaxy/datatypes/genetics.py index 04614e3bb547..e2539d2b41d6 100644 --- a/lib/galaxy/datatypes/genetics.py +++ b/lib/galaxy/datatypes/genetics.py @@ -17,9 +17,7 @@ import re import sys from typing import ( - Dict, IO, - List, Optional, Union, ) @@ -92,7 +90,7 @@ def as_ucsc_display_file(self, dataset: DatasetProtocol, **kwd) -> Union[FileObj """ return open(dataset.get_file_name(), "rb") - def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List: + def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> list: """ from the ever-helpful angie hinrichs angie@soe.ucsc.edu a genome graphs call looks like this @@ -653,7 +651,7 @@ def get_mime(self) -> str: """Returns the mime type of the datatype""" return "text/html" - def get_phecols(self, phenolist: List, maxConc: int = 20) -> List: + def get_phecols(self, phenolist: list, maxConc: int = 20) -> list: """ sept 2009: cannot use whitespace to split - make a more complex structure here and adjust the methods that rely on this structure @@ -674,7 +672,7 @@ def get_phecols(self, phenolist: List, maxConc: int = 20) -> List: if nrows == 0: # set up from header head = row totcols = len(row) - concordance: List[Dict] = [{} for x in head] + concordance: list[dict] = [{} for x in head] else: for col, code in enumerate(row): # keep column order correct if col >= totcols: diff --git a/lib/galaxy/datatypes/goldenpath.py b/lib/galaxy/datatypes/goldenpath.py index 9ce878714a69..6b8c975c5b4a 100644 --- a/lib/galaxy/datatypes/goldenpath.py +++ b/lib/galaxy/datatypes/goldenpath.py @@ -2,7 +2,6 @@ import logging import os from typing import ( - Set, Union, ) @@ -312,7 +311,7 @@ class AGPLine(metaclass=abc.ABCMeta): checks that involve multiple lines should not be considered. """ - allowed_comp_types: Set[str] = set() + allowed_comp_types: set[str] = set() def __init__(self, fname, line_number, obj, obj_beg, obj_end, pid, comp_type): self.is_gap = None diff --git a/lib/galaxy/datatypes/graph.py b/lib/galaxy/datatypes/graph.py index b7f8488383bd..e622669801e7 100644 --- a/lib/galaxy/datatypes/graph.py +++ b/lib/galaxy/datatypes/graph.py @@ -3,7 +3,6 @@ """ import logging -from typing import List from galaxy.datatypes.dataproviders.column import ColumnarDataProvider from galaxy.datatypes.dataproviders.dataset import DatasetDataProvider @@ -47,7 +46,7 @@ def sniff(self, filename: str) -> bool: return False @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """ Merging multiple XML files is non-trivial and must be done in subclasses. """ @@ -95,7 +94,7 @@ def sniff(self, filename: str) -> bool: return False @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: data.Text.merge(split_files, output_file) @dataproviders.decorators.dataprovider_factory("node-edge", ColumnarDataProvider.settings) diff --git a/lib/galaxy/datatypes/images.py b/lib/galaxy/datatypes/images.py index 2f851b0f00f5..0cc47c59a95b 100644 --- a/lib/galaxy/datatypes/images.py +++ b/lib/galaxy/datatypes/images.py @@ -6,13 +6,10 @@ import json import logging import struct +from collections.abc import Iterator from typing import ( Any, - Dict, - Iterator, - List, Optional, - Tuple, Union, ) @@ -212,7 +209,7 @@ def set_meta( reader = png.Reader(filename=dataset.get_file_name()) width, height, pixels, metadata = reader.asDirect() - unique_values: List[Any] = [] + unique_values: list[Any] = [] for row in pixels: values = np.array(row, dtype="uint8") unique_values = list(np.unique(unique_values + list(values))) @@ -253,7 +250,7 @@ def set_meta( offsets = [page.offset for page in tif.pages] # Aggregate a list of values for each metadata field (one value for each page of the TIFF file) - metadata: Dict[str, List[Any]] = { + metadata: dict[str, list[Any]] = { key: [] for key in [ "axes", @@ -312,7 +309,7 @@ def set_meta( pass @staticmethod - def _get_axis_size(shape: Tuple[int, ...], axes: str, axis: str) -> int: + def _get_axis_size(shape: tuple[int, ...], axes: str, axis: str) -> int: idx = axes.find(axis) return shape[idx] if idx >= 0 else 0 @@ -321,7 +318,7 @@ def _get_num_unique_values(series: tifffile.TiffPageSeries) -> Optional[int]: """ Determines the number of unique values in a TIFF series of pages. """ - unique_values: List[Any] = [] + unique_values: list[Any] = [] try: for page in series.pages: diff --git a/lib/galaxy/datatypes/interval.py b/lib/galaxy/datatypes/interval.py index 0a03cb6d8e9d..9aef90ac6a6e 100644 --- a/lib/galaxy/datatypes/interval.py +++ b/lib/galaxy/datatypes/interval.py @@ -6,9 +6,7 @@ import sys import tempfile from typing import ( - List, Optional, - Tuple, Union, ) from urllib.parse import quote_plus @@ -207,7 +205,7 @@ def get_estimated_display_viewport( chrom_col: Optional[int] = None, start_col: Optional[int] = None, end_col: Optional[int] = None, - ) -> Tuple[Optional[str], Optional[str], Optional[str]]: + ) -> tuple[Optional[str], Optional[str], Optional[str]]: """Return a chrom, start, stop tuple for viewing a file.""" viewport_feature_count = 100 # viewport should check at least 100 features; excludes comment lines max_line_count = max(viewport_feature_count, 500) # maximum number of lines to check; includes comment lines @@ -312,7 +310,7 @@ def display_peek(self, dataset: DatasetProtocol) -> str: }, ) - def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List: + def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> list: """ Generate links to UCSC genome browser sites based on the dbkey and content of dataset. @@ -440,7 +438,7 @@ def get_estimated_display_viewport( chrom_col: Optional[int] = 0, start_col: Optional[int] = 1, end_col: Optional[int] = 2, - ) -> Tuple[Optional[str], Optional[str], Optional[str]]: + ) -> tuple[Optional[str], Optional[str], Optional[str]]: """ Set viewport based on dataset's first 100 lines. """ @@ -879,7 +877,7 @@ def display_peek(self, dataset: DatasetProtocol) -> str: def get_estimated_display_viewport( self, dataset: DatasetProtocol - ) -> Tuple[Optional[str], Optional[str], Optional[str]]: + ) -> tuple[Optional[str], Optional[str], Optional[str]]: """ Return a chrom, start, stop tuple for viewing a file. There are slight differences between gff 2 and gff 3 formats. This function should correctly handle both... @@ -954,7 +952,7 @@ def get_estimated_display_viewport( log.exception("Unexpected error") return (None, None, None) # could not determine viewport - def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List: + def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> list: ret_val = [] seqid, start, stop = self.get_estimated_display_viewport(dataset) if seqid is not None: @@ -967,7 +965,7 @@ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> ret_val.append((site_name, link)) return ret_val - def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List: + def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> list: ret_val = [] seqid, start, stop = self.get_estimated_display_viewport(dataset) if seqid is not None: @@ -1291,7 +1289,7 @@ def __init__(self, **kwd): def get_estimated_display_viewport( self, dataset: DatasetProtocol - ) -> Tuple[Optional[str], Optional[str], Optional[str]]: + ) -> tuple[Optional[str], Optional[str], Optional[str]]: """Return a chrom, start, stop tuple for viewing a file.""" viewport_feature_count = 100 # viewport should check at least 100 features; excludes comment lines max_line_count = max(viewport_feature_count, 500) # maximum number of lines to check; includes comment lines @@ -1356,7 +1354,7 @@ def get_estimated_display_viewport( log.exception("Unexpected error") return (None, None, None) # could not determine viewport - def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List: + def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> list: ret_val = [] chrom, start, stop = self.get_estimated_display_viewport(dataset) if chrom is not None: @@ -1369,7 +1367,7 @@ def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) ret_val.append((site_name, link)) return ret_val - def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List: + def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> list: ret_val = [] chrom, start, stop = self.get_estimated_display_viewport(dataset) if chrom is not None: @@ -1480,7 +1478,7 @@ def get_estimated_display_viewport( chrom_col: Optional[int] = None, start_col: Optional[int] = None, end_col: Optional[int] = None, - ) -> Tuple[Optional[str], Optional[str], Optional[str]]: + ) -> tuple[Optional[str], Optional[str], Optional[str]]: """Return a chrom, start, stop tuple for viewing a file.""" # FIXME: only BED and WIG custom tracks are currently supported # As per previously existing behavior, viewport will only be over the first intervals @@ -1538,7 +1536,7 @@ def get_estimated_display_viewport( log.exception("Unexpected error") return (None, None, None) # could not determine viewport - def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List: + def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> list: ret_val = [] chrom, start, stop = self.get_estimated_display_viewport(dataset) if chrom is not None: diff --git a/lib/galaxy/datatypes/isa.py b/lib/galaxy/datatypes/isa.py index 603c709b318e..4c12aa86fc20 100644 --- a/lib/galaxy/datatypes/isa.py +++ b/lib/galaxy/datatypes/isa.py @@ -12,7 +12,6 @@ import shutil import tempfile from typing import ( - List, Optional, TYPE_CHECKING, ) @@ -95,7 +94,7 @@ def _get_investigation(self, dataset: HasExtraFilesPath) -> "Investigation": main_file = self._get_main_file(dataset) return self._make_investigation_instance(main_file) - def _find_main_file_in_archive(self, files_list: List) -> str: + def _find_main_file_in_archive(self, files_list: list) -> str: """Find the main file inside the ISA archive.""" found_file = None @@ -121,7 +120,7 @@ def set_peek(self, dataset: DatasetProtocol, **kwd) -> None: # Read first lines of main file with open(main_file, encoding="utf-8") as f: - data: List = [] + data: list = [] for line in f: if len(data) < _MAX_LINES_HISTORY_PEEK: data.append(line) diff --git a/lib/galaxy/datatypes/larch.py b/lib/galaxy/datatypes/larch.py index 160e9eadea32..37fa09b4f4f6 100644 --- a/lib/galaxy/datatypes/larch.py +++ b/lib/galaxy/datatypes/larch.py @@ -1,5 +1,3 @@ -from typing import List - from galaxy.datatypes.data import ( get_file_peek, Text, @@ -81,7 +79,7 @@ def set_meta(self, dataset: DatasetProtocol, *, overwrite: bool = True, **kwd) - Extract metadata from @args """ - def extract_arg(args: List[str], arg_name: str): + def extract_arg(args: list[str], arg_name: str): try: index = args.index(f"'{arg_name}'") setattr(dataset.metadata, arg_name, args[index + 1].replace("'", "")) diff --git a/lib/galaxy/datatypes/media.py b/lib/galaxy/datatypes/media.py index 0b15ae37d5c8..cdaecffe23ac 100644 --- a/lib/galaxy/datatypes/media.py +++ b/lib/galaxy/datatypes/media.py @@ -6,8 +6,6 @@ from functools import lru_cache from typing import ( cast, - List, - Tuple, ) from galaxy.datatypes.binary import Binary @@ -117,11 +115,11 @@ def _get_file_format_from_magic_number(filename: str, file_ext: str): if "string" in magic_number[file_ext]: string_check = any( head.startswith(string_code.encode("iso-8859-1")) - for string_code in cast(List[str], magic_number[file_ext]["string"]) + for string_code in cast(list[str], magic_number[file_ext]["string"]) ) if "hex" in magic_number[file_ext]: hex_check = any( - head.startswith(bytes.fromhex(hex_code)) for hex_code in cast(List[str], magic_number[file_ext]["hex"]) + head.startswith(bytes.fromhex(hex_code)) for hex_code in cast(list[str], magic_number[file_ext]["hex"]) ) return string_check or hex_check @@ -241,7 +239,7 @@ class Video(Binary): no_value=0, ) - def _get_resolution(self, streams: List) -> Tuple[int, int, float]: + def _get_resolution(self, streams: list) -> tuple[int, int, float]: for stream in streams: if stream["codec_type"] == "video": w = stream["width"] diff --git a/lib/galaxy/datatypes/molecules.py b/lib/galaxy/datatypes/molecules.py index 5c11451d254a..610d4421e25a 100644 --- a/lib/galaxy/datatypes/molecules.py +++ b/lib/galaxy/datatypes/molecules.py @@ -3,8 +3,6 @@ import re from typing import ( Callable, - Dict, - List, Optional, ) @@ -389,7 +387,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N dataset.metadata.number_of_molecules = count_special_lines(r"^\$\$\$\$$", dataset.get_file_name()) @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """ Split the input files by molecule records. """ @@ -472,7 +470,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N dataset.metadata.number_of_molecules = count_special_lines("@MOLECULE", dataset.get_file_name()) @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """ Split the input files by molecule records. """ @@ -558,7 +556,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N dataset.metadata.number_of_molecules = count_special_lines("^#", dataset.get_file_name(), invert=True) @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """ Split the input files by fingerprint records. """ @@ -605,7 +603,7 @@ def _write_part_fingerprint_file(accumulated_lines): raise @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """ Merging fps files requires merging the header manually. We take the header from the first file. @@ -677,12 +675,12 @@ def get_mime(self) -> str: return "text/plain" @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """Merging Fastsearch indices is not supported.""" raise NotImplementedError("Merging Fastsearch indices is not supported.") @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """Splitting Fastsearch indices is not supported.""" if split_params is None: return None @@ -1071,7 +1069,7 @@ def sniff_prefix(self, file_prefix: FilePrefix) -> bool: except (TypeError, ValueError, IndexError): return False - def read_blocks(self, lines: List) -> List: + def read_blocks(self, lines: list) -> list: """ Parses and returns a list of dictionaries representing XYZ structure blocks (aka frames). @@ -1149,7 +1147,7 @@ def sniff_prefix(self, file_prefix: FilePrefix) -> bool: # insufficient lines return False - def read_blocks(self, lines: List) -> List: + def read_blocks(self, lines: list) -> list: """ Parses and returns a list of XYZ structure blocks (aka frames). @@ -1412,7 +1410,7 @@ def sniff_prefix(self, file_prefix: FilePrefix) -> bool: return True @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """ Split the input files by molecule records. """ @@ -1472,7 +1470,7 @@ def _write_part_cml_file(accumulated_lines): raise @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """ Merging CML files. """ diff --git a/lib/galaxy/datatypes/mothur.py b/lib/galaxy/datatypes/mothur.py index ab030577d233..a183199822a4 100644 --- a/lib/galaxy/datatypes/mothur.py +++ b/lib/galaxy/datatypes/mothur.py @@ -5,7 +5,6 @@ import logging import re from typing import ( - List, Optional, ) @@ -1073,7 +1072,7 @@ def set_meta( except Exception as e: log.warning(f"SffFlow set_meta {e}") - def make_html_table(self, dataset: DatasetProtocol, skipchars: Optional[List] = None, **kwargs) -> str: + def make_html_table(self, dataset: DatasetProtocol, skipchars: Optional[list] = None, **kwargs) -> str: """Create HTML table, used for displaying peek""" skipchars = skipchars or [] try: diff --git a/lib/galaxy/datatypes/msa.py b/lib/galaxy/datatypes/msa.py index 326213dfbaee..18c0f2655b6e 100644 --- a/lib/galaxy/datatypes/msa.py +++ b/lib/galaxy/datatypes/msa.py @@ -4,8 +4,6 @@ import re from typing import ( Callable, - Dict, - List, Optional, ) @@ -206,7 +204,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N ) @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """ Split the input files by model records. diff --git a/lib/galaxy/datatypes/proteomics.py b/lib/galaxy/datatypes/proteomics.py index f9b30ca3bcfd..36d33e926bcc 100644 --- a/lib/galaxy/datatypes/proteomics.py +++ b/lib/galaxy/datatypes/proteomics.py @@ -6,7 +6,6 @@ import re from typing import ( IO, - List, Optional, ) @@ -436,7 +435,7 @@ class Dta(TabularData): def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> None: column_types = [] - data_row: List = [] + data_row: list = [] data_lines = 0 if dataset.has_data(): with open(dataset.get_file_name()) as dtafile: @@ -482,7 +481,7 @@ class Dta2d(TabularData): file_ext = "dta2d" comment_lines = 0 - def _parse_header(self, line: List) -> Optional[List]: + def _parse_header(self, line: list) -> Optional[list]: if len(line) != 3 or len(line[0]) < 3 or not line[0].startswith("#"): return None line[0] = line[0].lstrip("#") @@ -498,7 +497,7 @@ def _parse_delimiter(self, line: str) -> Optional[str]: return "\t" return None - def _parse_dataline(self, line: List) -> bool: + def _parse_dataline(self, line: list) -> bool: try: line = [float(_) for _ in line] except ValueError: @@ -593,7 +592,7 @@ def _parse_delimiter(self, line: str) -> Optional[str]: return "\t" return None - def _parse_type(self, line: List) -> Optional[int]: + def _parse_type(self, line: list) -> Optional[int]: """ parse the type from the header line types 1-3 as in the class docs, 0: type 1 wo/wrong header @@ -613,7 +612,7 @@ def _parse_type(self, line: List) -> Optional[int]: else: return 3 - def _parse_dataline(self, line: List, tpe: Optional[int]) -> bool: + def _parse_dataline(self, line: list, tpe: Optional[int]) -> bool: if tpe == 2 or tpe == 3: idx = 4 else: @@ -626,7 +625,7 @@ def _parse_dataline(self, line: List, tpe: Optional[int]) -> bool: return False return True - def _clean_header(self, line: List) -> List: + def _clean_header(self, line: list) -> list: for idx, el in enumerate(line): el = el.lower() if el.startswith("rt"): diff --git a/lib/galaxy/datatypes/qiime2.py b/lib/galaxy/datatypes/qiime2.py index c9aa533ae8c2..ed1281eace57 100644 --- a/lib/galaxy/datatypes/qiime2.py +++ b/lib/galaxy/datatypes/qiime2.py @@ -4,8 +4,6 @@ import uuid as _uuid import zipfile from typing import ( - Dict, - List, Optional, ) @@ -63,7 +61,7 @@ def make_row(pair): return "".join(table) - def _peek(self, dataset: HasMetadata, simple: bool = False) -> List: + def _peek(self, dataset: HasMetadata, simple: bool = False) -> list: peek = [("Type", dataset.metadata.semantic_type), ("UUID", dataset.metadata.uuid)] if not simple: if dataset.metadata.semantic_type != "Visualization": @@ -71,7 +69,7 @@ def _peek(self, dataset: HasMetadata, simple: bool = False) -> List: peek.append(("Version", dataset.metadata.version)) return peek - def _sniff(self, filename: str) -> Optional[Dict]: + def _sniff(self, filename: str) -> Optional[dict]: """Helper method for use in inherited datatypes""" try: if not zipfile.is_zipfile(filename): @@ -116,7 +114,7 @@ class QIIME2Metadata(Tabular): _TYPES_DIRECTIVE = "#q2:types" _search_lines = 2 - def get_column_names(self, first_line: str) -> Optional[List[str]]: + def get_column_names(self, first_line: str) -> Optional[list[str]]: return first_line.strip().split("\t") def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> None: diff --git a/lib/galaxy/datatypes/registry.py b/lib/galaxy/datatypes/registry.py index 1b58ff237b69..df9901e9f7b0 100644 --- a/lib/galaxy/datatypes/registry.py +++ b/lib/galaxy/datatypes/registry.py @@ -6,16 +6,12 @@ import logging import os import pkgutil +from collections.abc import Iterable from string import Template from typing import ( Any, cast, - Dict, - Iterable, - List, Optional, - Tuple, - Type, TYPE_CHECKING, Union, ) @@ -76,7 +72,7 @@ def __init__(self, config=None): self.config = config self.edam = edam - self.datatypes_by_extension: Dict[str, Data] = {} + self.datatypes_by_extension: dict[str, Data] = {} self.datatypes_by_suffix_inferences = {} self.mimetypes_by_extension = {} self.datatype_converters = {} @@ -86,12 +82,12 @@ def __init__(self, config=None): self.converter_deps = {} self.available_tracks = [] self.set_external_metadata_tool = None - self.sniff_order: List[Data] = [] + self.sniff_order: list[Data] = [] self.upload_file_formats = [] # Datatype elements defined in local datatypes_conf.xml that contain display applications. self.display_app_containers = [] # Map a display application id to a display application - self.display_applications: Dict[str, DisplayApplication] = {} + self.display_applications: dict[str, DisplayApplication] = {} # The following 2 attributes are used in the to_xml_file() # method to persist the current state into an xml file. self.display_path_attr = None @@ -103,13 +99,13 @@ def __init__(self, config=None): self.inherit_display_application_by_class = [] self.datatype_elems = [] self.datatype_info_dicts = [] - self.sniffer_elems: List[Element] = [] + self.sniffer_elems: list[Element] = [] self._registry_xml_string = None self._edam_formats_mapping = None self._edam_data_mapping = None self._converters_by_datatype = {} # Datatype visualization mappings - self.visualization_mappings: Dict[str, Dict[str, Any]] = {} + self.visualization_mappings: dict[str, dict[str, Any]] = {} # Build sites self.build_sites = {} self.display_sites = {} @@ -140,7 +136,7 @@ def __import_module(full_path: str, datatype_module: str): return module if root_dir and config: - compressed_sniffers: Dict[Type[Data], List[Data]] = {} + compressed_sniffers: dict[type[Data], list[Data]] = {} if isinstance(config, (str, os.PathLike)): # Parse datatypes_conf.xml tree = galaxy.util.parse_xml(config) @@ -212,7 +208,7 @@ def __import_module(full_path: str, datatype_module: str): if override or extension not in self.datatypes_by_extension: can_process_datatype = True if can_process_datatype: - datatype_class: Optional[Type[Data]] = None + datatype_class: Optional[type[Data]] = None if dtype is not None: ok = True try: @@ -368,9 +364,9 @@ def __import_module(full_path: str, datatype_module: str): compressed_extension = f"{extension}.{auto_compressed_type}" upper_compressed_type = auto_compressed_type[0].upper() + auto_compressed_type[1:] auto_compressed_type_name = datatype_class_name + upper_compressed_type - attributes: Dict[str, Any] = {} + attributes: dict[str, Any] = {} if auto_compressed_type == "gz": - dynamic_parent: Type[binary.DynamicCompressedArchive] = ( + dynamic_parent: type[binary.DynamicCompressedArchive] = ( binary.GzDynamicCompressedArchive ) elif auto_compressed_type == "bz2": @@ -379,7 +375,7 @@ def __import_module(full_path: str, datatype_module: str): raise ConfigurationError(f"Unknown auto compression type [{auto_compressed_type}]") attributes["file_ext"] = compressed_extension attributes["uncompressed_datatype_instance"] = datatype_instance - compressed_datatype_class: Type[Data] = type( + compressed_datatype_class: type[Data] = type( auto_compressed_type_name, ( datatype_class, @@ -535,7 +531,7 @@ def load_datatype_sniffers( self, root: Element, override: bool = False, - compressed_sniffers: Optional[Dict[Type["Data"], List["Data"]]] = None, + compressed_sniffers: Optional[dict[type["Data"], list["Data"]]] = None, ) -> None: """ Process the sniffers element from a parsed a datatypes XML file located at root_dir/config (if processing the Galaxy @@ -902,7 +898,7 @@ def find_conversion_destination_for_dataset_by_extensions( dataset_or_ext: Union[str, DatasetProtocol], accepted_formats: Iterable[Union[str, "Data"]], converter_safe: bool = True, - ) -> Tuple[bool, Optional[str], Optional[DatasetProtocol]]: + ) -> tuple[bool, Optional[str], Optional[DatasetProtocol]]: """ returns (direct_match, converted_ext, converted_dataset) - direct match is True iff no the data set already has an accepted format @@ -915,7 +911,7 @@ def find_conversion_destination_for_dataset_by_extensions( ext = dataset_or_ext dataset = None - accepted_datatypes: List[Data] = [] + accepted_datatypes: list[Data] = [] for accepted_format in accepted_formats: if isinstance(accepted_format, str): accepted_datatype = self.get_datatype_by_extension(accepted_format) diff --git a/lib/galaxy/datatypes/sequence.py b/lib/galaxy/datatypes/sequence.py index 853e05d1e035..ee6f52b1e6ff 100644 --- a/lib/galaxy/datatypes/sequence.py +++ b/lib/galaxy/datatypes/sequence.py @@ -9,13 +9,11 @@ import re import string import subprocess +from collections.abc import Iterable from itertools import islice from typing import ( Any, Callable, - Dict, - Iterable, - List, Optional, ) @@ -139,7 +137,7 @@ def set_peek(self, dataset: DatasetProtocol, **kwd) -> None: dataset.blurb = "file purged from disk" @staticmethod - def get_sequences_per_file(total_sequences: int, split_params: Dict) -> List: + def get_sequences_per_file(total_sequences: int, split_params: dict) -> list: if split_params["split_mode"] == "number_of_parts": # legacy basic mode - split into a specified number of parts parts = int(split_params["split_size"]) @@ -218,7 +216,7 @@ def get_subdir(idx): return directories @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """Split a generic sequence file (not sensible or possible, see subclasses).""" if split_params is None: return None @@ -227,7 +225,7 @@ def split(cls, input_datasets: List, subdir_generator_function: Callable, split_ @staticmethod def get_split_commands_with_toc( input_name: str, output_name: str, toc_file: Any, start_sequence: int, sequence_count: int - ) -> List: + ) -> list: """ Uses a Table of Contents dict, parsed from an FQTOC file, to come up with a set of shell commands that will extract the parts necessary @@ -299,7 +297,7 @@ def get_split_commands_with_toc( @staticmethod def get_split_commands_sequential( is_compressed: bool, input_name: str, output_name: str, start_sequence: int, sequence_count: int - ) -> List: + ) -> list: """ Does a brain-dead sequential scan & extract of certain sequences >>> Sequence.get_split_commands_sequential(True, './input.gz', './output.gz', start_sequence=0, sequence_count=10) @@ -357,7 +355,7 @@ class Alignment(data.Text): ) @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """Split a generic alignment file (not sensible or possible, see subclasses).""" if split_params is None: return None @@ -445,7 +443,7 @@ def sniff_prefix(self, file_prefix: FilePrefix) -> bool: return False @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """Split a FASTA file sequence by sequence. Note that even if split_mode="number_of_parts", the actual number of @@ -793,7 +791,7 @@ def sniff_prefix(self, file_prefix: FilePrefix) -> bool: return self.check_first_block(file_prefix) @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """ FASTQ files are split on cluster boundaries, in increments of 4 lines """ @@ -818,7 +816,7 @@ def split(cls, input_datasets: List, subdir_generator_function: Callable, split_ return cls.do_slow_split(input_datasets, subdir_generator_function, split_params) @staticmethod - def process_split_file(data: Dict) -> bool: + def process_split_file(data: dict) -> bool: """ This is called in the context of an external process launched by a Task (possibly not on the Galaxy machine) to create the input files for the Task. The parameters: @@ -855,7 +853,7 @@ def check_first_block(cls, file_prefix: FilePrefix): return cls.check_block(block) @classmethod - def check_block(cls, block: List) -> bool: + def check_block(cls, block: list) -> bool: if ( len(block) == 4 and block[0][0] @@ -1065,7 +1063,7 @@ def display_peek(self, dataset: DatasetProtocol) -> str: """Returns formated html of peek""" return self.make_html_table(dataset) - def make_html_table(self, dataset: DatasetProtocol, skipchars: Optional[List] = None) -> str: + def make_html_table(self, dataset: DatasetProtocol, skipchars: Optional[list] = None) -> str: """Create HTML table, used for displaying peek""" skipchars = skipchars or [] try: diff --git a/lib/galaxy/datatypes/sniff.py b/lib/galaxy/datatypes/sniff.py index 7a36ec764ccb..6fb86d99624f 100644 --- a/lib/galaxy/datatypes/sniff.py +++ b/lib/galaxy/datatypes/sniff.py @@ -12,12 +12,11 @@ import struct import tempfile import zipfile +from collections.abc import Iterable from functools import partial from typing import ( Callable, - Dict, IO, - Iterable, NamedTuple, Optional, TYPE_CHECKING, @@ -957,7 +956,7 @@ def handle_uploaded_dataset_file_internal( AUTO_DETECT_EXTENSIONS = ["auto"] # should 'data' also cause auto detect? -DECOMPRESSION_FUNCTIONS: Dict[str, Callable] = dict(gzip=gzip.GzipFile, bz2=bz2.BZ2File, zip=zip_single_fileobj) +DECOMPRESSION_FUNCTIONS: dict[str, Callable] = dict(gzip=gzip.GzipFile, bz2=bz2.BZ2File, zip=zip_single_fileobj) class InappropriateDatasetContentError(Exception): diff --git a/lib/galaxy/datatypes/spaln.py b/lib/galaxy/datatypes/spaln.py index 8e27fde546bf..e5d6aeea703a 100644 --- a/lib/galaxy/datatypes/spaln.py +++ b/lib/galaxy/datatypes/spaln.py @@ -6,8 +6,6 @@ import os.path from typing import ( Callable, - Dict, - List, Optional, ) @@ -166,12 +164,12 @@ def display_data( ) @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """Merge spaln databases (not implemented).""" raise NotImplementedError("Merging spaln databases is not possible") @classmethod - def split(cls, input_datasets: List, subdir_generator_function: Callable, split_params: Optional[Dict]) -> None: + def split(cls, input_datasets: list, subdir_generator_function: Callable, split_params: Optional[dict]) -> None: """Split a spaln database (not implemented).""" if split_params is None: return None diff --git a/lib/galaxy/datatypes/tabular.py b/lib/galaxy/datatypes/tabular.py index 4de85bcf5f49..2787598b63a9 100644 --- a/lib/galaxy/datatypes/tabular.py +++ b/lib/galaxy/datatypes/tabular.py @@ -14,8 +14,6 @@ from json import dumps from typing import ( cast, - Dict, - List, Optional, Union, ) @@ -253,10 +251,10 @@ def make_html_table(self, dataset: DatasetProtocol, **kwargs) -> str: def make_html_peek_header( self, dataset: DatasetProtocol, - skipchars: Optional[List] = None, - column_names: Optional[List] = None, + skipchars: Optional[list] = None, + column_names: Optional[list] = None, column_number_format: str = "%s", - column_parameter_alias: Optional[Dict] = None, + column_parameter_alias: Optional[dict] = None, **kwargs, ) -> str: if skipchars is None: @@ -306,7 +304,7 @@ def make_html_peek_header( raise Exception(f"Can't create peek header: {util.unicodify(exc)}") return "".join(out) - def make_html_peek_rows(self, dataset: DatasetProtocol, skipchars: Optional[List] = None, **kwargs) -> str: + def make_html_peek_rows(self, dataset: DatasetProtocol, skipchars: Optional[list] = None, **kwargs) -> str: if skipchars is None: skipchars = [] out = [] @@ -411,7 +409,7 @@ class Tabular(TabularData): file_ext = "tabular" - def get_column_names(self, first_line: str) -> Optional[List[str]]: + def get_column_names(self, first_line: str) -> Optional[list[str]]: return None def set_meta( @@ -514,7 +512,7 @@ def guess_column_type(column_text): data_lines = 0 comment_lines = 0 column_names = None - column_types: List = [] + column_types: list = [] first_line_column_types = [] if dataset.has_data(): # NOTE: if skip > num_check_lines, we won't detect any metadata, and will use default @@ -602,7 +600,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N super().set_meta(dataset, overwrite=overwrite, **kwd) dataset.metadata.comment_lines = 1 - def get_column_names(self, first_line: str) -> Optional[List[str]]: + def get_column_names(self, first_line: str) -> Optional[list[str]]: return first_line.strip().split("\t") @@ -855,7 +853,7 @@ def set_meta( _BamOrSam().set_meta(dataset, overwrite=overwrite, **kwd) @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: """ Multiple SAM files may each have headers. Since the headers should all be the same, remove the headers from files 1-n, keeping them in the first file only @@ -1066,7 +1064,7 @@ def set_meta(self, dataset: DatasetProtocol, overwrite: bool = True, **kwd) -> N dataset.metadata.sample_names = line.split()[9:] @staticmethod - def merge(split_files: List[str], output_file: str) -> None: + def merge(split_files: list[str], output_file: str) -> None: stderr_f = tempfile.NamedTemporaryFile(prefix="bam_merge_stderr") stderr_name = stderr_f.name command = ["bcftools", "concat"] + split_files + ["-o", output_file] @@ -1237,7 +1235,7 @@ def __init__(self, **kwd): ] def make_html_table( - self, dataset: DatasetProtocol, skipchars: Optional[List] = None, peek: Optional[List] = None, **kwargs + self, dataset: DatasetProtocol, skipchars: Optional[list] = None, peek: Optional[list] = None, **kwargs ) -> str: """Create HTML table, used for displaying peek""" skipchars = skipchars or [] diff --git a/lib/galaxy/datatypes/text.py b/lib/galaxy/datatypes/text.py index 11f3a7fc2282..e6226282e1d8 100644 --- a/lib/galaxy/datatypes/text.py +++ b/lib/galaxy/datatypes/text.py @@ -11,7 +11,6 @@ from typing import ( IO, Optional, - Tuple, ) import yaml @@ -229,7 +228,7 @@ def _display_data_trusted( filename: Optional[str] = None, to_ext: Optional[str] = None, **kwd, - ) -> Tuple[IO, Headers]: + ) -> tuple[IO, Headers]: headers = kwd.pop("headers", {}) preview = string_as_bool(preview) if to_ext or not preview: diff --git a/lib/galaxy/datatypes/util/maf_utilities.py b/lib/galaxy/datatypes/util/maf_utilities.py index 29b420d36e86..eba48f71aa76 100644 --- a/lib/galaxy/datatypes/util/maf_utilities.py +++ b/lib/galaxy/datatypes/util/maf_utilities.py @@ -12,7 +12,6 @@ import tempfile from copy import deepcopy from errno import EMFILE -from typing import Dict import bx.align.maf import bx.interval_index_file @@ -147,7 +146,7 @@ def __init__(self, size: int, species=None, temp_file_handler=None): if not temp_file_handler: temp_file_handler = TempFileHandler() self.temp_file_handler = temp_file_handler - self.sequences: Dict[str, int] = {} + self.sequences: dict[str, int] = {} if not isinstance(species, list): species = [species] for spec in species: diff --git a/lib/galaxy/datatypes/xml.py b/lib/galaxy/datatypes/xml.py index d3060c93ad9d..24117b1227ee 100644 --- a/lib/galaxy/datatypes/xml.py +++ b/lib/galaxy/datatypes/xml.py @@ -4,7 +4,6 @@ import logging import re -from typing import List from galaxy import util from galaxy.datatypes.dataproviders.dataset import DatasetDataProvider @@ -67,7 +66,7 @@ def sniff_prefix(self, file_prefix: FilePrefix) -> bool: return file_prefix.startswith(" None: + def merge(split_files: list[str], output_file: str) -> None: """Merging multiple XML files is non-trivial and must be done in subclasses.""" if len(split_files) > 1: raise NotImplementedError( diff --git a/lib/galaxy/di.py b/lib/galaxy/di.py index 93ce9cd1b6a8..d5342e5da2c6 100644 --- a/lib/galaxy/di.py +++ b/lib/galaxy/di.py @@ -2,7 +2,6 @@ from typing import ( Optional, - Type, TypeVar, ) @@ -21,7 +20,7 @@ class Container(LagomContainer): config variables for instance). """ - def _register_singleton(self, dep_type: Type[T], instance: Optional[T] = None) -> T: + def _register_singleton(self, dep_type: type[T], instance: Optional[T] = None) -> T: if instance is None: # create an instance from the context and register it as a singleton instance = self[dep_type] @@ -29,12 +28,12 @@ def _register_singleton(self, dep_type: Type[T], instance: Optional[T] = None) - return self[dep_type] def _register_abstract_singleton( - self, abstract_type: Type[T], concrete_type: Type[T], instance: Optional[T] = None + self, abstract_type: type[T], concrete_type: type[T], instance: Optional[T] = None ) -> T: self[abstract_type] = instance if instance is not None else concrete_type return self[abstract_type] - def resolve_or_none(self, dep_type: Type[T]) -> Optional[T]: + def resolve_or_none(self, dep_type: type[T]) -> Optional[T]: """Resolve the dependent type or just return None. If resolution is impossible assume caller has a backup plan for @@ -47,7 +46,7 @@ def resolve_or_none(self, dep_type: Type[T]) -> Optional[T]: except UnresolvableType: return None - def __getitem__(self, dep_type: Type[T]) -> T: + def __getitem__(self, dep_type: type[T]) -> T: if isinstance(dep_type, str): # Workaround for accessing attributes of $app inside cheetah templates. # Cheetah's searchList implementation tests access via __getitem__ before __getattr__. diff --git a/lib/galaxy/files/__init__.py b/lib/galaxy/files/__init__.py index 52f1706cffa9..3d7674a91758 100644 --- a/lib/galaxy/files/__init__.py +++ b/lib/galaxy/files/__init__.py @@ -4,12 +4,9 @@ from typing import ( Any, Callable, - Dict, - List, NamedTuple, Optional, Protocol, - Set, ) from galaxy import exceptions @@ -66,9 +63,9 @@ def user_file_sources_to_dicts( for_serialization: bool, user_context: "FileSourcesUserContext", browsable_only: Optional[bool] = False, - include_kind: Optional[Set[PluginKind]] = None, - exclude_kind: Optional[Set[PluginKind]] = None, - ) -> List[FilesSourceProperties]: + include_kind: Optional[set[PluginKind]] = None, + exclude_kind: Optional[set[PluginKind]] = None, + ) -> list[FilesSourceProperties]: """Write out user file sources as list of config dictionaries.""" # config_dicts: List[FilesSourceProperties] = [] # for file_source in self.user_file_sources(): @@ -90,9 +87,9 @@ def user_file_sources_to_dicts( for_serialization: bool, user_context: "FileSourcesUserContext", browsable_only: Optional[bool] = False, - include_kind: Optional[Set[PluginKind]] = None, - exclude_kind: Optional[Set[PluginKind]] = None, - ) -> List[FilesSourceProperties]: + include_kind: Optional[set[PluginKind]] = None, + exclude_kind: Optional[set[PluginKind]] = None, + ) -> list[FilesSourceProperties]: return [] @@ -126,7 +123,7 @@ def from_app_config(config): class ConfiguredFileSources: """Load plugins and resolve Galaxy URIs to FileSource objects.""" - _file_sources: List[BaseFilesSource] + _file_sources: list[BaseFilesSource] _plugin_loader: FileSourcePluginLoader _user_defined_file_sources: UserDefinedFileSources @@ -141,7 +138,7 @@ def __init__( self._file_sources_config = file_sources_config self._plugin_loader = plugin_loader or FileSourcePluginLoader() self._user_defined_file_sources = _ensure_user_defined_file_sources(user_defined_file_sources) - file_sources: List[BaseFilesSource] = [] + file_sources: list[BaseFilesSource] = [] if configured_file_source_conf is None: configured_file_source_conf = ConfiguredFileSourcesConf(conf_dict=[]) if configured_file_source_conf.conf_file is not None: @@ -258,10 +255,10 @@ def plugins_to_dict( for_serialization: bool = False, user_context: "OptionalUserContext" = None, browsable_only: Optional[bool] = False, - include_kind: Optional[Set[PluginKind]] = None, - exclude_kind: Optional[Set[PluginKind]] = None, - ) -> List[FilesSourceProperties]: - rval: List[FilesSourceProperties] = [] + include_kind: Optional[set[PluginKind]] = None, + exclude_kind: Optional[set[PluginKind]] = None, + ) -> list[FilesSourceProperties]: + rval: list[FilesSourceProperties] = [] for file_source in self._file_sources: if not file_source.user_has_access(user_context): continue @@ -285,7 +282,7 @@ def plugins_to_dict( ) return rval - def to_dict(self, for_serialization: bool = False, user_context: "OptionalUserContext" = None) -> Dict[str, Any]: + def to_dict(self, for_serialization: bool = False, user_context: "OptionalUserContext" = None) -> dict[str, Any]: return { "file_sources": self.plugins_to_dict(for_serialization=for_serialization, user_context=user_context), "config": self._file_sources_config.to_dict(), @@ -315,23 +312,23 @@ def __init__( class DictifiableFilesSourceContext(Protocol): @property - def role_names(self) -> Set[str]: ... + def role_names(self) -> set[str]: ... @property - def group_names(self) -> Set[str]: ... + def group_names(self) -> set[str]: ... @property def file_sources(self) -> ConfiguredFileSources: ... def to_dict( - self, view: str = "collection", value_mapper: Optional[Dict[str, Callable]] = None - ) -> Dict[str, Any]: ... + self, view: str = "collection", value_mapper: Optional[dict[str, Callable]] = None + ) -> dict[str, Any]: ... class FileSourceDictifiable(Dictifiable, DictifiableFilesSourceContext): dict_collection_visible_keys = ("email", "username", "ftp_dir", "preferences", "is_admin") - def to_dict(self, view="collection", value_mapper: Optional[Dict[str, Callable]] = None) -> Dict[str, Any]: + def to_dict(self, view="collection", value_mapper: Optional[dict[str, Callable]] = None) -> dict[str, Any]: rval = super().to_dict(view=view, value_mapper=value_mapper) rval["role_names"] = list(self.role_names) rval["group_names"] = list(self.group_names) @@ -350,16 +347,16 @@ def username(self) -> Optional[str]: ... def ftp_dir(self) -> Optional[str]: ... @property - def preferences(self) -> Dict[str, Any]: ... + def preferences(self) -> dict[str, Any]: ... @property def is_admin(self) -> bool: ... @property - def user_vault(self) -> Dict[str, Any]: ... + def user_vault(self) -> dict[str, Any]: ... @property - def app_vault(self) -> Dict[str, Any]: ... + def app_vault(self) -> dict[str, Any]: ... @property def anonymous(self) -> bool: ... @@ -394,17 +391,16 @@ def preferences(self): return user and user.extra_preferences or defaultdict(lambda: None) @property - def role_names(self) -> Set[str]: + def role_names(self) -> set[str]: """The set of role names of this user.""" - user = self.trans.user role_names = set() - if user: + if user := self.trans.user: role_names = {ura.role.name for ura in user.roles} role_names.add(user.email) # User's private role may have a generic name, so add user's email explicitly. return role_names @property - def group_names(self) -> Set[str]: + def group_names(self) -> set[str]: """The set of group names to which this user belongs.""" user = self.trans.user return {ugr.group.name for ugr in user.groups} if user else set() diff --git a/lib/galaxy/files/plugins.py b/lib/galaxy/files/plugins.py index fcbee3fdeaf8..002a3af5e97a 100644 --- a/lib/galaxy/files/plugins.py +++ b/lib/galaxy/files/plugins.py @@ -1,8 +1,6 @@ from typing import ( cast, - List, Optional, - Type, TYPE_CHECKING, ) @@ -18,8 +16,8 @@ class FileSourcePluginsConfig: - symlink_allowlist: List[str] - fetch_url_allowlist: List[str] + symlink_allowlist: list[str] + fetch_url_allowlist: list[str] library_import_dir: Optional[str] user_library_import_dir: Optional[str] ftp_upload_dir: Optional[str] @@ -108,12 +106,12 @@ def _file_source_plugins_dict(self): return plugins_dict(galaxy.files.sources, "plugin_type") - def get_plugin_type_class(self, plugin_type: str) -> Type["BaseFilesSource"]: - return cast(Type["BaseFilesSource"], self._plugin_classes[plugin_type]) + def get_plugin_type_class(self, plugin_type: str) -> type["BaseFilesSource"]: + return cast(type["BaseFilesSource"], self._plugin_classes[plugin_type]) def load_plugins( self, plugin_source: PluginConfigSource, file_source_plugin_config: FileSourcePluginsConfig - ) -> List["BaseFilesSource"]: + ) -> list["BaseFilesSource"]: extra_kwds = { "file_sources_config": file_source_plugin_config, } diff --git a/lib/galaxy/files/sources/__init__.py b/lib/galaxy/files/sources/__init__.py index 766b330711a7..ad643c5148af 100644 --- a/lib/galaxy/files/sources/__init__.py +++ b/lib/galaxy/files/sources/__init__.py @@ -1,4 +1,5 @@ import abc +import builtins import os import time from dataclasses import ( @@ -9,11 +10,7 @@ from typing import ( Any, ClassVar, - List, Optional, - Set, - Tuple, - Type, TYPE_CHECKING, Union, ) @@ -309,7 +306,7 @@ def list( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[list[AnyRemoteEntry], int]: """Return a list of 'Directory's and 'File's and the total count in a tuple.""" @@ -326,7 +323,7 @@ def get_browsable(self) -> bool: """Return true if the filesource implements the SupportsBrowsing interface.""" -def file_source_type_is_browsable(target_type: Type["BaseFilesSource"]) -> bool: +def file_source_type_is_browsable(target_type: type["BaseFilesSource"]) -> bool: # Check whether the list method has been overridden return target_type.list != BaseFilesSource.list or target_type._list != BaseFilesSource._list @@ -456,7 +453,7 @@ def list( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[list[AnyRemoteEntry], int]: self._check_user_access(user_context) if not self.supports_pagination and (limit is not None or offset is not None): raise RequestParameterInvalidException("Pagination is not supported by this file source.") @@ -482,7 +479,7 @@ def _list( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[builtins.list[AnyRemoteEntry], int]: raise NotImplementedError() def create_entry( @@ -593,7 +590,7 @@ def _user_has_required_groups(self, user_context: "FileSourcesUserContext") -> b return self._evaluate_security_rules(self.requires_groups, user_context.group_names) return True - def _evaluate_security_rules(self, rule_expression: str, user_credentials: Set[str]) -> bool: + def _evaluate_security_rules(self, rule_expression: str, user_credentials: set[str]) -> bool: token_evaluator = TokenContainedEvaluator(user_credentials) evaluator = BooleanExpressionEvaluator(token_evaluator) return evaluator.evaluate_expression(rule_expression) diff --git a/lib/galaxy/files/sources/_pyfilesystem2.py b/lib/galaxy/files/sources/_pyfilesystem2.py index 5b3add6e6119..6bce4349497e 100644 --- a/lib/galaxy/files/sources/_pyfilesystem2.py +++ b/lib/galaxy/files/sources/_pyfilesystem2.py @@ -4,10 +4,7 @@ import os from typing import ( ClassVar, - List, Optional, - Tuple, - Type, ) import fs @@ -34,7 +31,7 @@ class PyFilesystem2FilesSource(BaseFilesSource): - required_module: ClassVar[Optional[Type[FS]]] + required_module: ClassVar[Optional[type[FS]]] required_package: ClassVar[str] supports_pagination = True supports_search = True @@ -60,12 +57,12 @@ def _list( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[list[AnyRemoteEntry], int]: """Return dictionary of 'Directory's and 'File's.""" try: with self._open_fs(user_context=user_context, opts=opts) as h: if recursive: - recursive_result: List[AnyRemoteEntry] = [] + recursive_result: list[AnyRemoteEntry] = [] try: for p, dirs, files in h.walk(path, namespaces=["details"]): to_dict = functools.partial(self._resource_info_to_dict, p) @@ -89,10 +86,10 @@ def _list( except fs.errors.FSError as e: raise MessageException(f"Problem listing file source path {path}. Reason: {e}") from e - def _get_total_matches_count(self, fs: FS, path: str, filter: Optional[List[str]] = None) -> int: + def _get_total_matches_count(self, fs: FS, path: str, filter: Optional[list[str]] = None) -> int: return sum(1 for _ in fs.filterdir(path, namespaces=["basic"], files=filter, dirs=filter)) - def _to_page(self, limit: Optional[int] = None, offset: Optional[int] = None) -> Optional[Tuple[int, int]]: + def _to_page(self, limit: Optional[int] = None, offset: Optional[int] = None) -> Optional[tuple[int, int]]: if limit is None and offset is None: return None limit = limit or DEFAULT_PAGE_LIMIT @@ -100,7 +97,7 @@ def _to_page(self, limit: Optional[int] = None, offset: Optional[int] = None) -> end = start + limit return (start, end) - def _query_to_filter(self, query: Optional[str]) -> Optional[List[str]]: + def _query_to_filter(self, query: Optional[str]) -> Optional[list[str]]: if not query: return None return [f"*{query}*"] diff --git a/lib/galaxy/files/sources/_rdm.py b/lib/galaxy/files/sources/_rdm.py index b8d398bfee03..1da6d0aec182 100644 --- a/lib/galaxy/files/sources/_rdm.py +++ b/lib/galaxy/files/sources/_rdm.py @@ -1,9 +1,7 @@ import logging from typing import ( - List, NamedTuple, Optional, - Tuple, ) from typing_extensions import Unpack @@ -74,7 +72,7 @@ def get_file_containers( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[RemoteDirectory], int]: + ) -> tuple[list[RemoteDirectory], int]: """Returns the list of file containers in the repository and the total count containers. If writeable is True, only containers that the user can write to will be returned. @@ -88,7 +86,7 @@ def get_files_in_container( writeable: bool, user_context: OptionalUserContext = None, query: Optional[str] = None, - ) -> List[RemoteFile]: + ) -> list[RemoteFile]: """Returns the list of files of a file container. If writeable is True, we are signaling that the user intends to write to the container. diff --git a/lib/galaxy/files/sources/dataverse.py b/lib/galaxy/files/sources/dataverse.py index 2db95b85decc..d1bd786f3229 100644 --- a/lib/galaxy/files/sources/dataverse.py +++ b/lib/galaxy/files/sources/dataverse.py @@ -4,10 +4,7 @@ from typing import ( Any, cast, - Dict, - List, Optional, - Tuple, ) from urllib.error import HTTPError from urllib.parse import quote @@ -139,7 +136,7 @@ def _list( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[list[AnyRemoteEntry], int]: """This method lists the datasets or files from dataverse.""" writeable = opts and opts.writeable or False is_root_path = path == "/" @@ -147,10 +144,10 @@ def _list( datasets, total_hits = self.repository.get_file_containers( writeable, user_context, limit=limit, offset=offset, query=query ) - return cast(List[AnyRemoteEntry], datasets), total_hits + return cast(list[AnyRemoteEntry], datasets), total_hits dataset_id = self.get_container_id_from_path(path) files = self.repository.get_files_in_container(dataset_id, writeable, user_context, query) - return cast(List[AnyRemoteEntry], files), len(files) + return cast(list[AnyRemoteEntry], files), len(files) def _create_entry( self, @@ -256,7 +253,7 @@ def get_file_containers( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[RemoteDirectory], int]: + ) -> tuple[list[RemoteDirectory], int]: """Lists the Dataverse datasets in the repository.""" request_url = self.search_url params = { @@ -278,7 +275,7 @@ def get_files_in_container( writeable: bool, user_context: OptionalUserContext = None, query: Optional[str] = None, - ) -> List[RemoteFile]: + ) -> list[RemoteFile]: """This method lists the files in a dataverse dataset.""" request_url = self.files_of_dataset_url(dataset_id=container_id) response_data = self._get_response(user_context, request_url) @@ -286,7 +283,7 @@ def get_files_in_container( files = self._filter_files_by_name(files, query) return files - def _filter_files_by_name(self, files: List[RemoteFile], query: Optional[str] = None) -> List[RemoteFile]: + def _filter_files_by_name(self, files: list[RemoteFile], query: Optional[str] = None) -> list[RemoteFile]: if not query: return files return [file for file in files if query in file["name"]] @@ -385,8 +382,8 @@ def _download_file( f"Cannot download file from URL '{file_path}'. Please make sure the dataset and/or file exists and it is public." ) - def _get_datasets_from_response(self, response: dict) -> List[RemoteDirectory]: - rval: List[RemoteDirectory] = [] + def _get_datasets_from_response(self, response: dict) -> list[RemoteDirectory]: + rval: list[RemoteDirectory] = [] for dataset in response["items"]: uri = self.to_plugin_uri(dataset_id=dataset["global_id"]) rval.append( @@ -399,8 +396,8 @@ def _get_datasets_from_response(self, response: dict) -> List[RemoteDirectory]: ) return rval - def _get_files_from_response(self, dataset_id: str, response: dict) -> List[RemoteFile]: - rval: List[RemoteFile] = [] + def _get_files_from_response(self, dataset_id: str, response: dict) -> list[RemoteFile]: + rval: list[RemoteFile] = [] for entry in response: dataFile = entry.get("dataFile") uri = self.to_plugin_uri(dataset_id, dataFile.get("persistentId")) @@ -420,7 +417,7 @@ def _get_response( self, user_context: OptionalUserContext, request_url: str, - params: Optional[Dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, auth_required: bool = False, ) -> dict: headers = self._get_request_headers(user_context, auth_required) diff --git a/lib/galaxy/files/sources/drs.py b/lib/galaxy/files/sources/drs.py index 00f1a3643385..72e35f2b70bf 100644 --- a/lib/galaxy/files/sources/drs.py +++ b/lib/galaxy/files/sources/drs.py @@ -2,7 +2,6 @@ import re from typing import ( cast, - Dict, Optional, ) @@ -23,7 +22,7 @@ class DRSFilesSourceProperties(FilesSourceProperties, total=False): url_regex: str force_http: bool - http_headers: Dict[str, str] + http_headers: dict[str, str] class DRSFilesSource(BaseFilesSource): diff --git a/lib/galaxy/files/sources/elabftw.py b/lib/galaxy/files/sources/elabftw.py index 9861aefce56d..ea0134d3881c 100644 --- a/lib/galaxy/files/sources/elabftw.py +++ b/lib/galaxy/files/sources/elabftw.py @@ -46,6 +46,10 @@ import logging import re from abc import ABC +from collections.abc import ( + AsyncIterator, + Iterable, +) from datetime import ( datetime, timezone, @@ -55,16 +59,11 @@ from time import time from typing import ( Any, - AsyncIterator, cast, - Dict, Generic, get_type_hints, - Iterable, - List, Literal, Optional, - Tuple, TypeVar, ) from urllib.parse import ( @@ -282,7 +281,7 @@ def _get_endpoint( return urlparse(endpoint) def _serialization_props(self, user_context: OptionalUserContext = None) -> eLabFTWFilesSourceProperties: - effective_props: Dict[str, Any] = {} + effective_props: dict[str, Any] = {} for key, val in self._props.items(): if key in {"api_key", "endpoint"} and user_context is None: @@ -308,7 +307,7 @@ def _list( # `sort_by: Optional[Literal["name", "uri", "path", "class", "size", "ctime"]] = None,` # from Python 3.9 on, the following would be possible, although barely readable # `sort_by: Optional[Literal[*(get_type_hints(RemoteDirectory) | get_type_hints(RemoteFile)).keys()]] = None,` - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[list[AnyRemoteEntry], int]: """ List the contents of an eLabFTW endpoint. @@ -350,7 +349,7 @@ async def _list_async( sort_by: Optional[str] = None, # in particular, expecting # `sort_by: Optional[Literal["name", "uri", "path", "class", "size", "ctime"]] = None,` - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[list[AnyRemoteEntry], int]: """ List remote entries in a remote directory. @@ -408,7 +407,7 @@ async def collect_async_iterator(async_iter: AsyncIterator) -> list: """ return [value async for value in async_iter] - fetch_entity_types_tasks: List[asyncio.Task] = ( + fetch_entity_types_tasks: list[asyncio.Task] = ( # fmt: off [ asyncio.create_task( @@ -425,7 +424,7 @@ async def collect_async_iterator(async_iter: AsyncIterator) -> list: if retrieve_entity_types else [] ) - fetch_entities_tasks: List[asyncio.Task] = ( + fetch_entities_tasks: list[asyncio.Task] = ( [ asyncio.create_task( collect_async_iterator( @@ -474,7 +473,7 @@ async def collect_async_iterator(async_iter: AsyncIterator) -> list: if retrieve_entities else [] ) - fetch_attachments_tasks: List[asyncio.Task] = ( + fetch_attachments_tasks: list[asyncio.Task] = ( # fetching attachments is "bearable" for the user up to ~500 experiments + resources with attachments; # if eLabFTW allowed listing attachments without having to send individual requests for each experiment # or resource, this would not be a concern @@ -507,17 +506,17 @@ async def collect_async_iterator(async_iter: AsyncIterator) -> list: else [] ) - wrapped_entity_types: List[eLabFTWRemoteEntryWrapper[RemoteDirectory]] = [ + wrapped_entity_types: list[eLabFTWRemoteEntryWrapper[RemoteDirectory]] = [ wrapped_entity_type for wrapped_entity_types in await asyncio.gather(*fetch_entity_types_tasks) for wrapped_entity_type in wrapped_entity_types ] - wrapped_entities: List[eLabFTWRemoteEntryWrapper[RemoteDirectory]] = [ + wrapped_entities: list[eLabFTWRemoteEntryWrapper[RemoteDirectory]] = [ wrapped_entity for wrapped_entities in await asyncio.gather(*fetch_entities_tasks) for wrapped_entity in wrapped_entities ] - wrapped_attachments: List[eLabFTWRemoteEntryWrapper[RemoteFile]] = [ + wrapped_attachments: list[eLabFTWRemoteEntryWrapper[RemoteFile]] = [ wrapped_attachment for wrapped_attachments in await asyncio.gather(*fetch_attachments_tasks) for wrapped_attachment in wrapped_attachments @@ -659,10 +658,10 @@ class Params(TypedDict): if query: params.update({"q": query}) - content: List[dict] = [{}] * params["limit"] # stores JSON responses (entities) from the server + content: list[dict] = [{}] * params["limit"] # stores JSON responses (entities) from the server start, timeout = time(), False while len(content) >= params["limit"] and not (timeout := ((time() - start) >= PAGINATION_TIMEOUT)): - entities: Dict[int, dict] = {} + entities: dict[int, dict] = {} async with session.get( url, @@ -674,7 +673,7 @@ class Params(TypedDict): status: int = response.status content = await response.json() - def validate_and_register_entity(item, mapping: Dict[int, dict]) -> Literal[True]: + def validate_and_register_entity(item, mapping: dict[int, dict]) -> Literal[True]: valid = isinstance(item, dict) and isinstance(item.get("id"), int) if not valid: raise ValidationError(err_msg="Invalid response from eLabFTW") @@ -904,7 +903,7 @@ def _realize_to( raise exception -def split_path(path: str) -> Tuple[Optional[str], Optional[str], Optional[str]]: +def split_path(path: str) -> tuple[Optional[str], Optional[str], Optional[str]]: """ Split and validate an eLabFTW path. diff --git a/lib/galaxy/files/sources/ftp.py b/lib/galaxy/files/sources/ftp.py index 5df4066e615d..05658e925ac0 100644 --- a/lib/galaxy/files/sources/ftp.py +++ b/lib/galaxy/files/sources/ftp.py @@ -10,7 +10,6 @@ from typing import ( cast, Optional, - Tuple, ) from . import ( @@ -72,7 +71,7 @@ def _write_from( def _get_props_and_rel_path( self, extra_props: FTPFilesSourceProperties, url: str - ) -> Tuple[str, FTPFilesSourceProperties]: + ) -> tuple[str, FTPFilesSourceProperties]: host = self._props.get("host") port = self._props.get("port") user = self._props.get("user") diff --git a/lib/galaxy/files/sources/googledrive.py b/lib/galaxy/files/sources/googledrive.py index e809d4cd03bf..4f32803ea70b 100644 --- a/lib/galaxy/files/sources/googledrive.py +++ b/lib/galaxy/files/sources/googledrive.py @@ -17,8 +17,7 @@ class GoogleDriveFilesSource(PyFilesystem2FilesSource): def _open_fs(self, user_context=None, opts: Optional[FilesSourceOptions] = None): props = self._serialization_props(user_context) - access_token = props.pop("oauth2_access_token") - if access_token: + if access_token := props.pop("oauth2_access_token"): props["token"] = access_token credentials = Credentials(**props) handle = GoogleDriveFS(credentials) diff --git a/lib/galaxy/files/sources/http.py b/lib/galaxy/files/sources/http.py index ed4ae1ea8d33..fb564cb7b74d 100644 --- a/lib/galaxy/files/sources/http.py +++ b/lib/galaxy/files/sources/http.py @@ -3,8 +3,6 @@ import urllib.request from typing import ( cast, - Dict, - List, Optional, ) @@ -30,8 +28,8 @@ class HTTPFilesSourceProperties(FilesSourceProperties, total=False): url_regex: str - http_headers: Dict[str, str] - fetch_url_allowlist: List[IpAllowedListEntryT] + http_headers: dict[str, str] + fetch_url_allowlist: list[IpAllowedListEntryT] class HTTPFilesSource(BaseFilesSource): diff --git a/lib/galaxy/files/sources/invenio.py b/lib/galaxy/files/sources/invenio.py index afe819193d8b..ff195bdbad74 100644 --- a/lib/galaxy/files/sources/invenio.py +++ b/lib/galaxy/files/sources/invenio.py @@ -5,10 +5,7 @@ from typing import ( Any, cast, - Dict, - List, Optional, - Tuple, ) from urllib.parse import quote @@ -74,19 +71,19 @@ class RecordPersonOrOrg(TypedDict): given_name: str type: Literal["personal", "organizational"] name: str - identifiers: List[IdentifierEntry] + identifiers: list[IdentifierEntry] class Creator(TypedDict): person_or_org: RecordPersonOrOrg - affiliations: Optional[List[AffiliationEntry]] + affiliations: Optional[list[AffiliationEntry]] class RecordMetadata(TypedDict): title: str resource_type: ResourceType publication_date: str - creators: List[Creator] + creators: list[Creator] class RecordLinks(TypedDict): @@ -192,17 +189,17 @@ def _list( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[list[AnyRemoteEntry], int]: writeable = opts and opts.writeable or False is_root_path = path == "/" if is_root_path: records, total_hits = self.repository.get_file_containers( writeable, user_context, limit=limit, offset=offset, query=query ) - return cast(List[AnyRemoteEntry], records), total_hits + return cast(list[AnyRemoteEntry], records), total_hits record_id = self.get_container_id_from_path(path) files = self.repository.get_files_in_container(record_id, writeable, user_context) - return cast(List[AnyRemoteEntry], files), len(files) + return cast(list[AnyRemoteEntry], files), len(files) def _create_entry( self, @@ -277,9 +274,9 @@ def get_file_containers( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[RemoteDirectory], int]: + ) -> tuple[list[RemoteDirectory], int]: """Gets the records in the repository and returns the total count of records.""" - params: Dict[str, Any] = {} + params: dict[str, Any] = {} request_url = self.records_url if writeable: # Only draft records owned by the user can be written to. @@ -295,7 +292,7 @@ def get_file_containers( total_hits = response_data["hits"]["total"] return self._get_records_from_response(response_data), total_hits - def _to_size_page(self, limit: Optional[int], offset: Optional[int]) -> Tuple[Optional[int], Optional[int]]: + def _to_size_page(self, limit: Optional[int], offset: Optional[int]) -> tuple[Optional[int], Optional[int]]: if limit is None and offset is None: return None, None size = limit or DEFAULT_PAGE_LIMIT @@ -308,7 +305,7 @@ def get_files_in_container( writeable: bool, user_context: OptionalUserContext = None, query: Optional[str] = None, - ) -> List[RemoteFile]: + ) -> list[RemoteFile]: conditionally_draft = "/draft" if writeable else "" request_url = f"{self.records_url}/{container_id}{conditionally_draft}/files" response_data = self._get_response(user_context, request_url) @@ -429,9 +426,9 @@ def _get_draft_record(self, record_id: str, user_context: OptionalUserContext = draft_record = self._get_response(user_context, request_url) return draft_record - def _get_records_from_response(self, response: dict) -> List[RemoteDirectory]: + def _get_records_from_response(self, response: dict) -> list[RemoteDirectory]: records = response["hits"]["hits"] - rval: List[RemoteDirectory] = [] + rval: list[RemoteDirectory] = [] for record in records: uri = self.to_plugin_uri(record_id=record["id"]) path = self.plugin.to_relative_path(uri) @@ -452,12 +449,12 @@ def _get_record_title(self, record: InvenioRecord) -> str: title = record["metadata"].get("title") return title or "No title" - def _get_record_files_from_response(self, record_id: str, response: dict) -> List[RemoteFile]: + def _get_record_files_from_response(self, record_id: str, response: dict) -> list[RemoteFile]: files_enabled = response.get("enabled", False) if not files_enabled: return [] entries = response["entries"] - rval: List[RemoteFile] = [] + rval: list[RemoteFile] = [] for entry in entries: if entry.get("status") == "completed": uri = self.to_plugin_uri(record_id=record_id, filename=entry["key"]) @@ -499,7 +496,7 @@ def _get_response( self, user_context: OptionalUserContext, request_url: str, - params: Optional[Dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, auth_required: bool = False, ) -> dict: headers = self._get_request_headers(user_context, auth_required) diff --git a/lib/galaxy/files/sources/posix.py b/lib/galaxy/files/sources/posix.py index 258d5ef20a69..79840e9e7d2a 100644 --- a/lib/galaxy/files/sources/posix.py +++ b/lib/galaxy/files/sources/posix.py @@ -2,9 +2,7 @@ import os import shutil from typing import ( - List, Optional, - Tuple, ) from typing_extensions import Unpack @@ -70,14 +68,14 @@ def _list( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[list[AnyRemoteEntry], int]: if not self.root: raise exceptions.ItemAccessibilityException("Listing files at file:// URLs has been disabled.") dir_path = self._to_native_path(path, user_context=user_context) if not self._safe_directory(dir_path): raise exceptions.ObjectNotFound(f"The specified directory does not exist [{dir_path}].") if recursive: - res: List[AnyRemoteEntry] = [] + res: list[AnyRemoteEntry] = [] effective_root = self._effective_root(user_context) for p, dirs, files in safe_walk(dir_path, allowlist=self._allowlist): rel_dir = os.path.relpath(p, effective_root) diff --git a/lib/galaxy/files/sources/remotezip.py b/lib/galaxy/files/sources/remotezip.py index 06289245d8e3..b64ef4d1d586 100644 --- a/lib/galaxy/files/sources/remotezip.py +++ b/lib/galaxy/files/sources/remotezip.py @@ -6,7 +6,6 @@ ) from struct import unpack from typing import ( - Dict, Optional, ) from urllib.parse import ( @@ -110,13 +109,13 @@ def _serialization_props(self, user_context: OptionalUserContext = None): return effective_props -def extract_query_parameters(url: str) -> Dict[str, str]: +def extract_query_parameters(url: str) -> dict[str, str]: parsed_url = urlparse(url) query_params = parse_qs(parsed_url.query) return {key: value[0] for key, value in query_params.items()} -def validate_params(params: Dict[str, str]) -> FileExtractParameters: +def validate_params(params: dict[str, str]) -> FileExtractParameters: """Validates and converts the params dictionary to a FileExtractParameters instance.""" required_fields = [field.name for field in fields(FileExtractParameters)] diff --git a/lib/galaxy/files/sources/s3fs.py b/lib/galaxy/files/sources/s3fs.py index 7adeef035306..57d1f7d9f268 100644 --- a/lib/galaxy/files/sources/s3fs.py +++ b/lib/galaxy/files/sources/s3fs.py @@ -3,9 +3,7 @@ import os from typing import ( cast, - List, Optional, - Tuple, ) from typing_extensions import ( @@ -79,13 +77,13 @@ def _list( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[List[AnyRemoteEntry], int]: + ) -> tuple[list[AnyRemoteEntry], int]: _props = self._serialization_props(user_context) # we need to pop the 'bucket' here, because the argument is not recognised in a downstream function _bucket_name = _props.pop("bucket", "") fs = self._open_fs(props=_props, opts=opts) if recursive: - res: List[AnyRemoteEntry] = [] + res: list[AnyRemoteEntry] = [] bucket_path = self._bucket_path(_bucket_name, path) for p, dirs, files in fs.walk(bucket_path, detail=True): to_dict = functools.partial(self._resource_info_to_dict, p) diff --git a/lib/galaxy/files/sources/util.py b/lib/galaxy/files/sources/util.py index a3af04f6bff4..1faeb2e5726d 100644 --- a/lib/galaxy/files/sources/util.py +++ b/lib/galaxy/files/sources/util.py @@ -1,8 +1,6 @@ import time from typing import ( - List, Optional, - Tuple, ) from galaxy import exceptions @@ -53,7 +51,7 @@ def retry_and_get(get_url: str, retry_options: RetryOptions, headers: Optional[d return response -def _get_access_info(obj_url: str, access_method: dict, headers=None) -> Tuple[str, dict]: +def _get_access_info(obj_url: str, access_method: dict, headers=None) -> tuple[str, dict]: try: access_url = access_method["access_url"] except KeyError: @@ -81,7 +79,7 @@ def fetch_drs_to_file( force_http=False, retry_options: Optional[RetryOptions] = None, headers: Optional[dict] = None, - fetch_url_allowlist: Optional[List[IpAllowedListEntryT]] = None, + fetch_url_allowlist: Optional[list[IpAllowedListEntryT]] = None, ): """Fetch contents of drs:// URI to a target path.""" if not drs_uri.startswith("drs://"): diff --git a/lib/galaxy/files/templates/manager.py b/lib/galaxy/files/templates/manager.py index 79a649093999..8e573026aec8 100644 --- a/lib/galaxy/files/templates/manager.py +++ b/lib/galaxy/files/templates/manager.py @@ -1,6 +1,5 @@ import os from typing import ( - List, Optional, Protocol, ) @@ -27,7 +26,7 @@ class AppConfigProtocol(Protocol): - file_source_templates: Optional[List[RawTemplateConfig]] + file_source_templates: Optional[list[RawTemplateConfig]] file_source_templates_config_file: Optional[str] @@ -81,6 +80,6 @@ def validate(self, instance: InstanceDefinition): validate_secrets_and_variables(instance, template) -def raw_config_to_catalog(raw_config: List[RawTemplateConfig]) -> FileSourceTemplateCatalog: +def raw_config_to_catalog(raw_config: list[RawTemplateConfig]) -> FileSourceTemplateCatalog: effective_root = apply_syntactic_sugar(raw_config) return FileSourceTemplateCatalog.model_validate(effective_root) diff --git a/lib/galaxy/files/templates/models.py b/lib/galaxy/files/templates/models.py index f123ed3fd6d9..5e656642d00e 100644 --- a/lib/galaxy/files/templates/models.py +++ b/lib/galaxy/files/templates/models.py @@ -1,9 +1,7 @@ from typing import ( + Annotated, Any, - Dict, - List, Optional, - Type, Union, ) @@ -12,7 +10,6 @@ RootModel, ) from typing_extensions import ( - Annotated, Literal, ) @@ -359,8 +356,8 @@ class FileSourceTemplateBase(StrictModel): # template by hiding but keep it in the catalog for backward # compatibility for users with existing stores of that template. hidden: bool = False - variables: Optional[List[TemplateVariable]] = None - secrets: Optional[List[TemplateSecret]] = None + variables: Optional[list[TemplateVariable]] = None + secrets: Optional[list[TemplateSecret]] = None class FileSourceTemplateSummary(FileSourceTemplateBase): @@ -369,23 +366,23 @@ class FileSourceTemplateSummary(FileSourceTemplateBase): class FileSourceTemplate(FileSourceTemplateBase): configuration: FileSourceTemplateConfiguration - environment: Optional[List[TemplateEnvironmentEntry]] = None + environment: Optional[list[TemplateEnvironmentEntry]] = None @property def type(self): return self.configuration.type -FileSourceTemplateCatalog = RootModel[List[FileSourceTemplate]] +FileSourceTemplateCatalog = RootModel[list[FileSourceTemplate]] class FileSourceTemplateSummaries(RootModel): - root: List[FileSourceTemplateSummary] + root: list[FileSourceTemplateSummary] def template_to_configuration( template: FileSourceTemplate, - variables: Dict[str, TemplateVariableValueType], + variables: dict[str, TemplateVariableValueType], secrets: SecretsDict, user_details: UserDetailsDict, environment: EnvironmentDict, @@ -398,7 +395,7 @@ def template_to_configuration( return to_configuration_object(raw_config) -TypesToConfigurationClasses: Dict[FileSourceTemplateType, Type[FileSourceConfiguration]] = { +TypesToConfigurationClasses: dict[FileSourceTemplateType, type[FileSourceConfiguration]] = { "ftp": FtpFileSourceConfiguration, "posix": PosixFileSourceConfiguration, "s3fs": S3FSFileSourceConfiguration, @@ -440,7 +437,7 @@ def get_oauth2_config_or_none(template: FileSourceTemplate) -> Optional[OAuth2Co return get_oauth2_config(template) -def to_configuration_object(configuration_dict: Dict[str, Any]) -> FileSourceConfiguration: +def to_configuration_object(configuration_dict: dict[str, Any]) -> FileSourceConfiguration: if "type" not in configuration_dict: raise KeyError("Configuration objects require a file source 'type' key, none found.") object_store_type = configuration_dict["type"] diff --git a/lib/galaxy/files/unittest_utils/__init__.py b/lib/galaxy/files/unittest_utils/__init__.py index 6a2386cdacd6..f211c39ebf22 100644 --- a/lib/galaxy/files/unittest_utils/__init__.py +++ b/lib/galaxy/files/unittest_utils/__init__.py @@ -2,7 +2,6 @@ import tempfile from typing import ( Optional, - Tuple, ) from galaxy.files import ( @@ -37,7 +36,7 @@ def setup_root(): return tmp, root -def write_file_fixtures(tmp: str, root: str) -> Tuple[str, str]: +def write_file_fixtures(tmp: str, root: str) -> tuple[str, str]: if not os.path.exists(root): os.mkdir(root) os.symlink(os.path.join(tmp, "b"), os.path.join(root, "unsafe")) diff --git a/lib/galaxy/files/uris.py b/lib/galaxy/files/uris.py index 9d48853294d6..8697836aefca 100644 --- a/lib/galaxy/files/uris.py +++ b/lib/galaxy/files/uris.py @@ -4,9 +4,7 @@ import socket import tempfile from typing import ( - List, Optional, - Tuple, ) from urllib.parse import urlparse @@ -73,7 +71,7 @@ def stream_to_file(stream, suffix="", prefix="", dir=None, text=False, **kwd): return stream_to_open_named_file(stream, fd, temp_name, **kwd) -def validate_uri_access(uri: str, is_admin: bool, ip_allowlist: List[IpAllowedListEntryT]) -> None: +def validate_uri_access(uri: str, is_admin: bool, ip_allowlist: list[IpAllowedListEntryT]) -> None: """Perform uniform checks on supplied URIs. - Prevent access to local IPs not found in ip_allowlist. @@ -84,7 +82,7 @@ def validate_uri_access(uri: str, is_admin: bool, ip_allowlist: List[IpAllowedLi raise AdminRequiredException() -def split_port(parsed_url: str, url: str) -> Tuple[str, int]: +def split_port(parsed_url: str, url: str) -> tuple[str, int]: try: idx = parsed_url.rindex(":") # We parse as an int and let this fail ungracefully if parsing @@ -96,7 +94,7 @@ def split_port(parsed_url: str, url: str) -> Tuple[str, int]: raise RequestParameterInvalidException(f"Could not verify url '{url}'.") -def validate_non_local(uri: str, ip_allowlist: List[IpAllowedListEntryT]) -> str: +def validate_non_local(uri: str, ip_allowlist: list[IpAllowedListEntryT]) -> str: # If it doesn't look like a URL, ignore it. if not (uri.lstrip().startswith("http://") or uri.lstrip().startswith("https://")): return uri diff --git a/lib/galaxy/job_execution/actions/post.py b/lib/galaxy/job_execution/actions/post.py index c6a9e5d27e8f..2d83681601b2 100644 --- a/lib/galaxy/job_execution/actions/post.py +++ b/lib/galaxy/job_execution/actions/post.py @@ -4,10 +4,6 @@ """ import datetime -from typing import ( - Dict, - Type, -) from markupsafe import escape @@ -506,7 +502,7 @@ def _execute(cls, tag_handler, user, output, tags): class ActionBox: - actions: Dict[str, Type[DefaultJobAction]] = { + actions: dict[str, type[DefaultJobAction]] = { "RenameDatasetAction": RenameDatasetAction, "HideDatasetAction": HideDatasetAction, "ChangeDatatypeAction": ChangeDatatypeAction, diff --git a/lib/galaxy/job_execution/compute_environment.py b/lib/galaxy/job_execution/compute_environment.py index bdd5b2ed96e7..dfd8329106c4 100644 --- a/lib/galaxy/job_execution/compute_environment.py +++ b/lib/galaxy/job_execution/compute_environment.py @@ -5,7 +5,6 @@ ) from typing import ( Any, - Dict, ) from galaxy.job_execution.datasets import DeferrableObjectsT @@ -24,7 +23,7 @@ class ComputeEnvironment(metaclass=ABCMeta): """ def __init__(self): - self.materialized_objects: Dict[str, DeferrableObjectsT] = {} + self.materialized_objects: dict[str, DeferrableObjectsT] = {} @abstractmethod def output_names(self): @@ -95,7 +94,7 @@ def galaxy_url(self): """URL to access Galaxy API from for this compute environment.""" @abstractmethod - def get_file_sources_dict(self) -> Dict[str, Any]: + def get_file_sources_dict(self) -> dict[str, Any]: """Return file sources dict for current user.""" @@ -120,7 +119,7 @@ def __init__(self, job_io: JobIO, job: Job): self.job_io = job_io self.job = job - def get_file_sources_dict(self) -> Dict[str, Any]: + def get_file_sources_dict(self) -> dict[str, Any]: return self.job_io.file_sources_dict def output_names(self): diff --git a/lib/galaxy/job_execution/output_collect.py b/lib/galaxy/job_execution/output_collect.py index d09f767d38ae..539986eb4653 100644 --- a/lib/galaxy/job_execution/output_collect.py +++ b/lib/galaxy/job_execution/output_collect.py @@ -9,8 +9,6 @@ from typing import ( Any, Callable, - Dict, - List, Optional, TYPE_CHECKING, Union, @@ -112,7 +110,7 @@ def get_metadata_source(self, input_name): def collect_dynamic_outputs( job_context: "BaseJobContext", - output_collections: Dict[str, Any], + output_collections: dict[str, Any], ): # unmapped outputs do not correspond to explicit outputs of the tool, they were inferred entirely # from the tool provided metadata (e.g. galaxy.json). @@ -208,7 +206,7 @@ def add_dataset_collection(self, collection): pass def find_files(self, output_name, collection, dataset_collectors): - discovered_files: List[DiscoveredFile] = [] + discovered_files: list[DiscoveredFile] = [] for discovered_file in discover_files( output_name, self.tool_provided_metadata, dataset_collectors, self.job_working_directory, collection ): @@ -221,7 +219,7 @@ def get_job_id(self) -> int: ... @property @abc.abstractmethod - def change_datatype_actions(self) -> Dict[str, Any]: ... + def change_datatype_actions(self) -> dict[str, Any]: ... @abc.abstractmethod def create_hdca(self, name: str, structure: UninitializedTree) -> Union[HistoryDatasetCollectionAssociation]: ... @@ -230,7 +228,7 @@ def create_hdca(self, name: str, structure: UninitializedTree) -> Union[HistoryD def get_hdca(self, object_id) -> HistoryDatasetCollectionAssociation: ... @abc.abstractmethod - def get_library_folder(self, destination: Dict[str, Any]) -> "LibraryFolder": ... + def get_library_folder(self, destination: dict[str, Any]) -> "LibraryFolder": ... @abc.abstractmethod def output_collection_def(self, name: str) -> Union[None, ToolOutputCollection]: ... @@ -322,14 +320,14 @@ def get_implicit_collection_jobs_association_id(self): return self.metadata_params.get("implicit_collection_jobs_association_id") -def collect_primary_datasets(job_context: BaseJobContext, output: Dict[str, DatasetInstance], input_ext): +def collect_primary_datasets(job_context: BaseJobContext, output: dict[str, DatasetInstance], input_ext): job_working_directory = job_context.job_working_directory # Loop through output file names, looking for generated primary # datasets in form specified by discover dataset patterns or in tool provided metadata. new_outdata_name = None - primary_datasets: Dict[str, Dict[str, DatasetInstance]] = {} - storage_callbacks: List[Callable] = [] + primary_datasets: dict[str, dict[str, DatasetInstance]] = {} + storage_callbacks: list[Callable] = [] for name, outdata in output.items(): primary_output_assigned = False dataset_collectors = [DEFAULT_DATASET_COLLECTOR] diff --git a/lib/galaxy/job_execution/setup.py b/lib/galaxy/job_execution/setup.py index 75714f71e14a..3234a4c31551 100644 --- a/lib/galaxy/job_execution/setup.py +++ b/lib/galaxy/job_execution/setup.py @@ -6,11 +6,8 @@ from typing import ( Any, cast, - Dict, - List, NamedTuple, Optional, - Tuple, Union, ) @@ -38,8 +35,8 @@ TOOL_PROVIDED_JOB_METADATA_KEYS = ["name", "info", "dbkey", "created_from_basename"] -OutputHdasAndType = Dict[str, Tuple[DatasetInstance, DatasetPath]] -OutputPaths = List[DatasetPath] +OutputHdasAndType = dict[str, tuple[DatasetInstance, DatasetPath]] +OutputPaths = list[DatasetPath] class JobOutput(NamedTuple): @@ -58,7 +55,7 @@ def __init__(self) -> None: def populated(self) -> bool: return self.output_hdas_and_paths is not None - def set_job_outputs(self, job_outputs: List[JobOutput]) -> None: + def set_job_outputs(self, job_outputs: list[JobOutput]) -> None: self.output_paths = [t[2] for t in job_outputs] self.output_hdas_and_paths = {t.output_name: (t.dataset, t.dataset_path) for t in job_outputs} @@ -109,8 +106,8 @@ def __init__( check_job_script_integrity: bool, check_job_script_integrity_count: int, check_job_script_integrity_sleep: float, - file_sources_dict: Dict[str, Any], - user_context: Union[FileSourcesUserContext, Dict[str, Any]], + file_sources_dict: dict[str, Any], + user_context: Union[FileSourcesUserContext, dict[str, Any]], tool_source: Optional[str] = None, tool_source_class: Optional["str"] = "XmlToolSource", tool_dir: Optional[str] = None, @@ -207,7 +204,7 @@ def output_hdas_and_paths(self) -> OutputHdasAndType: self.compute_outputs() return cast(OutputHdasAndType, self.job_outputs.output_hdas_and_paths) - def get_input_dataset_fnames(self, ds: DatasetInstance) -> List[str]: + def get_input_dataset_fnames(self, ds: DatasetInstance) -> list[str]: filenames = [ds.get_file_name()] # we will need to stage in metadata file names also # TODO: would be better to only stage in metadata files that are actually needed (found in command line, referenced in config files, etc.) @@ -219,10 +216,10 @@ def get_input_dataset_fnames(self, ds: DatasetInstance) -> List[str]: return filenames def get_input_datasets( - self, materialized_objects: Optional[Dict[str, DeferrableObjectsT]] = None - ) -> List[DatasetInstance]: + self, materialized_objects: Optional[dict[str, DeferrableObjectsT]] = None + ) -> list[DatasetInstance]: job = self.job - datasets: List[DatasetInstance] = [] + datasets: list[DatasetInstance] = [] for da in job.input_datasets + job.input_library_datasets: if materialized_objects and da.name in materialized_objects: materialized_object = materialized_objects[da.name] @@ -232,13 +229,13 @@ def get_input_datasets( datasets.append(da.dataset) return datasets - def get_input_fnames(self) -> List[str]: + def get_input_fnames(self) -> list[str]: filenames = [] for ds in self.get_input_datasets(): filenames.extend(self.get_input_dataset_fnames(ds)) return filenames - def get_input_paths(self, materialized_objects: Optional[Dict[str, DeferrableObjectsT]]) -> List[DatasetPath]: + def get_input_paths(self, materialized_objects: Optional[dict[str, DeferrableObjectsT]]) -> list[DatasetPath]: paths = [] for ds in self.get_input_datasets(materialized_objects): paths.append(self.get_input_path(ds)) @@ -256,7 +253,7 @@ def get_input_path(self, dataset: DatasetInstance) -> DatasetPath: object_store_id=dataset.dataset.object_store_id, ) - def get_output_basenames(self) -> List[str]: + def get_output_basenames(self) -> list[str]: return [os.path.basename(str(fname)) for fname in self.get_output_fnames()] def get_output_fnames(self) -> OutputPaths: diff --git a/lib/galaxy/jobs/__init__.py b/lib/galaxy/jobs/__init__.py index 116b82738759..94915a1672da 100644 --- a/lib/galaxy/jobs/__init__.py +++ b/lib/galaxy/jobs/__init__.py @@ -15,6 +15,7 @@ import sys import time import traceback +from collections.abc import Iterable from dataclasses import ( dataclass, field, @@ -23,9 +24,6 @@ from typing import ( Any, Callable, - Dict, - Iterable, - List, Optional, TYPE_CHECKING, ) @@ -317,10 +315,10 @@ class JobConfigurationLimits: anonymous_user_concurrent_jobs: Optional[int] = None walltime: Optional[str] = None walltime_delta: Optional[datetime.timedelta] = None - total_walltime: Dict[str, Any] = field(default_factory=dict) + total_walltime: dict[str, Any] = field(default_factory=dict) output_size: Optional[int] = None - destination_user_concurrent_jobs: Dict[str, int] = field(default_factory=dict) - destination_total_concurrent_jobs: Dict[str, int] = field(default_factory=dict) + destination_user_concurrent_jobs: dict[str, int] = field(default_factory=dict) + destination_total_concurrent_jobs: dict[str, int] = field(default_factory=dict) class JobConfiguration(ConfiguresHandlers): @@ -329,14 +327,14 @@ class JobConfiguration(ConfiguresHandlers): These features are configured in the job configuration, by default, ``job_conf.yml`` """ - runner_plugins: List[dict] + runner_plugins: list[dict] handlers: dict - handler_runner_plugins: Dict[str, str] - tools: Dict[str, list] - tool_classes: Dict[str, list] - resource_groups: Dict[str, list] - destinations: Dict[str, tuple] - resource_parameters: Dict[str, Any] + handler_runner_plugins: dict[str, str] + tools: dict[str, list] + tool_classes: dict[str, list] + resource_groups: dict[str, list] + destinations: dict[str, tuple] + resource_parameters: dict[str, Any] DEFAULT_BASE_HANDLER_POOLS = ("job-handlers",) DEFAULT_NWORKERS = 4 @@ -356,7 +354,7 @@ def __init__(self, app: MinimalManagerApp): """Parse the job configuration XML.""" self.app = app self.runner_plugins = [] - self.dynamic_params: Optional[Dict[str, Any]] = None + self.dynamic_params: Optional[dict[str, Any]] = None self.handlers = {} self.handler_runner_plugins = {} self.default_handler_id = None @@ -864,7 +862,7 @@ def get_job_runner_plugins(self, handler_id: str): :returns: list of job runner plugins """ - rval: Dict[str, BaseJobRunner] = {} + rval: dict[str, BaseJobRunner] = {} if handler_id in self.handler_runner_plugins: plugins_to_load = [rp for rp in self.runner_plugins if rp["id"] in self.handler_runner_plugins[handler_id]] log.info( @@ -1028,9 +1026,9 @@ def __init__( self.app = app self.tool = tool self.sa_session = self.app.model.context - self.extra_filenames: List[str] = [] - self.environment_variables: List[Dict[str, str]] = [] - self.interactivetools: List[Dict[str, Any]] = [] + self.extra_filenames: list[str] = [] + self.environment_variables: list[dict[str, str]] = [] + self.interactivetools: list[dict[str, Any]] = [] self.command_line = None self.version_command_line = None self._dependency_shell_commands = None @@ -1851,7 +1849,7 @@ def _set_object_store_ids_full(self, job: Job): user = job.user object_store_id = self.get_destination_configuration("object_store_id", None) split_object_stores: Optional[Callable[[str], ObjectStorePopulator]] = None - object_store_id_overrides: Optional[Dict[str, Optional[str]]] = None + object_store_id_overrides: Optional[dict[str, Optional[str]]] = None if object_store_id is None: object_store_id = job.preferred_object_store_id diff --git a/lib/galaxy/jobs/dynamic_tool_destination.py b/lib/galaxy/jobs/dynamic_tool_destination.py index b771fa92a35e..1b9f8ab55cba 100755 --- a/lib/galaxy/jobs/dynamic_tool_destination.py +++ b/lib/galaxy/jobs/dynamic_tool_destination.py @@ -9,10 +9,7 @@ import sys from functools import reduce from typing import ( - Dict, - List, Optional, - Set, TYPE_CHECKING, Union, ) @@ -42,7 +39,7 @@ list of all valid priorities, inferred from the global default_desinations section of the config """ -priority_list: Set[str] = set() +priority_list: set[str] = set() """ Instantiated to a list of all valid destinations in the job configuration file @@ -50,7 +47,7 @@ to see if app is None, because if it is then we'll try using the destination_list instead. -""" -destination_list: Set[str] = set() +destination_list: set[str] = set() """ The largest the edit distance can be for a word to be considered @@ -81,7 +78,7 @@ class ScannerError(Exception): pass -def get_keys_from_dict(dl: Union[Dict, List], keys_list: List) -> None: +def get_keys_from_dict(dl: Union[dict, list], keys_list: list) -> None: """ This function builds a list using the keys from nest dictionaries """ @@ -1313,7 +1310,7 @@ def map_tool_to_destination( raise JobMappingException(e) # Get all inputs from tool and databases - inp_data: Dict[str, DatasetInstance] = {da.name: da.dataset for da in job.input_datasets} + inp_data: dict[str, DatasetInstance] = {da.name: da.dataset for da in job.input_datasets} inp_data.update([(da.name, da.dataset) for da in job.input_library_datasets]) if config is not None and str(tool.old_id) in config["tools"]: @@ -1503,7 +1500,7 @@ def map_tool_to_destination( # check if the args in the config file are available for arg in rule["arguments"]: arg_dict = {arg: rule["arguments"][arg]} - arg_keys_list: List = [] + arg_keys_list: list = [] get_keys_from_dict(arg_dict, arg_keys_list) try: options_value = reduce(dict.__getitem__, arg_keys_list, options) diff --git a/lib/galaxy/jobs/handler.py b/lib/galaxy/jobs/handler.py index 666547d352ef..020df8b2edeb 100644 --- a/lib/galaxy/jobs/handler.py +++ b/lib/galaxy/jobs/handler.py @@ -12,11 +12,7 @@ Queue, ) from typing import ( - Dict, - List, Optional, - Tuple, - Type, Union, ) @@ -111,7 +107,7 @@ def shutdown(self): class ItemGrabber: - grab_model: Union[Type[model.Job], Type[model.WorkflowInvocation]] + grab_model: Union[type[model.Job], type[model.WorkflowInvocation]] def __init__( self, @@ -248,7 +244,7 @@ def __init__(self, app: MinimalManagerApp, dispatcher: "DefaultJobDispatcher"): # Keep track of the pid that started the job manager, only it has valid threads self.parent_pid = os.getpid() # This queue is not used if track_jobs_in_database is True. - self.queue: Queue[Tuple[int, Optional[str]]] = Queue() + self.queue: Queue[tuple[int, Optional[str]]] = Queue() class JobHandlerQueue(BaseJobHandlerQueue): @@ -264,9 +260,9 @@ def __init__(self, app: MinimalManagerApp, dispatcher): # Initialize structures for handling job limits self.__clear_job_count() # Contains job ids for jobs that are waiting (only use from monitor thread) - self.waiting_jobs: List[int] = [] + self.waiting_jobs: list[int] = [] # Contains wrappers of jobs that are limited or ready (so they aren't created unnecessarily/multiple times) - self.job_wrappers: Dict[int, JobWrapper] = {} + self.job_wrappers: dict[int, JobWrapper] = {} name = "JobHandlerQueue.monitor_thread" self._init_monitor_thread(name, target=self.__monitor, config=app.config) self.job_grabber = None @@ -1104,7 +1100,7 @@ def __monitor_step(self): Called repeatedly by `monitor` to stop jobs. """ # Pull all new jobs from the queue at once - jobs_to_check: List[Tuple[model.Job, Optional[str]]] = [] + jobs_to_check: list[tuple[model.Job, Optional[str]]] = [] with self.sa_session.begin(): self._add_newly_deleted_jobs(jobs_to_check) try: @@ -1130,7 +1126,7 @@ def shutdown(self): self.shutdown_monitor() log.info("job handler stop queue stopped") - def _add_newly_deleted_jobs(self, jobs_to_check: List[Tuple[model.Job, Optional[str]]]): + def _add_newly_deleted_jobs(self, jobs_to_check: list[tuple[model.Job, Optional[str]]]): if self.track_jobs_in_database: newly_deleted_jobs = self._get_new_jobs() for job in newly_deleted_jobs: @@ -1146,7 +1142,7 @@ def _get_new_jobs(self): ) return self.sa_session.scalars(stmt).all() - def _pull_from_queue(self, jobs_to_check: List[Tuple[model.Job, Optional[str]]]): + def _pull_from_queue(self, jobs_to_check: list[tuple[model.Job, Optional[str]]]): # Pull jobs from the queue (in the case of Administrative stopped jobs) try: while 1: @@ -1160,7 +1156,7 @@ def _pull_from_queue(self, jobs_to_check: List[Tuple[model.Job, Optional[str]]]) except Empty: pass - def _check_jobs(self, jobs_to_check: List[Tuple[model.Job, Optional[str]]]): + def _check_jobs(self, jobs_to_check: list[tuple[model.Job, Optional[str]]]): for job, error_msg in jobs_to_check: if ( job.state diff --git a/lib/galaxy/jobs/runners/__init__.py b/lib/galaxy/jobs/runners/__init__.py index eca2dd756cd4..b1dcf0f66acc 100644 --- a/lib/galaxy/jobs/runners/__init__.py +++ b/lib/galaxy/jobs/runners/__init__.py @@ -17,7 +17,6 @@ ) from typing import ( Any, - Dict, Optional, TYPE_CHECKING, Union, @@ -267,7 +266,7 @@ def url_to_destination(self, url: str): """ return galaxy.jobs.JobDestination(runner=url.split(":")[0]) - def parse_destination_params(self, params: Dict[str, Any]): + def parse_destination_params(self, params: dict[str, Any]): """Parse the JobDestination ``params`` dict and return the runner's native representation of those params.""" raise NotImplementedError() diff --git a/lib/galaxy/jobs/runners/aws.py b/lib/galaxy/jobs/runners/aws.py index 5205bb517f83..d457c96c3486 100644 --- a/lib/galaxy/jobs/runners/aws.py +++ b/lib/galaxy/jobs/runners/aws.py @@ -9,7 +9,6 @@ import time from queue import Empty from typing import ( - Set, TYPE_CHECKING, ) @@ -462,11 +461,11 @@ def monitor(self): time.sleep(max(self.app.config.job_runner_monitor_sleep, self.MIN_QUERY_INTERVAL)) def check_watched_items(self): - done: Set[str] = set() + done: set[str] = set() self.check_watched_items_by_batch(0, len(self.watched), done) self.watched = [x for x in self.watched if x[0] not in done] - def check_watched_items_by_batch(self, start: int, end: int, done: Set[str]): + def check_watched_items_by_batch(self, start: int, end: int, done: set[str]): jobs = self.watched[start : start + self.MAX_JOBS_PER_QUERY] if not jobs: return diff --git a/lib/galaxy/jobs/runners/local.py b/lib/galaxy/jobs/runners/local.py index 1b05e594d788..4686eaf49437 100644 --- a/lib/galaxy/jobs/runners/local.py +++ b/lib/galaxy/jobs/runners/local.py @@ -10,7 +10,6 @@ import threading from time import sleep from typing import ( - Tuple, TYPE_CHECKING, ) @@ -57,7 +56,7 @@ def __init__(self, app, nworkers): super().__init__(app, nworkers) - def _command_line(self, job_wrapper: "MinimalJobWrapper") -> Tuple[str, str]: + def _command_line(self, job_wrapper: "MinimalJobWrapper") -> tuple[str, str]: """ """ command_line = job_wrapper.runner_command_line diff --git a/lib/galaxy/jobs/runners/pulsar.py b/lib/galaxy/jobs/runners/pulsar.py index 4dbe3632a2bd..ed33154569c0 100644 --- a/lib/galaxy/jobs/runners/pulsar.py +++ b/lib/galaxy/jobs/runners/pulsar.py @@ -12,7 +12,6 @@ from time import sleep from typing import ( Any, - Dict, Optional, ) @@ -931,7 +930,7 @@ def __build_metadata_configuration( remote_job_config, compute_environment: Optional["PulsarComputeEnvironment"] = None, ): - metadata_kwds: Dict[str, Any] = {} + metadata_kwds: dict[str, Any] = {} if remote_metadata: working_directory = remote_job_config["working_directory"] metadata_directory = remote_job_config["metadata_directory"] @@ -1061,7 +1060,7 @@ def _populate_parameter_defaults(self, job_destination): pulsar_app_config["staging_directory"] = params.get("jobs_directory") -KUBERNETES_DESTINATION_DEFAULTS: Dict[str, Any] = {"k8s_enabled": True, **COEXECUTION_DESTINATION_DEFAULTS} +KUBERNETES_DESTINATION_DEFAULTS: dict[str, Any] = {"k8s_enabled": True, **COEXECUTION_DESTINATION_DEFAULTS} class PulsarKubernetesJobRunner(PulsarCoexecutionJobRunner): @@ -1069,7 +1068,7 @@ class PulsarKubernetesJobRunner(PulsarCoexecutionJobRunner): poll = True # Poll so we can check API for pod IP for ITs. -TES_DESTINATION_DEFAULTS: Dict[str, Any] = { +TES_DESTINATION_DEFAULTS: dict[str, Any] = { "tes_url": PARAMETER_SPECIFICATION_REQUIRED, **COEXECUTION_DESTINATION_DEFAULTS, } diff --git a/lib/galaxy/jobs/runners/util/cli/job/__init__.py b/lib/galaxy/jobs/runners/util/cli/job/__init__.py index e444e1f84657..cdf296f490b8 100644 --- a/lib/galaxy/jobs/runners/util/cli/job/__init__.py +++ b/lib/galaxy/jobs/runners/util/cli/job/__init__.py @@ -7,10 +7,6 @@ abstractmethod, ) from enum import Enum -from typing import ( - Dict, - List, -) from typing_extensions import TypeAlias @@ -67,7 +63,7 @@ def get_single_status(self, job_id): """ @abstractmethod - def parse_status(self, status: str, job_ids: List[str]) -> Dict[str, job_states]: + def parse_status(self, status: str, job_ids: list[str]) -> dict[str, job_states]: """ Parse the statuses of output from get_status command. """ diff --git a/lib/galaxy/jobs/runners/util/job_script/__init__.py b/lib/galaxy/jobs/runners/util/job_script/__init__.py index e212a1ab2a8d..9ae3f49cd0c7 100644 --- a/lib/galaxy/jobs/runners/util/job_script/__init__.py +++ b/lib/galaxy/jobs/runners/util/job_script/__init__.py @@ -6,7 +6,6 @@ from string import Template from typing import ( Any, - Dict, Optional, ) @@ -58,7 +57,7 @@ DEFAULT_INTEGRITY_COUNT = 35 DEFAULT_INTEGRITY_SLEEP = 0.25 REQUIRED_TEMPLATE_PARAMS = ["working_directory", "command"] -OPTIONAL_TEMPLATE_PARAMS: Dict[str, Any] = { +OPTIONAL_TEMPLATE_PARAMS: dict[str, Any] = { "galaxy_lib": None, "galaxy_virtual_env": None, "headers": "", diff --git a/lib/galaxy/main_config.py b/lib/galaxy/main_config.py index 1db053323223..23ba644da47b 100644 --- a/lib/galaxy/main_config.py +++ b/lib/galaxy/main_config.py @@ -5,7 +5,6 @@ import os from typing import ( - List, NamedTuple, Optional, ) @@ -17,7 +16,7 @@ DEFAULT_CONFIG_SECTION = "galaxy" -def default_relative_config_paths_for(app_name: str) -> List[str]: +def default_relative_config_paths_for(app_name: str) -> list[str]: paths = [f"config/{app_name}.yml", f"config/{app_name}.ini", "universe_wsgi.ini"] # Do not load sample config for galaxy if app_name != "galaxy": diff --git a/lib/galaxy/managers/_config_templates.py b/lib/galaxy/managers/_config_templates.py index 22d798a22076..5b5dda62af0c 100644 --- a/lib/galaxy/managers/_config_templates.py +++ b/lib/galaxy/managers/_config_templates.py @@ -3,10 +3,7 @@ from typing import ( Any, cast, - Dict, - List, Optional, - Type, TypeVar, Union, ) @@ -60,8 +57,8 @@ log = logging.getLogger(__name__) -SuppliedVariables = Dict[str, TemplateVariableValueType] -SuppliedSecrets = Dict[str, str] +SuppliedVariables = dict[str, TemplateVariableValueType] +SuppliedSecrets = dict[str, str] class CreateInstancePayload(BaseModel): @@ -123,9 +120,9 @@ def __init__(self, instance: HasConfigTemplate, payload: TestUpdateInstancePaylo class CreateTestTarget: payload: CreateInstancePayload - instance_class: Type[HasConfigSecrets] + instance_class: type[HasConfigSecrets] - def __init__(self, payload: CreateInstancePayload, instance_class: Type[HasConfigSecrets]): + def __init__(self, payload: CreateInstancePayload, instance_class: type[HasConfigSecrets]): self.payload = payload self.instance_class = instance_class @@ -161,7 +158,7 @@ class TemplateParameters(TypedDict): secrets: SuppliedSecrets variables: SuppliedVariables environment: EnvironmentDict - user_details: Dict[str, Any] + user_details: dict[str, Any] implicit: Optional[ImplicitConfigurationParameters] @@ -269,7 +266,7 @@ def prepare_environment( def prepare_environment_from_root( - root: Optional[List[TemplateEnvironmentEntry]], vault: Vault, app_config: UsesTemplatesAppConfig + root: Optional[list[TemplateEnvironmentEntry]], vault: Vault, app_config: UsesTemplatesAppConfig ) -> EnvironmentDict: environment: EnvironmentDict = {} for environment_entry in root or []: @@ -363,8 +360,7 @@ def update_instance_secret( app_config: UsesTemplatesAppConfig, ): template_secrets = secrets_as_dict(template.secrets or []) - secret_name = payload.secret_name - if secret_name not in template_secrets: + if (secret_name := payload.secret_name) not in template_secrets: raise RequestParameterInvalidException(f"Configuration template does not specify a secret named {secret_name}") user_vault = trans.user_vault @@ -401,7 +397,7 @@ def upgrade_secrets( if secret_name not in recorded_secrets: recorded_secrets.append(secret_name) - secrets_to_delete: List[str] = [] + secrets_to_delete: list[str] = [] for recorded_secret in recorded_secrets: if recorded_secret not in upgraded_template_secrets: key = template_instance.vault_key(recorded_secret, app_config) @@ -424,7 +420,7 @@ def save_template_instance(sa_session: galaxy_scoped_session, template_instance: T = TypeVar("T", bound=Template, covariant=True) -def sort_templates(config, catalog: List[T], instance: HasConfigTemplate) -> List[T]: +def sort_templates(config, catalog: list[T], instance: HasConfigTemplate) -> list[T]: configured_template: Optional[T] = None try: configured_template = find_template_by( diff --git a/lib/galaxy/managers/annotatable.py b/lib/galaxy/managers/annotatable.py index e8b62b5d600e..487855dba836 100644 --- a/lib/galaxy/managers/annotatable.py +++ b/lib/galaxy/managers/annotatable.py @@ -5,7 +5,6 @@ import abc import logging from typing import ( - Dict, Optional, ) @@ -78,7 +77,7 @@ def _delete_annotation(self, item, user, flush=True): class AnnotatableSerializerMixin: - serializers: Dict[str, Serializer] + serializers: dict[str, Serializer] def add_serializers(self): self.serializers["annotation"] = self.serialize_annotation @@ -92,7 +91,7 @@ def serialize_annotation(self, item, key, user=None, **context): class AnnotatableDeserializerMixin: - deserializers: Dict[str, Deserializer] + deserializers: dict[str, Deserializer] def add_deserializers(self): self.deserializers["annotation"] = self.deserialize_annotation diff --git a/lib/galaxy/managers/base.py b/lib/galaxy/managers/base.py index 2a798338e28e..99f995af03f2 100644 --- a/lib/galaxy/managers/base.py +++ b/lib/galaxy/managers/base.py @@ -26,6 +26,7 @@ # such as: a single flat class, serializers being singletons in the manager, etc. # instead of the three separate classes. With no 'apparent' perfect scheme # I'm opting to just keep them separate. +import builtins import datetime import logging import re @@ -33,14 +34,9 @@ from typing import ( Any, Callable, - Dict, Generic, - List, NamedTuple, Optional, - Set, - Tuple, - Type, TypeVar, Union, ) @@ -79,10 +75,10 @@ class ParsedFilter(NamedTuple): parsed_filter = ParsedFilter -OrmFilterParserType = Union[None, Dict[str, Any], Callable] -OrmFilterParsersType = Dict[str, OrmFilterParserType] -FunctionFilterParserType = Dict[str, Any] -FunctionFilterParsersType = Dict[str, Any] +OrmFilterParserType = Union[None, dict[str, Any], Callable] +OrmFilterParsersType = dict[str, OrmFilterParserType] +FunctionFilterParserType = dict[str, Any] +FunctionFilterParsersType = dict[str, Any] # ==== accessors from base/controller.py @@ -204,7 +200,7 @@ class ModelManager(Generic[U]): over the ORM. """ - model_class: Type[U] + model_class: type[U] foreign_key_name: str app: BasicSharedApp @@ -402,7 +398,7 @@ def _split_filters(self, filters): orm_filters.append(filter_.filter) return (orm_filters, fn_filters) - def _orm_list(self, query: Optional[Query] = None, **kwargs) -> List[U]: + def _orm_list(self, query: Optional[Query] = None, **kwargs) -> builtins.list[U]: """ Sends kwargs to build the query return all models found. """ @@ -498,7 +494,7 @@ def copy(self, item, **kwargs) -> U: """ raise exceptions.NotImplemented("Abstract method") - def update(self, item: U, new_values: Dict[str, Any], flush: bool = True, **kwargs) -> U: + def update(self, item: U, new_values: dict[str, Any], flush: bool = True, **kwargs) -> U: """ Given a dictionary of new values, update `item` and return it. @@ -550,7 +546,7 @@ class HasAModelManager(Generic[T]): """ #: the class used to create this serializer's generically accessible model_manager - model_manager_class: Type[ + model_manager_class: type[ T ] # ideally this would be Type[ModelManager] but HistoryContentsManager cannot be a ModelManager # examples where this doesn't really work are ConfigurationSerializer (no manager) @@ -620,7 +616,7 @@ class ModelSerializer(HasAModelManager[T]): """ default_view: Optional[str] - views: Dict[str, List[str]] + views: dict[str, list[str]] def __init__(self, app: MinimalManagerApp, **kwargs): """ @@ -632,9 +628,9 @@ def __init__(self, app: MinimalManagerApp, **kwargs): # this allows us to: 'mention' the key without adding the default serializer # TODO: we may want to eventually error if a key is requested # that is in neither serializable_keyset or serializers - self.serializable_keyset: Set[str] = set() + self.serializable_keyset: set[str] = set() # a map of dictionary keys to the functions (often lambdas) that create the values for those keys - self.serializers: Dict[str, Serializer] = {} + self.serializers: dict[str, Serializer] = {} # add subclass serializers defined there self.add_serializers() # update the keyset by the serializers (removing the responsibility from subclasses) @@ -803,7 +799,7 @@ class ModelValidator: """ @staticmethod - def matches_type(key: str, val: Any, types: Union[type, Tuple[Union[type, Tuple[Any, ...]], ...]]): + def matches_type(key: str, val: Any, types: Union[type, tuple[Union[type, tuple[Any, ...]], ...]]): """ Check `val` against the type (or tuple of types) in `types`. @@ -843,7 +839,7 @@ def int_range(key: str, val: Any, min: Optional[int] = None, max: Optional[int] return val_ @staticmethod - def basestring_list(key: str, val: Any) -> List[str]: + def basestring_list(key: str, val: Any) -> list[str]: """ Must be a list of basestrings. """ @@ -899,8 +895,8 @@ def __init__(self, app: MinimalManagerApp, **kwargs): """ super().__init__(app, **kwargs) - self.deserializers: Dict[str, Deserializer] = {} - self.deserializable_keyset: Set[str] = set() + self.deserializers: dict[str, Deserializer] = {} + self.deserializable_keyset: set[str] = set() self.add_deserializers() def add_deserializers(self): @@ -994,7 +990,7 @@ class ModelFilterParser(HasAModelManager): # (as the model informs how the filter params are parsed) # I have no great idea where this 'belongs', so it's here for now - model_class: Type[model._HasTable] + model_class: type[model._HasTable] parsed_filter = parsed_filter orm_filter_parsers: OrmFilterParsersType fn_filter_parsers: FunctionFilterParsersType @@ -1044,7 +1040,7 @@ def build_filter_params( filter_attr_key: str = "q", filter_value_key: str = "qv", attr_op_split_char: str = "-", - ) -> List[Tuple[str, str, str]]: + ) -> list[tuple[str, str, str]]: """ Builds a list of tuples containing filtering information in the form of (attribute, operator, value). """ @@ -1266,7 +1262,7 @@ def parse_date(self, date_string): return date_string raise ValueError("datetime strings must be in the ISO 8601 format and in the UTC") - def contains_non_orm_filter(self, filters: List[ParsedFilter]) -> bool: + def contains_non_orm_filter(self, filters: list[ParsedFilter]) -> bool: """Whether the list of filters contains any non-orm filter.""" return any(filter.filter_type == "function" for filter in filters) @@ -1303,7 +1299,7 @@ class StorageCleanerManager(Protocol): # TODO: refactor this interface to be more generic and allow for more types of cleanable items - sort_map: Dict[StoredItemOrderBy, Any] + sort_map: dict[StoredItemOrderBy, Any] def get_discarded_summary(self, user: model.User) -> CleanableItemsSummary: """Returns information with the total storage space taken by discarded items for the given user. @@ -1318,7 +1314,7 @@ def get_discarded( offset: Optional[int], limit: Optional[int], order: Optional[StoredItemOrderBy], - ) -> List[StoredItem]: + ) -> list[StoredItem]: """Returns a paginated list of items deleted by the given user that are not yet purged.""" raise NotImplementedError @@ -1336,16 +1332,16 @@ def get_archived( offset: Optional[int], limit: Optional[int], order: Optional[StoredItemOrderBy], - ) -> List[StoredItem]: + ) -> list[StoredItem]: """Returns a paginated list of items archived by the given user that are not yet purged.""" raise NotImplementedError - def cleanup_items(self, user: model.User, item_ids: Set[int]) -> StorageItemsCleanupResult: + def cleanup_items(self, user: model.User, item_ids: set[int]) -> StorageItemsCleanupResult: """Purges the given list of items by ID. The items must be owned by the user.""" raise NotImplementedError -def combine_lists(listA: Any, listB: Any) -> List: +def combine_lists(listA: Any, listB: Any) -> list: """ Combine two lists into a single list. diff --git a/lib/galaxy/managers/citations.py b/lib/galaxy/managers/citations.py index e856232bbd02..baa2c47083b2 100644 --- a/lib/galaxy/managers/citations.py +++ b/lib/galaxy/managers/citations.py @@ -1,9 +1,7 @@ import functools import logging from typing import ( - Dict, Optional, - Type, Union, ) @@ -175,7 +173,7 @@ def to_bibtex(self) -> str: return str(self.raw_bibtex) -CITATION_CLASSES: Dict[str, Type[CitationT]] = dict( +CITATION_CLASSES: dict[str, type[CitationT]] = dict( bibtex=BibtexCitation, doi=DoiCitation, ) diff --git a/lib/galaxy/managers/collections.py b/lib/galaxy/managers/collections.py index e917ce69b0e7..8a62b608b585 100644 --- a/lib/galaxy/managers/collections.py +++ b/lib/galaxy/managers/collections.py @@ -1,8 +1,6 @@ import logging from typing import ( Any, - Dict, - List, Optional, overload, TYPE_CHECKING, @@ -189,7 +187,7 @@ def create( flush=True, completed_job=None, output_name=None, - fields: Optional[Union[str, List["FieldDict"]]] = None, + fields: Optional[Union[str, list["FieldDict"]]] = None, ) -> "DatasetCollectionInstance": """ PRECONDITION: security checks on ability to add to parent @@ -305,7 +303,7 @@ def create_dataset_collection( hide_source_items: bool = False, copy_elements: bool = False, history=None, - fields: Optional[Union[str, List["FieldDict"]]] = None, + fields: Optional[Union[str, list["FieldDict"]]] = None, ) -> DatasetCollection: # Make sure at least one of these is None. assert element_identifiers is None or elements is None @@ -393,7 +391,7 @@ def _element_identifiers_to_elements( hide_source_items: bool = False, copy_elements: bool = False, history=None, - ) -> Dict[str, HDCAElementObjectType]: + ) -> dict[str, HDCAElementObjectType]: if collection_type_description.has_subcollections(): # Nested collection - recursively create collections and update identifiers. self.__recursively_create_collections_for_identifiers( @@ -480,7 +478,7 @@ def copy( source: Literal[HistoryContentSource.hdca], encoded_source_id, copy_elements: bool = False, - dataset_instance_attributes: Optional[Dict[str, Any]] = None, + dataset_instance_attributes: Optional[dict[str, Any]] = None, ): """ PRECONDITION: security checks on ability to add to parent occurred @@ -618,8 +616,8 @@ def __recursively_create_collections_for_elements( def __load_elements( self, trans, element_identifiers, hide_source_items: bool = False, copy_elements: bool = False, history=None - ) -> Dict[str, HDCAElementObjectType]: - elements: Dict[str, HDCAElementObjectType] = {} + ) -> dict[str, HDCAElementObjectType]: + elements: dict[str, HDCAElementObjectType] = {} for element_identifier in element_identifiers: elements[element_identifier["name"]] = self.__load_element( trans, @@ -737,7 +735,7 @@ def apply_rules(self, hdca, rule_set, handle_dataset): def _build_elements_from_rule_data(self, collection_type_description, rule_set, data, sources, handle_dataset): identifier_columns = rule_set.identifier_columns mapping_as_dict = rule_set.mapping_as_dict - elements: Dict[str, Any] = {} + elements: dict[str, Any] = {} for data_index, row_data in enumerate(data): # For each row, find place in depth for this element. collection_type_at_depth = collection_type_description @@ -787,7 +785,7 @@ def _build_elements_from_rule_data(self, collection_type_description, rule_set, if not found: # Create a new collection whose elements are defined in the next loop - sub_collection: Dict[str, Any] = {} + sub_collection: dict[str, Any] = {} sub_collection["src"] = "new_collection" sub_collection["collection_type"] = collection_type_at_depth.collection_type sub_collection["elements"] = {} @@ -816,8 +814,8 @@ def update_unpaired_identifiers(elements): def __init_rule_data(self, elements, collection_type_description, parent_identifiers=None, parent_indices=None): parent_identifiers = parent_identifiers or [] parent_indices = parent_indices or [] - data: List[List[str]] = [] - sources: List[Dict[str, str]] = [] + data: list[list[str]] = [] + sources: list[dict[str, str]] = [] for i, element in enumerate(elements): indices = parent_indices.copy() indices.append(i) diff --git a/lib/galaxy/managers/collections_util.py b/lib/galaxy/managers/collections_util.py index 7f129992c754..d766cbcb311d 100644 --- a/lib/galaxy/managers/collections_util.py +++ b/lib/galaxy/managers/collections_util.py @@ -2,7 +2,6 @@ import math from typing import ( Any, - Dict, ) from galaxy import ( @@ -168,7 +167,7 @@ def dictify_element_reference( """ dictified = element.to_dict(view="element") if (element_object := element.element_object) is not None: - object_details: Dict[str, Any] = dict( + object_details: dict[str, Any] = dict( id=element_object.id, model_class=element_object.__class__.__name__, ) diff --git a/lib/galaxy/managers/configuration.py b/lib/galaxy/managers/configuration.py index 3ce983f6d83f..ac9e981ad6a1 100644 --- a/lib/galaxy/managers/configuration.py +++ b/lib/galaxy/managers/configuration.py @@ -9,8 +9,6 @@ import sys from typing import ( Any, - Dict, - List, ) from galaxy.managers import base @@ -30,14 +28,14 @@ def __init__(self, app: StructuredApp): def get_configuration( self, trans: ProvidesUserContext, serialization_params: SerializationParams - ) -> Dict[str, Any]: + ) -> dict[str, Any]: is_admin = trans.user_is_admin host = getattr(trans, "host", None) serializer_class = AdminConfigSerializer if is_admin else ConfigSerializer serializer = serializer_class(self._app) return serializer.serialize_to_view(self._app.config, host=host, **serialization_params.model_dump()) - def version(self) -> Dict[str, Any]: + def version(self) -> dict[str, Any]: version_info = { "version_major": self._app.config.version_major, "version_minor": self._app.config.version_minor, @@ -49,7 +47,7 @@ def version(self) -> Dict[str, Any]: def decode_id( self, encoded_id: str, - ) -> Dict[str, int]: + ) -> dict[str, int]: # Handle the special case for library folders if (len(encoded_id) % 16 == 1) and encoded_id.startswith("F"): encoded_id = encoded_id[1:] @@ -59,11 +57,11 @@ def decode_id( def encode_id( self, decoded_id: int, - ) -> Dict[str, str]: + ) -> dict[str, str]: encoded_id = self._app.security.encode_id(decoded_id) return {"encoded_id": encoded_id} - def tool_lineages(self) -> List[Dict[str, Dict]]: + def tool_lineages(self) -> list[dict[str, dict]]: rval = [] for id, tool in self._app.toolbox.tools(): try: @@ -75,7 +73,7 @@ def tool_lineages(self) -> List[Dict[str, Dict]]: rval.append(entry) return rval - def dynamic_tool_confs(self) -> List[Dict[str, str]]: + def dynamic_tool_confs(self) -> list[dict[str, str]]: # WARNING: If this method is ever changed so as not to require admin privileges, update the nginx proxy # documentation, since this path is used as an authentication-by-proxy method for securing other paths on the # server. A dedicated endpoint should probably be added to do that instead. @@ -120,7 +118,7 @@ def _config_is_truthy(item, key, **context): return True if item.get(key) else False object_store = self.app.object_store - self.serializers: Dict[str, base.Serializer] = { + self.serializers: dict[str, base.Serializer] = { # TODO: this is available from user data, remove "is_admin_user": lambda *a, **c: False, "brand": _use_config, @@ -258,5 +256,5 @@ def _defaults_to(default): } ) - def _serialize_tool_shed_urls(self, item: Any, key: str, **context) -> List[str]: + def _serialize_tool_shed_urls(self, item: Any, key: str, **context) -> list[str]: return list(self.app.tool_shed_registry.tool_sheds.values()) if self.app.tool_shed_registry else [] diff --git a/lib/galaxy/managers/context.py b/lib/galaxy/managers/context.py index fb89d1f82508..c6ec8eff2479 100644 --- a/lib/galaxy/managers/context.py +++ b/lib/galaxy/managers/context.py @@ -42,11 +42,8 @@ Any, Callable, cast, - Dict, - List, Literal, Optional, - Tuple, ) from sqlalchemy import select @@ -207,12 +204,12 @@ class ProvidesUserContext(ProvidesAppContext): workflow_building_mode: Literal[1, True, False] = False galaxy_session: Optional[GalaxySession] = None _tag_handler: Optional[GalaxyTagHandlerSession] = None - _short_term_cache: Dict[Tuple[str, ...], Any] + _short_term_cache: dict[tuple[str, ...], Any] - def set_cache_value(self, args: Tuple[str, ...], value: Any): + def set_cache_value(self, args: tuple[str, ...], value: Any): self._short_term_cache[args] = value - def get_cache_value(self, args: Tuple[str, ...], default: Any = None) -> Any: + def get_cache_value(self, args: tuple[str, ...], default: Any = None) -> Any: return self._short_term_cache.get(args, default) @property @@ -245,7 +242,7 @@ def get_user(self) -> Optional[User]: def anonymous(self) -> bool: return self.user is None - def get_current_user_roles(self) -> List[Role]: + def get_current_user_roles(self) -> list[Role]: if user := self.user: roles = user.all_roles() else: diff --git a/lib/galaxy/managers/datasets.py b/lib/galaxy/managers/datasets.py index 35a1414c8f35..c508487595d8 100644 --- a/lib/galaxy/managers/datasets.py +++ b/lib/galaxy/managers/datasets.py @@ -7,11 +7,7 @@ import os from typing import ( Any, - Dict, - List, Optional, - Set, - Type, TypeVar, ) @@ -270,7 +266,7 @@ def __init__(self, app: MinimalManagerApp, user_manager: users.UserManager): def add_serializers(self): super().add_serializers() deletable.PurgableSerializerMixin.add_serializers(self) - serializers: Dict[str, base.Serializer] = { + serializers: dict[str, base.Serializer] = { "create_time": self.serialize_date, "update_time": self.serialize_date, "uuid": lambda item, key, **context: str(item.uuid) if item.uuid else None, @@ -459,7 +455,7 @@ def serialize_dataset_association_roles(self, dataset_assoc: U): access_roles = set(dataset.get_access_roles(self.app.security_agent)) manage_roles = set(dataset.get_manage_permissions_roles(self.app.security_agent)) - def make_tuples(roles: Set): + def make_tuples(roles: set): tuples = [] for role in roles: # use role name for non-private roles, and user.email from private rules @@ -633,7 +629,7 @@ def add_serializers(self): super().add_serializers() deletable.PurgableSerializerMixin.add_serializers(self) - serializers: Dict[str, base.Serializer] = { + serializers: dict[str, base.Serializer] = { "create_time": self.serialize_date, "update_time": self.serialize_date, # underlying dataset @@ -927,7 +923,7 @@ def isinstance_datatype(self, dataset_assoc, class_strs): datatypes in the comma separated string `class_strs`? """ parse_datatype_fn = self.app.datatypes_registry.get_datatype_class_by_name - comparison_classes: List[Type] = [] + comparison_classes: list[type] = [] for class_str in class_strs.split(","): datatype_class = parse_datatype_fn(class_str) if datatype_class: diff --git a/lib/galaxy/managers/datatypes.py b/lib/galaxy/managers/datatypes.py index 21f0bfca47ea..77aa9310f53a 100644 --- a/lib/galaxy/managers/datatypes.py +++ b/lib/galaxy/managers/datatypes.py @@ -1,6 +1,4 @@ from typing import ( - Dict, - List, Optional, Union, ) @@ -22,7 +20,7 @@ def view_index( datatypes_registry: Registry, extension_only: Optional[bool] = True, upload_only: Optional[bool] = True -) -> Union[List[DatatypeDetails], List[str]]: +) -> Union[list[DatatypeDetails], list[str]]: if extension_only: if upload_only: return datatypes_registry.upload_file_formats @@ -38,13 +36,13 @@ def view_index( def view_mapping(datatypes_registry: Registry) -> DatatypesMap: - ext_to_class_name: Dict[str, str] = {} + ext_to_class_name: dict[str, str] = {} classes = [] for k, v in datatypes_registry.datatypes_by_extension.items(): c = v.__class__ ext_to_class_name[k] = f"{c.__module__}.{c.__name__}" classes.append(c) - class_to_classes: Dict[str, Dict[str, bool]] = {} + class_to_classes: dict[str, dict[str, bool]] = {} def visit_bases(types, cls): for base in cls.__bases__: @@ -69,8 +67,8 @@ def view_types_and_mapping( ) -def view_sniffers(datatypes_registry: Registry) -> List[str]: - rval: List[str] = [] +def view_sniffers(datatypes_registry: Registry) -> list[str]: + rval: list[str] = [] for sniffer_elem in datatypes_registry.sniffer_elems: datatype = sniffer_elem.get("type") if datatype is not None: @@ -92,7 +90,7 @@ def view_converters(datatypes_registry: Registry) -> DatatypeConverterList: return parse_obj_as(DatatypeConverterList, converters) -def _get_edam_details(datatypes_registry: Registry, edam_ids: Dict[str, str]) -> Dict[str, Dict]: +def _get_edam_details(datatypes_registry: Registry, edam_ids: dict[str, str]) -> dict[str, dict]: details_dict = {} for format, edam_iri in edam_ids.items(): edam_details = datatypes_registry.edam.get(edam_iri, {}) @@ -108,7 +106,7 @@ def _get_edam_details(datatypes_registry: Registry, edam_ids: Dict[str, str]) -> def view_edam_formats( datatypes_registry: Registry, detailed: Optional[bool] = False -) -> Union[Dict[str, str], Dict[str, Dict[str, str]]]: +) -> Union[dict[str, str], dict[str, dict[str, str]]]: if detailed: return _get_edam_details(datatypes_registry, datatypes_registry.edam_formats) else: @@ -117,7 +115,7 @@ def view_edam_formats( def view_edam_data( datatypes_registry: Registry, detailed: Optional[bool] = False -) -> Union[Dict[str, str], Dict[str, Dict[str, str]]]: +) -> Union[dict[str, str], dict[str, dict[str, str]]]: if detailed: return _get_edam_details(datatypes_registry, datatypes_registry.edam_data) else: @@ -163,7 +161,7 @@ def view_visualization_mappings( return parse_obj_as(DatatypeVisualizationMappingsList, mappings) -def get_preferred_visualization(datatypes_registry: Registry, datatype_extension: str) -> Optional[Dict[str, str]]: +def get_preferred_visualization(datatypes_registry: Registry, datatype_extension: str) -> Optional[dict[str, str]]: """ Get the preferred visualization mapping for a specific datatype extension. Returns a dictionary with 'visualization' and 'default_params' keys, or None if no mapping exists. diff --git a/lib/galaxy/managers/dbkeys.py b/lib/galaxy/managers/dbkeys.py index 99168133a69e..3315dc050c04 100644 --- a/lib/galaxy/managers/dbkeys.py +++ b/lib/galaxy/managers/dbkeys.py @@ -7,10 +7,7 @@ import re from json import loads from typing import ( - Dict, - List, Optional, - Tuple, ) from sqlalchemy import select @@ -26,11 +23,11 @@ log = logging.getLogger(__name__) -def read_dbnames(filename: Optional[str]) -> List[Tuple[str, str]]: +def read_dbnames(filename: Optional[str]) -> list[tuple[str, str]]: """Read build names from file""" - db_names: List[Tuple[str, str]] = [] + db_names: list[tuple[str, str]] = [] try: - ucsc_builds: Dict[str, List[Tuple[int, str, str]]] = {} + ucsc_builds: dict[str, list[tuple[int, str, str]]] = {} man_builds = [] # assume these are integers name_to_db_base = {} if filename is None: diff --git a/lib/galaxy/managers/deletable.py b/lib/galaxy/managers/deletable.py index 113cfa22dc99..fae0f518bf96 100644 --- a/lib/galaxy/managers/deletable.py +++ b/lib/galaxy/managers/deletable.py @@ -11,8 +11,6 @@ from typing import ( Any, - Dict, - Set, ) from galaxy.model import Base @@ -49,7 +47,7 @@ def undelete(self, item, flush=True, **kwargs): class DeletableSerializerMixin: - serializable_keyset: Set[str] + serializable_keyset: set[str] def add_serializers(self): self.serializable_keyset.add("deleted") @@ -57,7 +55,7 @@ def add_serializers(self): # TODO: these are of questionable value if we don't want to enable users to delete/purge via update class DeletableDeserializerMixin: - deserializers: Dict[str, Deserializer] + deserializers: dict[str, Deserializer] def add_deserializers(self): self.deserializers["deleted"] = self.deserialize_deleted @@ -104,7 +102,7 @@ def purge(self, item, flush=True, **kwargs): class PurgableSerializerMixin(DeletableSerializerMixin): - serializable_keyset: Set[str] + serializable_keyset: set[str] def add_serializers(self): DeletableSerializerMixin.add_serializers(self) @@ -112,7 +110,7 @@ def add_serializers(self): class PurgableDeserializerMixin(DeletableDeserializerMixin): - deserializers: Dict[str, Deserializer] = {} + deserializers: dict[str, Deserializer] = {} def add_deserializers(self): DeletableDeserializerMixin.add_deserializers(self) diff --git a/lib/galaxy/managers/display_applications.py b/lib/galaxy/managers/display_applications.py index 3c8ceb9b122f..672f3a33390a 100644 --- a/lib/galaxy/managers/display_applications.py +++ b/lib/galaxy/managers/display_applications.py @@ -1,6 +1,5 @@ import logging from typing import ( - List, Optional, ) @@ -21,13 +20,13 @@ class DisplayApplication(BaseModel): name: str version: str filename_: str - links: List[Link] + links: list[Link] class ReloadFeedback(BaseModel): message: str - reloaded: List[Optional[str]] - failed: List[Optional[str]] + reloaded: list[Optional[str]] + failed: list[Optional[str]] class DisplayApplicationsManager: @@ -40,7 +39,7 @@ def __init__(self, app: StructuredApp): def datatypes_registry(self) -> Registry: return self._app.datatypes_registry - def index(self) -> List[DisplayApplication]: + def index(self) -> list[DisplayApplication]: """ Returns the list of display applications. @@ -60,7 +59,7 @@ def index(self) -> List[DisplayApplication]: ) return rval - def reload(self, ids: List[str]) -> ReloadFeedback: + def reload(self, ids: list[str]) -> ReloadFeedback: """ Reloads the list of display applications. diff --git a/lib/galaxy/managers/executables.py b/lib/galaxy/managers/executables.py index 0993b32856b1..9293eeeda498 100644 --- a/lib/galaxy/managers/executables.py +++ b/lib/galaxy/managers/executables.py @@ -2,7 +2,6 @@ from typing import ( Any, - Dict, Optional, ) @@ -12,7 +11,7 @@ from galaxy.util import in_directory -def artifact_class(trans, as_dict: Dict[str, Any], allow_in_directory: Optional[str] = None): +def artifact_class(trans, as_dict: dict[str, Any], allow_in_directory: Optional[str] = None): object_id = as_dict.get("object_id", None) if as_dict.get("src", None) == "from_path": workflow_path = as_dict.get("path") diff --git a/lib/galaxy/managers/export_tracker.py b/lib/galaxy/managers/export_tracker.py index 73853073010b..14dd67dfe412 100644 --- a/lib/galaxy/managers/export_tracker.py +++ b/lib/galaxy/managers/export_tracker.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, ) @@ -55,7 +54,7 @@ def get_export_association(self, export_association_id: int) -> StoreExportAssoc def get_object_exports( self, object_id: int, object_type: ExportObjectType, limit: Optional[int] = None, offset: Optional[int] = None - ) -> List[StoreExportAssociation]: + ) -> list[StoreExportAssociation]: stmt = ( select( StoreExportAssociation, diff --git a/lib/galaxy/managers/file_source_instances.py b/lib/galaxy/managers/file_source_instances.py index 9c4f579dc920..bccef23f71e2 100644 --- a/lib/galaxy/managers/file_source_instances.py +++ b/lib/galaxy/managers/file_source_instances.py @@ -2,12 +2,8 @@ from typing import ( Any, cast, - Dict, - List, Literal, Optional, - Set, - Tuple, Union, ) from uuid import uuid4 @@ -124,8 +120,8 @@ class UserFileSourceModel(BaseModel): type: FileSourceTemplateType template_id: str template_version: int - variables: Optional[Dict[str, TemplateVariableValueType]] - secrets: List[str] + variables: Optional[dict[str, TemplateVariableValueType]] + secrets: list[str] class UserDefinedFileSourcesConfig(BaseModel): @@ -232,7 +228,7 @@ def _redirect_uri(self, trans: SessionRequestContext) -> str: redirect_uri = f"{galaxy_root}/oauth2_callback" return redirect_uri - def index(self, trans: ProvidesUserContext) -> List[UserFileSourceModel]: + def index(self, trans: ProvidesUserContext) -> list[UserFileSourceModel]: stores = self._sa_session.query(UserFileSource).filter(UserFileSource.user_id == trans.user.id).all() return [self._to_model(trans, s) for s in stores] @@ -411,7 +407,7 @@ def _template_parameter_generation( trans: ProvidesUserContext, payload: CanTestPluginStatus, template: FileSourceTemplate, - ) -> Tuple[Optional[TemplateParameters], Optional[PluginAspectStatus]]: + ) -> tuple[Optional[TemplateParameters], Optional[PluginAspectStatus]]: template_server_configuration = self._resolver.template_server_configuration( trans.user, template.id, template.version ) @@ -434,7 +430,7 @@ def _template_settings_status( payload: CanTestPluginStatus, template: FileSourceTemplate, template_parameters: TemplateParameters, - ) -> Tuple[Optional[FileSourceConfiguration], PluginAspectStatus]: + ) -> tuple[Optional[FileSourceConfiguration], PluginAspectStatus]: configuration = None exception = None try: @@ -445,7 +441,7 @@ def _template_settings_status( def _connection_status( self, trans: ProvidesUserContext, target: CanTestPluginStatus, configuration: FileSourceConfiguration - ) -> Tuple[Optional[BaseFilesSource], PluginAspectStatus]: + ) -> tuple[Optional[BaseFilesSource], PluginAspectStatus]: file_source = None exception = None if isinstance(target, (UpgradeTestTarget, UpdateTestTarget)): @@ -610,19 +606,19 @@ def find_best_match(self, url: str) -> Optional[FileSourceScore]: return FileSourceScore(file_source, len(url)) def _file_source(self, files_source_properties: FilesSourceProperties) -> BaseFilesSource: - plugin_source = plugin_source_from_dict([cast(Dict[str, Any], files_source_properties)]) + plugin_source = plugin_source_from_dict([cast(dict[str, Any], files_source_properties)]) file_source = self._plugin_loader.load_plugins( plugin_source, self._file_sources_config, )[0] return file_source - def _all_user_file_source_properties(self, user_context: FileSourcesUserContext) -> List[FilesSourceProperties]: + def _all_user_file_source_properties(self, user_context: FileSourcesUserContext) -> list[FilesSourceProperties]: username_filter = User.__table__.c.username == user_context.username user: Optional[User] = self._sa_session.query(User).filter(username_filter).one_or_none() if user is None: return [] - all_file_source_properties: List[FilesSourceProperties] = [] + all_file_source_properties: list[FilesSourceProperties] = [] for user_file_source in user.file_sources: if user_file_source.hidden: continue @@ -664,9 +660,9 @@ def user_file_sources_to_dicts( for_serialization: bool, user_context: FileSourcesUserContext, browsable_only: Optional[bool] = False, - include_kind: Optional[Set[PluginKind]] = None, - exclude_kind: Optional[Set[PluginKind]] = None, - ) -> List[FilesSourceProperties]: + include_kind: Optional[set[PluginKind]] = None, + exclude_kind: Optional[set[PluginKind]] = None, + ) -> list[FilesSourceProperties]: """Write out user file sources as list of config dictionaries.""" if user_context.anonymous: return [] diff --git a/lib/galaxy/managers/folders.py b/lib/galaxy/managers/folders.py index 34f1642ec8ac..8de5aa6bcb21 100644 --- a/lib/galaxy/managers/folders.py +++ b/lib/galaxy/managers/folders.py @@ -5,10 +5,7 @@ import logging from dataclasses import dataclass from typing import ( - List, Optional, - Set, - Tuple, TYPE_CHECKING, Union, ) @@ -65,7 +62,7 @@ class SecurityParams: """Contains security data bundled for reusability.""" - user_role_ids: List[model.Role] + user_role_ids: list[model.Role] security_agent: RBACAgent is_admin: bool @@ -328,7 +325,7 @@ def get_current_roles(self, trans, folder): ) ) - def make_tuples(roles: Set): + def make_tuples(roles: set): tuples = [] for role in roles: # use role name for non-private roles, and user.email from private rules @@ -409,7 +406,7 @@ def get_contents( trans, folder: LibraryFolder, payload: LibraryFolderContentsIndexQueryPayload, - ) -> Tuple[List[Union[LibraryFolder, LibraryDataset]], int]: + ) -> tuple[list[Union[LibraryFolder, LibraryDataset]], int]: """Retrieves the contents of the given folder that match the provided filters and pagination parameters. Returns a tuple with the list of paginated contents and the total number of items contained in the folder.""" limit = payload.limit @@ -421,7 +418,7 @@ def get_contents( is_admin=trans.user_is_admin, ) - content_items: List[Union[LibraryFolder, LibraryDataset]] = [] + content_items: list[Union[LibraryFolder, LibraryDataset]] = [] sub_folders_stmt = self._get_sub_folders_statement(sa_session, folder, security_params, payload) total_sub_folders = get_count(sa_session, sub_folders_stmt) if payload.order_by in FOLDER_SORT_COLUMN_MAP: @@ -554,7 +551,7 @@ def _filter_by_include_deleted( def build_folder_path( self, sa_session: galaxy_scoped_session, folder: model.LibraryFolder - ) -> List[Tuple[int, Optional[str]]]: + ) -> list[tuple[int, Optional[str]]]: """ Returns the folder path from root to the given folder. diff --git a/lib/galaxy/managers/genomes.py b/lib/galaxy/managers/genomes.py index 8d2bd2e8d576..1934bfa820f8 100644 --- a/lib/galaxy/managers/genomes.py +++ b/lib/galaxy/managers/genomes.py @@ -1,6 +1,5 @@ from typing import ( Any, - List, Optional, TYPE_CHECKING, ) @@ -32,7 +31,7 @@ def __init__(self, app: StructuredApp): self._app = app self.genomes = app.genomes - def get_dbkeys(self, user: Optional[User], chrom_info: bool) -> List[List[str]]: + def get_dbkeys(self, user: Optional[User], chrom_info: bool) -> list[list[str]]: return self.genomes.get_dbkeys(user, chrom_info) def is_registered_dbkey(self, dbkey: str, user: Optional[User]) -> bool: diff --git a/lib/galaxy/managers/group_roles.py b/lib/galaxy/managers/group_roles.py index e48fa3001ac1..b8d59703beab 100644 --- a/lib/galaxy/managers/group_roles.py +++ b/lib/galaxy/managers/group_roles.py @@ -1,6 +1,5 @@ import logging from typing import ( - List, Optional, ) @@ -22,7 +21,7 @@ class GroupRolesManager: def __init__(self, app: MinimalManagerApp) -> None: self._app = app - def index(self, trans: ProvidesAppContext, group_id: int) -> List[model.GroupRoleAssociation]: + def index(self, trans: ProvidesAppContext, group_id: int) -> list[model.GroupRoleAssociation]: """ Returns a collection roles associated with the given group. """ diff --git a/lib/galaxy/managers/group_users.py b/lib/galaxy/managers/group_users.py index 8b4d0650a1ad..04472046e34c 100644 --- a/lib/galaxy/managers/group_users.py +++ b/lib/galaxy/managers/group_users.py @@ -1,6 +1,5 @@ import logging from typing import ( - List, Optional, ) @@ -25,7 +24,7 @@ class GroupUsersManager: def __init__(self, app: MinimalManagerApp) -> None: self._app = app - def index(self, trans: ProvidesAppContext, group_id: int) -> List[model.User]: + def index(self, trans: ProvidesAppContext, group_id: int) -> list[model.User]: """ Returns a collection (list) with some information about users associated with the given group. """ diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py index b9066075027c..5778d282329f 100644 --- a/lib/galaxy/managers/hdas.py +++ b/lib/galaxy/managers/hdas.py @@ -10,10 +10,7 @@ import os from typing import ( Any, - Dict, - List, Optional, - Set, TYPE_CHECKING, Union, ) @@ -405,7 +402,7 @@ def get_discarded( offset: Optional[int], limit: Optional[int], order: Optional[StoredItemOrderBy], - ) -> List[StoredItem]: + ) -> list[StoredItem]: stmt = ( select( HistoryDatasetAssociation.id, @@ -437,11 +434,11 @@ def get_discarded( ] return discarded - def cleanup_items(self, user: model.User, item_ids: Set[int]) -> StorageItemsCleanupResult: + def cleanup_items(self, user: model.User, item_ids: set[int]) -> StorageItemsCleanupResult: success_item_count = 0 total_free_bytes = 0 - errors: List[StorageItemCleanupError] = [] - dataset_ids_to_remove: Set[int] = set() + errors: list[StorageItemCleanupError] = [] + dataset_ids_to_remove: set[int] = set() for hda_id in item_ids: try: @@ -469,7 +466,7 @@ def cleanup_items(self, user: model.User, item_ids: Set[int]) -> StorageItemsCle errors=errors, ) - def _request_full_delete_all(self, dataset_ids_to_remove: Set[int], user: Optional[model.User]): + def _request_full_delete_all(self, dataset_ids_to_remove: set[int], user: Optional[model.User]): use_tasks = self.dataset_manager.app.config.enable_celery_tasks request = PurgeDatasetsTaskRequest(dataset_ids=list(dataset_ids_to_remove)) if use_tasks: @@ -589,7 +586,7 @@ def add_serializers(self): taggable.TaggableSerializerMixin.add_serializers(self) annotatable.AnnotatableSerializerMixin.add_serializers(self) - serializers: Dict[str, base.Serializer] = { + serializers: dict[str, base.Serializer] = { "hid": lambda item, key, **context: item.hid if item.hid is not None else -1, "model_class": lambda item, key, **context: "HistoryDatasetAssociation", "history_content_type": lambda item, key, **context: "dataset", @@ -651,7 +648,7 @@ def serialize_display_apps(self, item, key, trans=None, **context): Return dictionary containing new-style display app urls. """ hda = item - display_apps: List[Dict[str, Any]] = [] + display_apps: list[dict[str, Any]] = [] if hda.state == HistoryDatasetAssociation.states.OK and not hda.deleted: for display_app in hda.get_display_applications(trans).values(): app_links = [] @@ -673,7 +670,7 @@ def serialize_old_display_applications(self, item, key, trans=None, **context): Return dictionary containing old-style display app urls. """ hda = item - display_apps: List[Dict[str, Any]] = [] + display_apps: list[dict[str, Any]] = [] if ( self.app.config.enable_old_display_applications and hda.state == HistoryDatasetAssociation.states.OK diff --git a/lib/galaxy/managers/hdcas.py b/lib/galaxy/managers/hdcas.py index a5c1530eda16..9e273f42f3fe 100644 --- a/lib/galaxy/managers/hdcas.py +++ b/lib/galaxy/managers/hdcas.py @@ -7,7 +7,6 @@ import logging from typing import ( - Dict, Optional, ) @@ -106,7 +105,7 @@ def map_datasets(self, content, fn, *parents): returned.append(processed) return returned - def update_attributes(self, content, payload: Dict): + def update_attributes(self, content, payload: dict): # pre-requisite checked that attributes are valid self.map_datasets(content, fn=lambda item, *args: set_collection_attributes(item, payload.items())) @@ -315,7 +314,7 @@ def add_serializers(self): super().add_serializers() taggable.TaggableSerializerMixin.add_serializers(self) annotatable.AnnotatableSerializerMixin.add_serializers(self) - serializers: Dict[str, base.Serializer] = { + serializers: dict[str, base.Serializer] = { "model_class": lambda item, key, **context: self.hdca_manager.model_class.__class__.__name__, # TODO: remove "type": lambda item, key, **context: "collection", diff --git a/lib/galaxy/managers/histories.py b/lib/galaxy/managers/histories.py index b05572962966..e68ad5c05150 100644 --- a/lib/galaxy/managers/histories.py +++ b/lib/galaxy/managers/histories.py @@ -9,11 +9,7 @@ from typing import ( Any, cast, - Dict, - List, Optional, - Set, - Tuple, TYPE_CHECKING, Union, ) @@ -124,7 +120,7 @@ def __init__( def index_query( self, trans: ProvidesUserContext, payload: HistoryIndexQueryPayload, include_total_count: bool = False - ) -> Tuple["ScalarResult[model.History]", Union[int, None]]: + ) -> tuple["ScalarResult[model.History]", Union[int, None]]: show_deleted = False show_own = payload.show_own show_published = payload.show_published @@ -244,7 +240,7 @@ def p_tag_filter(term_text: str, quoted: bool): # overriding to handle anonymous users' current histories in both cases def by_user( self, user: model.User, current_history: Optional[model.History] = None, **kwargs: Any - ) -> List[model.History]: + ) -> list[model.History]: """ Get all the histories for a given user (allowing anon users' theirs) ordered by update time. @@ -424,7 +420,7 @@ def queue_history_export( return job def get_sharing_extra_information( - self, trans, item, users: Set[model.User], errors: Set[str], option: Optional[sharable.SharingOptions] = None + self, trans, item, users: set[model.User], errors: set[str], option: Optional[sharable.SharingOptions] = None ) -> ShareHistoryExtra: """Returns optional extra information about the datasets of the history that can be accessed by the users.""" extra = ShareHistoryExtra() @@ -574,7 +570,7 @@ def get_discarded( offset: Optional[int], limit: Optional[int], order: Optional[StoredItemOrderBy], - ) -> List[StoredItem]: + ) -> list[StoredItem]: stmt = select(model.History).where( model.History.user_id == user.id, model.History.deleted == true(), @@ -609,7 +605,7 @@ def get_archived( offset: Optional[int], limit: Optional[int], order: Optional[StoredItemOrderBy], - ) -> List[StoredItem]: + ) -> list[StoredItem]: stmt = select(model.History).where( model.History.user_id == user.id, model.History.archived == true(), @@ -625,10 +621,10 @@ def get_archived( archived = [self._history_to_stored_item(item) for item in result] return archived - def cleanup_items(self, user: model.User, item_ids: Set[int]) -> StorageItemsCleanupResult: + def cleanup_items(self, user: model.User, item_ids: set[int]) -> StorageItemsCleanupResult: success_item_count = 0 total_free_bytes = 0 - errors: List[StorageItemCleanupError] = [] + errors: list[StorageItemCleanupError] = [] for history_id in item_ids: try: @@ -840,7 +836,7 @@ def add_serializers(self): super().add_serializers() deletable.PurgableSerializerMixin.add_serializers(self) - serializers: Dict[str, Serializer] = { + serializers: dict[str, Serializer] = { "model_class": lambda item, key, **context: "History", "size": lambda item, key, **context: int(item.disk_size), "nice_size": lambda item, key, **context: item.disk_nice_size, @@ -877,7 +873,7 @@ def serialize_state_ids(self, item, key, **context): containing the ids of each HDA in that state. """ history = item - state_ids: Dict[str, List[str]] = {} + state_ids: dict[str, list[str]] = {} for state in model.Dataset.states.values(): state_ids[state] = [] diff --git a/lib/galaxy/managers/history_contents.py b/lib/galaxy/managers/history_contents.py index 63f17e942aa6..ed91d601d6ac 100644 --- a/lib/galaxy/managers/history_contents.py +++ b/lib/galaxy/managers/history_contents.py @@ -7,8 +7,6 @@ import logging from typing import ( Any, - Dict, - List, ) from sqlalchemy import ( @@ -236,7 +234,7 @@ def _union_of_contents(self, container, expand_models=True, **kwargs): return contents_results # partition ids into a map of { component_class names -> list of ids } from the above union query - id_map: Dict[str, List[int]] = {self.contained_class_type_name: [], self.subcontainer_class_type_name: []} + id_map: dict[str, list[int]] = {self.contained_class_type_name: [], self.subcontainer_class_type_name: []} for result in contents_results: result_type = self._get_union_type(result) contents_id = self._get_union_id(result) @@ -473,7 +471,7 @@ def __init__(self, app: MinimalManagerApp, **kwargs): def add_serializers(self): super().add_serializers() deletable.PurgableSerializerMixin.add_serializers(self) - serializers: Dict[str, Serializer] = { + serializers: dict[str, Serializer] = { "type_id": self.serialize_type_id, "history_id": self.serialize_id, "dataset_id": self.serialize_id_or_skip, diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py index ef8f1b22568c..e4ff76bbd0bb 100644 --- a/lib/galaxy/managers/jobs.py +++ b/lib/galaxy/managers/jobs.py @@ -1,5 +1,6 @@ import json import logging +from collections.abc import Iterable from datetime import ( date, datetime, @@ -8,11 +9,7 @@ from typing import ( Any, cast, - Dict, - Iterable, - List, Optional, - Set, Union, ) @@ -215,7 +212,7 @@ def add_search_criteria(stmt): elif key == "runner": stmt = stmt.where(text_column_filter(Job.job_runner_name, term)) elif isinstance(term, RawTextTerm): - columns: List = [Job.tool_id] + columns: list = [Job.tool_id] if user_details: columns.append(User.email) if is_admin: @@ -474,7 +471,7 @@ def replace_dataset_ids(path, key, value): stmt = select(model.Job.id.label("job_id")) - data_conditions: List = [] + data_conditions: list = [] # We now build the stmt filters that relate to the input datasets # that this job uses. We keep track of the requested dataset id in `requested_ids`, @@ -482,7 +479,7 @@ def replace_dataset_ids(path, key, value): # and the ids that have been used in the job that has already been run in `used_ids`. requested_ids = [] data_types = [] - used_ids: List = [] + used_ids: list = [] for k, input_list in input_data.items(): # k will be matched against the JobParameter.name column. This can be prefixed depending on whether # the input is in a repeat, or not (section and conditional) @@ -610,7 +607,7 @@ def _filter_jobs( stmt = stmt.where(Job.tool_version == str(tool_version)) if job_state is None: - job_states: Set[str] = { + job_states: set[str] = { Job.states.NEW, Job.states.QUEUED, Job.states.WAITING, @@ -1368,7 +1365,7 @@ def _build_stmt_for_dce(self, stmt, data_conditions, used_ids, k, v, user_id, va return stmt -def view_show_job(trans, job: Job, full: bool) -> Dict: +def view_show_job(trans, job: Job, full: bool) -> dict: is_admin = trans.user_is_admin job_dict = job.to_dict("element", system_details=is_admin) if trans.app.config.expose_dataset_path and "command_line" not in job_dict: @@ -1577,7 +1574,7 @@ def merge_states(component_states): class JobsSummary(TypedDict): populated_state: str - states: Dict[str, int] + states: dict[str, int] model: str id: int @@ -1669,7 +1666,7 @@ def summarize_destination_params(trans, job): return destination_params -def summarize_job_parameters(trans: ProvidesUserContext, job: Job) -> Dict[str, Any]: +def summarize_job_parameters(trans: ProvidesUserContext, job: Job) -> dict[str, Any]: """Produce a dict-ified version of job parameters ready for tabular rendering. Precondition: the caller has verified the job is accessible to the user @@ -1746,7 +1743,7 @@ def inputs_recursive(input_params, param_values, depth=1, upgrade_messages=None) or input.type == "data_collection" or isinstance(input_value, model.HistoryDatasetAssociation) ): - value: List[Union[Dict[str, Any], None]] = [] + value: list[Union[dict[str, Any], None]] = [] for element in listify(input_value): if isinstance(element, model.HistoryDatasetAssociation): hda = element @@ -1844,7 +1841,7 @@ def summarize_job_outputs(job: model.Job, tool, params): ("hdca", "dataset_collection_id", job.output_dataset_collection_instances), ) for src, attribute, output_associations in possible_outputs: - output_associations = cast(List, output_associations) # during iteration, mypy sees it as object + output_associations = cast(list, output_associations) # during iteration, mypy sees it as object for output_association in output_associations: output_name = output_association.name if output_name not in output_labels and tool: diff --git a/lib/galaxy/managers/libraries.py b/lib/galaxy/managers/libraries.py index 0a857536202c..19333e6f72c6 100644 --- a/lib/galaxy/managers/libraries.py +++ b/lib/galaxy/managers/libraries.py @@ -4,10 +4,7 @@ import logging from typing import ( - Dict, Optional, - Set, - Tuple, ) from sqlalchemy.exc import ( @@ -133,7 +130,7 @@ def delete(self, trans, library: Library, undelete: Optional[bool] = False) -> L trans.sa_session.commit() return library - def list(self, trans, deleted: Optional[bool] = False) -> Tuple[Query, Dict[str, Set]]: + def list(self, trans, deleted: Optional[bool] = False) -> tuple[Query, dict[str, set]]: """ Return a list of libraries from the DB. @@ -215,7 +212,7 @@ def check_accessible(self, trans, library: Library) -> Library: else: return library - def get_library_dict(self, trans, library: Library, prefetched_ids: Optional[Dict[str, Set]] = None) -> dict: + def get_library_dict(self, trans, library: Library, prefetched_ids: Optional[dict[str, set]] = None) -> dict: """ Return library data in the form of a dictionary. @@ -281,7 +278,7 @@ def get_current_roles(self, trans, library: Library) -> dict: manage_roles = self.get_manage_roles(trans, library) add_roles = self.get_add_roles(trans, library) - def make_tuples(roles: Set): + def make_tuples(roles: set): tuples = [] for role in roles: # use role name for non-private roles, and user.email from private rules @@ -297,13 +294,13 @@ def make_tuples(roles: Set): add_library_item_role_list=make_tuples(add_roles), ) - def get_access_roles(self, trans, library: Library) -> Set[Role]: + def get_access_roles(self, trans, library: Library) -> set[Role]: """ Load access roles for all library permissions """ return set(library.get_access_roles(trans.app.security_agent)) - def get_modify_roles(self, trans, library: Library) -> Set[Role]: + def get_modify_roles(self, trans, library: Library) -> set[Role]: """ Load modify roles for all library permissions """ @@ -313,7 +310,7 @@ def get_modify_roles(self, trans, library: Library) -> Set[Role]: ) ) - def get_manage_roles(self, trans, library: Library) -> Set[Role]: + def get_manage_roles(self, trans, library: Library) -> set[Role]: """ Load manage roles for all library permissions """ @@ -323,7 +320,7 @@ def get_manage_roles(self, trans, library: Library) -> Set[Role]: ) ) - def get_add_roles(self, trans, library: Library) -> Set[Role]: + def get_add_roles(self, trans, library: Library) -> set[Role]: """ Load add roles for all library permissions """ diff --git a/lib/galaxy/managers/library_datasets.py b/lib/galaxy/managers/library_datasets.py index 56d9e9fa82f1..068a0ae03c7e 100644 --- a/lib/galaxy/managers/library_datasets.py +++ b/lib/galaxy/managers/library_datasets.py @@ -3,7 +3,6 @@ import logging from typing import ( Any, - Dict, ) from sqlalchemy import select @@ -57,7 +56,7 @@ def get(self, trans, decoded_library_dataset_id, check_accessible=True) -> Libra ld = self.secure(trans, ld, check_accessible) return ld - def update(self, item: LibraryDataset, new_values: Dict[str, Any], flush: bool = True, **kwargs) -> LibraryDataset: + def update(self, item: LibraryDataset, new_values: dict[str, Any], flush: bool = True, **kwargs) -> LibraryDataset: """ Update the given library dataset - the latest linked ldda. Updating older lddas (versions) is not allowed. @@ -93,7 +92,7 @@ def _set_from_dict( self, trans: ProvidesUserContext, ldda: LibraryDatasetDatasetAssociation, - new_data: Dict[str, Any], + new_data: dict[str, Any], flush: bool = True, ) -> None: changed = False @@ -226,7 +225,7 @@ def check_modifiable(self, trans, ld): else: return ld - def serialize(self, trans, ld: LibraryDataset) -> Dict[str, Any]: + def serialize(self, trans, ld: LibraryDataset) -> dict[str, Any]: """Serialize the library dataset into a dictionary.""" current_user_roles = trans.get_current_user_roles() diff --git a/lib/galaxy/managers/licenses.py b/lib/galaxy/managers/licenses.py index f1c3b8f92585..b8bd2257e8b3 100644 --- a/lib/galaxy/managers/licenses.py +++ b/lib/galaxy/managers/licenses.py @@ -1,6 +1,5 @@ import json import logging -from typing import List from pydantic import ( BaseModel, @@ -34,7 +33,7 @@ class LicenseMetadataModel(BaseModel): description="Indicates if the [OSI](https://opensource.org/) has approved the license", examples=[True], ) - seeAlso: List[HttpUrl] = Field( + seeAlso: list[HttpUrl] = Field( title="Reference URLs", description="Cross reference URL pointing to additional copies of the license" ) detailsUrl: HttpUrl = Field( @@ -98,7 +97,7 @@ def get(self, uri): log.warning(f"Unknown license URI encountered [{uri}]") return {"url": uri} - def get_licenses(self) -> List[LicenseMetadataModel]: + def get_licenses(self) -> list[LicenseMetadataModel]: return SPDX_LICENSES["licenses"] def get_license_by_id(self, id: str) -> LicenseMetadataModel: diff --git a/lib/galaxy/managers/markdown_parse.py b/lib/galaxy/managers/markdown_parse.py index 8dda7fac99c1..20e13c595781 100644 --- a/lib/galaxy/managers/markdown_parse.py +++ b/lib/galaxy/managers/markdown_parse.py @@ -8,8 +8,6 @@ import re from typing import ( - Dict, - List, Union, ) @@ -24,8 +22,8 @@ class DynamicArguments: DYNAMIC_ARGUMENTS = DynamicArguments() -SHARED_ARGUMENTS: List[str] = ["collapse"] -VALID_ARGUMENTS: Dict[str, Union[List[str], DynamicArguments]] = { +SHARED_ARGUMENTS: list[str] = ["collapse"] +VALID_ARGUMENTS: dict[str, Union[list[str], DynamicArguments]] = { "generate_galaxy_version": [], "generate_time": [], "history_dataset_as_image": ["history_dataset_id", "input", "invocation_id", "output", "path"], diff --git a/lib/galaxy/managers/markdown_util.py b/lib/galaxy/managers/markdown_util.py index 4b016ce96243..2f926b34c8df 100644 --- a/lib/galaxy/managers/markdown_util.py +++ b/lib/galaxy/managers/markdown_util.py @@ -19,11 +19,9 @@ import re import shutil import tempfile +from re import Match from typing import ( Any, - Dict, - List, - Match, Optional, ) @@ -722,7 +720,7 @@ def handle_tool_stderr(self, line, job): def handle_job_metrics(self, line, job): job_metrics = summarize_job_metrics(self.trans, job) - metrics_by_plugin: Dict[str, Dict[str, Any]] = {} + metrics_by_plugin: dict[str, dict[str, Any]] = {} for job_metric in job_metrics: plugin = job_metric["plugin"] if plugin not in metrics_by_plugin: @@ -846,7 +844,7 @@ def to_html(basic_markdown: str) -> str: return html -def to_pdf_raw(basic_markdown: str, css_paths: Optional[List[str]] = None) -> bytes: +def to_pdf_raw(basic_markdown: str, css_paths: Optional[list[str]] = None) -> bytes: """Convert RAW markdown with specified CSS paths into bytes of a PDF.""" css_paths = css_paths or [] as_html = to_html(basic_markdown) @@ -987,8 +985,7 @@ def resolve_invocation_markdown(trans, workflow_markdown): def get_invocation(trans, line): workflow_manager = trans.app.workflow_manager - invocation_id_match = re.search(INVOCATION_ID_PATTERN, line) - if invocation_id_match: + if invocation_id_match := re.search(INVOCATION_ID_PATTERN, line): invocation_id = invocation_id_match.group(1) invocation = workflow_manager.get_invocation( trans, invocation_id, check_ownership=False, check_accessible=True @@ -1132,8 +1129,6 @@ def _remap(container, line): elif container == "job_metrics": return (f"job_metrics(job_id={job.id})\n", False) ref_object_type = None - output_match = re.search(OUTPUT_LABEL_PATTERN, line) - input_match = re.search(INPUT_LABEL_PATTERN, line) def find_non_empty_group(match): for group in match.groups(): @@ -1142,7 +1137,7 @@ def find_non_empty_group(match): target_match: Optional[Match] ref_object: Optional[Any] - if output_match: + if output_match := re.search(OUTPUT_LABEL_PATTERN, line): target_match = output_match name = find_non_empty_group(target_match) if name in io_dicts.out_data: @@ -1151,7 +1146,7 @@ def find_non_empty_group(match): ref_object = io_dicts.out_collections[name] else: raise Exception("Unknown exception") - elif input_match: + elif input_match := re.search(INPUT_LABEL_PATTERN, line): target_match = input_match name = find_non_empty_group(target_match) ref_object = io_dicts.inp_data[name] diff --git a/lib/galaxy/managers/metrics.py b/lib/galaxy/managers/metrics.py index 3ce8ea9bac6a..c1f78049b09f 100644 --- a/lib/galaxy/managers/metrics.py +++ b/lib/galaxy/managers/metrics.py @@ -1,11 +1,9 @@ import logging +from collections.abc import Generator from datetime import datetime from typing import ( Any, - Generator, - List, Optional, - Tuple, ) from pydantic import ( @@ -46,7 +44,7 @@ class Metric(BaseModel): class CreateMetricsPayload(BaseModel): - metrics: List[Metric] = Field( + metrics: list[Metric] = Field( default=[], title="List of metrics to be recorded.", examples=[ @@ -55,7 +53,7 @@ class CreateMetricsPayload(BaseModel): ) -TimeSeriesTuple = Tuple[str, datetime, Any] +TimeSeriesTuple = tuple[str, datetime, Any] TimeSeriesTupleGenerator = Generator[TimeSeriesTuple, None, None] @@ -93,7 +91,7 @@ def create(self, trans, payload: CreateMetricsPayload): return response def _parse_metrics( - self, metrics: Optional[List[Metric]] = None, user_id=None, session_id=None + self, metrics: Optional[list[Metric]] = None, user_id=None, session_id=None ) -> TimeSeriesTupleGenerator: """ Return a generator yielding the each given metric as a tuple: diff --git a/lib/galaxy/managers/notification.py b/lib/galaxy/managers/notification.py index 3e2983db2a54..c6f220583d35 100644 --- a/lib/galaxy/managers/notification.py +++ b/lib/galaxy/managers/notification.py @@ -3,13 +3,8 @@ from enum import Enum from typing import ( cast, - Dict, - List, NamedTuple, Optional, - Set, - Tuple, - Type, ) from urllib.parse import urlparse @@ -81,7 +76,7 @@ class CleanupResultSummary(NamedTuple): class NotificationRecipientResolverStrategy(Protocol): - def resolve_users(self, recipients: NotificationRecipients) -> List[User]: + def resolve_users(self, recipients: NotificationRecipients) -> list[User]: pass @@ -102,7 +97,7 @@ def __init__(self, sa_session: galaxy_scoped_session, config: GalaxyAppConfigura self.sa_session = sa_session self.config = config self.recipient_resolver = NotificationRecipientResolver(strategy=DefaultStrategy(sa_session)) - self.user_notification_columns: List[InstrumentedAttribute] = [ + self.user_notification_columns: list[InstrumentedAttribute] = [ Notification.id, Notification.source, Notification.category, @@ -115,7 +110,7 @@ def __init__(self, sa_session: galaxy_scoped_session, config: GalaxyAppConfigura UserNotificationAssociation.seen_time, UserNotificationAssociation.deleted, ] - self.broadcast_notification_columns: List[InstrumentedAttribute] = [ + self.broadcast_notification_columns: list[InstrumentedAttribute] = [ Notification.id, Notification.source, Notification.category, @@ -152,7 +147,7 @@ def ensure_notifications_enabled(self): def can_send_notifications_async(self): return self.config.enable_celery_tasks - def send_notification_to_recipients(self, request: NotificationCreateRequest) -> Tuple[Optional[Notification], int]: + def send_notification_to_recipients(self, request: NotificationCreateRequest) -> tuple[Optional[Notification], int]: """ Creates a new notification and associates it with all the recipient users. @@ -170,7 +165,7 @@ def send_notification_to_recipients(self, request: NotificationCreateRequest) -> return notification, notifications_sent - def _create_associations(self, notification: Notification, users: List[User]) -> int: + def _create_associations(self, notification: Notification, users: list[User]) -> int: success_count = 0 for user in users: try: @@ -361,7 +356,7 @@ def get_all_broadcasted_notifications(self, since: Optional[datetime] = None, ac return result def update_user_notifications( - self, user: User, notification_ids: Set[int], request: UserNotificationUpdateRequest + self, user: User, notification_ids: set[int], request: UserNotificationUpdateRequest ) -> int: """Updates a batch of notifications associated with the user using the requested values.""" updated_row_count = 0 @@ -426,9 +421,9 @@ def update_user_notification_preferences( self.sa_session.commit() return preferences - def _register_supported_channels(self) -> Dict[str, NotificationChannelPlugin]: + def _register_supported_channels(self) -> dict[str, NotificationChannelPlugin]: """Registers the supported notification channels in this server.""" - supported_channels: Dict[str, NotificationChannelPlugin] = { + supported_channels: dict[str, NotificationChannelPlugin] = { # Push notifications are handled client-side so no real plugin is needed "push": NoOpNotificationChannelPlugin(self.config), } @@ -440,7 +435,7 @@ def _register_supported_channels(self) -> Dict[str, NotificationChannelPlugin]: return supported_channels - def get_supported_channels(self) -> Set[str]: + def get_supported_channels(self) -> set[str]: """Returns the set of supported notification channels in this server.""" return set(self.channel_plugins.keys()) @@ -532,7 +527,7 @@ class NotificationRecipientResolver: def __init__(self, strategy: NotificationRecipientResolverStrategy): self.strategy = strategy - def resolve(self, recipients: NotificationRecipients) -> List[User]: + def resolve(self, recipients: NotificationRecipients) -> list[User]: """Given individual user, group and roles ids as recipients, obtains the unique list of users. The resulting list will contain only unique users even if the same user id might have been provided more @@ -547,8 +542,8 @@ class DefaultStrategy(NotificationRecipientResolverStrategy): def __init__(self, sa_session: galaxy_scoped_session): self.sa_session = sa_session - def resolve_users(self, recipients: NotificationRecipients) -> List[User]: - unique_user_ids: Set[int] = set(recipients.user_ids) + def resolve_users(self, recipients: NotificationRecipients) -> list[User]: + unique_user_ids: set[int] = set(recipients.user_ids) all_group_ids, all_role_ids = self._expand_group_and_roles_ids( set(recipients.group_ids), set(recipients.role_ids) @@ -564,7 +559,7 @@ def resolve_users(self, recipients: NotificationRecipients) -> List[User]: stmt = select(User).where(User.id.in_(unique_user_ids)) return self.sa_session.scalars(stmt).all() # type:ignore[return-value] - def _get_all_user_ids_from_roles_query(self, role_ids: Set[int]) -> Select: + def _get_all_user_ids_from_roles_query(self, role_ids: set[int]) -> Select: stmt = ( select(UserRoleAssociation.user_id) .select_from(UserRoleAssociation) @@ -573,7 +568,7 @@ def _get_all_user_ids_from_roles_query(self, role_ids: Set[int]) -> Select: ) return stmt - def _get_all_user_ids_from_groups_query(self, group_ids: Set[int]) -> Select: + def _get_all_user_ids_from_groups_query(self, group_ids: set[int]) -> Select: stmt = ( select(UserGroupAssociation.user_id) .select_from(UserGroupAssociation) @@ -582,12 +577,12 @@ def _get_all_user_ids_from_groups_query(self, group_ids: Set[int]) -> Select: ) return stmt - def _expand_group_and_roles_ids(self, group_ids: Set[int], role_ids: Set[int]) -> Tuple[Set[int], Set[int]]: + def _expand_group_and_roles_ids(self, group_ids: set[int], role_ids: set[int]) -> tuple[set[int], set[int]]: """Given a set of group and roles IDs, it expands those sets (non-recursively) by including sub-groups or sub-roles indirectly associated with them. """ - processed_group_ids: Set[int] = set() - processed_role_ids: Set[int] = set() + processed_group_ids: set[int] = set() + processed_role_ids: set[int] = set() while True: # Get group IDs associated with any of the given role IDs @@ -626,7 +621,7 @@ def _expand_group_and_roles_ids(self, group_ids: Set[int], role_ids: Set[int]) - class RecursiveCTEStrategy(NotificationRecipientResolverStrategy): - def resolve_users(self, recipients: NotificationRecipients) -> List[User]: + def resolve_users(self, recipients: NotificationRecipients) -> list[User]: # TODO Implement resolver using recursive CTEs? return [] @@ -749,7 +744,7 @@ def get_subject(self) -> str: class EmailNotificationChannelPlugin(NotificationChannelPlugin): # Register the supported email templates here - email_templates_by_category: Dict[PersonalNotificationCategory, Type[EmailNotificationTemplateBuilder]] = { + email_templates_by_category: dict[PersonalNotificationCategory, type[EmailNotificationTemplateBuilder]] = { PersonalNotificationCategory.message: MessageEmailNotificationTemplateBuilder, PersonalNotificationCategory.new_shared_item: NewSharedItemEmailNotificationTemplateBuilder, } diff --git a/lib/galaxy/managers/object_store_instances.py b/lib/galaxy/managers/object_store_instances.py index 2473c39b117b..899e1c4b15cf 100644 --- a/lib/galaxy/managers/object_store_instances.py +++ b/lib/galaxy/managers/object_store_instances.py @@ -9,10 +9,7 @@ import logging from typing import ( - Dict, - List, Optional, - Tuple, Union, ) from uuid import uuid4 @@ -91,8 +88,8 @@ class UserConcreteObjectStoreModel(ConcreteObjectStoreModel): type: ObjectStoreTemplateType template_id: str template_version: int - variables: Optional[Dict[str, TemplateVariableValueType]] - secrets: List[str] + variables: Optional[dict[str, TemplateVariableValueType]] + secrets: list[str] hidden: bool active: bool purged: bool @@ -218,7 +215,7 @@ def create_instance( self._save(persisted_object_store) return self._to_model(trans, persisted_object_store) - def index(self, trans: ProvidesUserContext) -> List[UserConcreteObjectStoreModel]: + def index(self, trans: ProvidesUserContext) -> list[UserConcreteObjectStoreModel]: stores = self._sa_session.query(UserObjectStore).filter(UserObjectStore.user_id == trans.user.id).all() return [self._to_model(trans, s) for s in stores] @@ -304,7 +301,7 @@ def _template_settings_status( trans: ProvidesUserContext, payload: CanTestPluginStatus, template: ObjectStoreTemplate, - ) -> Tuple[Optional[ObjectStoreConfiguration], PluginAspectStatus]: + ) -> tuple[Optional[ObjectStoreConfiguration], PluginAspectStatus]: template_parameters = prepare_template_parameters_for_testing( trans, template, TemplateServerConfiguration(), payload, self._app_vault, self._app_config ) @@ -319,7 +316,7 @@ def _template_settings_status( def _connection_status( self, trans: ProvidesUserContext, payload: CanTestPluginStatus, configuration: ObjectStoreConfiguration - ) -> Tuple[Optional[BaseObjectStore], PluginAspectStatus]: + ) -> tuple[Optional[BaseObjectStore], PluginAspectStatus]: object_store = None exception = None try: diff --git a/lib/galaxy/managers/pages.py b/lib/galaxy/managers/pages.py index 69a356c6630c..97d81c109fc0 100644 --- a/lib/galaxy/managers/pages.py +++ b/lib/galaxy/managers/pages.py @@ -13,7 +13,6 @@ from typing import ( Callable, Optional, - Tuple, TYPE_CHECKING, Union, ) @@ -144,7 +143,7 @@ def __init__(self, app: MinimalManagerApp): def index_query( self, trans: ProvidesUserContext, payload: PageIndexQueryPayload, include_total_count: bool = False - ) -> Tuple["ScalarResult[model.Page]", Union[int, None]]: + ) -> tuple["ScalarResult[model.Page]", Union[int, None]]: show_deleted = payload.deleted show_own = payload.show_own show_published = payload.show_published diff --git a/lib/galaxy/managers/quotas.py b/lib/galaxy/managers/quotas.py index 8801f2cc304c..5b7726923a9b 100644 --- a/lib/galaxy/managers/quotas.py +++ b/lib/galaxy/managers/quotas.py @@ -8,7 +8,6 @@ from typing import ( cast, Optional, - Tuple, Union, ) @@ -53,7 +52,7 @@ def sa_session(self): def quota_agent(self) -> DatabaseQuotaAgent: return cast(DatabaseQuotaAgent, self.app.quota_agent) - def create_quota(self, payload: dict, decode_id=None) -> Tuple[model.Quota, str]: + def create_quota(self, payload: dict, decode_id=None) -> tuple[model.Quota, str]: params = CreateQuotaParams.parse_obj(payload) create_amount = self._parse_amount(params.amount) stmt = select(Quota).where(Quota.name == params.name).limit(1) diff --git a/lib/galaxy/managers/ratable.py b/lib/galaxy/managers/ratable.py index 9551f1403f3b..e149a91a98f9 100644 --- a/lib/galaxy/managers/ratable.py +++ b/lib/galaxy/managers/ratable.py @@ -3,7 +3,6 @@ """ import logging -from typing import Type from sqlalchemy import select from sqlalchemy.sql.expression import func @@ -15,7 +14,7 @@ class RatableManagerMixin: - rating_assoc: Type[ItemRatingAssociation] + rating_assoc: type[ItemRatingAssociation] def rating(self, item, user, as_int=True): """Returns the integer rating given to this item by the user. diff --git a/lib/galaxy/managers/remote_files.py b/lib/galaxy/managers/remote_files.py index 165efe3c95b2..c9e4aa210953 100644 --- a/lib/galaxy/managers/remote_files.py +++ b/lib/galaxy/managers/remote_files.py @@ -3,8 +3,6 @@ from operator import itemgetter from typing import ( Optional, - Set, - Tuple, ) from galaxy import exceptions @@ -55,7 +53,7 @@ def index( offset: Optional[int] = None, query: Optional[str] = None, sort_by: Optional[str] = None, - ) -> Tuple[AnyRemoteFilesListResponse, int]: + ) -> tuple[AnyRemoteFilesListResponse, int]: """Returns a list of remote files and directories available to the user and the total count of them.""" user_file_source_context = ProvidesFileSourcesUserContext(user_ctx) @@ -148,8 +146,8 @@ def get_files_source_plugins( self, user_context: ProvidesUserContext, browsable_only: Optional[bool] = True, - include_kind: Optional[Set[PluginKind]] = None, - exclude_kind: Optional[Set[PluginKind]] = None, + include_kind: Optional[set[PluginKind]] = None, + exclude_kind: Optional[set[PluginKind]] = None, ): """Display plugin information for each of the gxfiles:// URI targets available.""" user_file_source_context = ProvidesFileSourcesUserContext(user_context) diff --git a/lib/galaxy/managers/roles.py b/lib/galaxy/managers/roles.py index 760ab018d755..99d6bc1441e3 100644 --- a/lib/galaxy/managers/roles.py +++ b/lib/galaxy/managers/roles.py @@ -3,7 +3,6 @@ """ import logging -from typing import List from sqlalchemy import select from sqlalchemy.exc import ( @@ -67,7 +66,7 @@ def get(self, trans: ProvidesUserContext, role_id: int) -> model.Role: return role - def list_displayable_roles(self, trans: ProvidesUserContext) -> List[Role]: + def list_displayable_roles(self, trans: ProvidesUserContext) -> list[Role]: return get_displayable_roles(trans.sa_session, trans.user, trans.user_is_admin, trans.app.security_agent) def create_role(self, trans: ProvidesUserContext, role_definition_model: RoleDefinitionModel) -> model.Role: diff --git a/lib/galaxy/managers/secured.py b/lib/galaxy/managers/secured.py index 6a70cf5b756c..bee37a76fb80 100644 --- a/lib/galaxy/managers/secured.py +++ b/lib/galaxy/managers/secured.py @@ -9,7 +9,6 @@ Any, Generic, Optional, - Type, TypeVar, ) @@ -29,7 +28,7 @@ class AccessibleManagerMixin(Generic[U]): """ # declare what we are using from base ModelManager - model_class: Type[U] + model_class: type[U] @abc.abstractmethod def by_id(self, id: int) -> U: ... @@ -74,7 +73,7 @@ class OwnableManagerMixin(Generic[U]): """ # declare what we are using from base ModelManager - model_class: Type[U] + model_class: type[U] @abc.abstractmethod def by_id(self, id: int) -> U: ... diff --git a/lib/galaxy/managers/sharable.py b/lib/galaxy/managers/sharable.py index 5f208067a50f..f84ab15f30f4 100644 --- a/lib/galaxy/managers/sharable.py +++ b/lib/galaxy/managers/sharable.py @@ -13,10 +13,7 @@ import logging from typing import ( Any, - List, Optional, - Set, - Type, TypeVar, ) @@ -70,7 +67,7 @@ class SharableModelManager( # base.DeleteableModelMixin? (all four are deletable) #: the model used for UserShareAssociations with this model - user_share_model: Type[UserShareAssociation] + user_share_model: type[UserShareAssociation] #: the single character abbreviation used in username_and_slug: e.g. 'h' for histories: u/user/h/slug SINGLE_CHAR_ABBR: Optional[str] = None @@ -82,7 +79,7 @@ def __init__(self, app: MinimalManagerApp): self.tag_handler = app[GalaxyTagHandler] # .... has a user - def by_user(self, user: User, **kwargs: Any) -> List[Any]: + def by_user(self, user: User, **kwargs: Any) -> list[Any]: """ Return list for all items (of model_class type) associated with the given `user`. @@ -248,7 +245,7 @@ def list_shared_with(self, user, filters=None, order_by=None, limit=None, offset return list(self._apply_fn_limit_offset_gen(items, limit, offset)) def get_sharing_extra_information( - self, trans, item, users: Set[User], errors: Set[str], option: Optional[SharingOptions] = None + self, trans, item, users: set[User], errors: set[str], option: Optional[SharingOptions] = None ) -> Optional[ShareWithExtra]: """Returns optional extra information about the shareability of the given item. @@ -263,7 +260,7 @@ def make_members_public(self, trans, item): contained associated with the given item. """ - def update_current_sharing_with_users(self, item, new_users_shared_with: Set[User], flush=True): + def update_current_sharing_with_users(self, item, new_users_shared_with: set[User], flush=True): """Updates the currently list of users this item is shared with by adding new users and removing missing ones.""" current_shares = self.get_share_assocs(item) diff --git a/lib/galaxy/managers/tool_data.py b/lib/galaxy/managers/tool_data.py index 921c736d08a2..9f20a3786226 100644 --- a/lib/galaxy/managers/tool_data.py +++ b/lib/galaxy/managers/tool_data.py @@ -1,7 +1,6 @@ from pathlib import Path from typing import ( cast, - Dict, Optional, ) @@ -38,7 +37,7 @@ def __init__(self, app: StructuredApp): self._app = app @property - def data_tables(self) -> Dict[str, ToolDataTable]: + def data_tables(self) -> dict[str, ToolDataTable]: return self._app.tool_data_tables.data_tables def index(self) -> ToolDataEntryList: diff --git a/lib/galaxy/managers/tools.py b/lib/galaxy/managers/tools.py index 0d4dcc96600e..b44d2e2e81a0 100644 --- a/lib/galaxy/managers/tools.py +++ b/lib/galaxy/managers/tools.py @@ -1,7 +1,6 @@ import logging from typing import ( Any, - Dict, Optional, TYPE_CHECKING, Union, @@ -47,7 +46,7 @@ from galaxy.managers.base import OrmFilterParsersType -def tool_payload_to_tool(app, tool_dict: Dict[str, Any]) -> Optional[Tool]: +def tool_payload_to_tool(app, tool_dict: dict[str, Any]) -> Optional[Tool]: tool_source = YamlToolSource(tool_dict) tool = create_tool_from_source(app, tool_source=tool_source, tool_dir=None) return tool diff --git a/lib/galaxy/managers/users.py b/lib/galaxy/managers/users.py index 8a0b81c53ad2..80922d057f72 100644 --- a/lib/galaxy/managers/users.py +++ b/lib/galaxy/managers/users.py @@ -10,8 +10,6 @@ from datetime import datetime from typing import ( Any, - Dict, - List, Optional, ) @@ -171,7 +169,7 @@ def _stop_all_jobs_from_user(self, user): job.mark_deleted(self.app.config.track_jobs_in_database) session.commit() - def _get_all_active_jobs_from_user(self, user: User) -> List[Job]: + def _get_all_active_jobs_from_user(self, user: User) -> list[Job]: """Get all jobs that are not ready yet and belong to the given user.""" stmt = select(Job).where(and_(Job.user_id == user.id, Job.state.in_(Job.non_ready_states))) jobs = self.session().scalars(stmt) @@ -697,9 +695,9 @@ def add_serializers(self): } ) - def serialize_disk_usage(self, user: model.User) -> List[UserQuotaUsage]: + def serialize_disk_usage(self, user: model.User) -> list[UserQuotaUsage]: usages = user.dictify_usage(self.app.object_store) - rval: List[UserQuotaUsage] = [] + rval: list[UserQuotaUsage] = [] for usage in usages: quota_source_label = usage.quota_source_label quota_percent = self.user_manager.quota(user, quota_source_label=quota_source_label) @@ -741,7 +739,7 @@ class UserDeserializer(base.ModelDeserializer): def add_deserializers(self): super().add_deserializers() - user_deserializers: Dict[str, base.Deserializer] = { + user_deserializers: dict[str, base.Deserializer] = { "active": self.default_deserializer, "username": self.deserialize_username, "preferred_object_store_id": self.deserialize_preferred_object_store_id, diff --git a/lib/galaxy/managers/visualizations.py b/lib/galaxy/managers/visualizations.py index 03c36b9d0c24..2b785a3b4668 100644 --- a/lib/galaxy/managers/visualizations.py +++ b/lib/galaxy/managers/visualizations.py @@ -7,8 +7,6 @@ import logging from typing import ( - Dict, - Tuple, TYPE_CHECKING, Union, ) @@ -80,7 +78,7 @@ class VisualizationManager(sharable.SharableModelManager[model.Visualization]): def index_query( self, trans: ProvidesUserContext, payload: VisualizationIndexQueryPayload, include_total_count: bool = False - ) -> Tuple["ScalarResult[model.Visualization]", Union[int, None]]: + ) -> tuple["ScalarResult[model.Visualization]", Union[int, None]]: show_deleted = payload.deleted show_own = payload.show_own show_published = payload.show_published @@ -196,7 +194,7 @@ def __init__(self, app: MinimalManagerApp): def add_serializers(self): super().add_serializers() - serializers: Dict[str, base.Serializer] = {} + serializers: dict[str, base.Serializer] = {} self.serializers.update(serializers) diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py index 98a29628aaa2..253a501a7f7e 100644 --- a/lib/galaxy/managers/workflows.py +++ b/lib/galaxy/managers/workflows.py @@ -3,13 +3,11 @@ import os import uuid from typing import ( + Annotated, Any, cast, - Dict, - List, NamedTuple, Optional, - Tuple, TYPE_CHECKING, Union, ) @@ -44,7 +42,6 @@ subqueryload, ) from typing_extensions import ( - Annotated, TypeAlias, ) @@ -171,7 +168,7 @@ def __init__(self, app: MinimalManagerApp): def index_query( self, trans: ProvidesUserContext, payload: WorkflowIndexQueryPayload, include_total_count: bool = False - ) -> Tuple["ScalarResult[model.StoredWorkflow]", Optional[int]]: + ) -> tuple["ScalarResult[model.StoredWorkflow]", Optional[int]]: show_published = payload.show_published show_hidden = payload.show_hidden show_deleted = payload.show_deleted @@ -517,7 +514,7 @@ def build_invocations_query( sort_desc=None, include_nested_invocations=True, check_ownership=True, - ) -> Tuple[List, int]: + ) -> tuple[list, int]: """Get invocations owned by the current user.""" stmt = select(WorkflowInvocation) @@ -568,7 +565,7 @@ def build_invocations_query( return invocations, total_matches -MissingToolsT = List[Tuple[str, str, Optional[str], str]] +MissingToolsT = list[tuple[str, str, Optional[str], str]] class CreatedWorkflow(NamedTuple): @@ -797,7 +794,7 @@ def _workflow_from_raw_description( name, is_subworkflow: bool = False, **kwds, - ) -> Tuple[model.Workflow, MissingToolsT]: + ) -> tuple[model.Workflow, MissingToolsT]: # don't commit the workflow or attach its part to the sa session - just build a # a transient model to operate on or render. dry_run = kwds.pop("dry_run", False) @@ -818,8 +815,7 @@ def _workflow_from_raw_description( if "logo_url" in data: workflow.logo_url = data["logo_url"] - dois = data.get("doi", None) - if dois: + if dois := data.get("doi", None): for doi in dois: if not util.validate_doi(doi): raise exceptions.RequestParameterInvalidException(f"Invalid DOI format: {doi}") @@ -847,10 +843,10 @@ def _workflow_from_raw_description( # Assume no errors until we find a step that has some workflow.has_errors = False # Create each step - steps: List[model.WorkflowStep] = [] + steps: list[model.WorkflowStep] = [] # The editor will provide ids for each step that we don't need to save, # but do need to use to make connections - steps_by_external_id: Dict[str, model.WorkflowStep] = {} + steps_by_external_id: dict[str, model.WorkflowStep] = {} # Preload dependent workflows with locally defined content_ids. subworkflow_id_map = None @@ -894,8 +890,8 @@ def _workflow_from_raw_description( if ensure_object_added_to_session(workflow, object_in_session=step): break - comments: List[model.WorkflowComment] = [] - comments_by_external_id: Dict[str, model.WorkflowComment] = {} + comments: list[model.WorkflowComment] = [] + comments_by_external_id: dict[str, model.WorkflowComment] = {} for comment_dict in data.get("comments", []): comment = model.WorkflowComment.from_dict(comment_dict) comments.append(comment) @@ -1058,7 +1054,7 @@ def _workflow_to_dict_run(self, trans: ProvidesUserContext, stored, workflow, hi for step in workflow.steps: step_model = None if step.type == "tool": - incoming: Dict[str, Any] = {} + incoming: dict[str, Any] = {} tool = trans.app.toolbox.get_tool( step.tool_id, tool_version=step.tool_version, tool_uuid=step.tool_uuid, user=trans.user ) @@ -1499,7 +1495,7 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None, internal=F annotation_str = util.unicodify(annotations[0].annotation) # Pack workflow data into a dictionary and return - data: Dict[str, Any] = {} + data: dict[str, Any] = {} data["a_galaxy_workflow"] = "true" # Placeholder for identifying galaxy workflow data["format-version"] = "0.1" data["name"] = workflow.name @@ -1507,7 +1503,7 @@ def _workflow_to_dict_export(self, trans, stored=None, workflow=None, internal=F data["tags"] = tags_list if workflow.uuid is not None: data["uuid"] = str(workflow.uuid) - steps: Dict[int, Dict[str, Any]] = {} + steps: dict[int, dict[str, Any]] = {} data["steps"] = steps data["comments"] = [comment.to_dict() for comment in workflow.comments] if workflow.reports_config: @@ -1659,7 +1655,7 @@ def callback(input, prefixed_name, **kwargs): visit_input_values(module.tool.inputs, module.state.inputs, callback) # Encode input connections as dictionary - input_conn_dict: Dict[str, List[Dict[str, Any]]] = {} + input_conn_dict: dict[str, list[dict[str, Any]]] = {} unique_input_names = {conn.input_name for conn in input_connections} for input_name in unique_input_names: input_conn_dicts = [] @@ -1847,11 +1843,11 @@ def __load_subworkflows( def __module_from_dict( self, trans, - steps: List[model.WorkflowStep], - steps_by_external_id: Dict[str, model.WorkflowStep], + steps: list[model.WorkflowStep], + steps_by_external_id: dict[str, model.WorkflowStep], step_dict, **kwds, - ) -> Tuple[WorkflowModule, model.WorkflowStep]: + ) -> tuple[WorkflowModule, model.WorkflowStep]: """Create a WorkflowStep model object and corresponding module representing type-specific functionality from the incoming dictionary. """ @@ -1873,8 +1869,8 @@ def __module_from_dict( self.add_item_annotation(sa_session, trans.get_user(), step, annotation) # Stick this in the step temporarily - DictConnection: TypeAlias = Dict[str, Union[int, str]] - temp_input_connections: Dict[str, Union[List[DictConnection], DictConnection]] = step_dict.get( + DictConnection: TypeAlias = dict[str, Union[int, str]] + temp_input_connections: dict[str, Union[list[DictConnection], DictConnection]] = step_dict.get( "input_connections", {} ) step.temp_input_connections = temp_input_connections # type: ignore[assignment] @@ -1970,7 +1966,7 @@ def __build_embedded_subworkflow(self, trans, data, workflow_state_resolution_op ).workflow return subworkflow - def __connect_workflow_steps(self, steps: List[model.WorkflowStep], steps_by_external_id, dry_run: bool) -> None: + def __connect_workflow_steps(self, steps: list[model.WorkflowStep], steps_by_external_id, dry_run: bool) -> None: """Second pass to deal with connections between steps. Create workflow connection objects using externally specified ids @@ -2154,7 +2150,7 @@ def safe_wraps(v: Any, nxt: SerializerFunctionWrapHandler) -> str: class RefactorResponse(BaseModel): - action_executions: List[RefactorActionExecution] + action_executions: list[RefactorActionExecution] workflow: Annotated[dict, WrapSerializer(safe_wraps, when_used="json")] dry_run: bool @@ -2192,7 +2188,7 @@ class WorkflowCreateOptions(WorkflowStateResolutionOptions): install_tool_dependencies: bool = False new_tool_panel_section_label: str = "" tool_panel_section_id: str = "" - tool_panel_section_mapping: Dict = {} + tool_panel_section_mapping: dict = {} shed_tool_conf: Optional[str] = None # for workflows imported by archive source diff --git a/lib/galaxy/metadata/__init__.py b/lib/galaxy/metadata/__init__.py index c60737883bd4..db6dfdeb566e 100644 --- a/lib/galaxy/metadata/__init__.py +++ b/lib/galaxy/metadata/__init__.py @@ -7,7 +7,6 @@ from logging import getLogger from typing import ( Any, - Dict, Optional, TYPE_CHECKING, ) @@ -247,7 +246,7 @@ def _metadata_path(what): json.dump(object_store_conf, f) # setup tool - tool_as_dict: Dict[str, Any] = {} + tool_as_dict: dict[str, Any] = {} tool_as_dict["stdio_exit_codes"] = [e.to_dict() for e in tool.stdio_exit_codes] tool_as_dict["stdio_regexes"] = [r.to_dict() for r in tool.stdio_regexes] tool_as_dict["outputs"] = {name: output.to_dict() for name, output in tool.outputs.items()} diff --git a/lib/galaxy/metadata/set_metadata.py b/lib/galaxy/metadata/set_metadata.py index 0dadee4a6098..227bed9172c5 100644 --- a/lib/galaxy/metadata/set_metadata.py +++ b/lib/galaxy/metadata/set_metadata.py @@ -20,7 +20,6 @@ from functools import partial from pathlib import Path from typing import ( - List, Optional, ) @@ -223,7 +222,7 @@ def set_meta(new_dataset_instance, file_dict): export_store = None final_job_state = Job.states.OK - job_messages: List[AnyJobMessage] = [] + job_messages: list[AnyJobMessage] = [] if extended_metadata_collection: tool_dict = metadata_params["tool"] stdio_exit_code_dicts, stdio_regex_dicts = tool_dict["stdio_exit_codes"], tool_dict["stdio_regexes"] diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 66f47f0421fb..97605c5626c0 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -17,7 +17,10 @@ import random import string from collections import defaultdict -from collections.abc import Callable +from collections.abc import ( + Callable, + Iterable, +) from dataclasses import dataclass from datetime import ( datetime, @@ -31,16 +34,10 @@ Any, cast, ClassVar, - Dict, Generic, - Iterable, - List, NamedTuple, Optional, overload, - Set, - Tuple, - Type, TYPE_CHECKING, TypeVar, Union, @@ -247,7 +244,7 @@ MAX_WORKFLOW_README_SIZE = 20000 MAX_WORKFLOW_HELP_SIZE = 40000 -STR_TO_STR_DICT = Dict[str, str] +STR_TO_STR_DICT = dict[str, str] def required_object_session(obj) -> Session: @@ -272,11 +269,11 @@ class ConfigurationTemplateEnvironmentVariable(TypedDict): CONFIGURATION_TEMPLATE_ENVIRONMENT_ENTRY = Union[ ConfigurationTemplateEnvironmentSecret, ConfigurationTemplateEnvironmentVariable ] -CONFIGURATION_TEMPLATE_ENVIRONMENT = List[CONFIGURATION_TEMPLATE_ENVIRONMENT_ENTRY] +CONFIGURATION_TEMPLATE_ENVIRONMENT = list[CONFIGURATION_TEMPLATE_ENVIRONMENT_ENTRY] CONFIGURATION_TEMPLATE_CONFIGURATION_VALUE_TYPE = Union[str, bool, int] -CONFIGURATION_TEMPLATE_CONFIGURATION_VARIABLES_TYPE = Dict[str, CONFIGURATION_TEMPLATE_CONFIGURATION_VALUE_TYPE] -CONFIGURATION_TEMPLATE_CONFIGURATION_SECRET_NAMES_TYPE = List[str] -CONFIGURATION_TEMPLATE_DEFINITION_TYPE = Dict[str, Any] +CONFIGURATION_TEMPLATE_CONFIGURATION_VARIABLES_TYPE = dict[str, CONFIGURATION_TEMPLATE_CONFIGURATION_VALUE_TYPE] +CONFIGURATION_TEMPLATE_CONFIGURATION_SECRET_NAMES_TYPE = list[str] +CONFIGURATION_TEMPLATE_DEFINITION_TYPE = dict[str, Any] class TransformAction(TypedDict): @@ -291,8 +288,8 @@ class RequestedTransformAction(TypedDict): action: DatasetSourceTransformActionTypeLiteral -TRANSFORM_ACTIONS = List[TransformAction] -REQUESTED_TRANSFORM_ACTIONS = List[RequestedTransformAction] +TRANSFORM_ACTIONS = list[TransformAction] +REQUESTED_TRANSFORM_ACTIONS = list[RequestedTransformAction] mapper_registry = registry( type_annotation_map={ @@ -344,7 +341,7 @@ def get_uuid(uuid: Optional[Union[UUID, str]] = None) -> UUID: return UUID(str(uuid)) -def to_json(sa_session, column, keys: List[str]): +def to_json(sa_session, column, keys: list[str]): assert sa_session.bind if sa_session.bind.dialect.name == "postgresql": cast: Union[ColumnElement[Any], Cast[Any]] = func.cast(func.convert_from(column, "UTF8"), JSONB) @@ -443,7 +440,7 @@ def auto_propagated_tags(self): class SerializeFilesHandler(Protocol): - def serialize_files(self, dataset: "DatasetInstance", as_dict: Dict[str, Any]) -> None: + def serialize_files(self, dataset: "DatasetInstance", as_dict: dict[str, Any]) -> None: pass @@ -502,7 +499,7 @@ def serialize_files(self, dataset, as_dict): class Serializable(RepresentById): def serialize( self, id_encoder: IdEncodingHelper, serialization_options: SerializationOptions, for_link: bool = False - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Serialize model for a re-population in (potentially) another Galaxy instance.""" if for_link: rval = dict_for(self) @@ -511,7 +508,7 @@ def serialize( return self._serialize(id_encoder, serialization_options) @abc.abstractmethod - def _serialize(self, id_encoder: IdEncodingHelper, serialization_options: SerializationOptions) -> Dict[str, Any]: + def _serialize(self, id_encoder: IdEncodingHelper, serialization_options: SerializationOptions) -> dict[str, Any]: """Serialize model for a re-population in (potentially) another Galaxy instance.""" @@ -581,7 +578,7 @@ def cached_id(galaxy_model_object): class JobLike: - job_messages: Mapped[Optional[List[AnyJobMessage]]] + job_messages: Mapped[Optional[list[AnyJobMessage]]] MAX_NUMERIC = 10 ** (JOB_METRIC_PRECISION - JOB_METRIC_SCALE) - 1 def _init_metrics(self): @@ -622,7 +619,7 @@ def set_streams( tool_stderr, job_stdout=None, job_stderr=None, - job_messages: Optional[List[AnyJobMessage]] = None, + job_messages: Optional[list[AnyJobMessage]] = None, ): def shrink_and_unicodify(what, stream): if stream and len(stream) > galaxy.util.DATABASE_MAX_STRING_SIZE: @@ -648,7 +645,7 @@ def shrink_and_unicodify(what, stream): self.job_stderr = None if job_messages is not None: - self.job_messages = cast(Optional[List[AnyJobMessage]], job_messages) + self.job_messages = cast(Optional[list[AnyJobMessage]], job_messages) def log_str(self): extra = "" @@ -708,7 +705,7 @@ def stderr(self, stderr): def calculate_user_disk_usage_statements(user_id: int, quota_source_map: "QuotaSourceMap", for_sqlite: bool = False): """Standalone function so can be reused for postgres directly in pgcleanup.py.""" - statements: List[Tuple[str, Dict[str, Any]]] = [] + statements: list[tuple[str, dict[str, Any]]] = [] default_quota_enabled = quota_source_map.default_quota_enabled default_exclude_ids = quota_source_map.default_usage_excluded_ids() default_cond = "dataset.object_store_id IS NULL" if default_quota_enabled and default_exclude_ids else "" @@ -724,7 +721,7 @@ def calculate_user_disk_usage_statements(user_id: int, quota_source_map: "QuotaS UPDATE galaxy_user SET disk_usage = ({default_usage}) WHERE id = :id """ - params: Dict[str, Any] = {"id": user_id} + params: dict[str, Any] = {"id": user_id} if default_exclude_ids: params["exclude_object_store_ids"] = default_exclude_ids statements.append((default_usage, params)) @@ -854,30 +851,30 @@ class User(Base, Dictifiable, RepresentById): active: Mapped[bool] = mapped_column(index=True, default=True) activation_token: Mapped[Optional[str]] = mapped_column(TrimmedString(64), index=True) - addresses: Mapped[List["UserAddress"]] = relationship( + addresses: Mapped[list["UserAddress"]] = relationship( back_populates="user", order_by=lambda: desc(UserAddress.update_time) ) - custos_auth: Mapped[List["CustosAuthnzToken"]] = relationship(back_populates="user") - chat_exchanges: Mapped[List["ChatExchange"]] = relationship(back_populates="user") - default_permissions: Mapped[List["DefaultUserPermissions"]] = relationship(back_populates="user") - groups: Mapped[List["UserGroupAssociation"]] = relationship(back_populates="user") - histories: Mapped[List["History"]] = relationship( + custos_auth: Mapped[list["CustosAuthnzToken"]] = relationship(back_populates="user") + chat_exchanges: Mapped[list["ChatExchange"]] = relationship(back_populates="user") + default_permissions: Mapped[list["DefaultUserPermissions"]] = relationship(back_populates="user") + groups: Mapped[list["UserGroupAssociation"]] = relationship(back_populates="user") + histories: Mapped[list["History"]] = relationship( back_populates="user", order_by=lambda: desc(History.update_time) # type: ignore[has-type] ) - active_histories: Mapped[List["History"]] = relationship( + active_histories: Mapped[list["History"]] = relationship( primaryjoin=(lambda: (History.user_id == User.id) & (not_(History.deleted)) & (not_(History.archived))), viewonly=True, order_by=lambda: desc(History.update_time), # type: ignore[has-type] ) - galaxy_sessions: Mapped[List["GalaxySession"]] = relationship( + galaxy_sessions: Mapped[list["GalaxySession"]] = relationship( back_populates="user", order_by=lambda: desc(GalaxySession.update_time) ) - object_stores: Mapped[List["UserObjectStore"]] = relationship(back_populates="user") - file_sources: Mapped[List["UserFileSource"]] = relationship(back_populates="user") - quotas: Mapped[List["UserQuotaAssociation"]] = relationship(back_populates="user") - quota_source_usages: Mapped[List["UserQuotaSourceUsage"]] = relationship(back_populates="user") - social_auth: Mapped[List["UserAuthnzToken"]] = relationship(back_populates="user") - stored_workflow_menu_entries: Mapped[List["StoredWorkflowMenuEntry"]] = relationship( + object_stores: Mapped[list["UserObjectStore"]] = relationship(back_populates="user") + file_sources: Mapped[list["UserFileSource"]] = relationship(back_populates="user") + quotas: Mapped[list["UserQuotaAssociation"]] = relationship(back_populates="user") + quota_source_usages: Mapped[list["UserQuotaSourceUsage"]] = relationship(back_populates="user") + social_auth: Mapped[list["UserAuthnzToken"]] = relationship(back_populates="user") + stored_workflow_menu_entries: Mapped[list["StoredWorkflowMenuEntry"]] = relationship( primaryjoin=( lambda: (StoredWorkflowMenuEntry.user_id == User.id) & (StoredWorkflowMenuEntry.stored_workflow_id == StoredWorkflow.id) @@ -887,10 +884,10 @@ class User(Base, Dictifiable, RepresentById): cascade="all, delete-orphan", collection_class=ordering_list("order_index"), ) - _preferences: Mapped[Dict[str, "UserPreference"]] = relationship(collection_class=attribute_keyed_dict("name")) - values: Mapped[List["FormValues"]] = relationship(primaryjoin=(lambda: User.form_values_id == FormValues.id)) + _preferences: Mapped[dict[str, "UserPreference"]] = relationship(collection_class=attribute_keyed_dict("name")) + values: Mapped[list["FormValues"]] = relationship(primaryjoin=(lambda: User.form_values_id == FormValues.id)) # Add type hint (will this work w/SA?) - api_keys: Mapped[List["APIKeys"]] = relationship( + api_keys: Mapped[list["APIKeys"]] = relationship( back_populates="user", order_by=lambda: desc(APIKeys.create_time), primaryjoin=( @@ -900,13 +897,13 @@ class User(Base, Dictifiable, RepresentById): ) ), ) - data_manager_histories: Mapped[List["DataManagerHistoryAssociation"]] = relationship(back_populates="user") - roles: Mapped[List["UserRoleAssociation"]] = relationship(back_populates="user") - stored_workflows: Mapped[List["StoredWorkflow"]] = relationship( + data_manager_histories: Mapped[list["DataManagerHistoryAssociation"]] = relationship(back_populates="user") + roles: Mapped[list["UserRoleAssociation"]] = relationship(back_populates="user") + stored_workflows: Mapped[list["StoredWorkflow"]] = relationship( back_populates="user", primaryjoin=(lambda: User.id == StoredWorkflow.user_id), ) - all_notifications: Mapped[List["UserNotificationAssociation"]] = relationship(back_populates="user") + all_notifications: Mapped[list["UserNotificationAssociation"]] = relationship(back_populates="user") preferences: AssociationProxy[Any] @@ -1208,7 +1205,7 @@ def _calculate_or_set_disk_usage(self, object_store: "BaseObjectStore"): statements = calculate_user_disk_usage_statements(self.id, quota_source_map, for_sqlite) for sql, args in statements: statement = text(sql) - binds: List[BindParameter] = [] + binds: list[BindParameter] = [] for key, _ in args.items(): expand_binding = key.endswith("s") binds.append(bindparam(key, expanding=expand_binding)) @@ -1247,7 +1244,7 @@ def user_template_environment(user: Optional["User"]): user_id = "Anonymous" user_email = "Anonymous" user_name = "Anonymous" - environment: Dict[str, Any] = {} + environment: dict[str, Any] = {} environment["__user__"] = user environment["__user_id__"] = environment["userId"] = user_id environment["__user_email__"] = environment["userEmail"] = user_email @@ -1263,7 +1260,7 @@ def expand_user_properties(user, in_string: str): # above templating is for Cheetah in tools where we discouraged user details from being exposed. # the following templating if user details in Jinja for object stores and file sources where user # details are critical and documented. - def config_template_details(self) -> Dict[str, Any]: + def config_template_details(self) -> dict[str, Any]: return { "username": self.username, "email": self.email, @@ -1292,7 +1289,7 @@ def attempt_create_private_role(self): session.add(assoc) session.commit() - def dictify_objectstore_usage(self) -> List[UserObjectstoreUsage]: + def dictify_objectstore_usage(self) -> list[UserObjectstoreUsage]: session = required_object_session(self) rows = calculate_disk_usage_per_objectstore(session, self.id) return [ @@ -1301,10 +1298,10 @@ def dictify_objectstore_usage(self) -> List[UserObjectstoreUsage]: if r.object_store_id ] - def dictify_usage(self, object_store=None) -> List[UserQuotaBasicUsage]: + def dictify_usage(self, object_store=None) -> list[UserQuotaBasicUsage]: """Include object_store to include empty/unused usage info.""" - used_labels: Set[Union[str, None]] = set() - rval: List[UserQuotaBasicUsage] = [ + used_labels: set[Union[str, None]] = set() + rval: list[UserQuotaBasicUsage] = [ UserQuotaBasicUsage( quota_source_label=None, total_disk_usage=float(self.disk_usage or 0), @@ -1434,7 +1431,7 @@ class DynamicTool(Base, Dictifiable, RepresentById): tool_directory: Mapped[Optional[str]] = mapped_column(Unicode(255)) hidden: Mapped[Optional[bool]] = mapped_column(default=True) active: Mapped[Optional[bool]] = mapped_column(default=True) - value: Mapped[Optional[Dict[str, Any]]] = mapped_column(MutableJSONType) + value: Mapped[Optional[dict[str, Any]]] = mapped_column(MutableJSONType) public: Mapped[bool] = mapped_column(default=False, server_default=false()) dict_collection_visible_keys = ( @@ -1541,9 +1538,9 @@ class TaskMetricNumeric(BaseJobMetric, RepresentById): class IoDicts(NamedTuple): - inp_data: Dict[str, Optional["DatasetInstance"]] - out_data: Dict[str, "DatasetInstance"] - out_collections: Dict[str, Union["DatasetCollectionInstance", "DatasetCollection"]] + inp_data: dict[str, Optional["DatasetInstance"]] + out_data: dict[str, "DatasetInstance"] + out_collections: dict[str, Union["DatasetCollectionInstance", "DatasetCollection"]] class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): @@ -1568,7 +1565,7 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): copied_from_job_id: Mapped[Optional[int]] command_line: Mapped[Optional[str]] = mapped_column(TEXT) dependencies: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - job_messages: Mapped[Optional[List[AnyJobMessage]]] = mapped_column(MutableJSONType) + job_messages: Mapped[Optional[list[AnyJobMessage]]] = mapped_column(MutableJSONType) param_filename: Mapped[Optional[str]] = mapped_column(String(1024)) runner_name: Mapped[Optional[str]] = mapped_column(String(255)) job_stdout: Mapped[Optional[str]] = mapped_column(TEXT) @@ -1588,7 +1585,7 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): params: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) handler: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True) preferred_object_store_id: Mapped[Optional[str]] = mapped_column(String(255)) - object_store_id_overrides: Mapped[Optional[Dict[str, Optional[str]]]] = mapped_column(JSONType) + object_store_id_overrides: Mapped[Optional[dict[str, Optional[str]]]] = mapped_column(JSONType) tool_request_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tool_request.id"), index=True) dynamic_tool: Mapped[Optional["DynamicTool"]] = relationship() @@ -1598,31 +1595,31 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): history: Mapped[Optional["History"]] = relationship(back_populates="jobs") library_folder: Mapped[Optional["LibraryFolder"]] = relationship() parameters = relationship("JobParameter") - input_datasets: Mapped[List["JobToInputDatasetAssociation"]] = relationship( + input_datasets: Mapped[list["JobToInputDatasetAssociation"]] = relationship( "JobToInputDatasetAssociation", back_populates="job" ) - input_dataset_collections: Mapped[List["JobToInputDatasetCollectionAssociation"]] = relationship( + input_dataset_collections: Mapped[list["JobToInputDatasetCollectionAssociation"]] = relationship( back_populates="job" ) - input_dataset_collection_elements: Mapped[List["JobToInputDatasetCollectionElementAssociation"]] = relationship( + input_dataset_collection_elements: Mapped[list["JobToInputDatasetCollectionElementAssociation"]] = relationship( back_populates="job" ) - output_dataset_collection_instances: Mapped[List["JobToOutputDatasetCollectionAssociation"]] = relationship( + output_dataset_collection_instances: Mapped[list["JobToOutputDatasetCollectionAssociation"]] = relationship( back_populates="job" ) - output_dataset_collections: Mapped[List["JobToImplicitOutputDatasetCollectionAssociation"]] = relationship( + output_dataset_collections: Mapped[list["JobToImplicitOutputDatasetCollectionAssociation"]] = relationship( back_populates="job" ) - post_job_actions: Mapped[List["PostJobActionAssociation"]] = relationship(back_populates="job") - input_library_datasets: Mapped[List["JobToInputLibraryDatasetAssociation"]] = relationship(back_populates="job") - output_library_datasets: Mapped[List["JobToOutputLibraryDatasetAssociation"]] = relationship(back_populates="job") - external_output_metadata: Mapped[List["JobExternalOutputMetadata"]] = relationship(back_populates="job") - tasks: Mapped[List["Task"]] = relationship(back_populates="job") - output_datasets: Mapped[List["JobToOutputDatasetAssociation"]] = relationship(back_populates="job") - state_history: Mapped[List["JobStateHistory"]] = relationship() - text_metrics: Mapped[List["JobMetricText"]] = relationship() - numeric_metrics: Mapped[List["JobMetricNumeric"]] = relationship() - interactivetool_entry_points: Mapped[List["InteractiveToolEntryPoint"]] = relationship( + post_job_actions: Mapped[list["PostJobActionAssociation"]] = relationship(back_populates="job") + input_library_datasets: Mapped[list["JobToInputLibraryDatasetAssociation"]] = relationship(back_populates="job") + output_library_datasets: Mapped[list["JobToOutputLibraryDatasetAssociation"]] = relationship(back_populates="job") + external_output_metadata: Mapped[list["JobExternalOutputMetadata"]] = relationship(back_populates="job") + tasks: Mapped[list["Task"]] = relationship(back_populates="job") + output_datasets: Mapped[list["JobToOutputDatasetAssociation"]] = relationship(back_populates="job") + state_history: Mapped[list["JobStateHistory"]] = relationship() + text_metrics: Mapped[list["JobMetricText"]] = relationship() + numeric_metrics: Mapped[list["JobMetricNumeric"]] = relationship() + interactivetool_entry_points: Mapped[list["InteractiveToolEntryPoint"]] = relationship( back_populates="job", uselist=True ) implicit_collection_jobs_association: Mapped["ImplicitCollectionJobsJobAssociation"] = relationship( @@ -1632,7 +1629,7 @@ class Job(Base, JobLike, UsesCreateAndUpdateTime, Dictifiable, Serializable): data_manager_association: Mapped[Optional["DataManagerJobAssociation"]] = relationship( back_populates="job", uselist=False ) - history_dataset_collection_associations: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( + history_dataset_collection_associations: Mapped[list["HistoryDatasetCollectionAssociation"]] = relationship( back_populates="job" ) workflow_invocation_step: Mapped[Optional["WorkflowInvocationStep"]] = relationship( @@ -1761,12 +1758,12 @@ def copy_from_job(self, job: "Job", copy_outputs: bool = False): job.history.add_pending_items() def io_dicts(self, exclude_implicit_outputs=False) -> IoDicts: - inp_data: Dict[str, Optional[DatasetInstance]] = {da.name: da.dataset for da in self.input_datasets} - out_data: Dict[str, DatasetInstance] = {da.name: da.dataset for da in self.output_datasets} + inp_data: dict[str, Optional[DatasetInstance]] = {da.name: da.dataset for da in self.input_datasets} + out_data: dict[str, DatasetInstance] = {da.name: da.dataset for da in self.output_datasets} inp_data.update([(da.name, da.dataset) for da in self.input_library_datasets]) out_data.update([(da.name, da.dataset) for da in self.output_library_datasets]) - out_collections: Dict[str, Union[DatasetCollectionInstance, DatasetCollection]] + out_collections: dict[str, Union[DatasetCollectionInstance, DatasetCollection]] if not exclude_implicit_outputs: out_collections = { obj.name: obj.dataset_collection_instance for obj in self.output_dataset_collection_instances @@ -2402,7 +2399,7 @@ class Task(Base, JobLike, RepresentById): tool_stdout: Mapped[Optional[str]] = mapped_column(TEXT) tool_stderr: Mapped[Optional[str]] = mapped_column(TEXT) exit_code: Mapped[Optional[int]] - job_messages: Mapped[Optional[List[AnyJobMessage]]] = mapped_column(MutableJSONType) + job_messages: Mapped[Optional[list[AnyJobMessage]]] = mapped_column(MutableJSONType) info: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) traceback: Mapped[Optional[str]] = mapped_column(TEXT) job_id: Mapped[int] = mapped_column(ForeignKey("job.id"), index=True) @@ -2411,8 +2408,8 @@ class Task(Base, JobLike, RepresentById): task_runner_external_id: Mapped[Optional[str]] = mapped_column(String(255)) prepare_input_files_cmd: Mapped[Optional[str]] = mapped_column(TEXT) job: Mapped["Job"] = relationship(back_populates="tasks") - text_metrics: Mapped[List["TaskMetricText"]] = relationship() - numeric_metrics: Mapped[List["TaskMetricNumeric"]] = relationship() + text_metrics: Mapped[list["TaskMetricText"]] = relationship() + numeric_metrics: Mapped[list["TaskMetricNumeric"]] = relationship() _numeric_metric = TaskMetricNumeric _text_metric = TaskMetricText @@ -2575,7 +2572,7 @@ class JobToInputDatasetAssociation(Base, RepresentById): dataset_id: Mapped[int] = mapped_column(ForeignKey("history_dataset_association.id"), index=True, nullable=True) dataset_version: Mapped[Optional[int]] name: Mapped[str] = mapped_column(String(255), nullable=True) - adapter: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSONType, nullable=True) + adapter: Mapped[Optional[dict[str, Any]]] = mapped_column(JSONType, nullable=True) dataset: Mapped["HistoryDatasetAssociation"] = relationship(lazy="joined", back_populates="dependent_jobs") job: Mapped["Job"] = relationship(back_populates="input_datasets") @@ -2618,7 +2615,7 @@ class JobToInputDatasetCollectionAssociation(Base, RepresentById): ForeignKey("history_dataset_collection_association.id"), index=True, nullable=True ) name: Mapped[str] = mapped_column(String(255), nullable=True) - adapter: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSONType, nullable=True) + adapter: Mapped[Optional[dict[str, Any]]] = mapped_column(JSONType, nullable=True) dataset_collection: Mapped["HistoryDatasetCollectionAssociation"] = relationship(lazy="joined") job: Mapped["Job"] = relationship(back_populates="input_dataset_collections") @@ -2637,7 +2634,7 @@ class JobToInputDatasetCollectionElementAssociation(Base, RepresentById): ForeignKey("dataset_collection_element.id"), index=True, nullable=True ) name: Mapped[str] = mapped_column(Unicode(255), nullable=True) - adapter: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSONType, nullable=True) + adapter: Mapped[Optional[dict[str, Any]]] = mapped_column(JSONType, nullable=True) dataset_collection_element: Mapped["DatasetCollectionElement"] = relationship(lazy="joined") job: Mapped["Job"] = relationship(back_populates="input_dataset_collection_elements") @@ -2770,7 +2767,7 @@ class ImplicitCollectionJobs(Base, Serializable): id: Mapped[int] = mapped_column(primary_key=True) populated_state: Mapped[str] = mapped_column(TrimmedString(64), default="new") - jobs: Mapped[List["ImplicitCollectionJobsJobAssociation"]] = relationship(back_populates="implicit_collection_jobs") + jobs: Mapped[list["ImplicitCollectionJobsJobAssociation"]] = relationship(back_populates="implicit_collection_jobs") class populated_states(str, Enum): NEW = "new" # New implicit jobs object, unpopulated job associations @@ -2816,7 +2813,7 @@ class PostJobAction(Base, RepresentById): workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id"), index=True) action_type: Mapped[str] = mapped_column(String(255)) output_name: Mapped[Optional[str]] = mapped_column(String(255)) - _action_arguments: Mapped[Optional[Dict[str, Any]]] = mapped_column("action_arguments", MutableJSONType) + _action_arguments: Mapped[Optional[dict[str, Any]]] = mapped_column("action_arguments", MutableJSONType) workflow_step: Mapped[Optional["WorkflowStep"]] = relationship( back_populates="post_job_actions", primaryjoin=(lambda: WorkflowStep.id == PostJobAction.workflow_step_id), @@ -2838,7 +2835,7 @@ def action_arguments(self): return self._action_arguments @action_arguments.setter - def action_arguments(self, value: Dict[str, Any]): + def action_arguments(self, value: dict[str, Any]): self._action_arguments = value @@ -2912,7 +2909,7 @@ class FakeDatasetAssociation: def __init__(self, dataset: Optional["Dataset"] = None) -> None: self.dataset = dataset - self.metadata: Dict = {} + self.metadata: dict = {} self.has_deferred_data = False def get_file_name(self, sync_cache: bool = True) -> str: @@ -3152,7 +3149,7 @@ class ChatExchange(Base, RepresentById): job_id: Mapped[Optional[int]] = mapped_column(ForeignKey("job.id"), index=True, nullable=True) user: Mapped["User"] = relationship(back_populates="chat_exchanges") - messages: Mapped[List["ChatExchangeMessage"]] = relationship(back_populates="chat_exchange") + messages: Mapped[list["ChatExchangeMessage"]] = relationship(back_populates="chat_exchange") def __init__(self, user, job_id=None, message=None, **kwargs): self.user = user @@ -3188,9 +3185,9 @@ class Group(Base, Dictifiable, RepresentById): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) name: Mapped[Optional[str]] = mapped_column(String(255), index=True, unique=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - quotas: Mapped[List["GroupQuotaAssociation"]] = relationship(back_populates="group") - roles: Mapped[List["GroupRoleAssociation"]] = relationship(back_populates="group") - users: Mapped[List["UserGroupAssociation"]] = relationship("UserGroupAssociation", back_populates="group") + quotas: Mapped[list["GroupQuotaAssociation"]] = relationship(back_populates="group") + roles: Mapped[list["GroupRoleAssociation"]] = relationship(back_populates="group") + users: Mapped[list["UserGroupAssociation"]] = relationship("UserGroupAssociation", back_populates="group") dict_collection_visible_keys = ["id", "name"] dict_element_visible_keys = ["id", "name"] @@ -3249,7 +3246,7 @@ class Notification(Base, Dictifiable, RepresentById): # content should always be a dict content: Mapped[Optional[bytes]] = mapped_column(DoubleEncodedJsonType) - user_notification_associations: Mapped[List["UserNotificationAssociation"]] = relationship( + user_notification_associations: Mapped[list["UserNotificationAssociation"]] = relationship( back_populates="notification" ) @@ -3349,15 +3346,15 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable archived: Mapped[Optional[bool]] = mapped_column(index=True, default=False, server_default=false()) archive_export_id: Mapped[Optional[int]] = mapped_column(ForeignKey("store_export_association.id"), default=None) - datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( + datasets: Mapped[list["HistoryDatasetAssociation"]] = relationship( back_populates="history", order_by=lambda: asc(HistoryDatasetAssociation.hid) # type: ignore[has-type] ) - exports: Mapped[List["JobExportHistoryArchive"]] = relationship( + exports: Mapped[list["JobExportHistoryArchive"]] = relationship( back_populates="history", primaryjoin=lambda: JobExportHistoryArchive.history_id == History.id, order_by=lambda: desc(JobExportHistoryArchive.id), ) - active_datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( + active_datasets: Mapped[list["HistoryDatasetAssociation"]] = relationship( primaryjoin=( lambda: and_( HistoryDatasetAssociation.history_id == History.id, @@ -3367,8 +3364,8 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: asc(HistoryDatasetAssociation.hid), # type: ignore[has-type] viewonly=True, ) - dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship(back_populates="history") - active_dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( + dataset_collections: Mapped[list["HistoryDatasetCollectionAssociation"]] = relationship(back_populates="history") + active_dataset_collections: Mapped[list["HistoryDatasetCollectionAssociation"]] = relationship( primaryjoin=( lambda: ( and_( @@ -3380,7 +3377,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), viewonly=True, ) - visible_datasets: Mapped[List["HistoryDatasetAssociation"]] = relationship( + visible_datasets: Mapped[list["HistoryDatasetAssociation"]] = relationship( primaryjoin=( lambda: and_( HistoryDatasetAssociation.history_id == History.id, @@ -3391,7 +3388,7 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: asc(HistoryDatasetAssociation.hid), # type: ignore[has-type] viewonly=True, ) - visible_dataset_collections: Mapped[List["HistoryDatasetCollectionAssociation"]] = relationship( + visible_dataset_collections: Mapped[list["HistoryDatasetCollectionAssociation"]] = relationship( primaryjoin=( lambda: and_( HistoryDatasetCollectionAssociation.history_id == History.id, @@ -3402,23 +3399,23 @@ class History(Base, HasTags, Dictifiable, UsesAnnotations, HasName, Serializable order_by=lambda: asc(HistoryDatasetCollectionAssociation.hid), viewonly=True, ) - tags: Mapped[List["HistoryTagAssociation"]] = relationship( + tags: Mapped[list["HistoryTagAssociation"]] = relationship( order_by=lambda: HistoryTagAssociation.id, back_populates="history" ) - annotations: Mapped[List["HistoryAnnotationAssociation"]] = relationship( + annotations: Mapped[list["HistoryAnnotationAssociation"]] = relationship( order_by=lambda: HistoryAnnotationAssociation.id, back_populates="history" ) - ratings: Mapped[List["HistoryRatingAssociation"]] = relationship( + ratings: Mapped[list["HistoryRatingAssociation"]] = relationship( order_by=lambda: HistoryRatingAssociation.id, back_populates="history", ) - default_permissions: Mapped[List["DefaultHistoryPermissions"]] = relationship(back_populates="history") - users_shared_with: Mapped[List["HistoryUserShareAssociation"]] = relationship(back_populates="history") + default_permissions: Mapped[list["DefaultHistoryPermissions"]] = relationship(back_populates="history") + users_shared_with: Mapped[list["HistoryUserShareAssociation"]] = relationship(back_populates="history") galaxy_sessions = relationship("GalaxySessionToHistoryAssociation", back_populates="history") - workflow_invocations: Mapped[List["WorkflowInvocation"]] = relationship(back_populates="history") + workflow_invocations: Mapped[list["WorkflowInvocation"]] = relationship(back_populates="history") user: Mapped[Optional["User"]] = relationship(back_populates="histories") - jobs: Mapped[List["Job"]] = relationship(back_populates="history") - tool_requests: Mapped[List["ToolRequest"]] = relationship(back_populates="history") + jobs: Mapped[list["Job"]] = relationship(back_populates="history") + tool_requests: Mapped[list["ToolRequest"]] = relationship(back_populates="history") update_time = column_property( select(func.max(HistoryAudit.update_time)).where(HistoryAudit.history_id == id).scalar_subquery(), @@ -3968,9 +3965,9 @@ class Role(Base, Dictifiable, RepresentById): description: Mapped[Optional[str]] = mapped_column(TEXT) type: Mapped[Optional[str]] = mapped_column(String(40), index=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - dataset_actions: Mapped[List["DatasetPermissions"]] = relationship(back_populates="role") - groups: Mapped[List["GroupRoleAssociation"]] = relationship(back_populates="role") - users: Mapped[List["UserRoleAssociation"]] = relationship(back_populates="role") + dataset_actions: Mapped[list["DatasetPermissions"]] = relationship(back_populates="role") + groups: Mapped[list["GroupRoleAssociation"]] = relationship(back_populates="role") + users: Mapped[list["UserRoleAssociation"]] = relationship(back_populates="role") dict_collection_visible_keys = ["id", "name"] dict_element_visible_keys = ["id", "name", "description", "type"] @@ -4061,9 +4058,9 @@ class Quota(Base, Dictifiable, RepresentById): operation: Mapped[Optional[str]] = mapped_column(String(8)) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) quota_source_label: Mapped[Optional[str]] = mapped_column(String(32), default=None) - default: Mapped[List["DefaultQuotaAssociation"]] = relationship("DefaultQuotaAssociation", back_populates="quota") - groups: Mapped[List["GroupQuotaAssociation"]] = relationship(back_populates="quota") - users: Mapped[List["UserQuotaAssociation"]] = relationship(back_populates="quota") + default: Mapped[list["DefaultQuotaAssociation"]] = relationship("DefaultQuotaAssociation", back_populates="quota") + groups: Mapped[list["GroupQuotaAssociation"]] = relationship(back_populates="quota") + users: Mapped[list["UserQuotaAssociation"]] = relationship(back_populates="quota") dict_collection_visible_keys = ["id", "name", "quota_source_label"] dict_element_visible_keys = [ @@ -4312,9 +4309,9 @@ class Dataset(Base, StorableObject, Serializable): total_size: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0)) uuid: Mapped[Optional[Union[UUID, str]]] = mapped_column(UUIDType(), unique=True) - actions: Mapped[List["DatasetPermissions"]] = relationship(back_populates="dataset") + actions: Mapped[list["DatasetPermissions"]] = relationship(back_populates="dataset") job: Mapped[Optional["Job"]] = relationship(primaryjoin=(lambda: Dataset.job_id == Job.id)) - active_history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship( + active_history_associations: Mapped[list["HistoryDatasetAssociation"]] = relationship( primaryjoin=( lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, @@ -4324,7 +4321,7 @@ class Dataset(Base, StorableObject, Serializable): ), viewonly=True, ) - purged_history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship( + purged_history_associations: Mapped[list["HistoryDatasetAssociation"]] = relationship( primaryjoin=( lambda: and_( Dataset.id == HistoryDatasetAssociation.dataset_id, @@ -4333,7 +4330,7 @@ class Dataset(Base, StorableObject, Serializable): ), viewonly=True, ) - active_library_associations: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship( + active_library_associations: Mapped[list["LibraryDatasetDatasetAssociation"]] = relationship( primaryjoin=( lambda: and_( Dataset.id == LibraryDatasetDatasetAssociation.dataset_id, @@ -4342,10 +4339,10 @@ class Dataset(Base, StorableObject, Serializable): ), viewonly=True, ) - hashes: Mapped[List["DatasetHash"]] = relationship(back_populates="dataset") - sources: Mapped[List["DatasetSource"]] = relationship(back_populates="dataset") - history_associations: Mapped[List["HistoryDatasetAssociation"]] = relationship(back_populates="dataset") - library_associations: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship( + hashes: Mapped[list["DatasetHash"]] = relationship(back_populates="dataset") + sources: Mapped[list["DatasetSource"]] = relationship(back_populates="dataset") + history_associations: Mapped[list["HistoryDatasetAssociation"]] = relationship(back_populates="dataset") + library_associations: Mapped[list["LibraryDatasetDatasetAssociation"]] = relationship( primaryjoin=(lambda: LibraryDatasetDatasetAssociation.table.c.dataset_id == Dataset.id), back_populates="dataset", ) @@ -4682,7 +4679,7 @@ class DatasetSource(Base, Dictifiable, Serializable): # actions that may be applied to this source when creating the dataset requested_transform: Mapped[Optional[REQUESTED_TRANSFORM_ACTIONS]] = mapped_column(MutableJSONType) dataset: Mapped[Optional["Dataset"]] = relationship(back_populates="sources") - hashes: Mapped[List["DatasetSourceHash"]] = relationship(back_populates="source") + hashes: Mapped[list["DatasetSourceHash"]] = relationship(back_populates="source") dict_collection_visible_keys = ["id", "source_uri", "extra_files_path", "transform"] dict_element_visible_keys = [ "id", @@ -4809,12 +4806,12 @@ class DatasetInstance(RepresentById, UsesCreateAndUpdateTime, _HasTable): states = Dataset.states conversion_messages = Dataset.conversion_messages permitted_actions = Dataset.permitted_actions - creating_job_associations: List[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]] + creating_job_associations: list[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]] copied_from_history_dataset_association: Optional["HistoryDatasetAssociation"] copied_from_library_dataset_dataset_association: Optional["LibraryDatasetDatasetAssociation"] - dependent_jobs: List[JobToInputLibraryDatasetAssociation] - implicitly_converted_datasets: List["ImplicitlyConvertedDatasetAssociation"] - implicitly_converted_parent_datasets: List["ImplicitlyConvertedDatasetAssociation"] + dependent_jobs: list[JobToInputLibraryDatasetAssociation] + implicitly_converted_datasets: list["ImplicitlyConvertedDatasetAssociation"] + implicitly_converted_parent_datasets: list["ImplicitlyConvertedDatasetAssociation"] validated_states = DatasetValidatedState @@ -5005,7 +5002,7 @@ def metadata_file_types(self): meta_types.append(meta_type) return meta_types - def get_metadata_file_paths_and_extensions(self) -> List[Tuple[str, str]]: + def get_metadata_file_paths_and_extensions(self) -> list[tuple[str, str]]: metadata = self.metadata metadata_files = [] for metadata_name in self.metadata_file_types: @@ -5261,8 +5258,8 @@ def can_convert_to(self, format): return format in self.get_converter_types() def find_conversion_destination( - self, accepted_formats: List[str], **kwd - ) -> Tuple[bool, Optional[str], Optional["DatasetInstance"]]: + self, accepted_formats: list[str], **kwd + ) -> tuple[bool, Optional[str], Optional["DatasetInstance"]]: """Returns ( target_ext, existing converted dataset )""" return self.datatype.find_conversion_destination(self, accepted_formats, _get_datatypes_registry(), **kwd) @@ -5471,7 +5468,7 @@ class HistoryDatasetAssociation(DatasetInstance, HasTags, Dictifiable, UsesAnnot history_id: Mapped[Optional[int]] dataset_id: Mapped[Optional[int]] hidden_beneath_collection_instance: Mapped[Optional["HistoryDatasetCollectionAssociation"]] - tags: Mapped[List["HistoryDatasetAssociationTagAssociation"]] + tags: Mapped[list["HistoryDatasetAssociationTagAssociation"]] def __init__( self, @@ -5925,7 +5922,7 @@ class Library(Base, Dictifiable, HasName, Serializable): description: Mapped[Optional[str]] = mapped_column(TEXT) synopsis: Mapped[Optional[str]] = mapped_column(TEXT) root_folder = relationship("LibraryFolder", back_populates="library_root") - actions: Mapped[List["LibraryPermissions"]] = relationship(back_populates="library") + actions: Mapped[list["LibraryPermissions"]] = relationship(back_populates="library") permitted_actions = get_permitted_actions(filter="LIBRARY") dict_collection_visible_keys = ["id", "name"] @@ -6005,14 +6002,14 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): purged: Mapped[Optional[bool]] = mapped_column(index=True, default=False) genome_build: Mapped[Optional[str]] = mapped_column(TrimmedString(40)) - folders: Mapped[List["LibraryFolder"]] = relationship( + folders: Mapped[list["LibraryFolder"]] = relationship( primaryjoin=(lambda: LibraryFolder.id == LibraryFolder.parent_id), order_by=asc(name), back_populates="parent", ) parent: Mapped[Optional["LibraryFolder"]] = relationship(back_populates="folders", remote_side=[id]) - active_folders: Mapped[List["LibraryFolder"]] = relationship( + active_folders: Mapped[list["LibraryFolder"]] = relationship( primaryjoin=("and_(LibraryFolder.parent_id == LibraryFolder.id, not_(LibraryFolder.deleted))"), order_by=asc(name), # """sqlalchemy.exc.ArgumentError: Error creating eager relationship 'active_folders' @@ -6022,7 +6019,7 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): viewonly=True, ) - datasets: Mapped[List["LibraryDataset"]] = relationship( + datasets: Mapped[list["LibraryDataset"]] = relationship( primaryjoin=( lambda: LibraryDataset.folder_id == LibraryFolder.id and LibraryDataset.library_dataset_dataset_association_id.isnot(None) @@ -6031,7 +6028,7 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): viewonly=True, ) - active_datasets: Mapped[List["LibraryDataset"]] = relationship( + active_datasets: Mapped[list["LibraryDataset"]] = relationship( primaryjoin=( "and_(LibraryDataset.folder_id == LibraryFolder.id, not_(LibraryDataset.deleted), LibraryDataset.library_dataset_dataset_association_id.isnot(None))" ), @@ -6040,7 +6037,7 @@ class LibraryFolder(Base, Dictifiable, HasName, Serializable): ) library_root = relationship("Library", back_populates="root_folder") - actions: Mapped[List["LibraryFolderPermissions"]] = relationship(back_populates="folder") + actions: Mapped[list["LibraryFolderPermissions"]] = relationship(back_populates="folder") dict_element_visible_keys = [ "id", @@ -6153,7 +6150,7 @@ class LibraryDataset(Base, Serializable): library_dataset_dataset_association = relationship( "LibraryDatasetDatasetAssociation", foreign_keys=library_dataset_dataset_association_id, post_update=True ) - expired_datasets: Mapped[List["LibraryDatasetDatasetAssociation"]] = relationship( + expired_datasets: Mapped[list["LibraryDatasetDatasetAssociation"]] = relationship( foreign_keys=[id, library_dataset_dataset_association_id], primaryjoin=( "and_(LibraryDataset.id == LibraryDatasetDatasetAssociation.library_dataset_id, \ @@ -6162,7 +6159,7 @@ class LibraryDataset(Base, Serializable): viewonly=True, uselist=True, ) - actions: Mapped[List["LibraryDatasetPermissions"]] = relationship(back_populates="library_dataset") + actions: Mapped[list["LibraryDatasetPermissions"]] = relationship(back_populates="library_dataset") # This class acts as a proxy to the currently selected LDDA upload_options = [ @@ -6255,7 +6252,7 @@ def to_dict(self, view="collection"): class LibraryDatasetDatasetAssociation(DatasetInstance, HasName, Serializable): message: Mapped[Optional[str]] - tags: Mapped[List["LibraryDatasetDatasetAssociationTagAssociation"]] + tags: Mapped[list["LibraryDatasetDatasetAssociationTagAssociation"]] def __init__( self, @@ -6436,7 +6433,7 @@ class ExtendedMetadata(Base, RepresentById): id: Mapped[int] = mapped_column(primary_key=True) data: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - children: Mapped[List["ExtendedMetadataIndex"]] = relationship(back_populates="extended_metadata") + children: Mapped[list["ExtendedMetadataIndex"]] = relationship(back_populates="extended_metadata") def __init__(self, data): self.data = data @@ -6679,7 +6676,7 @@ class DatasetCollection(Base, Dictifiable, UsesAnnotations, Serializable): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) fields: Mapped[Optional[bytes]] = mapped_column(JSONType, nullable=True) - elements: Mapped[List["DatasetCollectionElement"]] = relationship( + elements: Mapped[list["DatasetCollectionElement"]] = relationship( primaryjoin=(lambda: DatasetCollection.id == DatasetCollectionElement.dataset_collection_id), back_populates="collection", order_by=lambda: DatasetCollectionElement.element_index, @@ -6716,11 +6713,11 @@ def _build_nested_collection_attributes_stmt( return_entities: Optional[ Iterable[ Union[ - Type[HistoryDatasetAssociation], - Type[Dataset], - Type[DatasetPermissions], - Type["DatasetCollection"], - Type["DatasetCollectionElement"], + type[HistoryDatasetAssociation], + type[Dataset], + type[DatasetPermissions], + type["DatasetCollection"], + type["DatasetCollectionElement"], ] ] ] = None, @@ -6901,7 +6898,7 @@ def dataset_action_tuples(self): @property def element_identifiers_extensions_paths_and_metadata_files( self, - ) -> List[List[Any]]: + ) -> list[list[Any]]: results = [] if session := object_session(self): stmt = self._build_nested_collection_attributes_stmt( @@ -7040,7 +7037,7 @@ def copy( self, destination: Optional["HistoryDatasetCollectionAssociation"] = None, element_destination: Optional["History"] = None, - dataset_instance_attributes: Optional[Dict[str, Any]] = None, + dataset_instance_attributes: Optional[dict[str, Any]] = None, flush=True, minimize_copies=False, copy_hid=True, @@ -7073,7 +7070,7 @@ def copy_from(self, other_collection: "DatasetCollection", history: "History"): for element in other_collection.elements: element.copy_to_collection(self, element_destination=history, flush=False, copy_hid=False) - def replace_elements_with_copies(self, replacements: List["DatasetCollectionElement"], history: "History"): + def replace_elements_with_copies(self, replacements: list["DatasetCollectionElement"], history: "History"): assert len(replacements) == len(self.elements) for element, replacement in zip(self.elements, replacements): assert replacement.element_object @@ -7222,7 +7219,7 @@ class HistoryDatasetCollectionAssociation( remote_side=[id], uselist=False, ) - implicit_input_collections: Mapped[List["ImplicitlyCreatedDatasetCollectionInput"]] = relationship( + implicit_input_collections: Mapped[list["ImplicitlyCreatedDatasetCollectionInput"]] = relationship( primaryjoin=( lambda: HistoryDatasetCollectionAssociation.id == ImplicitlyCreatedDatasetCollectionInput.dataset_collection_id @@ -7233,19 +7230,19 @@ class HistoryDatasetCollectionAssociation( back_populates="history_dataset_collection_associations", uselist=False, ) - tags: Mapped[List["HistoryDatasetCollectionTagAssociation"]] = relationship( + tags: Mapped[list["HistoryDatasetCollectionTagAssociation"]] = relationship( order_by=lambda: HistoryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", ) - annotations: Mapped[List["HistoryDatasetCollectionAssociationAnnotationAssociation"]] = relationship( + annotations: Mapped[list["HistoryDatasetCollectionAssociationAnnotationAssociation"]] = relationship( order_by=lambda: HistoryDatasetCollectionAssociationAnnotationAssociation.id, back_populates="history_dataset_collection", ) - ratings: Mapped[List["HistoryDatasetCollectionRatingAssociation"]] = relationship( + ratings: Mapped[list["HistoryDatasetCollectionRatingAssociation"]] = relationship( order_by=lambda: HistoryDatasetCollectionRatingAssociation.id, back_populates="dataset_collection", ) - creating_job_associations: Mapped[List["JobToOutputDatasetCollectionAssociation"]] = relationship(viewonly=True) + creating_job_associations: Mapped[list["JobToOutputDatasetCollectionAssociation"]] = relationship(viewonly=True) dict_dbkeysandextensions_visible_keys = ["dbkeys", "extensions"] editable_keys = ("name", "deleted", "visible") @@ -7392,11 +7389,11 @@ def touch(self): def to_hda_representative(self, multiple: Literal[False] = False) -> Optional[HistoryDatasetAssociation]: ... @overload - def to_hda_representative(self, multiple: Literal[True]) -> List[HistoryDatasetAssociation]: ... + def to_hda_representative(self, multiple: Literal[True]) -> list[HistoryDatasetAssociation]: ... def to_hda_representative( self, multiple: bool = False - ) -> Union[List[HistoryDatasetAssociation], Optional[HistoryDatasetAssociation]]: + ) -> Union[list[HistoryDatasetAssociation], Optional[HistoryDatasetAssociation]]: rval = [] for dataset in self.collection.dataset_elements: rval.append(dataset.dataset_instance) @@ -7487,7 +7484,7 @@ def find_implicit_input_collection(self, name): def copy( self, element_destination: Optional[History] = None, - dataset_instance_attributes: Optional[Dict[str, Any]] = None, + dataset_instance_attributes: Optional[dict[str, Any]] = None, flush: bool = True, set_hid: bool = True, minimize_copies: bool = False, @@ -7589,15 +7586,15 @@ class LibraryDatasetCollectionAssociation(Base, DatasetCollectionInstance, Repre collection = relationship("DatasetCollection") folder = relationship("LibraryFolder") - tags: Mapped[List["LibraryDatasetCollectionTagAssociation"]] = relationship( + tags: Mapped[list["LibraryDatasetCollectionTagAssociation"]] = relationship( order_by=lambda: LibraryDatasetCollectionTagAssociation.id, back_populates="dataset_collection", ) - annotations: Mapped[List["LibraryDatasetCollectionAnnotationAssociation"]] = relationship( + annotations: Mapped[list["LibraryDatasetCollectionAnnotationAssociation"]] = relationship( order_by=lambda: LibraryDatasetCollectionAnnotationAssociation.id, back_populates="dataset_collection", ) - ratings: Mapped[List["LibraryDatasetCollectionRatingAssociation"]] = relationship( + ratings: Mapped[list["LibraryDatasetCollectionRatingAssociation"]] = relationship( order_by=lambda: LibraryDatasetCollectionRatingAssociation.id, back_populates="dataset_collection", ) @@ -7729,8 +7726,7 @@ def element_object( @property def auto_propagated_tags(self): - first_dataset_instance = self.first_dataset_instance() - if first_dataset_instance: + if first_dataset_instance := self.first_dataset_instance(): return [t for t in first_dataset_instance.tags if t.user_tname in AUTO_PROPAGATED_TAGS] return [] @@ -7769,7 +7765,7 @@ def copy_to_collection( collection: DatasetCollection, destination: Optional[HistoryDatasetCollectionAssociation] = None, element_destination: Optional[History] = None, - dataset_instance_attributes: Optional[Dict[str, Any]] = None, + dataset_instance_attributes: Optional[dict[str, Any]] = None, flush=True, minimize_copies=False, copy_hid=True, @@ -7874,7 +7870,7 @@ class GalaxySession(Base, RepresentById): disk_usage: Mapped[Optional[Decimal]] = mapped_column(Numeric(15, 0), index=True) last_action: Mapped[Optional[datetime]] current_history: Mapped[Optional["History"]] = relationship() - histories: Mapped[List["GalaxySessionToHistoryAssociation"]] = relationship( + histories: Mapped[list["GalaxySessionToHistoryAssociation"]] = relationship( back_populates="galaxy_session", ) user: Mapped[Optional["User"]] = relationship(back_populates="galaxy_sessions") @@ -7948,7 +7944,7 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpd user: Mapped["User"] = relationship( primaryjoin=(lambda: User.id == StoredWorkflow.user_id), back_populates="stored_workflows" ) - workflows: Mapped[List["Workflow"]] = relationship( + workflows: Mapped[list["Workflow"]] = relationship( back_populates="stored_workflow", cascade="all, delete-orphan", primaryjoin=(lambda: StoredWorkflow.id == Workflow.stored_workflow_id), @@ -7960,11 +7956,11 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpd primaryjoin=(lambda: StoredWorkflow.latest_workflow_id == Workflow.id), lazy=False, ) - tags: Mapped[List["StoredWorkflowTagAssociation"]] = relationship( + tags: Mapped[list["StoredWorkflowTagAssociation"]] = relationship( order_by=lambda: StoredWorkflowTagAssociation.id, back_populates="stored_workflow", ) - owner_tags: Mapped[List["StoredWorkflowTagAssociation"]] = relationship( + owner_tags: Mapped[list["StoredWorkflowTagAssociation"]] = relationship( primaryjoin=( lambda: and_( StoredWorkflow.id == StoredWorkflowTagAssociation.stored_workflow_id, @@ -7974,15 +7970,15 @@ class StoredWorkflow(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpd viewonly=True, order_by=lambda: StoredWorkflowTagAssociation.id, ) - annotations: Mapped[List["StoredWorkflowAnnotationAssociation"]] = relationship( + annotations: Mapped[list["StoredWorkflowAnnotationAssociation"]] = relationship( order_by=lambda: StoredWorkflowAnnotationAssociation.id, back_populates="stored_workflow", ) - ratings: Mapped[List["StoredWorkflowRatingAssociation"]] = relationship( + ratings: Mapped[list["StoredWorkflowRatingAssociation"]] = relationship( order_by=lambda: StoredWorkflowRatingAssociation.id, back_populates="stored_workflow", ) - users_shared_with: Mapped[List["StoredWorkflowUserShareAssociation"]] = relationship( + users_shared_with: Mapped[list["StoredWorkflowUserShareAssociation"]] = relationship( back_populates="stored_workflow" ) @@ -8121,16 +8117,16 @@ class Workflow(Base, Dictifiable, RepresentById): has_cycles: Mapped[Optional[bool]] has_errors: Mapped[Optional[bool]] reports_config: Mapped[Optional[bytes]] = mapped_column(JSONType) - creator_metadata: Mapped[Optional[List[Dict[str, Any]]]] = mapped_column(JSONType) + creator_metadata: Mapped[Optional[list[dict[str, Any]]]] = mapped_column(JSONType) license: Mapped[Optional[str]] = mapped_column(TEXT) - source_metadata: Mapped[Optional[Dict[str, str]]] = mapped_column(JSONType) + source_metadata: Mapped[Optional[dict[str, str]]] = mapped_column(JSONType) readme: Mapped[Optional[str]] = mapped_column(Text) logo_url: Mapped[Optional[str]] = mapped_column(Text) help: Mapped[Optional[str]] = mapped_column(Text) uuid: Mapped[Optional[Union[UUID, str]]] = mapped_column(UUIDType) - doi: Mapped[Optional[List[str]]] = mapped_column(JSON) + doi: Mapped[Optional[list[str]]] = mapped_column(JSON) - steps: Mapped[List["WorkflowStep"]] = relationship( + steps: Mapped[list["WorkflowStep"]] = relationship( "WorkflowStep", back_populates="workflow", primaryjoin=(lambda: Workflow.id == WorkflowStep.workflow_id), @@ -8138,7 +8134,7 @@ class Workflow(Base, Dictifiable, RepresentById): cascade="all, delete-orphan", lazy=False, ) - comments: Mapped[List["WorkflowComment"]] = relationship( + comments: Mapped[list["WorkflowComment"]] = relationship( back_populates="workflow", primaryjoin=(lambda: Workflow.id == WorkflowComment.workflow_id), cascade="all, delete-orphan", @@ -8169,8 +8165,7 @@ def __init__(self, uuid=None): def validates_readme(self, key, readme): if readme is None: return None - size = len(readme) - if size > MAX_WORKFLOW_README_SIZE: + if (size := len(readme)) > MAX_WORKFLOW_README_SIZE: raise ValueError( f"Workflow readme too large ({size}), maximum allowed length ({MAX_WORKFLOW_README_SIZE})." ) @@ -8180,8 +8175,7 @@ def validates_readme(self, key, readme): def validates_help(self, key, help): if help is None: return None - size = len(help) - if size > MAX_WORKFLOW_HELP_SIZE: + if (size := len(help)) > MAX_WORKFLOW_HELP_SIZE: raise ValueError(f"Workflow help too large ({size}), maximum allowed length ({MAX_WORKFLOW_HELP_SIZE}).") return help @@ -8200,7 +8194,7 @@ def to_dict(self, view="collection", value_mapper=None): return rval @property - def steps_by_id(self) -> Dict[int, "WorkflowStep"]: + def steps_by_id(self) -> dict[int, "WorkflowStep"]: steps = {} for step in self.steps: step_id = step.id @@ -8323,7 +8317,7 @@ def log_str(self): return f"Workflow[id={self.id}{extra}]" -InputConnDictType = Dict[str, Union[Dict[str, Any], List[Dict[str, Any]]]] +InputConnDictType = dict[str, Union[dict[str, Any], list[dict[str, Any]]]] class WorkflowStep(Base, RepresentById, UsesCreateAndUpdateTime): @@ -8344,7 +8338,7 @@ class WorkflowStep(Base, RepresentById, UsesCreateAndUpdateTime): type: Mapped[Optional[str]] = mapped_column(String(64)) tool_id: Mapped[Optional[str]] = mapped_column(TEXT) tool_version: Mapped[Optional[str]] = mapped_column(TEXT) - tool_inputs: Mapped[Optional[Dict[str, Any]]] = mapped_column(JSONType) + tool_inputs: Mapped[Optional[dict[str, Any]]] = mapped_column(JSONType) tool_errors: Mapped[Optional[bytes]] = mapped_column(JSONType) position: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) config: Mapped[Optional[bytes]] = mapped_column(JSONType) @@ -8367,17 +8361,17 @@ class WorkflowStep(Base, RepresentById, UsesCreateAndUpdateTime): dynamic_tool: Mapped[Optional["DynamicTool"]] = relationship( primaryjoin=(lambda: DynamicTool.id == WorkflowStep.dynamic_tool_id) ) - tags: Mapped[List["WorkflowStepTagAssociation"]] = relationship( + tags: Mapped[list["WorkflowStepTagAssociation"]] = relationship( order_by=lambda: WorkflowStepTagAssociation.id, back_populates="workflow_step" ) - annotations: Mapped[List["WorkflowStepAnnotationAssociation"]] = relationship( + annotations: Mapped[list["WorkflowStepAnnotationAssociation"]] = relationship( order_by=lambda: WorkflowStepAnnotationAssociation.id, back_populates="workflow_step", ) post_job_actions = relationship("PostJobAction", back_populates="workflow_step") - inputs: Mapped[List["WorkflowStepInput"]] = relationship("WorkflowStepInput", back_populates="workflow_step") - workflow_outputs: Mapped[List["WorkflowOutput"]] = relationship(back_populates="workflow_step") - output_connections: Mapped[List["WorkflowStepConnection"]] = relationship( + inputs: Mapped[list["WorkflowStepInput"]] = relationship("WorkflowStepInput", back_populates="workflow_step") + workflow_outputs: Mapped[list["WorkflowOutput"]] = relationship(back_populates="workflow_step") + output_connections: Mapped[list["WorkflowStepConnection"]] = relationship( primaryjoin=(lambda: WorkflowStepConnection.output_step_id == WorkflowStep.id) ) workflow: Mapped["Workflow"] = relationship( @@ -8400,7 +8394,7 @@ def __init__(self): # TODO: code using these should be refactored to not depend on these non-persistent fields self.module: Optional[WorkflowModule] self.state: Optional[DefaultToolState] - self.upgrade_messages: Optional[Dict] + self.upgrade_messages: Optional[dict] @reconstructor def init_on_load(self): @@ -8443,7 +8437,7 @@ def setup_inputs_by_name(self): # Ensure input_connections has already been set. # Make connection information available on each step by input name. - inputs_by_name: Dict[str, Any] = {} + inputs_by_name: dict[str, Any] = {} for step_input in self.inputs: input_name = step_input.name assert input_name not in inputs_by_name @@ -8515,7 +8509,7 @@ def unique_workflow_outputs(self): # Older Galaxy workflows may have multiple WorkflowOutputs # per "output_name", when serving these back to the editor # feed only a "best" output per "output_name."" - outputs: Dict[str, WorkflowOutput] = {} + outputs: dict[str, WorkflowOutput] = {} for workflow_output in self.workflow_outputs: output_name = workflow_output.output_name @@ -8574,7 +8568,7 @@ def workflow_output_for(self, output_name): break return target_output - def copy_to(self, copied_step: "WorkflowStep", step_mapping: Dict[int, "WorkflowStep"], user: User): + def copy_to(self, copied_step: "WorkflowStep", step_mapping: dict[int, "WorkflowStep"], user: User): copied_step.order_index = self.order_index copied_step.type = self.type copied_step.tool_id = self.tool_id @@ -8637,8 +8631,7 @@ def log_str(self): @property def effective_label(self) -> Optional[str]: - label = self.label - if label is not None: + if (label := self.label) is not None: return label elif self.is_input_type: tool_inputs = self.tool_inputs @@ -8684,7 +8677,7 @@ class WorkflowStepInput(Base, RepresentById): cascade="all", primaryjoin=(lambda: WorkflowStepInput.workflow_step_id == WorkflowStep.id), ) - connections: Mapped[List["WorkflowStepConnection"]] = relationship( + connections: Mapped[list["WorkflowStepConnection"]] = relationship( back_populates="input_step_input", primaryjoin=(lambda: WorkflowStepConnection.input_step_input_id == WorkflowStepInput.id), ) @@ -8832,7 +8825,7 @@ class WorkflowComment(Base, RepresentById): remote_side=[id], ) - child_comments: Mapped[List["WorkflowComment"]] = relationship( + child_comments: Mapped[list["WorkflowComment"]] = relationship( primaryjoin=(lambda: WorkflowComment.parent_comment_id == WorkflowComment.id), back_populates="parent_comment", ) @@ -8913,7 +8906,7 @@ class StoredWorkflowMenuEntry(Base, RepresentById): @dataclass class InputWithRequest: input: Any - request: Dict[str, Any] + request: dict[str, Any] @dataclass @@ -8941,10 +8934,10 @@ class WorkflowInvocation(Base, UsesCreateAndUpdateTime, Dictifiable, Serializabl input_parameters = relationship("WorkflowRequestInputParameter", back_populates="workflow_invocation") step_states = relationship("WorkflowRequestStepState", back_populates="workflow_invocation") input_step_parameters = relationship("WorkflowRequestInputStepParameter", back_populates="workflow_invocation") - input_datasets: Mapped[List["WorkflowRequestToInputDatasetAssociation"]] = relationship( + input_datasets: Mapped[list["WorkflowRequestToInputDatasetAssociation"]] = relationship( "WorkflowRequestToInputDatasetAssociation", back_populates="workflow_invocation" ) - input_dataset_collections: Mapped[List["WorkflowRequestToInputDatasetCollectionAssociation"]] = relationship( + input_dataset_collections: Mapped[list["WorkflowRequestToInputDatasetCollectionAssociation"]] = relationship( "WorkflowRequestToInputDatasetCollectionAssociation", back_populates="workflow_invocation", ) @@ -9235,8 +9228,8 @@ def input_associations(self): inputs.append(input_dataset_collection_assoc) return inputs - def inputs_requiring_materialization(self) -> List[InputToMaterialize]: - hdas_to_materialize: List[InputToMaterialize] = [] + def inputs_requiring_materialization(self) -> list[InputToMaterialize]: + hdas_to_materialize: list[InputToMaterialize] = [] for input_dataset_assoc in self.input_datasets: request = input_dataset_assoc.request if request: @@ -9421,7 +9414,7 @@ def attach_step(request_to_content): else: request_to_content.workflow_step = step - request: Optional[Dict[str, Any]] = None + request: Optional[dict[str, Any]] = None if isinstance(content, InputWithRequest): request = content.request content = content.input @@ -9446,15 +9439,14 @@ def attach_step(request_to_content): attach_step(request_to_content) self.input_step_parameters.append(request_to_content) - def recover_inputs(self) -> Tuple[Dict[str, Any], str]: - inputs: Dict[str, Any] = {} + def recover_inputs(self) -> tuple[dict[str, Any], str]: + inputs: dict[str, Any] = {} inputs_by = "name" have_referenced_steps_by_order_index = False def best_step_reference(workflow_step: "WorkflowStep") -> str: - label = workflow_step.effective_label - if label is not None: + if (label := workflow_step.effective_label) is not None: return label nonlocal have_referenced_steps_by_order_index have_referenced_steps_by_order_index = True @@ -9619,7 +9611,7 @@ class WorkflowInvocationStepObjectStores(NamedTuple): preferred_object_store_id: Optional[str] preferred_outputs_object_store_id: Optional[str] preferred_intermediate_object_store_id: Optional[str] - step_effective_outputs: Optional[List["EffectiveOutput"]] + step_effective_outputs: Optional[list["EffectiveOutput"]] def is_output_name_an_effective_output(self, output_name: str) -> bool: if self.step_effective_outputs is None: @@ -9742,7 +9734,7 @@ def preferred_object_stores(self) -> WorkflowInvocationStepObjectStores: preferred_object_store_id = None preferred_outputs_object_store_id = None preferred_intermediate_object_store_id = None - step_effective_outputs: Optional[List[EffectiveOutput]] = None + step_effective_outputs: Optional[list[EffectiveOutput]] = None workflow_invocation = self.workflow_invocation for input_parameter in workflow_invocation.input_parameters: @@ -9885,7 +9877,7 @@ class WorkflowRequestStepState(Base, Dictifiable, Serializable): ForeignKey("workflow_invocation.id", onupdate="CASCADE", ondelete="CASCADE"), index=True ) workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id")) - value: Mapped[Optional[Dict[str, Any]]] = mapped_column(MutableJSONType) + value: Mapped[Optional[dict[str, Any]]] = mapped_column(MutableJSONType) workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship(back_populates="step_states") @@ -9908,7 +9900,7 @@ class WorkflowRequestToInputDatasetAssociation(Base, Dictifiable, Serializable): workflow_invocation_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_invocation.id"), index=True) workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id")) dataset_id: Mapped[Optional[int]] = mapped_column(ForeignKey("history_dataset_association.id"), index=True) - request: Mapped[Optional[Dict]] = mapped_column(JSONType) + request: Mapped[Optional[dict]] = mapped_column(JSONType) workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() dataset: Mapped[Optional["HistoryDatasetAssociation"]] = relationship() @@ -9944,7 +9936,7 @@ class WorkflowRequestToInputDatasetCollectionAssociation(Base, Dictifiable, Seri workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship( back_populates="input_dataset_collections" ) - request: Mapped[Optional[Dict]] = mapped_column(JSONType) + request: Mapped[Optional[dict]] = mapped_column(JSONType) history_content_type = "dataset_collection" dict_collection_visible_keys = ["id", "workflow_invocation_id", "workflow_step_id", "dataset_collection_id", "name"] @@ -9968,7 +9960,7 @@ class WorkflowRequestInputStepParameter(Base, Dictifiable, Serializable): workflow_invocation_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_invocation.id"), index=True) workflow_step_id: Mapped[Optional[int]] = mapped_column(ForeignKey("workflow_step.id")) parameter_value: Mapped[Optional[bytes]] = mapped_column(MutableJSONType) - request: Mapped[Optional[Dict]] = mapped_column(JSONType) + request: Mapped[Optional[dict]] = mapped_column(JSONType) workflow_step: Mapped[Optional["WorkflowStep"]] = relationship() workflow_invocation: Mapped[Optional["WorkflowInvocation"]] = relationship(back_populates="input_step_parameters") @@ -10292,7 +10284,7 @@ class FormDefinitionCurrent(Base, RepresentById): update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) latest_form_id: Mapped[Optional[int]] = mapped_column(ForeignKey("form_definition.id"), index=True) deleted: Mapped[Optional[bool]] = mapped_column(index=True, default=False) - forms: Mapped[List["FormDefinition"]] = relationship( + forms: Mapped[list["FormDefinition"]] = relationship( back_populates="form_definition_current", cascade="all, delete-orphan", primaryjoin=(lambda: FormDefinitionCurrent.id == FormDefinition.form_definition_current_id), @@ -10717,7 +10709,7 @@ class Page(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime): slug: Mapped[Optional[str]] = mapped_column(TEXT) published: Mapped[Optional[bool]] = mapped_column(index=True, default=False) user: Mapped["User"] = relationship() - revisions: Mapped[List["PageRevision"]] = relationship( + revisions: Mapped[list["PageRevision"]] = relationship( cascade="all, delete-orphan", primaryjoin=(lambda: Page.id == PageRevision.page_id), back_populates="page", @@ -10727,17 +10719,17 @@ class Page(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpdateTime): primaryjoin=(lambda: Page.latest_revision_id == PageRevision.id), lazy=False, ) - tags: Mapped[List["PageTagAssociation"]] = relationship( + tags: Mapped[list["PageTagAssociation"]] = relationship( order_by=lambda: PageTagAssociation.id, back_populates="page" ) - annotations: Mapped[List["PageAnnotationAssociation"]] = relationship( + annotations: Mapped[list["PageAnnotationAssociation"]] = relationship( order_by=lambda: PageAnnotationAssociation.id, back_populates="page" ) - ratings: Mapped[List["PageRatingAssociation"]] = relationship( + ratings: Mapped[list["PageRatingAssociation"]] = relationship( order_by=lambda: PageRatingAssociation.id, back_populates="page", ) - users_shared_with: Mapped[List["PageUserShareAssociation"]] = relationship(back_populates="page") + users_shared_with: Mapped[list["PageUserShareAssociation"]] = relationship(back_populates="page") # Set up proxy so that # Page.users_shared_with @@ -10845,7 +10837,7 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpda published: Mapped[Optional[bool]] = mapped_column(default=False, index=True) user: Mapped["User"] = relationship() - revisions: Mapped[List["VisualizationRevision"]] = relationship( + revisions: Mapped[list["VisualizationRevision"]] = relationship( back_populates="visualization", cascade="all, delete-orphan", primaryjoin=(lambda: Visualization.id == VisualizationRevision.visualization_id), @@ -10855,18 +10847,18 @@ class Visualization(Base, HasTags, Dictifiable, RepresentById, UsesCreateAndUpda primaryjoin=(lambda: Visualization.latest_revision_id == VisualizationRevision.id), lazy=False, ) - tags: Mapped[List["VisualizationTagAssociation"]] = relationship( + tags: Mapped[list["VisualizationTagAssociation"]] = relationship( order_by=lambda: VisualizationTagAssociation.id, back_populates="visualization" ) - annotations: Mapped[List["VisualizationAnnotationAssociation"]] = relationship( + annotations: Mapped[list["VisualizationAnnotationAssociation"]] = relationship( order_by=lambda: VisualizationAnnotationAssociation.id, back_populates="visualization", ) - ratings: Mapped[List["VisualizationRatingAssociation"]] = relationship( + ratings: Mapped[list["VisualizationRatingAssociation"]] = relationship( order_by=lambda: VisualizationRatingAssociation.id, back_populates="visualization", ) - users_shared_with: Mapped[List["VisualizationUserShareAssociation"]] = relationship(back_populates="visualization") + users_shared_with: Mapped[list["VisualizationUserShareAssociation"]] = relationship(back_populates="visualization") average_rating = None @@ -10978,7 +10970,7 @@ class Tag(Base, RepresentById): type: Mapped[Optional[int]] parent_id: Mapped[Optional[int]] = mapped_column(ForeignKey("tag.id")) name: Mapped[Optional[str]] = mapped_column(TrimmedString(255)) - children: Mapped[List["Tag"]] = relationship(back_populates="parent") + children: Mapped[list["Tag"]] = relationship(back_populates="parent") parent: Mapped[Optional["Tag"]] = relationship(back_populates="children", remote_side=[id]) def __str__(self): @@ -11263,7 +11255,7 @@ class Vault(Base): key: Mapped[str] = mapped_column(Text, primary_key=True) parent_key: Mapped[Optional[str]] = mapped_column(Text, ForeignKey(key), index=True) - children: Mapped[List["Vault"]] = relationship(back_populates="parent") + children: Mapped[list["Vault"]] = relationship(back_populates="parent") parent: Mapped[Optional["Vault"]] = relationship(back_populates="children", remote_side=[key]) value: Mapped[Optional[str]] = mapped_column(Text) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) @@ -11536,7 +11528,7 @@ def template(self) -> ObjectStoreTemplate: return ObjectStoreTemplate(**self.template_definition or {}) def object_store_configuration( - self, secrets: SecretsDict, environment: EnvironmentDict, templates: Optional[List[ObjectStoreTemplate]] = None + self, secrets: SecretsDict, environment: EnvironmentDict, templates: Optional[list[ObjectStoreTemplate]] = None ) -> ObjectStoreConfiguration: if templates is None: templates = [self.template] @@ -11608,7 +11600,7 @@ def file_source_configuration( secrets: SecretsDict, environment: EnvironmentDict, implicit: ImplicitConfigurationParameters, - templates: Optional[List[FileSourceTemplate]] = None, + templates: Optional[list[FileSourceTemplate]] = None, ) -> FileSourceConfiguration: if templates is None: templates = [self.template] @@ -11646,7 +11638,7 @@ class ToolLandingRequest(Base): uuid: Mapped[Union[UUID, str]] = mapped_column(UUIDType(), index=True) tool_id: Mapped[str] = mapped_column(String(255)) tool_version: Mapped[Optional[str]] = mapped_column(String(255), default=None) - request_state: Mapped[Optional[Dict]] = mapped_column(JSONType) + request_state: Mapped[Optional[dict]] = mapped_column(JSONType) client_secret: Mapped[Optional[str]] = mapped_column(String(255), default=None) public: Mapped[bool] = mapped_column(Boolean) @@ -11665,7 +11657,7 @@ class WorkflowLandingRequest(Base): create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[Optional[datetime]] = mapped_column(index=True, default=now, onupdate=now, nullable=True) uuid: Mapped[Union[UUID, str]] = mapped_column(UUIDType(), index=True) - request_state: Mapped[Optional[Dict]] = mapped_column(JSONType) + request_state: Mapped[Optional[dict]] = mapped_column(JSONType) client_secret: Mapped[Optional[str]] = mapped_column(String(255), default=None) workflow_source: Mapped[Optional[str]] = mapped_column(String(255), default=None) workflow_source_type: Mapped[Optional[str]] = mapped_column(String(255), default=None) diff --git a/lib/galaxy/model/base.py b/lib/galaxy/model/base.py index 88557be25be5..a100f044a07a 100644 --- a/lib/galaxy/model/base.py +++ b/lib/galaxy/model/base.py @@ -13,9 +13,6 @@ ) from types import ModuleType from typing import ( - Dict, - List, - Type, TYPE_CHECKING, Union, ) @@ -49,7 +46,7 @@ # of a request (which run within a threadpool) to see changes to the ContextVar # state. See https://github.com/tiangolo/fastapi/issues/953#issuecomment-586006249 # for details -REQUEST_ID: ContextVar[Union[Dict[str, str], None]] = ContextVar("request_id", default=None) +REQUEST_ID: ContextVar[Union[dict[str, str], None]] = ContextVar("request_id", default=None) def check_database_connection(session): @@ -71,7 +68,7 @@ def check_database_connection(session): # TODO: Refactor this to be a proper class, not a bunch. class ModelMapping(Bunch): - def __init__(self, model_modules: List[ModuleType], engine): + def __init__(self, model_modules: list[ModuleType], engine): self.engine = engine self._SessionLocal = sessionmaker(autoflush=False) versioned_session(self._SessionLocal) @@ -79,7 +76,7 @@ def __init__(self, model_modules: List[ModuleType], engine): self.session = context self.scoped_registry = context.registry - model_classes: Dict[str, type] = {} + model_classes: dict[str, type] = {} for module in model_modules: name_class_pairs = getmembers(module, isclass) filtered_module_classes_dict = dict(m for m in name_class_pairs if m[1].__module__ == module.__name__) @@ -143,10 +140,10 @@ class SharedModelMapping(ModelMapping): a way to do app.model. for common code shared by the tool shed and Galaxy. """ - User: Union[Type["GalaxyUser"], Type["ToolShedUser"]] - GalaxySession: Union[Type["GalaxyGalaxySession"], Type["ToolShedGalaxySession"]] - APIKeys: Union[Type["GalaxyAPIKeys"], Type["ToolShedAPIKeys"]] - PasswordResetToken: Union[Type["GalaxyPasswordResetToken"], Type["ToolShedPasswordResetToken"]] + User: Union[type["GalaxyUser"], type["ToolShedUser"]] + GalaxySession: Union[type["GalaxyGalaxySession"], type["ToolShedGalaxySession"]] + APIKeys: Union[type["GalaxyAPIKeys"], type["ToolShedAPIKeys"]] + PasswordResetToken: Union[type["GalaxyPasswordResetToken"], type["ToolShedPasswordResetToken"]] def versioned_objects(iter): diff --git a/lib/galaxy/model/database_object_names.py b/lib/galaxy/model/database_object_names.py index 864f821e56a6..928f8c64d0d2 100644 --- a/lib/galaxy/model/database_object_names.py +++ b/lib/galaxy/model/database_object_names.py @@ -4,7 +4,6 @@ """ from typing import ( - List, Union, ) @@ -22,12 +21,12 @@ } -def build_foreign_key_name(table_name: str, column_names: Union[str, List]) -> str: +def build_foreign_key_name(table_name: str, column_names: Union[str, list]) -> str: columns = _as_str(column_names) return f"{table_name}_{columns}_fkey" -def build_unique_constraint_name(table_name: str, column_names: Union[str, List]) -> str: +def build_unique_constraint_name(table_name: str, column_names: Union[str, list]) -> str: columns = _as_str(column_names) return f"{table_name}_{columns}_key" @@ -36,10 +35,10 @@ def build_check_constraint_name(table_name: str, column_name: str) -> str: return f"{table_name}_{column_name}_check" -def build_index_name(table_name: str, column_names: Union[str, List]) -> str: +def build_index_name(table_name: str, column_names: Union[str, list]) -> str: columns = _as_str(column_names) return f"ix_{table_name}_{columns}" -def _as_str(column_names: Union[str, List]) -> str: +def _as_str(column_names: Union[str, list]) -> str: return "_".join(listify(column_names)) diff --git a/lib/galaxy/model/dataset_collections/adapters.py b/lib/galaxy/model/dataset_collections/adapters.py index 0cbf080aa91f..c231cedc1037 100644 --- a/lib/galaxy/model/dataset_collections/adapters.py +++ b/lib/galaxy/model/dataset_collections/adapters.py @@ -1,5 +1,4 @@ from typing import ( - List, TYPE_CHECKING, ) @@ -190,9 +189,9 @@ def collection_type(self) -> str: class PromoteDatasetsToCollection(CollectionAdapter): _collection_type: str - _elements: List["TransientCollectionAdapterDatasetInstanceElement"] + _elements: list["TransientCollectionAdapterDatasetInstanceElement"] - def __init__(self, elements: List["TransientCollectionAdapterDatasetInstanceElement"], collection_type: str): + def __init__(self, elements: list["TransientCollectionAdapterDatasetInstanceElement"], collection_type: str): assert collection_type in ["paired", "paired_or_unpaired"] self._collection_type = collection_type self._elements = elements diff --git a/lib/galaxy/model/dataset_collections/auto_identifiers.py b/lib/galaxy/model/dataset_collections/auto_identifiers.py index 519f50915f90..e0fc26d3799a 100644 --- a/lib/galaxy/model/dataset_collections/auto_identifiers.py +++ b/lib/galaxy/model/dataset_collections/auto_identifiers.py @@ -2,10 +2,7 @@ import os.path from typing import ( - List, Optional, - Set, - Tuple, ) from urllib.parse import urlparse @@ -29,13 +26,13 @@ def filename_to_element_identifier(filename_or_uri: str): def fill_in_identifiers( - uris_to_identifiers: List[Tuple[str, Optional[str]]], config: Optional[FillIdentifiers] -) -> List[Optional[str]]: + uris_to_identifiers: list[tuple[str, Optional[str]]], config: Optional[FillIdentifiers] +) -> list[Optional[str]]: if config is None: config = FillIdentifiers() - new_identifiers: List[Optional[str]] = [] - seen_identifiers: Set[Optional[str]] = set() + new_identifiers: list[Optional[str]] = [] + seen_identifiers: set[Optional[str]] = set() for uri, identifier in uris_to_identifiers: if identifier is None and config.fill_inner_list_identifiers: basename = filename_to_element_identifier(uri) diff --git a/lib/galaxy/model/dataset_collections/auto_pairing.py b/lib/galaxy/model/dataset_collections/auto_pairing.py index f74908985299..351c4657e52c 100644 --- a/lib/galaxy/model/dataset_collections/auto_pairing.py +++ b/lib/galaxy/model/dataset_collections/auto_pairing.py @@ -1,12 +1,9 @@ import re from dataclasses import dataclass from typing import ( - Dict, Generic, - List, Optional, Protocol, - Tuple, TypeVar, ) @@ -20,7 +17,7 @@ class HasName(Protocol): T = TypeVar("T", bound=HasName) # matches pairing.ts in the client -COMMON_FILTERS: Dict[str, Tuple[str, str]] = { +COMMON_FILTERS: dict[str, tuple[str, str]] = { "illumina": ("_1", "_2"), "Rs": ("_R1", "_R2"), "dot12s": (".1.fastq", ".2.fastq"), @@ -73,16 +70,15 @@ def to_pair(self) -> Pair[T]: @dataclass class AutoPairResponse(Generic[T]): - paired: List[Pair[T]] - unpaired: List[T] + paired: list[Pair[T]] + unpaired: list[T] -def auto_pair(elements: List[T]) -> AutoPairResponse[T]: - filter_type = guess_initial_filter_type(elements) - if filter_type: +def auto_pair(elements: list[T]) -> AutoPairResponse[T]: + if filter_type := guess_initial_filter_type(elements): forward_filter, reverse_filter = COMMON_FILTERS[filter_type] forward_elements, reverse_elements = split_elements_by_filter(elements, forward_filter, reverse_filter) - partial_pairs: Dict[str, PartialPair[T]] = {} + partial_pairs: dict[str, PartialPair[T]] = {} for forward_element in forward_elements: forward_base = filename_to_element_identifier(re.sub(f"{forward_filter}", "", forward_element.name)) @@ -95,13 +91,13 @@ def auto_pair(elements: List[T]) -> AutoPairResponse[T]: else: partial_pairs[reverse_base].reverse = reverse_element - unpaired: List[T] = elements.copy() + unpaired: list[T] = elements.copy() for forward_element in forward_elements: unpaired.remove(forward_element) for reverse_element in reverse_elements: unpaired.remove(reverse_element) - full_pairs: List[Pair[T]] = [] + full_pairs: list[Pair[T]] = [] for partial_pair in partial_pairs.values(): if partial_pair.forward is None: assert partial_pair.reverse @@ -116,7 +112,7 @@ def auto_pair(elements: List[T]) -> AutoPairResponse[T]: return AutoPairResponse(paired=[], unpaired=elements) -def guess_initial_filter_type(elements: List[T]) -> Optional[str]: +def guess_initial_filter_type(elements: list[T]) -> Optional[str]: illumina = 0 dot12s = 0 Rs = 0 @@ -143,9 +139,9 @@ def guess_initial_filter_type(elements: List[T]) -> Optional[str]: return "illumina" -def split_elements_by_filter(elements: List[T], forward_filter: str, reverse_filter: str) -> Tuple[List[T], List[T]]: +def split_elements_by_filter(elements: list[T], forward_filter: str, reverse_filter: str) -> tuple[list[T], list[T]]: filters = [re.compile(forward_filter), re.compile(reverse_filter)] - split: Tuple[List[T], List[T]] = ([], []) + split: tuple[list[T], list[T]] = ([], []) for element in elements: for i, filter in enumerate(filters): if element.name and filter.search(element.name): diff --git a/lib/galaxy/model/dataset_collections/builder.py b/lib/galaxy/model/dataset_collections/builder.py index 2518be6c9dee..77edeb4f13fe 100644 --- a/lib/galaxy/model/dataset_collections/builder.py +++ b/lib/galaxy/model/dataset_collections/builder.py @@ -1,9 +1,6 @@ from typing import ( cast, - Dict, - List, Optional, - Set, TYPE_CHECKING, Union, ) @@ -30,8 +27,8 @@ def build_collection( type: "BaseDatasetCollectionType", dataset_instances: "DatasetInstanceMapping", collection: Optional[DatasetCollection] = None, - associated_identifiers: Optional[Set[str]] = None, - fields: Optional[Union[str, List["FieldDict"]]] = None, + associated_identifiers: Optional[set[str]] = None, + fields: Optional[Union[str, list["FieldDict"]]] = None, ) -> DatasetCollection: """ Build DatasetCollection with populated DatasetcollectionElement objects @@ -48,8 +45,8 @@ def set_collection_elements( dataset_collection: DatasetCollection, type: "BaseDatasetCollectionType", dataset_instances: "DatasetInstanceMapping", - associated_identifiers: Set[str], - fields: Optional[Union[str, List["FieldDict"]]] = None, + associated_identifiers: set[str], + fields: Optional[Union[str, list["FieldDict"]]] = None, ) -> DatasetCollection: new_element_keys = OrderedSet(dataset_instances.keys()) - associated_identifiers new_dataset_instances = {k: dataset_instances[k] for k in new_element_keys} @@ -72,8 +69,8 @@ def set_collection_elements( return dataset_collection -def guess_fields(dataset_instances: "DatasetInstanceMapping") -> List["FieldDict"]: - fields: List[FieldDict] = [] +def guess_fields(dataset_instances: "DatasetInstanceMapping") -> list["FieldDict"]: + fields: list[FieldDict] = [] for identifier, element in dataset_instances.items(): if isinstance(element, DatasetCollection): return [] @@ -83,7 +80,7 @@ def guess_fields(dataset_instances: "DatasetInstanceMapping") -> List["FieldDict return fields -ElementsDict = Dict[str, Union["CollectionBuilder", DatasetInstance]] +ElementsDict = dict[str, Union["CollectionBuilder", DatasetInstance]] class CollectionBuilder: @@ -94,12 +91,12 @@ def __init__(self, collection_type_description: "CollectionTypeDescription"): self._current_elements: ElementsDict = {} # Store collection here so we don't recreate the collection all the time self.collection: Optional[DatasetCollection] = None - self.associated_identifiers: Set[str] = set() + self.associated_identifiers: set[str] = set() def replace_elements_in_collection( self, template_collection: Union["CollectionAdapter", DatasetCollection], - replacement_dict: Dict[DatasetInstance, DatasetInstance], + replacement_dict: dict[DatasetInstance, DatasetInstance], ) -> None: self._current_elements = self._replace_elements_in_collection( template_collection=template_collection, @@ -109,7 +106,7 @@ def replace_elements_in_collection( def _replace_elements_in_collection( self, template_collection: Union["CollectionAdapter", DatasetCollection], - replacement_dict: Dict[DatasetInstance, DatasetInstance], + replacement_dict: dict[DatasetInstance, DatasetInstance], ) -> ElementsDict: elements: ElementsDict = {} for element in template_collection.elements: @@ -155,7 +152,7 @@ def build_elements(self) -> "DatasetInstanceMapping": return new_elements else: self._current_elements = {} - return cast(Dict[str, DatasetInstance], elements) + return cast(dict[str, DatasetInstance], elements) def build(self) -> DatasetCollection: type_plugin = self._collection_type_description.rank_type_plugin() diff --git a/lib/galaxy/model/dataset_collections/query.py b/lib/galaxy/model/dataset_collections/query.py index 01ca4f19f20b..3c66b11e3ff5 100644 --- a/lib/galaxy/model/dataset_collections/query.py +++ b/lib/galaxy/model/dataset_collections/query.py @@ -1,6 +1,5 @@ import logging from typing import ( - List, Optional, ) @@ -21,7 +20,7 @@ class HdcaLike(Protocol): class DataCollectionParameterLike(Protocol): @property - def collection_types(self) -> Optional[List[str]]: + def collection_types(self) -> Optional[list[str]]: """Return a list of collection type strings the parameter accepts.""" @@ -39,7 +38,7 @@ def from_collection_type(collection_type, collection_type_descriptions): return HistoryQuery(**kwargs) @staticmethod - def from_collection_types(collection_types: Optional[List[str]], collection_type_descriptions): + def from_collection_types(collection_types: Optional[list[str]], collection_type_descriptions): if collection_types: collection_type_descriptions = [ collection_type_descriptions.for_collection_type(t) for t in collection_types diff --git a/lib/galaxy/model/dataset_collections/registry.py b/lib/galaxy/model/dataset_collections/registry.py index 8c0a52faa012..ad86dfd7a924 100644 --- a/lib/galaxy/model/dataset_collections/registry.py +++ b/lib/galaxy/model/dataset_collections/registry.py @@ -1,19 +1,14 @@ -from typing import ( - List, - Type, -) - from galaxy import model from .types import ( BaseDatasetCollectionType, - list, paired, paired_or_unpaired, record, ) +from .types.list import ListDatasetCollectionType -PLUGIN_CLASSES: List[Type[BaseDatasetCollectionType]] = [ - list.ListDatasetCollectionType, +PLUGIN_CLASSES: list[type[BaseDatasetCollectionType]] = [ + ListDatasetCollectionType, paired.PairedDatasetCollectionType, record.RecordDatasetCollectionType, paired_or_unpaired.PairedOrUnpairedDatasetCollectionType, diff --git a/lib/galaxy/model/dataset_collections/rule_target_columns.py b/lib/galaxy/model/dataset_collections/rule_target_columns.py index 6232a1cf8ede..98cd51d8cc6e 100644 --- a/lib/galaxy/model/dataset_collections/rule_target_columns.py +++ b/lib/galaxy/model/dataset_collections/rule_target_columns.py @@ -1,8 +1,6 @@ import re from dataclasses import dataclass from typing import ( - Dict, - List, Optional, ) @@ -14,7 +12,7 @@ target_model_by_type, ) -COLUMN_TITLE_PREFIXES: Dict[str, RuleBuilderMappingTargetKey] = { +COLUMN_TITLE_PREFIXES: dict[str, RuleBuilderMappingTargetKey] = { "name": "name", "listname": "collection_name", "collectionname": "collection_name", @@ -128,9 +126,9 @@ def name(self): return f"{self.type}_{self.type_index}" -def column_titles_to_headers(column_titles: List[str]) -> List[HeaderColumn]: - headers: List[HeaderColumn] = [] - headers_of_type_seen: Dict[RuleBuilderMappingTargetKey, int] = {} +def column_titles_to_headers(column_titles: list[str]) -> list[HeaderColumn]: + headers: list[HeaderColumn] = [] + headers_of_type_seen: dict[RuleBuilderMappingTargetKey, int] = {} for column_title in column_titles: column_type_: Optional[RuleBuilderMappingTargetKey] = column_title_to_target_type(column_title) diff --git a/lib/galaxy/model/dataset_collections/rule_target_models.py b/lib/galaxy/model/dataset_collections/rule_target_models.py index f4986f189b28..a98d4c949e0d 100644 --- a/lib/galaxy/model/dataset_collections/rule_target_models.py +++ b/lib/galaxy/model/dataset_collections/rule_target_models.py @@ -1,6 +1,4 @@ from typing import ( - Dict, - List, Literal, Optional, ) @@ -26,13 +24,13 @@ class ColumnTarget(BaseModel): label: str help: Optional[str] - modes: Optional[List[RuleBuilderModes]] = None + modes: Optional[list[RuleBuilderModes]] = None importType: Optional[RuleBuilderImportType] = None multiple: Optional[bool] = False columnHeader: Optional[str] = None advanced: Optional[bool] = False requiresFtp: Optional[bool] = False - example_column_names: Optional[List[str]] = None + example_column_names: Optional[list[str]] = None @property def example_column_names_as_str(self) -> Optional[str]: @@ -63,7 +61,7 @@ def example_column_names_as_str(self) -> Optional[str]: ] -ColumnTargetsConfig = Dict[RuleBuilderMappingTargetKey, ColumnTarget] +ColumnTargetsConfig = dict[RuleBuilderMappingTargetKey, ColumnTarget] ColumnTargetsConfigRootModel = RootModel[ColumnTargetsConfig] diff --git a/lib/galaxy/model/dataset_collections/type_description.py b/lib/galaxy/model/dataset_collections/type_description.py index 3a1e72fe5b8e..bafd7a00bbdd 100644 --- a/lib/galaxy/model/dataset_collections/type_description.py +++ b/lib/galaxy/model/dataset_collections/type_description.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, TYPE_CHECKING, Union, @@ -17,7 +16,7 @@ def __init__(self, type_registry=DATASET_COLLECTION_TYPES_REGISTRY): # I think. self.type_registry = type_registry - def for_collection_type(self, collection_type, fields: Optional[Union[str, List["FieldDict"]]] = None): + def for_collection_type(self, collection_type, fields: Optional[Union[str, list["FieldDict"]]] = None): assert collection_type is not None return CollectionTypeDescription(collection_type, self, fields=fields) @@ -33,7 +32,7 @@ def __init__( self, collection_type: Union[str, "CollectionTypeDescription"], collection_type_description_factory: CollectionTypeDescriptionFactory, - fields: Optional[Union[str, List["FieldDict"]]] = None, + fields: Optional[Union[str, list["FieldDict"]]] = None, ): if isinstance(collection_type, CollectionTypeDescription): self.collection_type = collection_type.collection_type diff --git a/lib/galaxy/model/dataset_collections/types/__init__.py b/lib/galaxy/model/dataset_collections/types/__init__.py index af852b42e80b..c5d097160758 100644 --- a/lib/galaxy/model/dataset_collections/types/__init__.py +++ b/lib/galaxy/model/dataset_collections/types/__init__.py @@ -3,9 +3,11 @@ ABCMeta, abstractmethod, ) -from typing import ( +from collections.abc import ( Iterable, Mapping, +) +from typing import ( TYPE_CHECKING, Union, ) diff --git a/lib/galaxy/model/dataset_collections/types/list.py b/lib/galaxy/model/dataset_collections/types/list.py index a34697ea48e3..31aa3faeb4bf 100644 --- a/lib/galaxy/model/dataset_collections/types/list.py +++ b/lib/galaxy/model/dataset_collections/types/list.py @@ -1,5 +1,5 @@ +from collections.abc import Iterable from typing import ( - Iterable, TYPE_CHECKING, ) diff --git a/lib/galaxy/model/dataset_collections/types/paired.py b/lib/galaxy/model/dataset_collections/types/paired.py index 7194da7eba8c..cab9315df1e0 100644 --- a/lib/galaxy/model/dataset_collections/types/paired.py +++ b/lib/galaxy/model/dataset_collections/types/paired.py @@ -1,5 +1,5 @@ +from collections.abc import Iterable from typing import ( - Iterable, TYPE_CHECKING, ) diff --git a/lib/galaxy/model/dataset_collections/types/paired_or_unpaired.py b/lib/galaxy/model/dataset_collections/types/paired_or_unpaired.py index 12b3729ea9c3..98c1411e0443 100644 --- a/lib/galaxy/model/dataset_collections/types/paired_or_unpaired.py +++ b/lib/galaxy/model/dataset_collections/types/paired_or_unpaired.py @@ -1,5 +1,5 @@ +from collections.abc import Iterable from typing import ( - Iterable, TYPE_CHECKING, ) diff --git a/lib/galaxy/model/dataset_collections/types/record.py b/lib/galaxy/model/dataset_collections/types/record.py index 3b8c3aea8093..0cafbd81a915 100644 --- a/lib/galaxy/model/dataset_collections/types/record.py +++ b/lib/galaxy/model/dataset_collections/types/record.py @@ -1,5 +1,5 @@ +from collections.abc import Iterable from typing import ( - Iterable, TYPE_CHECKING, ) diff --git a/lib/galaxy/model/dataset_collections/types/semantics.py b/lib/galaxy/model/dataset_collections/types/semantics.py index 051594830f46..8005ccbc169a 100644 --- a/lib/galaxy/model/dataset_collections/types/semantics.py +++ b/lib/galaxy/model/dataset_collections/types/semantics.py @@ -8,11 +8,8 @@ from io import StringIO from typing import ( Any, - Dict, - List, NamedTuple, Optional, - Tuple, Union, ) @@ -51,15 +48,15 @@ class ExampleTests(BaseModel): class DatasetsDeclaration(BaseModel): - datasets: List[str] + datasets: list[str] def as_latex(self) -> str: return ", ".join([f"$ {d} $" for d in self.datasets]) class ToolDefinition(BaseModel): - inputs: Dict[str, str] = Field(alias="in") - outputs: Dict[str, str] = Field(alias="out") + inputs: dict[str, str] = Field(alias="in") + outputs: dict[str, str] = Field(alias="out") def as_latex(self) -> str: inputs = ", ".join([f"{k}: \\text{{ {v} }}" for (k, v) in self.inputs.items()]) @@ -74,7 +71,7 @@ def as_latex(self) -> str: return self.tool.as_latex() -def elements_to_latex(elements: Dict[str, Any]): +def elements_to_latex(elements: dict[str, Any]): elements_as_strings = [] for identifier, value in elements.items(): if value is None: @@ -91,7 +88,7 @@ def elements_to_latex(elements: Dict[str, Any]): class CollectionDefinition(NamedTuple): collection_type: str - elements: Dict[str, Any] + elements: dict[str, Any] def as_latex(self) -> str: collection_type = self.collection_type.replace("_", "\\_") @@ -99,7 +96,7 @@ def as_latex(self) -> str: class CollectionDeclarations(BaseModel): - collections: Dict[str, CollectionDefinition] + collections: dict[str, CollectionDefinition] Expression = Union[str, DatasetsDeclaration, ToolDeclaration, CollectionDeclarations] @@ -107,7 +104,7 @@ class CollectionDeclarations(BaseModel): class Example(BaseModel): label: str - assumptions: Optional[List[Expression]] = None + assumptions: Optional[list[Expression]] = None then: Optional[str] = None is_valid: bool = True tests: Optional[ExampleTests] = None @@ -117,7 +114,7 @@ class ExampleEntry(BaseModel): example: Example -YAMLRootModel = RootModel[List[Union[DocEntry, ExampleEntry]]] +YAMLRootModel = RootModel[list[Union[DocEntry, ExampleEntry]]] WORDS_TO_TEXTIFY = ["list", "forward", "reverse", "mapOver"] @@ -137,11 +134,11 @@ def expression_to_latex(expression: str, wrap: bool = True): return f"{expression}" -def collect_docs_with_examples(root: YAMLRootModel) -> List[Tuple[DocEntry, List[ExampleEntry]]]: +def collect_docs_with_examples(root: YAMLRootModel) -> list[tuple[DocEntry, list[ExampleEntry]]]: docs_with_examples = [] current_doc: Optional[DocEntry] = None - current_examples: List[ExampleEntry] = [] + current_examples: list[ExampleEntry] = [] for entry in root.root: if isinstance(entry, DocEntry): if current_doc: diff --git a/lib/galaxy/model/dataset_collections/workbook_util.py b/lib/galaxy/model/dataset_collections/workbook_util.py index 47546464a8c0..c89dffa7d8ed 100644 --- a/lib/galaxy/model/dataset_collections/workbook_util.py +++ b/lib/galaxy/model/dataset_collections/workbook_util.py @@ -8,7 +8,6 @@ from dataclasses import dataclass from io import BytesIO from textwrap import wrap -from typing import List from openpyxl import ( load_workbook, @@ -100,8 +99,8 @@ class HasHelp: @dataclass class HelpConfiguration: - instructions: List[str] - columns: List[HasHelp] + instructions: list[str] + columns: list[HasHelp] text_width: int column_width: int help_row_start: int = 3 @@ -109,12 +108,12 @@ class HelpConfiguration: @dataclass class ExtraColumnsHelpConfiguration: - instructions: List[str] + instructions: list[str] text_width: int - column_targets: List[ColumnTarget] + column_targets: list[ColumnTarget] -def wrap_instructions(instruction: str, help_width: int) -> List[str]: +def wrap_instructions(instruction: str, help_width: int) -> list[str]: return wrap(instruction, width=help_width) @@ -179,7 +178,7 @@ def add_instructions_to_sheet(worksheet: Worksheet, help_configuration: HelpConf def write_instructions_to_sheet( - worksheet: Worksheet, instructions: List[str], start_row: int, help_label_index: int, help_width: int + worksheet: Worksheet, instructions: list[str], start_row: int, help_label_index: int, help_width: int ) -> int: current_row = start_row @@ -193,10 +192,10 @@ def write_instructions_to_sheet( return current_row -def read_column_header_titles(worksheet: Worksheet) -> List[str]: +def read_column_header_titles(worksheet: Worksheet) -> list[str]: """Read the first row of the worksheet and return a list of these column titles.""" index = 1 - titles: List[str] = [] + titles: list[str] = [] while True: value = worksheet.cell(1, index).value if not value: diff --git a/lib/galaxy/model/db/role.py b/lib/galaxy/model/db/role.py index 8362767c93b0..586427a1e59b 100644 --- a/lib/galaxy/model/db/role.py +++ b/lib/galaxy/model/db/role.py @@ -1,5 +1,3 @@ -from typing import Dict - from sqlalchemy import ( and_, false, @@ -55,7 +53,7 @@ def get_displayable_roles(session, trans_user, user_is_admin, security_agent): return roles -def get_private_role_user_emails_dict(session) -> Dict[int, str]: +def get_private_role_user_emails_dict(session) -> dict[int, str]: """Return a mapping of private role ids to user emails.""" stmt = select(UserRoleAssociation.role_id, User.email).join(Role).join(User).where(Role.type == Role.types.PRIVATE) roleid_email_tuples = session.execute(stmt).all() diff --git a/lib/galaxy/model/deferred.py b/lib/galaxy/model/deferred.py index 1e12b39aac51..d218746cb093 100644 --- a/lib/galaxy/model/deferred.py +++ b/lib/galaxy/model/deferred.py @@ -364,6 +364,5 @@ def materializer_factory( def _validate_hash(path: str, describes_hash: DescribesHash, what: str) -> None: - hash_value = describes_hash.hash_value - if hash_value is not None: + if (hash_value := describes_hash.hash_value) is not None: verify_hash(path, hash_func_name=describes_hash.hash_func_name, hash_value=hash_value) diff --git a/lib/galaxy/model/index_filter_util.py b/lib/galaxy/model/index_filter_util.py index 0050d514dbc8..1b339e3224df 100644 --- a/lib/galaxy/model/index_filter_util.py +++ b/lib/galaxy/model/index_filter_util.py @@ -1,7 +1,6 @@ """Utility functions used to adapt galaxy.util.search to Galaxy model index queries.""" from typing import ( - List, Union, ) @@ -33,7 +32,7 @@ def text_column_filter(column, term: FilteredTerm): RawTextSearchableT = Union[BinaryExpression, InstrumentedAttribute] -def raw_text_column_filter(columns: List[RawTextSearchableT], term: RawTextTerm): +def raw_text_column_filter(columns: list[RawTextSearchableT], term: RawTextTerm): like_text = f"%{term.text}%" return or_(*[c.ilike(like_text) if isinstance(c, InstrumentedAttribute) else c for c in columns]) diff --git a/lib/galaxy/model/mapping.py b/lib/galaxy/model/mapping.py index e449c4f2fb45..5c92492f2c7a 100644 --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -2,7 +2,6 @@ from threading import local from typing import ( Optional, - Type, TYPE_CHECKING, ) @@ -27,7 +26,7 @@ class GalaxyModelMapping(SharedModelMapping): - User: Type["GalaxyUser"] + User: type["GalaxyUser"] security_agent: GalaxyRBACAgent thread_local_log: Optional[local] diff --git a/lib/galaxy/model/metadata.py b/lib/galaxy/model/metadata.py index 1213b66bf09a..5571d52358d0 100644 --- a/lib/galaxy/model/metadata.py +++ b/lib/galaxy/model/metadata.py @@ -10,11 +10,13 @@ import tempfile import weakref from collections import OrderedDict -from collections.abc import Mapping +from collections.abc import ( + Iterator, + Mapping, +) from os.path import abspath from typing import ( Any, - Iterator, Optional, TYPE_CHECKING, Union, diff --git a/lib/galaxy/model/migrations/__init__.py b/lib/galaxy/model/migrations/__init__.py index da0a1927c867..e5b4d92294d5 100644 --- a/lib/galaxy/model/migrations/__init__.py +++ b/lib/galaxy/model/migrations/__init__.py @@ -1,8 +1,8 @@ import logging import os +from collections.abc import Iterable from typing import ( cast, - Iterable, NamedTuple, NewType, NoReturn, diff --git a/lib/galaxy/model/migrations/alembic/env.py b/lib/galaxy/model/migrations/alembic/env.py index b7766e4e20bf..98091912b805 100644 --- a/lib/galaxy/model/migrations/alembic/env.py +++ b/lib/galaxy/model/migrations/alembic/env.py @@ -3,7 +3,6 @@ from typing import ( Callable, cast, - Dict, ) from alembic import context @@ -75,7 +74,7 @@ def _run_migrations_invoked_via_script(run_migrations: Callable[[str], None]) -> run_migrations(url) -def _process_cmd_current(urls: Dict[ModelId, str]) -> bool: +def _process_cmd_current(urls: dict[ModelId, str]) -> bool: if config.cmd_opts.cmd[0].__name__ == "current": # type: ignore[union-attr] # Run command for each url only if urls are different; otherwise run once. are_urls_equal = len(set(urls.values())) == 1 @@ -141,8 +140,8 @@ def _get_url_from_config() -> str: return cast(str, url) -def _load_urls() -> Dict[ModelId, str]: - context_dict = cast(Dict, context.get_x_argument(as_dictionary=True)) +def _load_urls() -> dict[ModelId, str]: + context_dict = cast(dict, context.get_x_argument(as_dictionary=True)) gxy_url = context_dict.get(f"{GXY}_url") tsi_url = context_dict.get(f"{TSI}_url") assert gxy_url and tsi_url diff --git a/lib/galaxy/model/migrations/base.py b/lib/galaxy/model/migrations/base.py index e03257d3f522..d48388875af7 100644 --- a/lib/galaxy/model/migrations/base.py +++ b/lib/galaxy/model/migrations/base.py @@ -11,11 +11,9 @@ ArgumentParser, Namespace, ) +from collections.abc import Iterable from typing import ( cast, - Dict, - Iterable, - List, Optional, Union, ) @@ -369,7 +367,7 @@ def __init__(self, engine: Engine) -> None: self._load_db(engine) @property - def tables(self) -> Dict[str, Table]: + def tables(self) -> dict[str, Table]: return self.db_metadata.tables def is_database_empty(self) -> bool: @@ -404,7 +402,7 @@ def _load_sqlalchemymigrate_version(self, conn: Connection) -> Optional[int]: return None -def pop_arg_from_args(args: List[str], arg_name) -> Optional[str]: +def pop_arg_from_args(args: list[str], arg_name) -> Optional[str]: """ Pop and return argument name and value from args if arg_name is in args. """ diff --git a/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py b/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py index 711f266c5be1..805b6069a362 100644 --- a/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py +++ b/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py @@ -91,8 +91,7 @@ def __init__(self, connection): self.assoc_name = self.assoc_model.__tablename__ def run(self): - duplicate_assocs = self.select_duplicate_associations() - if duplicate_assocs: + if duplicate_assocs := self.select_duplicate_associations(): self.delete_duplicate_associations(duplicate_assocs) def select_duplicate_associations(self): diff --git a/lib/galaxy/model/migrations/scripts.py b/lib/galaxy/model/migrations/scripts.py index 6315c83ecd23..16ac4b1242ef 100644 --- a/lib/galaxy/model/migrations/scripts.py +++ b/lib/galaxy/model/migrations/scripts.py @@ -1,9 +1,7 @@ import os import sys from typing import ( - List, Optional, - Tuple, ) import alembic.config @@ -68,7 +66,7 @@ def verify_database_is_initialized(db_url: str) -> None: engine.dispose() -def get_configuration(argv: List[str], cwd: str) -> Tuple[DatabaseConfig, DatabaseConfig, bool]: +def get_configuration(argv: list[str], cwd: str) -> tuple[DatabaseConfig, DatabaseConfig, bool]: """ Return a 3-item-tuple with configuration values used for managing databases. """ @@ -78,7 +76,7 @@ def get_configuration(argv: List[str], cwd: str) -> Tuple[DatabaseConfig, Databa def get_configuration_from_file( cwd: str, config_file: Optional[str] = None -) -> Tuple[DatabaseConfig, DatabaseConfig, bool]: +) -> tuple[DatabaseConfig, DatabaseConfig, bool]: if config_file is None: cwds = [cwd, os.path.join(cwd, CONFIG_DIR_NAME)] config_file = find_config_file(DEFAULT_CONFIG_NAMES, dirs=cwds) @@ -103,7 +101,7 @@ def get_configuration_from_file( return (gxy_config, tsi_config, is_auto_migrate) -def add_db_urls_to_command_arguments(argv: List[str], gxy_url: str, tsi_url: str) -> None: +def add_db_urls_to_command_arguments(argv: list[str], gxy_url: str, tsi_url: str) -> None: _insert_x_argument(argv, f"{TSI}_url", tsi_url) _insert_x_argument(argv, f"{GXY}_url", gxy_url) @@ -182,7 +180,7 @@ def run_upgrade(self, gxy_db_url=None, tsi_db_url=None): self._upgrade(gxy_db_url, GXY) self._upgrade(tsi_db_url, TSI) - def rename_config_argument(self, argv: List[str]) -> None: + def rename_config_argument(self, argv: list[str]) -> None: """ Rename the optional config argument: we can't use '-c' because that option is used by Alembic. """ diff --git a/lib/galaxy/model/migrations/util.py b/lib/galaxy/model/migrations/util.py index ab6e097c5524..cfa3091152b1 100644 --- a/lib/galaxy/model/migrations/util.py +++ b/lib/galaxy/model/migrations/util.py @@ -3,12 +3,11 @@ ABC, abstractmethod, ) +from collections.abc import Sequence from contextlib import contextmanager from typing import ( Any, - List, Optional, - Sequence, ) import sqlalchemy as sa @@ -226,8 +225,8 @@ def __init__( foreign_key_name: str, table_name: str, referent_table: str, - local_cols: List[str], - remote_cols: List[str], + local_cols: list[str], + remote_cols: list[str], **kw: Any, ) -> None: super().__init__(table_name) @@ -258,7 +257,7 @@ def log_check_not_passed(self) -> None: class CreateUniqueConstraint(DDLAlterOperation): """Wraps alembic's create_unique_constraint directive.""" - def __init__(self, constraint_name: str, table_name: str, columns: List[str]) -> None: + def __init__(self, constraint_name: str, table_name: str, columns: list[str]) -> None: super().__init__(table_name) self.constraint_name = constraint_name self.columns = columns @@ -330,14 +329,14 @@ def create_foreign_key( foreign_key_name: str, table_name: str, referent_table: str, - local_cols: List[str], - remote_cols: List[str], + local_cols: list[str], + remote_cols: list[str], **kw: Any, ) -> None: CreateForeignKey(foreign_key_name, table_name, referent_table, local_cols, remote_cols, **kw).run() -def create_unique_constraint(constraint_name: str, table_name: str, columns: List[str]) -> None: +def create_unique_constraint(constraint_name: str, table_name: str, columns: list[str]) -> None: CreateUniqueConstraint(constraint_name, table_name, columns).run() diff --git a/lib/galaxy/model/orm/engine_factory.py b/lib/galaxy/model/orm/engine_factory.py index 877e39ca69f1..66771e934823 100644 --- a/lib/galaxy/model/orm/engine_factory.py +++ b/lib/galaxy/model/orm/engine_factory.py @@ -4,7 +4,6 @@ import threading import time from multiprocessing.util import register_after_fork -from typing import Dict from sqlalchemy import ( create_engine, @@ -126,7 +125,7 @@ def checkout(dbapi_connection, connection_record, connection_proxy): return engine -def set_sqlite_connect_args(engine_options: Dict, url: str) -> None: +def set_sqlite_connect_args(engine_options: dict, url: str) -> None: """ Add or update `connect_args` in `engine_options` if db is sqlite. Set check_same_thread to False for sqlite, handled by request-specific session. diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 3b555daa6f87..2ce7863a16d6 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -6,7 +6,6 @@ timedelta, ) from typing import ( - List, Optional, ) @@ -509,7 +508,7 @@ def can_access_datasets(self, user_roles, action_tuples): return True - def can_access_collection(self, user_roles: List[Role], collection: DatasetCollection): + def can_access_collection(self, user_roles: list[Role], collection: DatasetCollection): action_tuples = collection.dataset_action_tuples if not self.can_access_datasets(user_roles, action_tuples): return False @@ -1416,8 +1415,8 @@ def set_user_group_and_role_associations( self, user: User, *, - group_ids: Optional[List[int]] = None, - role_ids: Optional[List[int]] = None, + group_ids: Optional[list[int]] = None, + role_ids: Optional[list[int]] = None, ) -> None: """ Set user groups and user roles, replacing current associations. @@ -1438,8 +1437,8 @@ def set_group_user_and_role_associations( self, group: Group, *, - user_ids: Optional[List[int]] = None, - role_ids: Optional[List[int]] = None, + user_ids: Optional[list[int]] = None, + role_ids: Optional[list[int]] = None, ) -> None: """ Set group users and group roles, replacing current associations. @@ -1460,8 +1459,8 @@ def set_role_user_and_group_associations( self, role: Role, *, - user_ids: Optional[List[int]] = None, - group_ids: Optional[List[int]] = None, + user_ids: Optional[list[int]] = None, + group_ids: Optional[list[int]] = None, ) -> None: """ Set role users and role groups, replacing current associations. diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index 10200e41567a..4d4cfa745ae0 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -7,6 +7,10 @@ import tarfile import tempfile from collections import defaultdict +from collections.abc import ( + Iterable, + Iterator, +) from dataclasses import dataclass from enum import Enum from json import ( @@ -20,15 +24,8 @@ Any, Callable, cast, - Dict, - Iterable, - Iterator, - List, Literal, Optional, - Set, - Tuple, - Type, TYPE_CHECKING, Union, ) @@ -162,7 +159,7 @@ DICT_STORE_ATTRS_KEY_INVOCATIONS = "invocations" -JsonDictT = Dict[str, Any] +JsonDictT = dict[str, Any] class StoreAppProtocol(Protocol): @@ -225,7 +222,7 @@ def __init__( class SessionlessContext: def __init__(self) -> None: - self.objects: Dict[Type, Dict] = defaultdict(dict) + self.objects: dict[type, dict] = defaultdict(dict) def commit(self) -> None: pass @@ -236,7 +233,7 @@ def flush(self) -> None: def add(self, obj: model.RepresentById) -> None: self.objects[obj.__class__][obj.id] = obj - def query(self, model_class: Type[model.RepresentById]) -> Bunch: + def query(self, model_class: type[model.RepresentById]) -> Bunch: def find(obj_id): return self.objects.get(model_class, {}).get(obj_id) or None @@ -246,15 +243,15 @@ def filter_by(*args, **kwargs): return Bunch(find=find, get=find, filter_by=filter_by) - def get(self, model_class: Type[model.RepresentById], primary_key: Any): # patch for SQLAlchemy 2.0 compatibility + def get(self, model_class: type[model.RepresentById], primary_key: Any): # patch for SQLAlchemy 2.0 compatibility return self.query(model_class).get(primary_key) def replace_metadata_file( - metadata: Dict[str, Any], + metadata: dict[str, Any], dataset_instance: model.DatasetInstance, sa_session: Union[SessionlessContext, scoped_session], -) -> Dict[str, Any]: +) -> dict[str, Any]: def remap_objects(p, k, obj): if isinstance(obj, dict) and "model_class" in obj and obj["model_class"] == "MetadataFile": metadata_file = model.MetadataFile(dataset=dataset_instance, uuid=obj["uuid"]) @@ -299,41 +296,41 @@ def __init__( self.import_history_encoded_id = None @abc.abstractmethod - def workflow_paths(self) -> Iterator[Tuple[str, str]]: ... + def workflow_paths(self) -> Iterator[tuple[str, str]]: ... @abc.abstractmethod def defines_new_history(self) -> bool: """Does this store define a new history to create.""" @abc.abstractmethod - def new_history_properties(self) -> Dict[str, Any]: + def new_history_properties(self) -> dict[str, Any]: """Dict of history properties if defines_new_history() is truthy.""" @abc.abstractmethod - def datasets_properties(self) -> List[Dict[str, Any]]: + def datasets_properties(self) -> list[dict[str, Any]]: """Return a list of HDA properties.""" - def library_properties(self) -> List[Dict[str, Any]]: + def library_properties(self) -> list[dict[str, Any]]: """Return a list of library properties.""" return [] @abc.abstractmethod - def invocations_properties(self) -> List[Dict[str, Any]]: ... + def invocations_properties(self) -> list[dict[str, Any]]: ... @abc.abstractmethod - def collections_properties(self) -> List[Dict[str, Any]]: + def collections_properties(self) -> list[dict[str, Any]]: """Return a list of HDCA properties.""" @abc.abstractmethod - def implicit_dataset_conversion_properties(self) -> List[Dict[str, Any]]: + def implicit_dataset_conversion_properties(self) -> list[dict[str, Any]]: """Return a list of ImplicitlyConvertedDatasetAssociation properties.""" @abc.abstractmethod - def jobs_properties(self) -> List[Dict[str, Any]]: + def jobs_properties(self) -> list[dict[str, Any]]: """Return a list of jobs properties.""" @abc.abstractmethod - def implicit_collection_jobs_properties(self) -> List[Dict[str, Any]]: ... + def implicit_collection_jobs_properties(self) -> list[dict[str, Any]]: ... @property @abc.abstractmethod @@ -349,7 +346,7 @@ def file_source_root(self) -> Optional[str]: """Source of valid file data.""" return None - def trust_hid(self, obj_attrs: Dict[str, Any]) -> bool: + def trust_hid(self, obj_attrs: dict[str, Any]) -> bool: """Trust HID when importing objects into a new History.""" return ( self.import_history_encoded_id is not None @@ -426,7 +423,7 @@ def perform_import( def _attach_dataset_hashes( self, - dataset_or_file_attrs: Dict[str, Any], + dataset_or_file_attrs: dict[str, Any], dataset_instance: model.DatasetInstance, ) -> None: if "hashes" in dataset_or_file_attrs: @@ -439,7 +436,7 @@ def _attach_dataset_hashes( def _attach_dataset_sources( self, - dataset_or_file_attrs: Dict[str, Any], + dataset_or_file_attrs: dict[str, Any], dataset_instance: model.DatasetInstance, ) -> None: if "sources" in dataset_or_file_attrs: @@ -471,7 +468,7 @@ def _attach_dataset_sources( def _import_datasets( self, object_import_tracker: "ObjectImportTracker", - datasets_attrs: List[Dict[str, Any]], + datasets_attrs: list[dict[str, Any]], history: Optional[model.History], new_history: bool, job: Optional[model.Job], @@ -710,7 +707,7 @@ def handle_dataset_object_edit(dataset_instance, dataset_attrs): # If dataset instance is discarded or deferred, don't attempt to regenerate # metadata for it. if dataset_instance.state == dataset_instance.states.OK: - regenerate_kwds: Dict[str, Any] = {} + regenerate_kwds: dict[str, Any] = {} if job: regenerate_kwds["user"] = job.user regenerate_kwds["session_id"] = job.session_id @@ -819,7 +816,7 @@ def import_folder(folder_attrs, root_folder=None): def _import_collection_instances( self, object_import_tracker: "ObjectImportTracker", - collections_attrs: List[Dict[str, Any]], + collections_attrs: list[dict[str, Any]], history: Optional[model.History], new_history: bool, ) -> None: @@ -923,13 +920,13 @@ def materialize_elements(dc): def _attach_raw_id_if_editing( self, obj: model.RepresentById, - attrs: Dict[str, Any], + attrs: dict[str, Any], ) -> None: if self.sessionless and "id" in attrs and self.import_options.allow_edit: obj.id = attrs["id"] def _import_collection_implicit_input_associations( - self, object_import_tracker: "ObjectImportTracker", collections_attrs: List[Dict[str, Any]] + self, object_import_tracker: "ObjectImportTracker", collections_attrs: list[dict[str, Any]] ) -> None: object_key = self.object_key @@ -949,7 +946,7 @@ def _import_collection_implicit_input_associations( hdca.add_implicit_input_collection(name, input_dataset_collection) def _import_dataset_copied_associations( - self, object_import_tracker: "ObjectImportTracker", datasets_attrs: List[Dict[str, Any]] + self, object_import_tracker: "ObjectImportTracker", datasets_attrs: list[dict[str, Any]] ) -> None: object_key = self.object_key @@ -989,7 +986,7 @@ def _import_dataset_copied_associations( hda_copied_from_sinks[copied_from_object_key] = dataset_key def _import_collection_copied_associations( - self, object_import_tracker: "ObjectImportTracker", collections_attrs: List[Dict[str, Any]] + self, object_import_tracker: "ObjectImportTracker", collections_attrs: list[dict[str, Any]] ) -> None: object_key = self.object_key @@ -1358,10 +1355,10 @@ def _flush(self) -> None: def _copied_from_object_key( - copied_from_chain: List[ObjectKeyType], + copied_from_chain: list[ObjectKeyType], objects_by_key: Union[ - Dict[ObjectKeyType, model.HistoryDatasetAssociation], - Dict[ObjectKeyType, model.HistoryDatasetCollectionAssociation], + dict[ObjectKeyType, model.HistoryDatasetAssociation], + dict[ObjectKeyType, model.HistoryDatasetCollectionAssociation], ], ) -> Optional[ObjectKeyType]: if len(copied_from_chain) == 0: @@ -1391,19 +1388,19 @@ class ObjectImportTracker: Needed to re-establish connections and such in multiple passes. """ - libraries_by_key: Dict[ObjectKeyType, model.Library] - hdas_by_key: Dict[ObjectKeyType, model.HistoryDatasetAssociation] - hdas_by_id: Dict[int, model.HistoryDatasetAssociation] - hdcas_by_key: Dict[ObjectKeyType, model.HistoryDatasetCollectionAssociation] - hdcas_by_id: Dict[int, model.HistoryDatasetCollectionAssociation] - dces_by_key: Dict[ObjectKeyType, model.DatasetCollectionElement] - dces_by_id: Dict[int, model.DatasetCollectionElement] - lddas_by_key: Dict[ObjectKeyType, model.LibraryDatasetDatasetAssociation] - hda_copied_from_sinks: Dict[ObjectKeyType, ObjectKeyType] - hdca_copied_from_sinks: Dict[ObjectKeyType, ObjectKeyType] - jobs_by_key: Dict[ObjectKeyType, model.Job] - requires_hid: List["HistoryItem"] - copy_hid_for: List[Tuple["HistoryItem", "HistoryItem"]] + libraries_by_key: dict[ObjectKeyType, model.Library] + hdas_by_key: dict[ObjectKeyType, model.HistoryDatasetAssociation] + hdas_by_id: dict[int, model.HistoryDatasetAssociation] + hdcas_by_key: dict[ObjectKeyType, model.HistoryDatasetCollectionAssociation] + hdcas_by_id: dict[int, model.HistoryDatasetCollectionAssociation] + dces_by_key: dict[ObjectKeyType, model.DatasetCollectionElement] + dces_by_id: dict[int, model.DatasetCollectionElement] + lddas_by_key: dict[ObjectKeyType, model.LibraryDatasetDatasetAssociation] + hda_copied_from_sinks: dict[ObjectKeyType, ObjectKeyType] + hdca_copied_from_sinks: dict[ObjectKeyType, ObjectKeyType] + jobs_by_key: dict[ObjectKeyType, model.Job] + requires_hid: list["HistoryItem"] + copy_hid_for: list[tuple["HistoryItem", "HistoryItem"]] def __init__(self) -> None: self.libraries_by_key = {} @@ -1417,9 +1414,9 @@ def __init__(self) -> None: self.hda_copied_from_sinks = {} self.hdca_copied_from_sinks = {} self.jobs_by_key = {} - self.invocations_by_key: Dict[str, model.WorkflowInvocation] = {} - self.implicit_collection_jobs_by_key: Dict[str, ImplicitCollectionJobs] = {} - self.workflows_by_key: Dict[str, model.Workflow] = {} + self.invocations_by_key: dict[str, model.WorkflowInvocation] = {} + self.implicit_collection_jobs_by_key: dict[str, ImplicitCollectionJobs] = {} + self.workflows_by_key: dict[str, model.Workflow] = {} self.requires_hid = [] self.copy_hid_for = [] @@ -1484,7 +1481,7 @@ class DictImportModelStore(ModelImportStore): def __init__( self, - store_as_dict: Dict[str, Any], + store_as_dict: dict[str, Any], **kwd, ) -> None: self._store_as_dict = store_as_dict @@ -1494,41 +1491,41 @@ def __init__( def defines_new_history(self) -> bool: return DICT_STORE_ATTRS_KEY_HISTORY in self._store_as_dict - def new_history_properties(self) -> Dict[str, Any]: + def new_history_properties(self) -> dict[str, Any]: return self._store_as_dict.get(DICT_STORE_ATTRS_KEY_HISTORY) or {} def datasets_properties( self, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: return self._store_as_dict.get(DICT_STORE_ATTRS_KEY_DATASETS) or [] - def collections_properties(self) -> List[Dict[str, Any]]: + def collections_properties(self) -> list[dict[str, Any]]: return self._store_as_dict.get(DICT_STORE_ATTRS_KEY_COLLECTIONS) or [] - def implicit_dataset_conversion_properties(self) -> List[Dict[str, Any]]: + def implicit_dataset_conversion_properties(self) -> list[dict[str, Any]]: return self._store_as_dict.get(DICT_STORE_ATTRS_KEY_CONVERSIONS) or [] def library_properties( self, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: return self._store_as_dict.get(DICT_STORE_ATTRS_KEY_LIBRARIES) or [] - def jobs_properties(self) -> List[Dict[str, Any]]: + def jobs_properties(self) -> list[dict[str, Any]]: return self._store_as_dict.get(DICT_STORE_ATTRS_KEY_JOBS) or [] - def implicit_collection_jobs_properties(self) -> List[Dict[str, Any]]: + def implicit_collection_jobs_properties(self) -> list[dict[str, Any]]: return self._store_as_dict.get(DICT_STORE_ATTRS_KEY_IMPLICIT_COLLECTION_JOBS) or [] - def invocations_properties(self) -> List[Dict[str, Any]]: + def invocations_properties(self) -> list[dict[str, Any]]: return self._store_as_dict.get(DICT_STORE_ATTRS_KEY_INVOCATIONS) or [] - def workflow_paths(self) -> Iterator[Tuple[str, str]]: + def workflow_paths(self) -> Iterator[tuple[str, str]]: return yield def get_import_model_store_for_dict( - as_dict: Dict[str, Any], + as_dict: dict[str, Any], **kwd, ) -> DictImportModelStore: return DictImportModelStore(as_dict, **kwd) @@ -1539,17 +1536,17 @@ class BaseDirectoryImportModelStore(ModelImportStore): def _normalize_job_parameters( self, imported_job: model.Job, - job_attrs: Dict[str, Any], + job_attrs: dict[str, Any], _find_hda: Callable, _find_hdca: Callable, _find_dce: Callable, - ) -> Dict[str, Any]: ... + ) -> dict[str, Any]: ... @abc.abstractmethod def _connect_job_io( self, imported_job: model.Job, - job_attrs: Dict[str, Any], + job_attrs: dict[str, Any], _find_hda: Callable, _find_hdca: Callable, _find_dce: Callable, @@ -1563,12 +1560,12 @@ def defines_new_history(self) -> bool: new_history_attributes = os.path.join(self.archive_dir, ATTRS_FILENAME_HISTORY) return os.path.exists(new_history_attributes) - def new_history_properties(self) -> Dict[str, Any]: + def new_history_properties(self) -> dict[str, Any]: new_history_attributes = os.path.join(self.archive_dir, ATTRS_FILENAME_HISTORY) history_properties = load(open(new_history_attributes)) return history_properties - def datasets_properties(self) -> List[Dict[str, Any]]: + def datasets_properties(self) -> list[dict[str, Any]]: datasets_attrs_file_name = os.path.join(self.archive_dir, ATTRS_FILENAME_DATASETS) datasets_attrs = load(open(datasets_attrs_file_name)) provenance_file_name = f"{datasets_attrs_file_name}.provenance" @@ -1579,25 +1576,25 @@ def datasets_properties(self) -> List[Dict[str, Any]]: return datasets_attrs - def collections_properties(self) -> List[Dict[str, Any]]: + def collections_properties(self) -> list[dict[str, Any]]: return self._read_list_if_exists(ATTRS_FILENAME_COLLECTIONS) - def implicit_dataset_conversion_properties(self) -> List[Dict[str, Any]]: + def implicit_dataset_conversion_properties(self) -> list[dict[str, Any]]: return self._read_list_if_exists(ATTRS_FILENAME_CONVERSIONS) def library_properties( self, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: libraries_attrs = self._read_list_if_exists(ATTRS_FILENAME_LIBRARIES) libraries_attrs.extend(self._read_list_if_exists(ATTRS_FILENAME_LIBRARY_FOLDERS)) return libraries_attrs def jobs_properties( self, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: return self._read_list_if_exists(ATTRS_FILENAME_JOBS) - def implicit_collection_jobs_properties(self) -> List[Dict[str, Any]]: + def implicit_collection_jobs_properties(self) -> list[dict[str, Any]]: implicit_collection_jobs_attrs_file_name = os.path.join( self.archive_dir, ATTRS_FILENAME_IMPLICIT_COLLECTION_JOBS ) @@ -1608,10 +1605,10 @@ def implicit_collection_jobs_properties(self) -> List[Dict[str, Any]]: def invocations_properties( self, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: return self._read_list_if_exists(ATTRS_FILENAME_INVOCATIONS) - def workflow_paths(self) -> Iterator[Tuple[str, str]]: + def workflow_paths(self) -> Iterator[tuple[str, str]]: workflows_directory = os.path.join(self.archive_dir, "workflows") if not os.path.exists(workflows_directory): return @@ -1624,7 +1621,7 @@ def workflow_paths(self) -> Iterator[Tuple[str, str]]: yield workflow_key, os.path.join(workflows_directory, name) def _set_job_attributes( - self, imported_job: model.Job, job_attrs: Dict[str, Any], force_terminal: bool = False + self, imported_job: model.Job, job_attrs: dict[str, Any], force_terminal: bool = False ) -> None: ATTRIBUTES = ( "info", @@ -1650,7 +1647,7 @@ def _set_job_attributes( if raw_state: imported_job.set_state(raw_state) - def _read_list_if_exists(self, file_name: str, required: bool = False) -> List[Dict[str, Any]]: + def _read_list_if_exists(self, file_name: str, required: bool = False) -> list[dict[str, Any]]: file_name = os.path.join(self.archive_dir, file_name) if os.path.exists(file_name): attrs = load(open(file_name)) @@ -1662,7 +1659,7 @@ def _read_list_if_exists(self, file_name: str, required: bool = False) -> List[D def restore_times( - model_object: Union[model.Job, model.WorkflowInvocation, model.WorkflowInvocationStep], attrs: Dict[str, Any] + model_object: Union[model.Job, model.WorkflowInvocation, model.WorkflowInvocationStep], attrs: dict[str, Any] ) -> None: try: model_object.create_time = datetime.datetime.strptime(attrs["create_time"], "%Y-%m-%dT%H:%M:%S.%f") @@ -1691,7 +1688,7 @@ def __init__(self, archive_dir: str, **kwd) -> None: def _connect_job_io( self, imported_job: model.Job, - job_attrs: Dict[str, Any], + job_attrs: dict[str, Any], _find_hda: Callable, _find_hdca: Callable, _find_dce: Callable, @@ -1713,11 +1710,11 @@ def _connect_job_io( def _normalize_job_parameters( self, imported_job: model.Job, - job_attrs: Dict[str, Any], + job_attrs: dict[str, Any], _find_hda: Callable, _find_hdca: Callable, _find_dce: Callable, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: def remap_objects(p, k, obj): if isinstance(obj, dict) and obj.get("__HistoryDatasetAssociation__", False): imported_hda = _find_hda(obj[self.object_key]) @@ -1729,7 +1726,7 @@ def remap_objects(p, k, obj): params = remap(params, remap_objects) return params - def trust_hid(self, obj_attrs: Dict[str, Any]) -> bool: + def trust_hid(self, obj_attrs: dict[str, Any]) -> bool: # We didn't do object tracking so we pretty much have to trust the HID and accept # that it will be wrong a lot. return True @@ -1746,7 +1743,7 @@ def __init__(self, archive_dir: str, **kwd) -> None: def _connect_job_io( self, imported_job: model.Job, - job_attrs: Dict[str, Any], + job_attrs: dict[str, Any], _find_hda: Callable, _find_hdca: Callable, _find_dce: Callable, @@ -1800,11 +1797,11 @@ def _connect_job_io( def _normalize_job_parameters( self, imported_job: model.Job, - job_attrs: Dict[str, Any], + job_attrs: dict[str, Any], _find_hda: Callable, _find_hdca: Callable, _find_dce: Callable, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: def remap_objects(p, k, obj): if isinstance(obj, dict) and "src" in obj and obj["src"] in ["hda", "hdca", "dce"]: if obj["src"] == "hda": @@ -1839,7 +1836,7 @@ def remap_objects(p, k, obj): params = job_attrs["params"] params = remap(params, remap_objects) - return cast(Dict[str, Any], params) + return cast(dict[str, Any], params) class BagArchiveImportModelStore(DirectoryImportModelStoreLatest): @@ -1951,19 +1948,19 @@ def __init__( serialize_files_handler=self, ) self.export_files = export_files - self.included_datasets: Dict[model.DatasetInstance, Tuple[model.DatasetInstance, bool]] = {} - self.dataset_implicit_conversions: Dict[model.DatasetInstance, model.ImplicitlyConvertedDatasetAssociation] = {} - self.included_collections: Dict[ + self.included_datasets: dict[model.DatasetInstance, tuple[model.DatasetInstance, bool]] = {} + self.dataset_implicit_conversions: dict[model.DatasetInstance, model.ImplicitlyConvertedDatasetAssociation] = {} + self.included_collections: dict[ Union[model.DatasetCollection, model.HistoryDatasetCollectionAssociation], Union[model.DatasetCollection, model.HistoryDatasetCollectionAssociation], ] = {} - self.included_libraries: List[model.Library] = [] - self.included_library_folders: List[model.LibraryFolder] = [] - self.included_invocations: List[model.WorkflowInvocation] = [] - self.collection_datasets: Set[int] = set() - self.dataset_id_to_path: Dict[int, Tuple[Optional[str], Optional[str]]] = {} + self.included_libraries: list[model.Library] = [] + self.included_library_folders: list[model.LibraryFolder] = [] + self.included_invocations: list[model.WorkflowInvocation] = [] + self.collection_datasets: set[int] = set() + self.dataset_id_to_path: dict[int, tuple[Optional[str], Optional[str]]] = {} - self.job_output_dataset_associations: Dict[int, Dict[str, model.DatasetInstance]] = {} + self.job_output_dataset_associations: dict[int, dict[str, model.DatasetInstance]] = {} @property def workflows_directory(self) -> str: @@ -2076,9 +2073,9 @@ def export_job(self, job: model.Job, tool=None, include_job_data=True): def export_jobs( self, jobs: Iterable[model.Job], - jobs_attrs: Optional[List[Dict[str, Any]]] = None, + jobs_attrs: Optional[list[dict[str, Any]]] = None, include_job_data: bool = True, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: """ Export jobs. @@ -2093,12 +2090,12 @@ def export_jobs( if include_job_data: # -- Get input, output datasets. -- - input_dataset_mapping: Dict[str, List[Union[str, int]]] = {} - output_dataset_mapping: Dict[str, List[Union[str, int]]] = {} - input_dataset_collection_mapping: Dict[str, List[Union[str, int]]] = {} - input_dataset_collection_element_mapping: Dict[str, List[Union[str, int]]] = {} - output_dataset_collection_mapping: Dict[str, List[Union[str, int]]] = {} - implicit_output_dataset_collection_mapping: Dict[str, List[Union[str, int]]] = {} + input_dataset_mapping: dict[str, list[Union[str, int]]] = {} + output_dataset_mapping: dict[str, list[Union[str, int]]] = {} + input_dataset_collection_mapping: dict[str, list[Union[str, int]]] = {} + input_dataset_collection_element_mapping: dict[str, list[Union[str, int]]] = {} + output_dataset_collection_mapping: dict[str, list[Union[str, int]]] = {} + implicit_output_dataset_collection_mapping: dict[str, list[Union[str, int]]] = {} for id_assoc in job.input_datasets: # Optional data inputs will not have a dataset. @@ -2401,7 +2398,7 @@ def to_json(attributes): jobs_attrs = [] for job_id, job_output_dataset_associations in self.job_output_dataset_associations.items(): - output_dataset_mapping: Dict[str, List[Union[str, int]]] = {} + output_dataset_mapping: dict[str, list[Union[str, int]]] = {} for name, dataset in job_output_dataset_associations.items(): if name not in output_dataset_mapping: output_dataset_mapping[name] = [] @@ -2414,7 +2411,7 @@ def to_json(attributes): # # Get all jobs associated with included HDAs. - jobs_dict: Dict[int, model.Job] = {} + jobs_dict: dict[int, model.Job] = {} implicit_collection_jobs_dict = {} def record_job(job): @@ -2507,7 +2504,7 @@ def record_associated_jobs(obj): dump({"galaxy_export_version": GALAXY_EXPORT_VERSION}, export_attrs_out) def __exit__( - self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType] + self, exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType] ) -> bool: if exc_type is None: self._finalize() @@ -2517,18 +2514,18 @@ def __exit__( class WriteCrates: - included_invocations: List[model.WorkflowInvocation] + included_invocations: list[model.WorkflowInvocation] export_directory: StrPath - included_datasets: Dict[model.DatasetInstance, Tuple[model.DatasetInstance, bool]] - dataset_implicit_conversions: Dict[model.DatasetInstance, model.ImplicitlyConvertedDatasetAssociation] - dataset_id_to_path: Dict[int, Tuple[Optional[str], Optional[str]]] + included_datasets: dict[model.DatasetInstance, tuple[model.DatasetInstance, bool]] + dataset_implicit_conversions: dict[model.DatasetInstance, model.ImplicitlyConvertedDatasetAssociation] + dataset_id_to_path: dict[int, tuple[Optional[str], Optional[str]]] @property @abc.abstractmethod def workflows_directory(self) -> str: ... def _generate_markdown_readme(self) -> str: - markdown_parts: List[str] = [] + markdown_parts: list[str] = [] if self._is_single_invocation_export(): invocation = self.included_invocations[0] name = invocation.workflow.name @@ -2649,10 +2646,10 @@ class BcoExportOptions: galaxy_url: str galaxy_version: str merge_history_metadata: bool = False - override_environment_variables: Optional[Dict[str, str]] = None - override_empirical_error: Optional[Dict[str, str]] = None - override_algorithmic_error: Optional[Dict[str, str]] = None - override_xref: Optional[List[XrefItem]] = None + override_environment_variables: Optional[dict[str, str]] = None + override_empirical_error: Optional[dict[str, str]] = None + override_algorithmic_error: Optional[dict[str, str]] = None + override_xref: Optional[list[XrefItem]] = None class FileSourceModelExportStore(abc.ABC, DirectoryModelExportStore): @@ -2718,7 +2715,7 @@ def _generate_output_file(self): core_biocompute_object, object_id = self._core_biocompute_object_and_object_id() write_to_file(object_id, core_biocompute_object, self.out_file) - def _core_biocompute_object_and_object_id(self) -> Tuple[BioComputeObjectCore, str]: + def _core_biocompute_object_and_object_id(self) -> tuple[BioComputeObjectCore, str]: assert self.app # need app.security to do anything... export_options = self.export_options workflow_invocation = self.only_invocation @@ -2750,17 +2747,17 @@ def get_dataset_url(encoded_dataset_id: str): keywords.append(tag.user_tname) # metrics = {} ... TODO - pipeline_steps: List[PipelineStep] = [] + pipeline_steps: list[PipelineStep] = [] software_prerequisite_tracker = SoftwarePrerequisiteTracker() - input_subdomain_items: List[InputSubdomainItem] = [] - output_subdomain_items: List[OutputSubdomainItem] = [] + input_subdomain_items: list[InputSubdomainItem] = [] + output_subdomain_items: list[OutputSubdomainItem] = [] for step in workflow_invocation.steps: workflow_step = step.workflow_step software_prerequisite_tracker.register_step(workflow_step) if workflow_step.type == "tool": workflow_outputs_list = set() - output_list: List[DescriptionDomainUri] = [] - input_list: List[DescriptionDomainUri] = [] + output_list: list[DescriptionDomainUri] = [] + input_list: list[DescriptionDomainUri] = [] for wo in workflow_step.workflow_outputs: workflow_outputs_list.add(wo.output_name) for job in step.jobs: @@ -2838,14 +2835,14 @@ def get_dataset_url(encoded_dataset_id: str): ) input_subdomain_items.append(input_obj) - usability_domain_str: List[str] = [] + usability_domain_str: list[str] = [] for a in stored_workflow.annotations: usability_domain_str.append(a.annotation) if export_options.merge_history_metadata: for h in history.annotations: usability_domain_str.append(h.annotation) - parametric_domain_items: List[ParametricDomainItem] = [] + parametric_domain_items: list[ParametricDomainItem] = [] for inv_step in workflow_invocation.steps: try: for k, v in inv_step.workflow_step.tool_inputs.items(): @@ -2959,7 +2956,7 @@ def get_export_store_factory( bco_export_options: Optional[BcoExportOptions] = None, user_context=None, ) -> Callable[[StrPath], FileSourceModelExportStore]: - export_store_class: Type[FileSourceModelExportStore] + export_store_class: type[FileSourceModelExportStore] export_store_class_kwds = { "app": app, "export_files": export_files, diff --git a/lib/galaxy/model/store/_bco_convert_utils.py b/lib/galaxy/model/store/_bco_convert_utils.py index 1763b2851665..6df2be1a9220 100644 --- a/lib/galaxy/model/store/_bco_convert_utils.py +++ b/lib/galaxy/model/store/_bco_convert_utils.py @@ -1,8 +1,4 @@ import urllib.parse -from typing import ( - List, - Set, -) from galaxy.model import ( Workflow, @@ -15,8 +11,8 @@ class SoftwarePrerequisiteTracker: - _recorded_tools: Set[str] = set() - _software_prerequisites: List[SoftwarePrerequisite] = [] + _recorded_tools: set[str] = set() + _software_prerequisites: list[SoftwarePrerequisite] = [] def register_step(self, step: WorkflowStep) -> None: if step.type != "tool": @@ -45,7 +41,7 @@ def register_step(self, step: WorkflowStep) -> None: self._software_prerequisites.append(software_prerequisite) @property - def software_prerequisites(self) -> List[SoftwarePrerequisite]: + def software_prerequisites(self) -> list[SoftwarePrerequisite]: return self._software_prerequisites diff --git a/lib/galaxy/model/store/build_objects.py b/lib/galaxy/model/store/build_objects.py index 38bcd59b11c0..ddb43cd8f328 100644 --- a/lib/galaxy/model/store/build_objects.py +++ b/lib/galaxy/model/store/build_objects.py @@ -2,10 +2,6 @@ import logging import os import sys -from typing import ( - Dict, - Type, -) import yaml @@ -107,7 +103,7 @@ def main(argv=None): if export_type is None: export_type = "directory" if not export_path.endswith(".tgz") else "bag_archive" - export_types: Dict[str, Type[store.DirectoryModelExportStore]] = { + export_types: dict[str, type[store.DirectoryModelExportStore]] = { "directory": store.DirectoryModelExportStore, "tar": store.TarModelExportStore, "bag_directory": store.BagDirectoryModelExportStore, diff --git a/lib/galaxy/model/store/discover.py b/lib/galaxy/model/store/discover.py index 9301509db792..0cf69cb69ce0 100644 --- a/lib/galaxy/model/store/discover.py +++ b/lib/galaxy/model/store/discover.py @@ -9,12 +9,10 @@ import abc import logging import os +from collections.abc import Iterable from typing import ( Any, Callable, - Dict, - Iterable, - List, NamedTuple, Optional, TYPE_CHECKING, @@ -369,7 +367,7 @@ def _populate_elements( final_job_state, change_datatype_actions, ): - element_datasets: Dict[str, List[Any]] = { + element_datasets: dict[str, list[Any]] = { "element_identifiers": [], "datasets": [], "tag_lists": [], @@ -561,7 +559,7 @@ def add_output_dataset_association(self, name: str, dataset: "DatasetInstance"): @abc.abstractmethod def add_datasets_to_history( - self, datasets: List["DatasetInstance"], for_output_dataset: Optional["DatasetInstance"] = None + self, datasets: list["DatasetInstance"], for_output_dataset: Optional["DatasetInstance"] = None ): """Add datasets to the history this context points at.""" @@ -740,7 +738,7 @@ def get_implicit_collection_jobs_association_id(self): def persist_target_to_export_store( - target_dict: Dict[str, Any], + target_dict: dict[str, Any], export_store: "DirectoryModelExportStore", object_store: ObjectStore, work_directory: str, @@ -792,7 +790,7 @@ def persist_elements_to_hdca( hdca, collector=None, ): - discovered_files: List[DiscoveredResult] = [] + discovered_files: list[DiscoveredResult] = [] def add_to_discovered_files(elements, parent_identifiers=None): parent_identifiers = parent_identifiers or [] @@ -864,7 +862,7 @@ def persist_elements_to_folder( def persist_hdas(elements, model_persistence_context: ModelPersistenceContext, final_job_state="ok"): # discover files as individual datasets for the target history datasets = [] - storage_callbacks: List[Callable] = [] + storage_callbacks: list[Callable] = [] def collect_elements_for_history(elements): for element in elements: @@ -984,7 +982,7 @@ class DiscoveredFile(NamedTuple): collector: Optional[CollectorT] match: "JsonCollectedDatasetMatch" - def discovered_state(self, element: Dict[str, Any], final_job_state="ok") -> "DiscoveredResultState": + def discovered_state(self, element: dict[str, Any], final_job_state="ok") -> "DiscoveredResultState": info = element.get("info", None) return DiscoveredResultState(info, final_job_state) @@ -998,7 +996,7 @@ class DiscoveredDeferredFile(NamedTuple): collector: Optional[CollectorT] match: "JsonCollectedDatasetMatch" - def discovered_state(self, element: Dict[str, Any], final_job_state="ok") -> DiscoveredResultState: + def discovered_state(self, element: dict[str, Any], final_job_state="ok") -> DiscoveredResultState: info = element.get("info", None) state = "deferred" if final_job_state == "ok" else final_job_state return DiscoveredResultState(info, state) @@ -1171,6 +1169,6 @@ class DiscoveredFileError(NamedTuple): match: JsonCollectedDatasetMatch path: Optional[str] = None - def discovered_state(self, element: Dict[str, Any], final_job_state="ok") -> DiscoveredResultState: + def discovered_state(self, element: dict[str, Any], final_job_state="ok") -> DiscoveredResultState: info = self.error_message return DiscoveredResultState(info, "error") diff --git a/lib/galaxy/model/store/load_objects.py b/lib/galaxy/model/store/load_objects.py index 8b6bcdb08dfc..c87e999bce5e 100644 --- a/lib/galaxy/model/store/load_objects.py +++ b/lib/galaxy/model/store/load_objects.py @@ -5,7 +5,6 @@ import sys from typing import ( Any, - Dict, ) import yaml @@ -48,7 +47,7 @@ def main(argv=None): if store_path.endswith(json_ext): is_json = True - data: Dict[str, Any] = {} + data: dict[str, Any] = {} if is_json: with open(store_path) as f: store_dict = yaml.safe_load(f) diff --git a/lib/galaxy/model/store/ro_crate_utils.py b/lib/galaxy/model/store/ro_crate_utils.py index 74a5b7caf5a7..2d71f50ee130 100644 --- a/lib/galaxy/model/store/ro_crate_utils.py +++ b/lib/galaxy/model/store/ro_crate_utils.py @@ -2,8 +2,6 @@ import os from typing import ( Any, - Dict, - List, Optional, ) @@ -81,13 +79,13 @@ def __init__(self, model_store: Any): "dbkey", "__input_ext", ] - self.workflow_entities: Dict[int, Any] = {} - self.collection_entities: Dict[int, Any] = {} - self.file_entities: Dict[int, Any] = {} - self.param_entities: Dict[int, Any] = {} - self.pv_entities: Dict[str, Any] = {} + self.workflow_entities: dict[int, Any] = {} + self.collection_entities: dict[int, Any] = {} + self.file_entities: dict[int, Any] = {} + self.param_entities: dict[int, Any] = {} + self.pv_entities: dict[str, Any] = {} # Cache for tools to avoid duplicating entities for the same tool - self.tool_cache: Dict[str, ContextEntity] = {} + self.tool_cache: dict[str, ContextEntity] = {} def build_crate(self): crate = ROCrate() @@ -103,7 +101,7 @@ def build_crate(self): self._add_attrs_files(crate) return crate - def _add_file(self, dataset: HistoryDatasetAssociation, properties: Dict[Any, Any], crate: ROCrate) -> File: + def _add_file(self, dataset: HistoryDatasetAssociation, properties: dict[Any, Any], crate: ROCrate) -> File: if dataset.dataset.id in self.model_store.dataset_id_to_path: filename, _ = self.model_store.dataset_id_to_path[dataset.dataset.id] description = "" @@ -156,7 +154,7 @@ def _add_collection( dataset_ids = [] for hda in hdca.dataset_instances: if hda.dataset: - properties: Dict[Any, Any] = {} + properties: dict[Any, Any] = {} self._add_file(hda, properties, crate) dataset_id = self.file_entities.get(hda.dataset.id) if dataset_id: @@ -288,7 +286,7 @@ def _add_steps(self, crate: ROCrate): """ Add workflow steps (HowToStep) to the RO-Crate. These are unique for each tool occurrence. """ - step_entities: List[ContextEntity] = [] + step_entities: list[ContextEntity] = [] # Initialize the position as a list with a single element to keep it mutable position = [1] self._add_steps_recursive(self.workflow.steps, crate, step_entities, position) @@ -337,7 +335,7 @@ def _add_steps_recursive(self, steps, crate: ROCrate, step_entities, position): self._add_steps_recursive(subworkflow.steps, crate, step_entities, position) def _add_tools(self, crate: ROCrate): - tool_entities: List[ContextEntity] = [] + tool_entities: list[ContextEntity] = [] self._add_tools_recursive(self.workflow.steps, crate, tool_entities) def _add_tools_recursive(self, steps, crate: ROCrate, tool_entities): diff --git a/lib/galaxy/model/tags.py b/lib/galaxy/model/tags.py index cc1753419c2e..6ac520a4c5ff 100644 --- a/lib/galaxy/model/tags.py +++ b/lib/galaxy/model/tags.py @@ -1,10 +1,7 @@ import logging import re from typing import ( - Dict, - List, Optional, - Tuple, TYPE_CHECKING, ) @@ -59,7 +56,7 @@ def __init__(self, sa_session: scoped_session, galaxy_session: Optional[GalaxySe # Key-value separator. self.key_value_separators = "=:" # Initialize with known classes - add to this in subclasses. - self.item_tag_assoc_info: Dict[str, ItemTagAssocInfo] = {} + self.item_tag_assoc_info: dict[str, ItemTagAssocInfo] = {} self.galaxy_session = galaxy_session def create_tag_handler_session(self, galaxy_session: Optional[GalaxySession]): @@ -263,13 +260,13 @@ def apply_item_tags( for name, value in parsed_tags: self.apply_item_tag(user, item, name, value, flush=flush) - def get_tags_list(self, tags) -> List[str]: + def get_tags_list(self, tags) -> list[str]: """Build a list of tags from an item's tags.""" # Return empty list if there are no tags. if not tags: return [] # Create list of tags. - tags_list: List[str] = [] + tags_list: list[str] = [] for tag in tags: tag_str = tag.user_tname if tag.value is not None: @@ -382,7 +379,7 @@ def parse_tags(self, tag_str): raw_tags = reg_exp.split(tag_str) return self.parse_tags_list(raw_tags) - def parse_tags_list(self, tags_list: List[str]) -> List[Tuple[str, Optional[str]]]: + def parse_tags_list(self, tags_list: list[str]) -> list[tuple[str, Optional[str]]]: """ Return a list of tag tuples (name, value) pairs derived from a list. Method scrubs tag names and values as well. @@ -433,13 +430,13 @@ def _scrub_tag_name_list(self, tag_name_list): scrubbed_tag_list.append(self._scrub_tag_name(tag)) return scrubbed_tag_list - def _get_name_value_pair(self, tag_str) -> List[Optional[str]]: + def _get_name_value_pair(self, tag_str) -> list[Optional[str]]: """Get name, value pair from a tag string.""" # Use regular expression to parse name, value. if tag_str.startswith("#"): tag_str = f"name:{tag_str[1:]}" reg_exp = re.compile(f"[{self.key_value_separators}]") - name_value_pair: List[Optional[str]] = list(reg_exp.split(tag_str, 1)) + name_value_pair: list[Optional[str]] = list(reg_exp.split(tag_str, 1)) # Add empty slot if tag does not have value. if len(name_value_pair) < 2: name_value_pair.append(None) @@ -447,7 +444,7 @@ def _get_name_value_pair(self, tag_str) -> List[Optional[str]]: class GalaxyTagHandler(TagHandler): - _item_tag_assoc_info: Dict[str, ItemTagAssocInfo] = {} + _item_tag_assoc_info: dict[str, ItemTagAssocInfo] = {} def __init__(self, sa_session: scoped_session, galaxy_session: Optional[GalaxySession] = None): super().__init__(sa_session, galaxy_session=galaxy_session) @@ -498,7 +495,7 @@ class GalaxyTagHandlerSession(GalaxyTagHandler): def __init__(self, sa_session: scoped_session, galaxy_session: Optional[GalaxySession]): super().__init__(sa_session, galaxy_session) - self.created_tags: Dict[str, Tag] = {} + self.created_tags: dict[str, Tag] = {} def _get_tag(self, tag_name): """Get tag from cache or database.""" diff --git a/lib/galaxy/model/tool_shed_install/__init__.py b/lib/galaxy/model/tool_shed_install/__init__.py index edd922600b1d..6b9824ba8dac 100644 --- a/lib/galaxy/model/tool_shed_install/__init__.py +++ b/lib/galaxy/model/tool_shed_install/__init__.py @@ -5,7 +5,6 @@ from typing import ( Any, Callable, - Dict, Optional, TYPE_CHECKING, ) @@ -192,7 +191,7 @@ def __init__( self.status = status self.error_message = error_message - def as_dict(self, value_mapper: Optional[Dict[str, Callable]] = None) -> Dict[str, Any]: + def as_dict(self, value_mapper: Optional[dict[str, Callable]] = None) -> dict[str, Any]: return self.to_dict(view="element", value_mapper=value_mapper) @property @@ -535,7 +534,7 @@ def revision_update_available(self): return asbool(self.tool_shed_status.get("revision_update", False)) return False - def to_dict(self, view="collection", value_mapper: Optional[Dict[str, Callable]] = None) -> Dict[str, Any]: + def to_dict(self, view="collection", value_mapper: Optional[dict[str, Callable]] = None) -> dict[str, Any]: if value_mapper is None: value_mapper = {} rval = {} diff --git a/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py b/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py index b4704e1394e8..59dca9ce3032 100644 --- a/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py +++ b/lib/galaxy/model/unittest_utils/migration_scripts_testing_utils.py @@ -1,10 +1,6 @@ import os import subprocess import tempfile -from typing import ( - List, - Tuple, -) import pytest from alembic.config import Config @@ -18,7 +14,7 @@ @pytest.fixture(scope="session") -def alembic_config_text(migrations_dir) -> List[str]: +def alembic_config_text(migrations_dir) -> list[str]: """Contents of production alembic.ini as list of lines""" current_config_path = migrations_dir / "alembic.ini" with open(current_config_path) as f: @@ -31,7 +27,7 @@ def tmp_directory(): yield tmp_dir -def update_config_for_staging(config_text: List[str], script_location: str, version_locations: str, dburl: str) -> None: +def update_config_for_staging(config_text: list[str], script_location: str, version_locations: str, dburl: str) -> None: """ config_text is a list containing the text of an alembic.ini file split into lines. This function updates config_text in place, setting values to config options. @@ -72,7 +68,7 @@ def run_command(cmd: str) -> subprocess.CompletedProcess: return subprocess.run(args, capture_output=True, text=True) -def get_db_heads(config: Config) -> Tuple[str, ...]: +def get_db_heads(config: Config) -> tuple[str, ...]: """Return revision ids (version heads) stored in the database.""" dburl = config.get_main_option("sqlalchemy.url") assert dburl diff --git a/lib/galaxy/model/unittest_utils/model_testing_utils.py b/lib/galaxy/model/unittest_utils/model_testing_utils.py index 869cc40888e8..dacbde037a4d 100644 --- a/lib/galaxy/model/unittest_utils/model_testing_utils.py +++ b/lib/galaxy/model/unittest_utils/model_testing_utils.py @@ -1,9 +1,9 @@ import os import uuid +from collections.abc import Iterator from contextlib import contextmanager from typing import ( Callable, - Iterator, Optional, ) diff --git a/lib/galaxy/model/unittest_utils/store_fixtures.py b/lib/galaxy/model/unittest_utils/store_fixtures.py index 172a4d634f2e..c49890dd2d8b 100644 --- a/lib/galaxy/model/unittest_utils/store_fixtures.py +++ b/lib/galaxy/model/unittest_utils/store_fixtures.py @@ -2,7 +2,6 @@ from typing import ( Any, - Dict, Literal, ) from uuid import uuid4 @@ -48,7 +47,7 @@ def one_ld_library_model_store_dict(): hash_value=TEST_HASH_VALUE, extra_files_path=None, ) - dataset_source: Dict[str, Any] = dict( + dataset_source: dict[str, Any] = dict( model_class="DatasetSource", source_uri=TEST_SOURCE_URI, extra_files_path=None, @@ -90,7 +89,7 @@ def one_ld_library_model_store_dict(): }, } - root_folder: Dict[str, Any] = { + root_folder: dict[str, Any] = { "model_class": "LibraryFolder", "name": TEST_ROOT_FOLDER_NAME, "description": TEST_ROOT_FOLDER_DESCRIPTION, @@ -124,7 +123,7 @@ def one_ld_library_deferred_model_store_dict(): hash_value=TEST_HASH_VALUE, extra_files_path=None, ) - dataset_source: Dict[str, Any] = dict( + dataset_source: dict[str, Any] = dict( model_class="DatasetSource", source_uri=TEST_SOURCE_URI, extra_files_path=None, @@ -167,7 +166,7 @@ def one_ld_library_deferred_model_store_dict(): }, } - root_folder: Dict[str, Any] = { + root_folder: dict[str, Any] = { "model_class": "LibraryFolder", "name": TEST_ROOT_FOLDER_NAME, "description": TEST_ROOT_FOLDER_DESCRIPTION, @@ -203,7 +202,7 @@ def one_hda_model_store_dict( hash_value=TEST_HASH_VALUE, extra_files_path=None, ) - dataset_source: Dict[str, Any] = dict( + dataset_source: dict[str, Any] = dict( model_class="DatasetSource", source_uri=TEST_SOURCE_URI, extra_files_path=None, @@ -249,7 +248,7 @@ def history_model_store_dict(): hash_value=TEST_HASH_VALUE, extra_files_path=None, ) - dataset_source: Dict[str, Any] = dict( + dataset_source: dict[str, Any] = dict( model_class="DatasetSource", source_uri=TEST_SOURCE_URI, extra_files_path=None, @@ -300,7 +299,7 @@ def deferred_hda_model_store_dict( hash_value=TEST_HASH_VALUE, extra_files_path=None, ) - dataset_source: Dict[str, Any] = dict( + dataset_source: dict[str, Any] = dict( model_class="DatasetSource", source_uri=source_uri, extra_files_path=None, @@ -348,7 +347,7 @@ def deferred_hda_model_store_dict_space_to_tab( actions_type: TRANSFORM_ACTIONS_TYPE, apply_transform: bool = True, ): - dataset_source: Dict[str, Any] = dict( + dataset_source: dict[str, Any] = dict( model_class="DatasetSource", source_uri=TEST_SOURCE_URI_SIMPLE_LINE, extra_files_path=None, @@ -414,7 +413,7 @@ def deferred_hda_model_store_dict_bam( hash_value=TEST_HASH_VALUE_BAM, extra_files_path=None, ) - dataset_source: Dict[str, Any] = dict( + dataset_source: dict[str, Any] = dict( model_class="DatasetSource", source_uri=source_uri, extra_files_path=None, diff --git a/lib/galaxy/navigation/components.py b/lib/galaxy/navigation/components.py index e6484ce5d74a..9bd00cc8f774 100644 --- a/lib/galaxy/navigation/components.py +++ b/lib/galaxy/navigation/components.py @@ -3,11 +3,8 @@ import string from enum import Enum from typing import ( - Dict, - List, NamedTuple, Optional, - Tuple, Union, ) @@ -49,7 +46,7 @@ def component_locator(self) -> LocatorT: """Return a (by, selector) Selenium element locator tuple for this selector.""" @property - def selenium_locator(self) -> Tuple[str, str]: + def selenium_locator(self) -> tuple[str, str]: element_locator: LocatorT = self.component_locator return (element_locator.selenium_by, element_locator.locator) @@ -61,7 +58,7 @@ def element_locator(self): class SelectorTemplate(Target): def __init__( self, - selector: Union[str, List[str]], + selector: Union[str, list[str]], selector_type: str, children=None, kwds=None, @@ -253,7 +250,7 @@ def resolve_component_locator(self, path: Optional[str] = None) -> LocatorT: if not path: return self._selectors["_"].resolve_component_locator() - def arguments() -> Tuple[str, Optional[Dict[str, str]], Optional[str]]: + def arguments() -> tuple[str, Optional[dict[str, str]], Optional[str]]: assert path if match := CALL_ARGUMENTS_RE.match(path): component_name = match.group("SUBCOMPONENT") diff --git a/lib/galaxy/quota/_schema.py b/lib/galaxy/quota/_schema.py index 937ced98bdc0..faf9c3cc85eb 100644 --- a/lib/galaxy/quota/_schema.py +++ b/lib/galaxy/quota/_schema.py @@ -1,6 +1,5 @@ from enum import Enum from typing import ( - List, Optional, ) @@ -133,7 +132,7 @@ class QuotaSummary(QuotaBase): class QuotaSummaryList(RootModel): - root: List[QuotaSummary] = Field( + root: list[QuotaSummary] = Field( default=[], title="List with summary information of Quotas.", ) @@ -152,17 +151,17 @@ class QuotaDetails(QuotaBase): title="Display Amount", description="Human-readable representation of the `amount` field.", ) - default: List[DefaultQuota] = Field( + default: list[DefaultQuota] = Field( [], title="Default", description="A list indicating which types of default user quotas, if any, are associated with this quota.", ) - users: List[UserQuota] = Field( + users: list[UserQuota] = Field( [], title="Users", description="A list of specific users associated with this quota.", ) - groups: List[GroupQuota] = Field( + groups: list[GroupQuota] = Field( [], title="Groups", description="A list of specific groups of users associated with this quota.", @@ -200,12 +199,12 @@ class CreateQuotaParams(Model): title="Quota Source Label", description="If set, quota source label to apply this quota operation to. Otherwise, the default quota is used.", ) - in_users: Optional[List[str]] = Field( + in_users: Optional[list[str]] = Field( default=[], title="Users", description="A list of user IDs or user emails to associate with this quota.", ) - in_groups: Optional[List[str]] = Field( + in_groups: Optional[list[str]] = Field( default=[], title="Groups", description="A list of group IDs or names to associate with this quota.", @@ -247,12 +246,12 @@ class UpdateQuotaParams(Model): " passing this parameter is equivalent to passing ``no``." ), ) - in_users: Optional[List[str]] = Field( + in_users: Optional[list[str]] = Field( default=None, title="Users", description="A list of user IDs or user emails to associate with this quota.", ) - in_groups: Optional[List[str]] = Field( + in_groups: Optional[list[str]] = Field( default=None, title="Groups", description="A list of group IDs or names to associate with this quota.", diff --git a/lib/galaxy/schema/__init__.py b/lib/galaxy/schema/__init__.py index b5dac468e512..de8303ca6f91 100644 --- a/lib/galaxy/schema/__init__.py +++ b/lib/galaxy/schema/__init__.py @@ -1,15 +1,11 @@ +from collections.abc import Iterable from copy import deepcopy from datetime import datetime from enum import Enum from typing import ( Any, Callable, - Dict, - Iterable, - List, Optional, - Tuple, - Type, TypeVar, Union, ) @@ -26,7 +22,7 @@ class BootstrapAdminUser(BaseModel): id: int = 0 email: Optional[str] = None username: Optional[str] = None - preferences: Dict[str, str] = {} + preferences: dict[str, str] = {} bootstrap_admin_user: bool = True def all_roles(*args) -> list: @@ -39,13 +35,13 @@ class ValueFilterQueryParams(BaseModel): Multiple `q/qv` queries can be concatenated. """ - q: Optional[Union[List[str], str]] = Field( + q: Optional[Union[list[str], str]] = Field( default=None, title="Filter Query", description="Generally a property name to filter by followed by an (often optional) hyphen and operator string.", examples=["create_time-gt"], ) - qv: Optional[Union[List[str], str]] = Field( + qv: Optional[Union[list[str], str]] = Field( default=None, title="Filter Value", description="The value to filter by.", @@ -97,7 +93,7 @@ class SerializationParams(BaseModel): ), examples=["summary"], ) - keys: Optional[List[str]] = Field( + keys: Optional[list[str]] = Field( default=None, title="Keys", description=( @@ -129,22 +125,22 @@ class APIKeyModel(BaseModel): # It should be removed when Python/pydantic supports this feature natively. # https://github.com/pydantic/pydantic/issues/1673 def partial_model( - include: Optional[List[str]] = None, exclude: Optional[List[str]] = None -) -> Callable[[Type[T]], Type[T]]: + include: Optional[list[str]] = None, exclude: Optional[list[str]] = None +) -> Callable[[type[T]], type[T]]: """Decorator to make all model fields optional""" if exclude is None: exclude = [] - def decorator(model: Type[T]) -> Type[T]: - def make_optional(field: FieldInfo, default: Any = None) -> Tuple[Any, FieldInfo]: + def decorator(model: type[T]) -> type[T]: + def make_optional(field: FieldInfo, default: Any = None) -> tuple[Any, FieldInfo]: new = deepcopy(field) new.default = default new.annotation = Optional[field.annotation or Any] # type:ignore[assignment] return new.annotation, new if include is None: - fields: Iterable[Tuple[str, FieldInfo]] = model.model_fields.items() + fields: Iterable[tuple[str, FieldInfo]] = model.model_fields.items() else: fields = ((k, v) for k, v in model.model_fields.items() if k in include) diff --git a/lib/galaxy/schema/fetch_data.py b/lib/galaxy/schema/fetch_data.py index daf9c905dfed..98e4a54fef54 100644 --- a/lib/galaxy/schema/fetch_data.py +++ b/lib/galaxy/schema/fetch_data.py @@ -1,9 +1,8 @@ import json from enum import Enum from typing import ( + Annotated, Any, - Dict, - List, Optional, Union, ) @@ -15,7 +14,6 @@ Json, ) from typing_extensions import ( - Annotated, Literal, ) @@ -83,7 +81,7 @@ class LibraryFolderDestination(FetchBaseModel): class BaseCollectionTarget(BaseFetchDataTarget): destination: HdcaDestination collection_type: Optional[str] = None - tags: Optional[List[str]] = None + tags: Optional[list[str]] = None name: Optional[str] = None @@ -118,7 +116,7 @@ class BaseDataElement(FetchBaseModel): space_to_tab: bool = False to_posix_lines: bool = False deferred: bool = False - tags: Optional[List[str]] = None + tags: Optional[list[str]] = None created_from_basename: Optional[str] = None extra_files: Optional[ExtraFiles] = None auto_decompress: bool = AutoDecompressField @@ -128,7 +126,7 @@ class BaseDataElement(FetchBaseModel): SHA1: Optional[str] = Field(None, alias="SHA-1") SHA256: Optional[str] = Field(None, alias="SHA-256") SHA512: Optional[str] = Field(None, alias="SHA-512") - hashes: Optional[List[FetchDatasetHash]] = None + hashes: Optional[list[FetchDatasetHash]] = None description: Optional[str] = None model_config = ConfigDict(extra="forbid") @@ -183,11 +181,11 @@ class PathDataElement(BaseDataElement): class CompositeDataElement(BaseDataElement): src: Literal["composite"] composite: "CompositeItems" - metadata: Optional[Dict[str, Any]] = None + metadata: Optional[dict[str, Any]] = None class CompositeItems(FetchBaseModel): - items: List[ + items: list[ Union[FileDataElement, PastedDataElement, UrlDataElement, PathDataElement, ServerDirElement, FtpImportElement] ] = Field(..., alias="elements") @@ -196,7 +194,7 @@ class CompositeItems(FetchBaseModel): class NestedElement(BaseDataElement): - items: List[Union["AnyElement", "NestedElement"]] = Field(..., alias="elements") + items: list[Union["AnyElement", "NestedElement"]] = Field(..., alias="elements") AnyElement = Annotated[ @@ -235,7 +233,7 @@ class BaseDataTarget(BaseFetchDataTarget): class DataElementsTarget(BaseDataTarget): - items: List[Union[AnyElement, NestedElement]] = Field(..., alias="elements") + items: list[Union[AnyElement, NestedElement]] = Field(..., alias="elements") class DataElementsFromTarget(BaseDataTarget, ItemsFromModel): @@ -243,7 +241,7 @@ class DataElementsFromTarget(BaseDataTarget, ItemsFromModel): class HdcaDataItemsTarget(BaseCollectionTarget): - items: List[Union[AnyElement2, NestedElement]] = Field(..., alias="elements") + items: list[Union[AnyElement2, NestedElement]] = Field(..., alias="elements") class HdcaDataItemsFromTarget(BaseCollectionTarget, ItemsFromModel): @@ -267,7 +265,7 @@ def targets_string_to_json(cls, v): return v -Targets = List[ +Targets = list[ Union[ DataElementsTarget, HdcaDataItemsTarget, diff --git a/lib/galaxy/schema/fields.py b/lib/galaxy/schema/fields.py index f3a81d47bce4..180e463d8777 100644 --- a/lib/galaxy/schema/fields.py +++ b/lib/galaxy/schema/fields.py @@ -1,5 +1,6 @@ import re from typing import ( + Annotated, Callable, get_origin, TYPE_CHECKING, @@ -14,7 +15,6 @@ ) from pydantic_core import PydanticCustomError from typing_extensions import ( - Annotated, get_args, ) diff --git a/lib/galaxy/schema/generics.py b/lib/galaxy/schema/generics.py index 7312a4b32869..b224cbea1cf4 100644 --- a/lib/galaxy/schema/generics.py +++ b/lib/galaxy/schema/generics.py @@ -2,8 +2,6 @@ from typing import ( Any, Generic, - Tuple, - Type, TypeVar, ) @@ -23,7 +21,7 @@ class GenericModel(BaseModel): @classmethod - def model_parametrized_name(cls, params: Tuple[Type[Any], ...]) -> str: + def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str: suffix = cls.__determine_suffix__(params) class_name = cls.__name__.split("Generic", 1)[-1] return f"{class_name}{suffix}" @@ -35,7 +33,7 @@ def __get_pydantic_core_schema__(cls, *args, **kwargs): return result @classmethod - def __determine_suffix__(cls, params: Tuple[Type[Any], ...]) -> str: + def __determine_suffix__(cls, params: tuple[type[Any], ...]) -> str: suffix = "Incoming" if params[0] is EncodedDatabaseIdField: suffix = "Response" diff --git a/lib/galaxy/schema/groups.py b/lib/galaxy/schema/groups.py index b513ba26fa41..e1acfd235fa4 100644 --- a/lib/galaxy/schema/groups.py +++ b/lib/galaxy/schema/groups.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, ) @@ -52,7 +51,7 @@ class GroupResponse(Model, WithModelClass): class GroupListResponse(RootModel): """Response schema for listing groups.""" - root: List[GroupResponse] + root: list[GroupResponse] class GroupCreatePayload(Model): @@ -62,11 +61,11 @@ class GroupCreatePayload(Model): ..., title="name of the group", ) - user_ids: List[DecodedDatabaseIdField] = Field( + user_ids: list[DecodedDatabaseIdField] = Field( [], title="user IDs", ) - role_ids: List[DecodedDatabaseIdField] = Field( + role_ids: list[DecodedDatabaseIdField] = Field( [], title="role IDs", ) @@ -80,11 +79,11 @@ class GroupUpdatePayload(Model): ..., title="name of the group", ) - user_ids: Optional[List[DecodedDatabaseIdField]] = Field( + user_ids: Optional[list[DecodedDatabaseIdField]] = Field( None, title="user IDs", ) - role_ids: Optional[List[DecodedDatabaseIdField]] = Field( + role_ids: Optional[list[DecodedDatabaseIdField]] = Field( None, title="role IDs", ) diff --git a/lib/galaxy/schema/help.py b/lib/galaxy/schema/help.py index 6ecb83513b49..31bfa1c489a2 100644 --- a/lib/galaxy/schema/help.py +++ b/lib/galaxy/schema/help.py @@ -1,6 +1,6 @@ from typing import ( + Annotated, Any, - List, Optional, ) @@ -8,7 +8,6 @@ ConfigDict, Field, ) -from typing_extensions import Annotated from galaxy.schema.schema import Model @@ -60,7 +59,7 @@ class HelpForumTopic(Model): archived: Annotated[bool, Field(description="Whether the topic is archived.")] bookmarked: Annotated[Optional[bool], Field(default=None, description="Whether the topic is bookmarked.")] liked: Annotated[Optional[bool], Field(default=None, description="Whether the topic is liked.")] - tags: Annotated[List[str], Field(description="The tags of the topic.")] + tags: Annotated[list[str], Field(description="The tags of the topic.")] tags_descriptions: Annotated[ Optional[Any], Field(default=None, description="The descriptions of the tags of the topic.") ] @@ -104,22 +103,22 @@ class HelpForumSearchResponse(Model): This model is based on the Discourse API response for the search endpoint. """ - posts: Annotated[List[HelpForumPost], Field(default=None, description="The list of posts returned by the search.")] + posts: Annotated[list[HelpForumPost], Field(default=None, description="The list of posts returned by the search.")] topics: Annotated[ - List[HelpForumTopic], Field(default=None, description="The list of topics returned by the search.") + list[HelpForumTopic], Field(default=None, description="The list of topics returned by the search.") ] users: Annotated[ - Optional[List[HelpForumUser]], Field(default=None, description="The list of users returned by the search.") + Optional[list[HelpForumUser]], Field(default=None, description="The list of users returned by the search.") ] categories: Annotated[ - Optional[List[HelpForumCategory]], + Optional[list[HelpForumCategory]], Field(default=None, description="The list of categories returned by the search."), ] tags: Annotated[ - Optional[List[HelpForumTag]], Field(default=None, description="The list of tags returned by the search.") + Optional[list[HelpForumTag]], Field(default=None, description="The list of tags returned by the search.") ] groups: Annotated[ - Optional[List[HelpForumGroup]], Field(default=None, description="The list of groups returned by the search.") + Optional[list[HelpForumGroup]], Field(default=None, description="The list of groups returned by the search.") ] grouped_search_result: Annotated[ Optional[HelpForumGroupedSearchResult], Field(default=None, description="The grouped search result.") diff --git a/lib/galaxy/schema/history.py b/lib/galaxy/schema/history.py index be02370c2555..b0978dc67168 100644 --- a/lib/galaxy/schema/history.py +++ b/lib/galaxy/schema/history.py @@ -1,6 +1,5 @@ from datetime import datetime from typing import ( - List, Optional, ) @@ -78,7 +77,7 @@ class HistoryQueryResult(Model): class HistoryQueryResultList(RootModel): - root: List[HistoryQueryResult] = Field( + root: list[HistoryQueryResult] = Field( default=[], title="List with detailed information of Histories.", ) diff --git a/lib/galaxy/schema/invocation.py b/lib/galaxy/schema/invocation.py index 76d7ca72761f..8f5b9c16552f 100644 --- a/lib/galaxy/schema/invocation.py +++ b/lib/galaxy/schema/invocation.py @@ -1,10 +1,9 @@ from datetime import datetime from enum import Enum from typing import ( + Annotated, Any, - Dict, Generic, - List, Optional, Union, ) @@ -18,7 +17,6 @@ UUID4, ) from typing_extensions import ( - Annotated, Literal, TypeAliasType, ) @@ -411,17 +409,17 @@ class InvocationStep(Model, WithModelClass): title="UUID", description="Universal unique identifier of the workflow step.", ) - outputs: Dict[str, InvocationStepOutput] = Field( + outputs: dict[str, InvocationStepOutput] = Field( {}, title="Outputs", description="The outputs of the workflow invocation step.", ) - output_collections: Dict[str, InvocationStepCollectionOutput] = Field( + output_collections: dict[str, InvocationStepCollectionOutput] = Field( {}, title="Output collections", description="The dataset collection outputs of the workflow invocation step.", ) - jobs: List[schema.JobBaseModel] = Field( + jobs: list[schema.JobBaseModel] = Field( [], title="Jobs", description="Jobs associated with the workflow invocation step.", @@ -470,38 +468,38 @@ class InvocationReport(Model, WithModelClass): generate_time: Optional[str] = schema.GenerateTimeField generate_version: Optional[str] = schema.GenerateVersionField - errors: Optional[Dict[str, Any]] = Field( + errors: Optional[dict[str, Any]] = Field( default=None, title="Errors", description="Errors associated with the invocation.", ) - history_datasets: Optional[Dict[str, Any]] = Field( + history_datasets: Optional[dict[str, Any]] = Field( default=None, title="History datasets", description="History datasets associated with the invocation.", ) - workflows: Optional[Dict[str, Any]] = Field( + workflows: Optional[dict[str, Any]] = Field( default=None, title="Workflows", description="Workflows associated with the invocation.", ) - history_dataset_collections: Optional[Dict[str, Any]] = Field( + history_dataset_collections: Optional[dict[str, Any]] = Field( default=None, title="History dataset collections", description="History dataset collections associated with the invocation.", ) - jobs: Optional[Dict[str, Any]] = Field( + jobs: Optional[dict[str, Any]] = Field( default=None, title="Jobs", description="Jobs associated with the invocation.", ) - histories: Optional[Dict[str, Any]] = Field( + histories: Optional[dict[str, Any]] = Field( default=None, title="Histories", description="Histories associated with the invocation.", ) - invocations: Optional[Dict[str, Any]] = Field( + invocations: Optional[dict[str, Any]] = Field( default=None, title="Invocations", description="Other invocations associated with the invocation.", @@ -583,25 +581,25 @@ class WorkflowInvocationCollectionView(Model, WithModelClass): class WorkflowInvocationElementView(WorkflowInvocationCollectionView): - steps: List[InvocationStep] = Field(default=..., title="Steps", description="Steps of the workflow invocation.") - inputs: Dict[str, InvocationInput] = Field( + steps: list[InvocationStep] = Field(default=..., title="Steps", description="Steps of the workflow invocation.") + inputs: dict[str, InvocationInput] = Field( default=..., title="Inputs", description="Input datasets/dataset collections of the workflow invocation." ) - input_step_parameters: Dict[str, InvocationInputParameter] = Field( + input_step_parameters: dict[str, InvocationInputParameter] = Field( default=..., title="Input step parameters", description="Input step parameters of the workflow invocation." ) - outputs: Dict[str, InvocationOutput] = Field( + outputs: dict[str, InvocationOutput] = Field( default=..., title="Outputs", description="Output datasets of the workflow invocation." ) - output_collections: Dict[str, InvocationOutputCollection] = Field( + output_collections: dict[str, InvocationOutputCollection] = Field( default=..., title="Output collections", description="Output dataset collections of the workflow invocation.", ) - output_values: Dict[str, Any] = Field( + output_values: dict[str, Any] = Field( default=..., title="Output values", description="Output values of the workflow invocation." ) - messages: List[InvocationMessageResponseUnion] = Field( + messages: list[InvocationMessageResponseUnion] = Field( default=..., title="Messages", description="A list of messages about why the invocation did not succeed.", @@ -623,7 +621,7 @@ class WorkflowInvocationRequestModel(Model): description="The encoded history id the workflow was run in.", ) workflow_id: str = Field(title="Workflow ID", description="The encoded Workflow ID associated with the invocation.") - inputs: Dict[str, Any] = Field( + inputs: dict[str, Any] = Field( ..., title="Inputs", description="Values for inputs", @@ -633,8 +631,8 @@ class WorkflowInvocationRequestModel(Model): title="Inputs by", description=INPUTS_BY_DESCRIPTION, ) - replacement_params: Optional[Dict[str, Any]] = ReplacementParametersField - resource_params: Optional[Dict[str, Any]] = ResourceParametersField + replacement_params: Optional[dict[str, Any]] = ReplacementParametersField + resource_params: Optional[dict[str, Any]] = ResourceParametersField use_cached_job: bool = UseCachedJobField preferred_object_store_id: Optional[str] = PreferredObjectStoreIdField preferred_intermediate_object_store_id: Optional[str] = PreferredIntermediateObjectStoreIdField @@ -644,7 +642,7 @@ class WorkflowInvocationRequestModel(Model): title=STEP_PARAMETERS_NORMALIZED_TITLE, description=STEP_PARAMETERS_NORMALIZED_DESCRIPTION, ) - parameters: Optional[Dict[str, Any]] = Field( + parameters: Optional[dict[str, Any]] = Field( None, title=STEP_PARAMETERS_TITLE, description=f"{STEP_PARAMETERS_DESCRIPTION} If these are set, the workflow was not executed in a best-practice fashion and we the resulting invocation request may not fully reflect the executed workflow state.", @@ -658,7 +656,7 @@ class WorkflowInvocationRequestModel(Model): class InvocationJobsSummaryBaseModel(Model): id: EncodedDatabaseIdField = InvocationIdField - states: Dict[JobState, int] = Field( + states: dict[JobState, int] = Field( default=..., title="States", description="The states of all the jobs related to the Invocation." ) populated_state: JobState = Field( diff --git a/lib/galaxy/schema/item_tags.py b/lib/galaxy/schema/item_tags.py index 8643a926ee89..bdf5f9948fb1 100644 --- a/lib/galaxy/schema/item_tags.py +++ b/lib/galaxy/schema/item_tags.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, ) @@ -36,7 +35,7 @@ class ItemTagsResponse(Model): class ItemTagsListResponse(RootModel): """Response schema for listing item tags.""" - root: List[ItemTagsResponse] + root: list[ItemTagsResponse] class ItemTagsCreatePayload(Model): diff --git a/lib/galaxy/schema/jobs.py b/lib/galaxy/schema/jobs.py index 4a60e355e8ce..404012e02060 100644 --- a/lib/galaxy/schema/jobs.py +++ b/lib/galaxy/schema/jobs.py @@ -1,8 +1,6 @@ import json from typing import ( Any, - Dict, - List, Optional, Union, ) @@ -45,7 +43,7 @@ class JobInputSummary(Model): # TODO: Use Tuple again when `make update-client-api-schema` supports them class JobErrorSummary(Model): # messages: List[Union[Tuple[str, str], List[str]]] - messages: List[List[str]] = Field( + messages: list[list[str]] = Field( default=..., title="Error messages", description="The error messages for the specified job.", @@ -105,7 +103,7 @@ class SearchJobsPayload(Model): title="Tool ID", description="The tool ID related to the job.", ) - inputs: Dict[str, Any] = Field( + inputs: dict[str, Any] = Field( default=..., title="Inputs", description="The inputs of the job.", @@ -170,12 +168,12 @@ class EncodedJobDetails(JobSummary): "The specific parameters depend on the tool itself." ), ) - inputs: Dict[str, EncodedDatasetJobInfo] = Field( + inputs: dict[str, EncodedDatasetJobInfo] = Field( {}, title="Inputs", description="Dictionary mapping all the tool inputs (by name) to the corresponding data references.", ) - outputs: Dict[str, EncodedDatasetJobInfo] = Field( + outputs: dict[str, EncodedDatasetJobInfo] = Field( {}, title="Outputs", description="Dictionary mapping all the tool outputs (by name) to the corresponding data references.", @@ -185,7 +183,7 @@ class EncodedJobDetails(JobSummary): title="Copied from Job-ID", description="Reference to cached job if job execution was cached.", ) - output_collections: Dict[str, EncodedHdcaSourceId] = Field( + output_collections: dict[str, EncodedHdcaSourceId] = Field( default={}, title="Output collections", description="", @@ -229,20 +227,20 @@ class JobParameter(Model): title="Depth", description="The depth of the job parameter.", ) - value: Optional[Union[List[Optional[EncodedJobParameterHistoryItem]], float, int, bool, str]] = Field( + value: Optional[Union[list[Optional[EncodedJobParameterHistoryItem]], float, int, bool, str]] = Field( default=None, title="Value", description="The values of the job parameter", union_mode="left_to_right" ) notes: Optional[str] = Field(default=None, title="Notes", description="Notes associated with the job parameter.") class JobDisplayParametersSummary(Model): - parameters: List[JobParameter] = Field( + parameters: list[JobParameter] = Field( default=..., title="Parameters", description="The parameters of the job in a nested format." ) has_parameter_errors: bool = Field( default=..., title="Has parameter errors", description="The job has parameter errors" ) - outputs: Dict[str, List[JobOutput]] = Field( + outputs: dict[str, list[JobOutput]] = Field( default=..., title="Outputs", description="Dictionary mapping all the tool outputs (by name) with the corresponding dataset information in a nested format.", diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index 52b41c7c82d1..1e0d1f05b65c 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -1,9 +1,8 @@ import json from enum import Enum from typing import ( + Annotated, Any, - Dict, - List, Optional, Union, ) @@ -15,7 +14,6 @@ ) from pydantic.functional_validators import field_validator from typing_extensions import ( - Annotated, Literal, ) @@ -67,7 +65,7 @@ class LibraryContentsCreatePayload(Model): False, description="create tags on datasets using the file's original name", ) - tags: List[str] = Field( + tags: list[str] = Field( [], description="create the given list of tags on datasets", ) @@ -83,7 +81,7 @@ class LibraryContentsCreatePayload(Model): "", description="the new message attribute of the LDDA created", ) - extended_metadata: Optional[Dict[str, Any]] = Field( + extended_metadata: Optional[dict[str, Any]] = Field( None, description="sub-dictionary containing any extended metadata to associate with the item", ) @@ -131,7 +129,7 @@ class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): None, title="UUID of the dataset to upload", ) - upload_files: Optional[List[Dict[str, Any]]] = Field( + upload_files: Optional[list[dict[str, Any]]] = Field( None, title="list of the uploaded files", ) @@ -156,7 +154,7 @@ class LibraryContentsCollectionCreatePayload(LibraryContentsCreatePayload): ..., title="the type of collection to create", ) - element_identifiers: List[Dict[str, Any]] = Field( + element_identifiers: list[dict[str, Any]] = Field( ..., title="list of dictionaries containing the element identifiers for the collection", ) @@ -203,7 +201,7 @@ class LibraryContentsIndexDatasetResponse(LibraryContentsIndexResponse): class LibraryContentsIndexListResponse(RootModel): - root: List[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] + root: list[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] class LibraryContentsShowResponse(Model): @@ -220,7 +218,7 @@ class LibraryContentsShowFolderResponse(LibraryContentsShowResponse): description: str item_count: int deleted: bool - library_path: List[str] + library_path: list[str] class LibraryContentsShowDatasetResponse(LibraryContentsShowResponse): @@ -261,11 +259,11 @@ class LibraryContentsCreateFileResponse(LibraryContentsCreateResponse): class LibraryContentsCreateFolderListResponse(RootModel): - root: List[LibraryContentsCreateFolderResponse] + root: list[LibraryContentsCreateFolderResponse] class LibraryContentsCreateFileListResponse(RootModel): - root: List[LibraryContentsCreateFileResponse] + root: list[LibraryContentsCreateFileResponse] class LibraryContentsCreateDatasetResponse(Model): @@ -299,7 +297,7 @@ class LibraryContentsCreateDatasetResponse(Model): class LibraryContentsCreateDatasetCollectionResponse(RootModel): - root: List[LibraryContentsCreateDatasetResponse] + root: list[LibraryContentsCreateDatasetResponse] class LibraryContentsDeleteResponse(Model): diff --git a/lib/galaxy/schema/notifications.py b/lib/galaxy/schema/notifications.py index 1c8204a2369b..a7f59906924d 100644 --- a/lib/galaxy/schema/notifications.py +++ b/lib/galaxy/schema/notifications.py @@ -1,10 +1,9 @@ from datetime import datetime from enum import Enum from typing import ( + Annotated, Any, - Dict, Generic, - List, Optional, Union, ) @@ -15,7 +14,6 @@ RootModel, ) from typing_extensions import ( - Annotated, Literal, ) @@ -92,7 +90,7 @@ class ActionLink(Model): class BroadcastNotificationContent(MessageNotificationContentBase): category: Literal[MandatoryNotificationCategory.broadcast] = MandatoryNotificationCategory.broadcast - action_links: Optional[List[ActionLink]] = Field( + action_links: Optional[list[ActionLink]] = Field( None, title="Action links", description="The optional action links (buttons) to be displayed in the notification.", @@ -234,13 +232,13 @@ class BroadcastNotificationResponse(NotificationResponse): class UserNotificationListResponse(RootModel): """A list of user notifications.""" - root: List[UserNotificationResponse] + root: list[UserNotificationResponse] class BroadcastNotificationListResponse(RootModel): """A list of broadcast notifications.""" - root: List[BroadcastNotificationResponse] + root: list[BroadcastNotificationResponse] class NotificationStatusSummary(Model): @@ -249,10 +247,10 @@ class NotificationStatusSummary(Model): total_unread_count: int = Field( ..., title="Total unread count", description="The total number of unread notifications for the user." ) - notifications: List[UserNotificationResponse] = Field( + notifications: list[UserNotificationResponse] = Field( ..., title="Notifications", description="The list of updated notifications for the user." ) - broadcasts: List[BroadcastNotificationResponse] = Field( + broadcasts: list[BroadcastNotificationResponse] = Field( ..., title="Broadcasts", description="The list of updated broadcasts." ) @@ -279,17 +277,17 @@ class NotificationCreateData(Model): class GenericNotificationRecipients(GenericModel, Generic[DatabaseIdT], PatchGenericPickle): """The recipients of a notification. Can be a combination of users, groups and roles.""" - user_ids: List[DatabaseIdT] = Field( + user_ids: list[DatabaseIdT] = Field( default=[], title="User IDs", description="The list of encoded user IDs of the users that should receive the notification.", ) - group_ids: List[DatabaseIdT] = Field( + group_ids: list[DatabaseIdT] = Field( default=[], title="Group IDs", description="The list of encoded group IDs of the groups that should receive the notification.", ) - role_ids: List[DatabaseIdT] = Field( + role_ids: list[DatabaseIdT] = Field( default=[], title="Role IDs", description="The list of encoded role IDs of the roles that should receive the notification.", @@ -401,7 +399,7 @@ class NotificationBroadcastUpdateRequest(NotificationUpdateRequest): class NotificationsBatchRequest(Model): - notification_ids: List[DecodedDatabaseIdField] = Field( + notification_ids: list[DecodedDatabaseIdField] = Field( ..., title="Notification IDs", description="The list of encoded notification IDs of the notifications that should be updated.", @@ -461,7 +459,7 @@ class NotificationCategorySettings(Model): ) -PersonalNotificationPreferences = Dict[PersonalNotificationCategory, NotificationCategorySettings] +PersonalNotificationPreferences = dict[PersonalNotificationCategory, NotificationCategorySettings] def get_default_personal_notification_preferences() -> PersonalNotificationPreferences: @@ -469,7 +467,7 @@ def get_default_personal_notification_preferences() -> PersonalNotificationPrefe return {category: NotificationCategorySettings() for category in PersonalNotificationCategory.__members__.values()} -def get_default_personal_notification_preferences_example() -> Dict[str, Any]: +def get_default_personal_notification_preferences_example() -> dict[str, Any]: return { category: NotificationCategorySettings().model_dump() for category in PersonalNotificationCategory.__members__.values() diff --git a/lib/galaxy/schema/remote_files.py b/lib/galaxy/schema/remote_files.py index 7f4c71a4b93e..74966c9ee021 100644 --- a/lib/galaxy/schema/remote_files.py +++ b/lib/galaxy/schema/remote_files.py @@ -1,7 +1,7 @@ from enum import Enum from typing import ( + Annotated, Any, - List, Optional, Union, ) @@ -11,7 +11,6 @@ RootModel, ) from typing_extensions import ( - Annotated, Literal, ) @@ -109,7 +108,7 @@ class BrowsableFilesSourcePlugin(FilesSourcePlugin): class FilesSourcePluginList(RootModel): - root: List[Union[BrowsableFilesSourcePlugin, FilesSourcePlugin]] = Field( + root: list[Union[BrowsableFilesSourcePlugin, FilesSourcePlugin]] = Field( default=[], title="List of files source plugins", examples=[ @@ -143,7 +142,7 @@ class RemoteFile(RemoteEntry): class ListJstreeResponse(RootModel): - root: List[Any] = Field( + root: list[Any] = Field( default=[], title="List of files", description="List of files in Jstree format.", @@ -159,7 +158,7 @@ class ListJstreeResponse(RootModel): class ListUriResponse(RootModel): - root: List[AnyRemoteEntry] = Field( + root: list[AnyRemoteEntry] = Field( default=[], title="List of remote entries", description="List of directories and files.", diff --git a/lib/galaxy/schema/schema.py b/lib/galaxy/schema/schema.py index d954726aa02f..2829533232f4 100644 --- a/lib/galaxy/schema/schema.py +++ b/lib/galaxy/schema/schema.py @@ -7,12 +7,9 @@ ) from enum import Enum from typing import ( + Annotated, Any, - Dict, - List, Optional, - Set, - Tuple, Union, ) from uuid import UUID @@ -31,7 +28,6 @@ ) from pydantic_core import core_schema from typing_extensions import ( - Annotated, Literal, ) @@ -245,7 +241,7 @@ class DatasetSourceTransformActionType(str, Enum): ), ] -ElementsField: List["DCESummary"] = Field( +ElementsField: list["DCESummary"] = Field( [], title="Elements", description="The summary information of each of the elements inside the dataset collection.", @@ -375,7 +371,7 @@ class AnonUserModel(DiskUsageUserModel): class DetailedUserModel(BaseUserModel, AnonUserModel): is_admin: bool = Field(default=..., title="Is admin", description="User is admin") purged: bool = Field(default=..., title="Purged", description="User is purged") - preferences: Dict[Any, Any] = Field(default=..., title="Preferences", description="Preferences of the user") + preferences: dict[Any, Any] = Field(default=..., title="Preferences", description="Preferences of the user") preferred_object_store_id: Optional[str] = PreferredObjectStoreIdField quota: str = Field(default=..., title="Quota", description="Quota applicable to the user") quota_bytes: Optional[int] = Field( @@ -415,7 +411,7 @@ class FavoriteObject(Model): class FavoriteObjectsSummary(Model): - tools: List[str] = Field(default=..., title="Favorite tools", description="The name of the tools the user favored.") + tools: list[str] = Field(default=..., title="Favorite tools", description="The name of the tools the user favored.") class FavoriteObjectType(str, Enum): @@ -470,7 +466,7 @@ class CustomBuildModel(CreatedCustomBuild): class CustomBuildsCollection(RootModel): - root: List[CustomBuildModel] = Field( + root: list[CustomBuildModel] = Field( default=..., title="Custom builds collection", description="The custom builds associated with the user." ) @@ -557,7 +553,7 @@ class HistoryContentSource(str, Enum): class TagCollection(RootModel): """Represents the collection of tags associated with an item.""" - root: List[TagItem] = Field( + root: list[TagItem] = Field( default=..., title="Tags", description="The collection of tags associated with an item.", @@ -579,12 +575,12 @@ class MetadataFile(Model): class DatasetPermissions(Model): """Role-based permissions for accessing and managing a dataset.""" - manage: List[DecodedDatabaseIdField] = Field( + manage: list[DecodedDatabaseIdField] = Field( [], title="Management", description="The set of roles (encoded IDs) that can manage this dataset.", ) - access: List[DecodedDatabaseIdField] = Field( + access: list[DecodedDatabaseIdField] = Field( [], title="Access", description="The set of roles (encoded IDs) that can access this dataset.", @@ -613,7 +609,7 @@ class DisplayApp(Model): title="Label", description="The label or title of the Display Application.", ) - links: List[Hyperlink] = Field( + links: list[Hyperlink] = Field( ..., title="Links", description="The collection of link details for this Display Application.", @@ -787,7 +783,7 @@ class DatasetSource(Model): Optional[str], Field(None, title="Extra Files Path", description="The path to the extra files.") ] transform: Annotated[ - Optional[List[DatasetSourceTransform]], + Optional[list[DatasetSourceTransform]], Field( None, title="Transform", @@ -832,7 +828,7 @@ class HDADetailed(HDASummary, WithModelClass): title="Metadata", description="The metadata associated with this dataset.", ) - meta_files: List[MetadataFile] = Field( + meta_files: list[MetadataFile] = Field( ..., title="Metadata Files", description="Collection of metadata files associated with this dataset.", @@ -869,12 +865,12 @@ class HDADetailed(HDASummary, WithModelClass): title="File Name", description="The full path to the dataset file.", ) - display_apps: List[DisplayApp] = Field( + display_apps: list[DisplayApp] = Field( ..., title="Display Applications", description="Contains new-style display app urls.", ) - display_types: List[DisplayApp] = Field( + display_types: list[DisplayApp] = Field( ..., title="Legacy Display Applications", description="Contains old-style display app urls.", @@ -916,7 +912,7 @@ class HDADetailed(HDASummary, WithModelClass): description="The basename of the output that produced this dataset.", # TODO: is that correct? ) hashes: Annotated[ - List[DatasetHash], + list[DatasetHash], Field( ..., title="Hashes", @@ -932,7 +928,7 @@ class HDADetailed(HDASummary, WithModelClass): ), ] sources: Annotated[ - List[DatasetSource], + list[DatasetSource], Field( ..., title="Sources", @@ -987,7 +983,7 @@ class HDAObject(Model, WithModelClass): state: DatasetStateField hda_ldda: DatasetSourceType = HdaLddaField history_id: HistoryID - tags: List[str] + tags: list[str] copied_from_ldda_id: Optional[EncodedDatabaseIdField] = None accessible: Optional[bool] = None purged: bool @@ -1003,7 +999,7 @@ class DCObject(Model, WithModelClass): populated: PopulatedField element_count: ElementCountField contents_url: Optional[ContentsUrlField] = None - elements: List["DCESummary"] = ElementsField + elements: list["DCESummary"] = ElementsField class DCESummary(Model, WithModelClass): @@ -1040,7 +1036,7 @@ class DCDetailed(DCSummary): """Dataset Collection detailed information.""" populated: PopulatedField - elements: List[DCESummary] = ElementsField + elements: list[DCESummary] = ElementsField class HDCJobStateSummary(Model): @@ -1144,7 +1140,7 @@ class HDCASummary(HDCACommon, WithModelClass): populated_state: DatasetCollectionPopulatedState = PopulatedStateField populated_state_message: Optional[str] = PopulatedStateMessageField element_count: ElementCountField - elements_datatypes: Set[str] = Field( + elements_datatypes: set[str] = Field( ..., description="A set containing all the different element datatypes in the collection." ) job_source_id: Optional[EncodedDatabaseIdField] = Field( @@ -1170,7 +1166,7 @@ class HDCADetailed(HDCASummary): """History Dataset Collection Association detailed information.""" populated: PopulatedField - elements: List[DCESummary] = ElementsField + elements: list[DCESummary] = ElementsField implicit_collection_jobs_id: Optional[EncodedDatabaseIdField] = Field( None, description="Encoded ID for the ICJ object describing the collection of jobs corresponding to this collection", @@ -1204,7 +1200,7 @@ class UpdateContentItem(HistoryContentItem): class UpdateHistoryContentsBatchPayload(Model): """Contains property values that will be updated for all the history `items` provided.""" - items: List[UpdateContentItem] = Field( + items: list[UpdateContentItem] = Field( ..., title="Items", description="A list of content items to update with the changes.", @@ -1248,7 +1244,7 @@ class ChangeDbkeyOperationParams(BulkOperationParams): class TagOperationParams(BulkOperationParams): type: Union[Literal["add_tags"], Literal["remove_tags"]] - tags: List[str] + tags: list[str] AnyBulkOperationParams = Union[ @@ -1260,7 +1256,7 @@ class TagOperationParams(BulkOperationParams): class HistoryContentBulkOperationPayload(Model): operation: HistoryContentItemOperation - items: Optional[List[HistoryContentItem]] = None + items: Optional[list[HistoryContentItem]] = None params: Optional[AnyBulkOperationParams] = None @@ -1271,7 +1267,7 @@ class BulkOperationItemError(Model): class HistoryContentBulkOperationResult(Model): success_count: int - errors: List[BulkOperationItemError] + errors: list[BulkOperationItemError] class UpdateHistoryContentsPayload(Model): @@ -1376,11 +1372,11 @@ class HistoryActiveContentCounts(Model): # TODO: https://github.com/galaxyproject/galaxy/issues/17785 -HistoryStateCounts = Dict[DatasetState, int] -HistoryStateIds = Dict[DatasetState, List[DecodedDatabaseIdField]] +HistoryStateCounts = dict[DatasetState, int] +HistoryStateIds = dict[DatasetState, list[DecodedDatabaseIdField]] HistoryContentStates = Union[DatasetState, DatasetCollectionPopulatedState] -HistoryContentStateCounts = Dict[HistoryContentStates, int] +HistoryContentStateCounts = dict[HistoryContentStates, int] class HistoryDetailed(HistorySummary): # Equivalent to 'dev-detailed' view, which seems the default @@ -1554,11 +1550,11 @@ class JobIndexSortByEnum(str, Enum): class JobIndexQueryPayload(Model): - states: Optional[List[str]] = None + states: Optional[list[str]] = None user_details: bool = False user_id: Optional[DecodedDatabaseIdField] = None - tool_ids: Optional[List[str]] = None - tool_ids_like: Optional[List[str]] = None + tool_ids: Optional[list[str]] = None + tool_ids_like: Optional[list[str]] = None date_range_min: Optional[Union[OffsetNaiveDatetime, date]] = None date_range_max: Optional[Union[OffsetNaiveDatetime, date]] = None history_id: Optional[DecodedDatabaseIdField] = None @@ -1669,12 +1665,12 @@ class CollectionElementIdentifier(Model): description="The encoded ID of the element.", ) collection_type: Optional[CollectionType] = OptionalCollectionTypeField - element_identifiers: Optional[List["CollectionElementIdentifier"]] = Field( + element_identifiers: Optional[list["CollectionElementIdentifier"]] = Field( default=None, title="Element Identifiers", description="List of elements that should be in the new sub-collection.", ) - tags: Optional[List[str]] = Field( + tags: Optional[list[str]] = Field( default=None, title="Tags", description="The list of tags associated with the element.", @@ -1683,7 +1679,7 @@ class CollectionElementIdentifier(Model): class CreateNewCollectionPayload(Model): collection_type: Optional[CollectionType] = OptionalCollectionTypeField - element_identifiers: Optional[List[CollectionElementIdentifier]] = Field( + element_identifiers: Optional[list[CollectionElementIdentifier]] = Field( default=None, title="Element Identifiers", description="List of elements that should be in the new collection.", @@ -1716,7 +1712,7 @@ class CreateNewCollectionPayload(Model): default=None, description="The ID of the library folder that will contain the collection. Required if `instance_type=library`.", ) - fields_: Optional[Union[str, List[FieldDict]]] = Field( + fields_: Optional[Union[str, list[FieldDict]]] = Field( default=[], description="List of fields to create for this collection. Set to 'auto' to guess fields from identifiers.", alias="fields", @@ -1746,7 +1742,7 @@ def is_bag(cls, value: "ModelStoreFormat"): class StoreContentSource(Model): store_content_uri: Optional[str] = None - store_dict: Optional[Dict[str, Any]] = None + store_dict: Optional[dict[str, Any]] = None model_store_format: Optional["ModelStoreFormat"] = None @@ -1784,19 +1780,19 @@ class BcoGenerationParametersMixin(BaseModel): bco_merge_history_metadata: bool = Field( default=False, description="When reading tags/annotations to generate BCO object include history metadata." ) - bco_override_environment_variables: Optional[Dict[str, str]] = Field( + bco_override_environment_variables: Optional[dict[str, str]] = Field( default=None, description="Override environment variables for 'execution_domain' when generating BioCompute object.", ) - bco_override_empirical_error: Optional[Dict[str, str]] = Field( + bco_override_empirical_error: Optional[dict[str, str]] = Field( default=None, description="Override empirical error for 'error domain' when generating BioCompute object.", ) - bco_override_algorithmic_error: Optional[Dict[str, str]] = Field( + bco_override_algorithmic_error: Optional[dict[str, str]] = Field( default=None, description="Override algorithmic error for 'error domain' when generating BioCompute object.", ) - bco_override_xref: Optional[List[XrefItem]] = Field( + bco_override_xref: Optional[list[XrefItem]] = Field( default=None, description="Override xref for 'description domain' when generating BioCompute object.", ) @@ -1923,11 +1919,11 @@ class ObjectExportTaskResponse(ObjectExportResponseBase): class JobExportHistoryArchiveListResponse(RootModel): - root: List[JobExportHistoryArchiveModel] + root: list[JobExportHistoryArchiveModel] class ExportTaskListResponse(RootModel): - root: List[ObjectExportTaskResponse] + root: list[ObjectExportTaskResponse] __accept_type__ = "application/vnd.galaxy.task.export+json" @@ -2008,12 +2004,12 @@ class LabelValuePair(Model): class CustomBuildsMetadataResponse(Model): - installed_builds: List[LabelValuePair] = Field( + installed_builds: list[LabelValuePair] = Field( ..., title="Installed Builds", description="TODO", ) - fasta_hdas: List[LabelValuePair] = Field( + fasta_hdas: list[LabelValuePair] = Field( ..., title="Fasta HDAs", description=( @@ -2074,7 +2070,7 @@ class JobImportHistoryResponse(JobBaseModel): class ItemStateSummary(Model): id: EncodedDatabaseIdField populated_state: DatasetCollectionPopulatedState = PopulatedStateField - states: Dict[JobState, int] = Field( + states: dict[JobState, int] = Field( {}, title="States", description=("A dictionary of job states and the number of jobs in that state.") ) @@ -2180,12 +2176,12 @@ class JobDetails(JobSummary): "The specific parameters depend on the tool itself." ), ) - inputs: Dict[str, DatasetJobInfo] = Field( + inputs: dict[str, DatasetJobInfo] = Field( {}, title="Inputs", description="Dictionary mapping all the tool inputs (by name) with the corresponding dataset information.", ) - outputs: Dict[str, DatasetJobInfo] = Field( + outputs: dict[str, DatasetJobInfo] = Field( {}, title="Outputs", description="Dictionary mapping all the tool outputs (by name) with the corresponding dataset information.", @@ -2241,7 +2237,7 @@ class WorkflowJobMetric(JobMetric): class JobMetricCollection(RootModel): """Represents a collection of metrics associated with a Job.""" - root: List[JobMetric] = Field( + root: list[JobMetric] = Field( [], title="Job Metrics", description="Collections of metrics provided by `JobInstrumenter` plugins on a particular job.", @@ -2279,7 +2275,7 @@ class JobFullDetails(JobDetails): title="Standard Error", description="Combined tool and job standard error streams.", ) - job_messages: List[str] = Field( + job_messages: list[str] = Field( ..., title="Job Messages", description="List with additional information and possible reasons for a failed job.", @@ -2310,7 +2306,7 @@ class StoredWorkflowSummary(Model, WithModelClass): title="Published", description="Whether this workflow is currently publicly available to all users.", ) - annotations: Optional[List[str]] = ( + annotations: Optional[list[str]] = ( Field( # Inconsistency? Why workflows summaries use a list instead of an optional string? None, title="Annotations", @@ -2406,7 +2402,7 @@ class WorkflowStepBase(Model): description="The identifier of the step. It matches the index order of the step inside the workflow.", ) annotation: Optional[str] = AnnotationField - input_steps: Dict[str, InputStep] = Field( + input_steps: dict[str, InputStep] = Field( ..., title="Input Steps", description="A dictionary containing information about the inputs connected to this workflow step.", @@ -2561,7 +2557,7 @@ class WorkflowStepLayoutPosition(Model): width: int = Field(..., title="Width", description="Width of the box in pixels.") -InvocationsStateCounts = RootModel[Dict[str, int]] +InvocationsStateCounts = RootModel[dict[str, int]] class WorkflowStepToExportBase(Model): @@ -2585,17 +2581,17 @@ class WorkflowStepToExportBase(Model): None, title="Label", ) - inputs: List[Input] = Field( + inputs: list[Input] = Field( ..., title="Inputs", description="TODO", ) - outputs: List[Output] = Field( + outputs: list[Output] = Field( ..., title="Outputs", description="TODO", ) - input_connections: Dict[str, InputConnection] = Field( + input_connections: dict[str, InputConnection] = Field( {}, title="Input Connections", description="TODO", @@ -2605,7 +2601,7 @@ class WorkflowStepToExportBase(Model): title="Position", description="Layout position of this step in the graph", ) - workflow_outputs: List[WorkflowOutput] = Field( + workflow_outputs: list[WorkflowOutput] = Field( [], title="Workflow Outputs", description="Workflow outputs associated with this step." ) @@ -2663,7 +2659,7 @@ class PostJobAction(Model): title="Output Name", description="The name of the output that will be affected by the action.", ) - action_arguments: Dict[str, Any] = Field( + action_arguments: dict[str, Any] = Field( ..., title="Action Arguments", description="Any additional arguments needed by the action.", @@ -2674,7 +2670,7 @@ class WorkflowToolStepToExport(WorkflowStepToExportBase): tool_shed_repository: ToolShedRepositorySummary = Field( ..., title="Tool Shed Repository", description="Information about the origin repository of this tool." ) - post_job_actions: Dict[str, PostJobAction] = Field( + post_job_actions: dict[str, PostJobAction] = Field( ..., title="Post-job Actions", description="Set of actions that will be run when the job finish." ) @@ -2703,7 +2699,7 @@ class WorkflowToExport(Model): title="UUID", description="Universal unique identifier of the workflow.", ) - creator: Optional[List[Union[Person, Organization]]] = Field( + creator: Optional[list[Union[Person, Organization]]] = Field( None, title="Creator", description=("Additional information about the creator (or multiple creators) of this workflow."), @@ -2714,7 +2710,7 @@ class WorkflowToExport(Model): version: int = Field( ..., title="Version", description="The version of the workflow represented by an incremental number." ) - steps: Dict[int, Union[SubworkflowStepToExport, WorkflowToolStepToExport, WorkflowStepToExport]] = Field( + steps: dict[int, Union[SubworkflowStepToExport, WorkflowToolStepToExport, WorkflowStepToExport]] = Field( {}, title="Steps", description="A dictionary with information about all the steps of the workflow." ) @@ -2741,19 +2737,19 @@ class RoleModelResponse(BasicRoleModel, WithModelClass): class RoleDefinitionModel(Model): name: RoleNameField description: RoleDescriptionField - user_ids: Optional[List[DecodedDatabaseIdField]] = Field(title="User IDs", default=[]) - group_ids: Optional[List[DecodedDatabaseIdField]] = Field(title="Group IDs", default=[]) + user_ids: Optional[list[DecodedDatabaseIdField]] = Field(title="User IDs", default=[]) + group_ids: Optional[list[DecodedDatabaseIdField]] = Field(title="Group IDs", default=[]) role_type: Literal["admin", "user_tool_create", "user_tool_execute"] = "admin" class RoleListResponse(RootModel): - root: List[RoleModelResponse] + root: list[RoleModelResponse] # The tuple should probably be another proper model instead? # Keeping it as a Tuple for now for backward compatibility # TODO: Use Tuple again when `make update-client-api-schema` supports them -RoleNameIdTuple = List[str] # Tuple[str, DecodedDatabaseIdField] +RoleNameIdTuple = list[str] # Tuple[str, DecodedDatabaseIdField] # Group_Roles ----------------------------------------------------------------- @@ -2765,7 +2761,7 @@ class GroupRoleResponse(Model): class GroupRoleListResponse(RootModel): - root: List[GroupRoleResponse] + root: list[GroupRoleResponse] # Users ----------------------------------------------------------------------- @@ -2779,7 +2775,7 @@ class GroupUserResponse(Model): class GroupUserListResponse(RootModel): - root: List[GroupUserResponse] + root: list[GroupUserResponse] class ImportToolDataBundleUriSource(Model): @@ -2858,7 +2854,7 @@ class InstalledToolShedRepository(Model, WithModelClass): class InstalledToolShedRepositories(RootModel): - root: List[InstalledToolShedRepository] + root: list[InstalledToolShedRepository] CheckForUpdatesResponseStatusT = Literal["ok", "error"] @@ -2948,7 +2944,7 @@ class LibrarySummary(LibraryLegacySummary): class LibrarySummaryList(RootModel): - root: List[LibrarySummary] = Field( + root: list[LibrarySummary] = Field( default=[], title="List with summary information of Libraries.", ) @@ -3003,22 +2999,22 @@ class DeleteLibraryPayload(Model): class LibraryCurrentPermissions(Model): - access_library_role_list: List[RoleNameIdTuple] = Field( + access_library_role_list: list[RoleNameIdTuple] = Field( ..., title="Access Role List", description="A list containing pairs of role names and corresponding encoded IDs which have access to the Library.", ) - modify_library_role_list: List[RoleNameIdTuple] = Field( + modify_library_role_list: list[RoleNameIdTuple] = Field( ..., title="Modify Role List", description="A list containing pairs of role names and corresponding encoded IDs which can modify the Library.", ) - manage_library_role_list: List[RoleNameIdTuple] = Field( + manage_library_role_list: list[RoleNameIdTuple] = Field( ..., title="Manage Role List", description="A list containing pairs of role names and corresponding encoded IDs which can manage the Library.", ) - add_library_item_role_list: List[RoleNameIdTuple] = Field( + add_library_item_role_list: list[RoleNameIdTuple] = Field( ..., title="Add Role List", description="A list containing pairs of role names and corresponding encoded IDs which can add items to the Library.", @@ -3026,7 +3022,7 @@ class LibraryCurrentPermissions(Model): RoleIdList = Union[ - List[DecodedDatabaseIdField], DecodedDatabaseIdField + list[DecodedDatabaseIdField], DecodedDatabaseIdField ] # Should we support just List[DecodedDatabaseIdField] in the future? @@ -3157,7 +3153,7 @@ class LibraryFolderDetails(Model, WithModelClass): title="Deleted", description="Whether this folder is marked as deleted.", ) - library_path: List[str] = Field( + library_path: list[str] = Field( [], title="Path", description="The list of folder names composing the path to this folder.", @@ -3183,7 +3179,7 @@ class UpdateLibraryFolderPayload(Model): class LibraryAvailablePermissions(Model): - roles: List[BasicRoleModel] = Field( + roles: list[BasicRoleModel] = Field( ..., title="Roles", description="A list containing available roles that can be assigned to a particular permission.", @@ -3206,17 +3202,17 @@ class LibraryAvailablePermissions(Model): class LibraryFolderCurrentPermissions(Model): - modify_folder_role_list: List[RoleNameIdTuple] = Field( + modify_folder_role_list: list[RoleNameIdTuple] = Field( ..., title="Modify Role List", description="A list containing pairs of role names and corresponding encoded IDs which can modify the Library folder.", ) - manage_folder_role_list: List[RoleNameIdTuple] = Field( + manage_folder_role_list: list[RoleNameIdTuple] = Field( ..., title="Manage Role List", description="A list containing pairs of role names and corresponding encoded IDs which can manage the Library folder.", ) - add_library_item_role_list: List[RoleNameIdTuple] = Field( + add_library_item_role_list: list[RoleNameIdTuple] = Field( ..., title="Add Role List", description="A list containing pairs of role names and corresponding encoded IDs which can add items to the Library folder.", @@ -3276,12 +3272,12 @@ class LibraryFolderMetadata(Model): total_rows: int can_modify_folder: bool can_add_library_item: bool - full_path: List[Tuple[EncodedLibraryFolderDatabaseIdField, str]] + full_path: list[tuple[EncodedLibraryFolderDatabaseIdField, str]] class LibraryFolderContentsIndexResult(Model): metadata: LibraryFolderMetadata - folder_contents: List[AnyLibraryFolderItem] + folder_contents: list[AnyLibraryFolderItem] class CreateLibraryFilePayload(Model): @@ -3306,7 +3302,7 @@ class CreateLibraryFilePayload(Model): class DatasetAssociationRoles(Model): - access_dataset_roles: List[RoleNameIdTuple] = Field( + access_dataset_roles: list[RoleNameIdTuple] = Field( default=[], title="Access Roles", description=( @@ -3316,7 +3312,7 @@ class DatasetAssociationRoles(Model): "If there are no access roles set on the dataset it is considered **unrestricted**." ), ) - manage_dataset_roles: List[RoleNameIdTuple] = Field( + manage_dataset_roles: list[RoleNameIdTuple] = Field( default=[], title="Manage Roles", description=( @@ -3325,7 +3321,7 @@ class DatasetAssociationRoles(Model): "If you remove yourself you will lose the ability to manage this dataset unless you are an admin." ), ) - modify_item_roles: List[RoleNameIdTuple] = Field( + modify_item_roles: list[RoleNameIdTuple] = Field( default=[], title="Modify Roles", description=( @@ -3411,7 +3407,7 @@ class HDACustom(HDADetailed): # Add fields that are not part of any view here visualizations: Annotated[ - Optional[List[Visualization]], + Optional[list[Visualization]], Field( None, title="Visualizations", @@ -3507,7 +3503,7 @@ class HistoryContentsArchiveDryRunResult(RootModel): This is used for debugging purposes. """ - root: List[Tuple[str, str]] + root: list[tuple[str, str]] class HistoryContentStats(Model): @@ -3523,7 +3519,7 @@ class HistoryContentsResult(RootModel): Can contain different views and kinds of items. """ - root: List[AnyHistoryContentItem] + root: list[AnyHistoryContentItem] class HistoryContentsWithStatsResult(Model): @@ -3534,7 +3530,7 @@ class HistoryContentsWithStatsResult(Model): title="Stats", description=("Contains counting stats for the query."), ) - contents: List[AnyHistoryContentItem] = Field( + contents: list[AnyHistoryContentItem] = Field( ..., title="Contents", description=( @@ -3567,7 +3563,7 @@ class ShareWithExtra(Model): class ShareWithPayload(Model): - user_ids: List[UserIdentifier] = Field( + user_ids: list[UserIdentifier] = Field( ..., title="User Identifiers", description=( @@ -3637,7 +3633,7 @@ class SharingStatus(Model): title="Published", description="Whether this resource is currently published.", ) - users_shared_with: List[UserEmail] = Field( + users_shared_with: list[UserEmail] = Field( [], title="Users shared with", description="The list of encoded ids for users the resource has been shared.", @@ -3665,7 +3661,7 @@ class HDABasicInfo(Model): class ShareHistoryExtra(ShareWithExtra): - can_change: List[HDABasicInfo] = Field( + can_change: list[HDABasicInfo] = Field( [], title="Can Change", description=( @@ -3673,7 +3669,7 @@ class ShareHistoryExtra(ShareWithExtra): "and that can be made accessible for others by the user sharing the history." ), ) - cannot_change: List[HDABasicInfo] = Field( + cannot_change: list[HDABasicInfo] = Field( [], title="Cannot Change", description=( @@ -3689,7 +3685,7 @@ class ShareHistoryExtra(ShareWithExtra): class ShareWithStatus(SharingStatus): - errors: List[str] = Field( + errors: list[str] = Field( [], title="Errors", description="Collection of messages indicating that the resource was not shared with some (or all users) due to an error.", @@ -3857,7 +3853,7 @@ class ToolRequestState(str, Enum): class ToolRequestModel(Model): id: EncodedDatabaseIdField = ToolRequestIdField - request: Dict[str, Any] + request: dict[str, Any] state: ToolRequestState state_message: Optional[str] @@ -3909,7 +3905,7 @@ class PageSummary(PageSummaryBase, WithModelClass): title="Latest revision ID", description="The encoded ID of the last revision of this Page.", ) - revision_ids: List[EncodedDatabaseIdField] = Field( + revision_ids: list[EncodedDatabaseIdField] = Field( ..., # Required title="List of revisions", description="The history with the encoded ID of each revision of the Page.", @@ -3960,7 +3956,7 @@ class ToolReportForDataset(BaseModel): class PageSummaryList(RootModel): - root: List[PageSummary] = Field( + root: list[PageSummary] = Field( default=[], title="List with summary information of Pages.", ) @@ -3978,7 +3974,7 @@ class LandingRequestState(str, Enum): class CreateToolLandingRequestPayload(Model): tool_id: str tool_version: Optional[str] = None - request_state: Optional[Dict[str, Any]] = None + request_state: Optional[dict[str, Any]] = None client_secret: Optional[str] = None public: bool = False @@ -3986,7 +3982,7 @@ class CreateToolLandingRequestPayload(Model): class CreateWorkflowLandingRequestPayload(Model): workflow_id: str workflow_target_type: Literal["stored_workflow", "workflow", "trs_url"] - request_state: Optional[Dict[str, Any]] = None + request_state: Optional[dict[str, Any]] = None client_secret: Optional[str] = None public: bool = Field( False, @@ -4002,7 +3998,7 @@ class ToolLandingRequest(Model): uuid: UuidField tool_id: str tool_version: Optional[str] = None - request_state: Optional[Dict[str, Any]] = None + request_state: Optional[dict[str, Any]] = None state: LandingRequestState @@ -4010,7 +4006,7 @@ class WorkflowLandingRequest(Model): uuid: UuidField workflow_id: str workflow_target_type: Literal["stored_workflow", "workflow", "trs_url"] - request_state: Dict[str, Any] + request_state: dict[str, Any] state: LandingRequestState diff --git a/lib/galaxy/schema/storage_cleaner.py b/lib/galaxy/schema/storage_cleaner.py index 69fb82771cb8..d34f2a61b0c4 100644 --- a/lib/galaxy/schema/storage_cleaner.py +++ b/lib/galaxy/schema/storage_cleaner.py @@ -1,7 +1,6 @@ from datetime import datetime from enum import Enum from typing import ( - List, Union, ) @@ -59,11 +58,11 @@ class StorageItemCleanupError(Model): class CleanupStorageItemsRequest(Model): - item_ids: List[DecodedDatabaseIdField] + item_ids: list[DecodedDatabaseIdField] class StorageItemsCleanupResult(Model): total_item_count: int success_item_count: int total_free_bytes: int - errors: List[StorageItemCleanupError] + errors: list[StorageItemCleanupError] diff --git a/lib/galaxy/schema/tasks.py b/lib/galaxy/schema/tasks.py index b7f8958fe364..b4475ff5d4e2 100644 --- a/lib/galaxy/schema/tasks.py +++ b/lib/galaxy/schema/tasks.py @@ -1,6 +1,5 @@ from enum import Enum from typing import ( - List, Optional, ) from uuid import UUID @@ -119,7 +118,7 @@ class ComputeDatasetHashTaskRequest(Model): class PurgeDatasetsTaskRequest(Model): - dataset_ids: List[int] + dataset_ids: list[int] class TaskState(str, Enum): diff --git a/lib/galaxy/schema/types.py b/lib/galaxy/schema/types.py index 791cb88ace4d..7a8dae0b28a9 100644 --- a/lib/galaxy/schema/types.py +++ b/lib/galaxy/schema/types.py @@ -1,10 +1,12 @@ from datetime import datetime -from typing import Union +from typing import ( + Annotated, + Union, +) from pydantic import ValidationInfo from pydantic.functional_validators import AfterValidator from typing_extensions import ( - Annotated, Literal, ) diff --git a/lib/galaxy/schema/visualization.py b/lib/galaxy/schema/visualization.py index 45a693b21f8e..ae6f2ac396ff 100644 --- a/lib/galaxy/schema/visualization.py +++ b/lib/galaxy/schema/visualization.py @@ -1,7 +1,5 @@ from datetime import datetime from typing import ( - Dict, - List, Optional, Union, ) @@ -104,7 +102,7 @@ class VisualizationSummary(Model): class VisualizationSummaryList(RootModel): - root: List[VisualizationSummary] = Field( + root: list[VisualizationSummary] = Field( default=[], title="List with detailed information of Visualizations.", ) @@ -132,7 +130,7 @@ class VisualizationRevisionResponse(Model, WithModelClass): title="DbKey", description="The database key of the visualization.", ) - config: Dict = Field( + config: dict = Field( ..., title="Config", description="The config of the visualization revision.", @@ -175,22 +173,22 @@ class VisualizationPluginResponse(Model): title="Embeddable", description="Whether the plugin is embeddable.", ) - entry_point: Dict = Field( + entry_point: dict = Field( ..., title="Entry Point", description="The entry point of the plugin.", ) - settings: List[Dict] = Field( + settings: list[dict] = Field( ..., title="Settings", description="The settings of the plugin.", ) - groups: Optional[List[Dict]] = Field( + groups: Optional[list[dict]] = Field( None, title="Groups", description="The groups of the plugin.", ) - specs: Optional[Dict] = Field( + specs: Optional[dict] = Field( None, title="Specs", description="The specs of the plugin.", @@ -239,7 +237,7 @@ class VisualizationShowResponse(Model, WithModelClass): title="Latest Revision", description="The latest revision of this Visualization.", ) - revisions: List[EncodedDatabaseIdField] = Field( + revisions: list[EncodedDatabaseIdField] = Field( ..., title="Revisions", description="A list of encoded IDs of the revisions of this Visualization.", diff --git a/lib/galaxy/schema/workflow/comments.py b/lib/galaxy/schema/workflow/comments.py index 488df52d68c2..0565c60b5b80 100644 --- a/lib/galaxy/schema/workflow/comments.py +++ b/lib/galaxy/schema/workflow/comments.py @@ -1,7 +1,5 @@ from typing import ( - List, Optional, - Tuple, Union, ) @@ -18,8 +16,8 @@ class BaseComment(BaseModel): color: Literal["none", "black", "blue", "turquoise", "green", "lime", "orange", "yellow", "red", "pink"] = Field( ..., description="Color this comment is displayed as. The exact color hex is determined by the client" ) - position: Tuple[float, float] = Field(..., description="[x, y] position of this comment in the Workflow") - size: Tuple[float, float] = Field(..., description="[width, height] size of this comment") + position: tuple[float, float] = Field(..., description="[x, y] position of this comment in the Workflow") + size: tuple[float, float] = Field(..., description="[width, height] size of this comment") class TextCommentData(BaseModel): @@ -54,17 +52,17 @@ class FrameCommentData(BaseModel): class FrameComment(BaseComment): type: Literal["frame"] data: FrameCommentData - child_comments: Optional[List[int]] = Field( + child_comments: Optional[list[int]] = Field( default=None, description="A list of ids (see `id`) of all Comments which are encompassed by this Frame" ) - child_steps: Optional[List[int]] = Field( + child_steps: Optional[list[int]] = Field( default=None, description="A list of ids of all Steps (see WorkflowStep.id) which are encompassed by this Frame" ) class FreehandCommentData(BaseModel): thickness: int = Field(..., description="Width of the Line in pixels") - line: List[Tuple[float, float]] = Field( + line: list[tuple[float, float]] = Field( ..., description="List of [x, y] coordinates determining the unsmoothed line. Smoothing is done client-side using Catmull-Rom", ) diff --git a/lib/galaxy/schema/workflows.py b/lib/galaxy/schema/workflows.py index a9ff094d178e..005a29ca21ec 100644 --- a/lib/galaxy/schema/workflows.py +++ b/lib/galaxy/schema/workflows.py @@ -1,8 +1,7 @@ import json from typing import ( + Annotated, Any, - Dict, - List, Optional, Union, ) @@ -12,7 +11,6 @@ Field, field_validator, ) -from typing_extensions import Annotated from galaxy.schema.schema import ( AnnotationField, @@ -80,7 +78,7 @@ def validateInputsBy(inputsBy: Optional[str]) -> Optional[str]: if inputsBy is not None: if not isinstance(inputsBy, str): raise ValueError(f"Invalid type for inputsBy {inputsBy}") - inputsByArray: List[str] = inputsBy.split("|") + inputsByArray: list[str] = inputsBy.split("|") for inputsByItem in inputsByArray: if inputsByItem not in VALID_INPUTS_BY_ITEMS: raise ValueError(f"Invalid inputsBy delineation {inputsByItem}") @@ -162,24 +160,24 @@ def inputs_string_to_json(cls, v): return json.loads(v) return v - parameters: Optional[Dict[str, Any]] = Field( + parameters: Optional[dict[str, Any]] = Field( {}, title=STEP_PARAMETERS_TITLE, description=STEP_PARAMETERS_DESCRIPTION, ) - inputs: Optional[Dict[str, Any]] = Field( + inputs: Optional[dict[str, Any]] = Field( None, title="Inputs", description="Specify values for formal inputs to the workflow", ) - ds_map: Optional[Dict[str, Dict[str, Any]]] = Field( + ds_map: Optional[dict[str, dict[str, Any]]] = Field( {}, title="Legacy Dataset Map", description="An older alternative to specifying inputs using database IDs, do not use this and use inputs instead", deprecated=True, ) - resource_params: Optional[Dict[str, Any]] = ResourceParametersField - replacement_params: Optional[Dict[str, Any]] = ReplacementParametersField + resource_params: Optional[dict[str, Any]] = ResourceParametersField + replacement_params: Optional[dict[str, Any]] = ReplacementParametersField no_add_to_history: Optional[bool] = Field( False, title="No Add to History", @@ -215,10 +213,10 @@ class StoredWorkflowDetailed(StoredWorkflowSummary): version: int = Field( ..., title="Version", description="The version of the workflow represented by an incremental number." ) - inputs: Dict[int, WorkflowInput] = Field( + inputs: dict[int, WorkflowInput] = Field( {}, title="Inputs", description="A dictionary containing information about all the inputs of the workflow." ) - creator: Optional[List[Union[Person, Organization]]] = Field( + creator: Optional[list[Union[Person, Organization]]] = Field( None, title="Creator", description=("Additional information about the creator (or multiple creators) of this workflow."), @@ -228,10 +226,10 @@ class StoredWorkflowDetailed(StoredWorkflowSummary): title="Creator deleted", description="Whether the creator of this Workflow has been deleted.", ) - doi: Optional[List[str]] = Field( + doi: Optional[list[str]] = Field( None, title="DOI", description="A list of Digital Object Identifiers associated with this workflow." ) - steps: Dict[ + steps: dict[ int, Annotated[ Union[ @@ -274,7 +272,7 @@ class StoredWorkflowDetailed(StoredWorkflowSummary): title="Slug", description="The slug of the workflow.", ) - source_metadata: Optional[Dict[str, Any]] = Field( + source_metadata: Optional[dict[str, Any]] = Field( ..., title="Source Metadata", description="The source metadata of the workflow.", diff --git a/lib/galaxy/security/__init__.py b/lib/galaxy/security/__init__.py index 94e8948042b4..9cd98a972d15 100644 --- a/lib/galaxy/security/__init__.py +++ b/lib/galaxy/security/__init__.py @@ -4,7 +4,6 @@ """ from typing import ( - List, Optional, ) @@ -63,7 +62,7 @@ def get_action(self, name: str, default: Optional[Action] = None) -> Optional[Ac return v return default - def get_actions(self) -> List[Action]: + def get_actions(self) -> list[Action]: """Get all permitted actions as a list of Action objects""" return list(self.permitted_actions.__dict__.values()) diff --git a/lib/galaxy/security/validate_user_input.py b/lib/galaxy/security/validate_user_input.py index 94c2dba9a8ac..8d5b683b9051 100644 --- a/lib/galaxy/security/validate_user_input.py +++ b/lib/galaxy/security/validate_user_input.py @@ -8,8 +8,6 @@ import logging import re from typing import ( - Dict, - List, Optional, ) @@ -181,7 +179,7 @@ def validate_preferred_object_store_id( return object_store.validate_selected_object_store_id(trans.user, preferred_object_store_id) or "" -def is_email_banned(email: str, filepath: Optional[str], canonical_email_rules: Optional[Dict]) -> bool: +def is_email_banned(email: str, filepath: Optional[str], canonical_email_rules: Optional[dict]) -> bool: if not filepath: return False normalizer = EmailAddressNormalizer(canonical_email_rules) @@ -193,7 +191,7 @@ def is_email_banned(email: str, filepath: Optional[str], canonical_email_rules: return False -def _read_email_ban_list(filepath: str) -> List[str]: +def _read_email_ban_list(filepath: str) -> list[str]: with open(filepath) as f: return [line.strip() for line in f if not line.startswith("#")] @@ -206,7 +204,7 @@ class EmailAddressNormalizer: SUB_ADDRESSING_DELIM_DEFAULT = "+" ALL = "all" - def __init__(self, canonical_email_rules: Optional[Dict]) -> None: + def __init__(self, canonical_email_rules: Optional[dict]) -> None: self.config = canonical_email_rules def normalize(self, email: str) -> str: diff --git a/lib/galaxy/security/vault.py b/lib/galaxy/security/vault.py index 587037173e0e..1f89e37377ab 100644 --- a/lib/galaxy/security/vault.py +++ b/lib/galaxy/security/vault.py @@ -3,7 +3,6 @@ import os import re from typing import ( - List, Optional, ) @@ -70,7 +69,7 @@ def write_secret(self, key: str, value: str) -> None: """ @abc.abstractmethod - def list_secrets(self, key: str) -> List[str]: + def list_secrets(self, key: str) -> list[str]: """ Lists secrets at a given path. @@ -105,7 +104,7 @@ def write_secret(self, key: str, value: str) -> None: "No vault configured. Make sure the vault_config_file setting is defined in galaxy.yml" ) - def list_secrets(self, key: str) -> List[str]: + def list_secrets(self, key: str) -> list[str]: raise NotImplementedError() @@ -130,7 +129,7 @@ def read_secret(self, key: str) -> Optional[str]: def write_secret(self, key: str, value: str) -> None: self.client.secrets.kv.v2.create_or_update_secret(path=key, secret={"value": value}) - def list_secrets(self, key: str) -> List[str]: + def list_secrets(self, key: str) -> list[str]: raise NotImplementedError() @@ -177,7 +176,7 @@ def delete_secret(self, key: str) -> None: self.sa_session.delete(vault_entry) self.sa_session.flush() - def list_secrets(self, key: str) -> List[str]: + def list_secrets(self, key: str) -> list[str]: raise NotImplementedError() def _get_vault_value(self, key): @@ -209,7 +208,7 @@ def read_secret(self, key: str) -> Optional[str]: def write_secret(self, key: str, value: str) -> None: self.client.set_kv_credential(key=key, value=value) - def list_secrets(self, key: str) -> List[str]: + def list_secrets(self, key: str) -> list[str]: raise NotImplementedError() @@ -227,7 +226,7 @@ def read_secret(self, key: str) -> Optional[str]: def write_secret(self, key: str, value: str) -> None: return self.vault.write_secret(f"user/{self.user.id}/{key}", value) - def list_secrets(self, key: str) -> List[str]: + def list_secrets(self, key: str) -> list[str]: raise NotImplementedError() @@ -263,7 +262,7 @@ def write_secret(self, key: str, value: str) -> None: key = self.normalize_key(key) return self.vault.write_secret(key, value) - def list_secrets(self, key: str) -> List[str]: + def list_secrets(self, key: str) -> list[str]: raise NotImplementedError() @@ -282,7 +281,7 @@ def read_secret(self, key: str) -> Optional[str]: def write_secret(self, key: str, value: str) -> None: return self.vault.write_secret(f"/{self.prefix}/{key}", value) - def list_secrets(self, key: str) -> List[str]: + def list_secrets(self, key: str) -> list[str]: raise NotImplementedError() diff --git a/lib/galaxy/selenium/axe_results.py b/lib/galaxy/selenium/axe_results.py index fae862c61812..a0d67e5edfbe 100644 --- a/lib/galaxy/selenium/axe_results.py +++ b/lib/galaxy/selenium/axe_results.py @@ -1,7 +1,5 @@ from typing import ( Any, - Dict, - List, Optional, ) @@ -36,7 +34,7 @@ class AxeResult: - def __init__(self, json: Dict[str, Any]): + def __init__(self, json: dict[str, Any]): self._json = json @property @@ -83,21 +81,21 @@ def assert_passes(self, id: str) -> None: def assert_does_not_violate(self, id: str) -> None: """""" - def violations(self) -> List[Violation]: + def violations(self) -> list[Violation]: """""" # these next two could be refactored into a mixin... - def violations_with_impact_of_at_least(self, impact: Impact) -> List[Violation]: + def violations_with_impact_of_at_least(self, impact: Impact) -> list[Violation]: """""" def assert_no_violations_with_impact_of_at_least( - self, impact: Impact, excludes: Optional[List[str]] = None + self, impact: Impact, excludes: Optional[list[str]] = None ) -> None: """""" class RealAxeResults(AxeResults): - def __init__(self, json: Dict[str, Any]): + def __init__(self, json: dict[str, Any]): self._json = json def assert_passes(self, id: str) -> None: @@ -111,15 +109,15 @@ def assert_does_not_violate(self, id: str) -> None: violation = Violation(result) raise AssertionError(violation.message) - def violations(self) -> List[Violation]: + def violations(self) -> list[Violation]: violations = self._json["violations"] return [Violation(v) for v in violations] - def violations_with_impact_of_at_least(self, impact: Impact) -> List[Violation]: + def violations_with_impact_of_at_least(self, impact: Impact) -> list[Violation]: return [v for v in self.violations() if v.is_impact_at_least(impact)] def assert_no_violations_with_impact_of_at_least( - self, impact: Impact, excludes: Optional[List[str]] = None + self, impact: Impact, excludes: Optional[list[str]] = None ) -> None: excludes = excludes or [] violations = self.violations_with_impact_of_at_least(impact) @@ -137,15 +135,15 @@ def assert_passes(self, id: str) -> None: def assert_does_not_violate(self, id: str) -> None: pass - def violations(self) -> List[Violation]: + def violations(self) -> list[Violation]: return [] # these next two could be refactored into a mixin... - def violations_with_impact_of_at_least(self, impact: Impact) -> List[Violation]: + def violations_with_impact_of_at_least(self, impact: Impact) -> list[Violation]: return [] def assert_no_violations_with_impact_of_at_least( - self, impact: Impact, excludes: Optional[List[str]] = None + self, impact: Impact, excludes: Optional[list[str]] = None ) -> None: pass @@ -159,7 +157,7 @@ def assert_baseline_accessible(axe_results: AxeResults) -> None: raise AssertionError(violation.message) -def _check_list_for_id(result_list: List[Dict[str, Any]], id) -> Optional[Dict[str, Any]]: +def _check_list_for_id(result_list: list[dict[str, Any]], id) -> Optional[dict[str, Any]]: for result in result_list: if result.get("id") == id: return result diff --git a/lib/galaxy/selenium/has_driver.py b/lib/galaxy/selenium/has_driver.py index d0ab5ed98ca3..b3f2f9caebf3 100644 --- a/lib/galaxy/selenium/has_driver.py +++ b/lib/galaxy/selenium/has_driver.py @@ -7,10 +7,7 @@ import abc import threading from typing import ( - Dict, - List, Optional, - Type, Union, ) @@ -39,7 +36,7 @@ HasFindElement = Union[WebDriver, WebElement] DEFAULT_AXE_SCRIPT_URL = "https://cdnjs.cloudflare.com/ajax/libs/axe-core/4.7.1/axe.min.js" -AXE_SCRIPT_HASH: Dict[str, str] = {} +AXE_SCRIPT_HASH: dict[str, str] = {} AXE_SCRIPT_HASH_LOCK = threading.Lock() @@ -53,8 +50,8 @@ def get_axe_script(script_url: str) -> str: class HasDriver: - by: Type[By] = By - keys: Type[Keys] = Keys + by: type[By] = By + keys: type[Keys] = Keys driver: WebDriver axe_script_url: str = DEFAULT_AXE_SCRIPT_URL axe_skip: bool = False @@ -100,7 +97,7 @@ def is_displayed(self, selector_template: Target) -> bool: def assert_selector_absent(self, selector: str): assert len(self.driver.find_elements(By.CSS_SELECTOR, selector)) == 0 - def find_elements(self, selector_template: Target) -> List[WebElement]: + def find_elements(self, selector_template: Target) -> list[WebElement]: return self.driver.find_elements(*selector_template.element_locator) def assert_absent(self, selector_template: Target) -> None: diff --git a/lib/galaxy/selenium/navigates_galaxy.py b/lib/galaxy/selenium/navigates_galaxy.py index 5ab6ed5cfa24..2a7347de1f9a 100644 --- a/lib/galaxy/selenium/navigates_galaxy.py +++ b/lib/galaxy/selenium/navigates_galaxy.py @@ -20,12 +20,9 @@ from typing import ( Any, cast, - Dict, - List, Literal, NamedTuple, Optional, - Tuple, Union, ) @@ -198,7 +195,7 @@ class FileSourceInstance: template_id: str name: str description: Optional[str] - parameters: List[ConfigTemplateParameter] = field(default_factory=list) + parameters: list[ConfigTemplateParameter] = field(default_factory=list) @dataclass @@ -206,7 +203,7 @@ class ObjectStoreInstance: template_id: str name: str description: Optional[str] - parameters: List[ConfigTemplateParameter] = field(default_factory=list) + parameters: list[ConfigTemplateParameter] = field(default_factory=list) class NavigatesGalaxy(HasDriver): @@ -435,7 +432,7 @@ def history_contents(self, history_id=None, view="summary", datasets_only=True): endpoint = f"histories/{history_id}?view={view}" return self.api_get(endpoint) - def current_history(self) -> Dict[str, Any]: + def current_history(self) -> dict[str, Any]: full_url = self.build_url("history/current_history_json", for_selenium=False) response = requests.get(full_url, cookies=self.selenium_to_requests_cookies(), timeout=DEFAULT_SOCKET_TIMEOUT) return response.json() @@ -458,12 +455,12 @@ def latest_history_entry(self): history_content_type=entry_dict["history_content_type"], ) - def latest_history_item(self) -> Dict[str, Any]: + def latest_history_item(self) -> dict[str, Any]: return_value = self._latest_history_item() assert return_value, "Attempted to get latest history item on empty history." return return_value - def _latest_history_item(self) -> Optional[Dict[str, Any]]: + def _latest_history_item(self) -> Optional[dict[str, Any]]: history_contents = self.history_contents() if len(history_contents) > 0: entry_dict = history_contents[-1] @@ -606,7 +603,7 @@ def history_panel_wait_for_hid_state(self, hid, state, allowed_force_refreshes=0 raise self.prepend_timeout_message(e, message) return history_item_selector_state - def history_panel_wait_for_and_select(self, hids: List[int]): + def history_panel_wait_for_and_select(self, hids: list[int]): """ Waits for uploads to pass through queued, running, ok. Not all the states are not guaranteed depending on how fast the upload goes compared to the history polling updates, it might just @@ -689,7 +686,7 @@ def published_grid_search_for(self, search_term=None): search_term, ) - def get_logged_in_user(self) -> Optional[Dict[str, Any]]: + def get_logged_in_user(self) -> Optional[dict[str, Any]]: # for user's not logged in - this just returns a {} so lets # key this on an id being available? if "id" in (user_dict := self.api_get("users/current")): @@ -1725,7 +1722,7 @@ def workflow_run_with_name(self, name: str): self.components.workflows.run_button.wait_for_and_click() self.sleep_for(self.wait_types.UX_RENDER) - def workflow_run_specify_inputs(self, inputs: Dict[str, Any]): + def workflow_run_specify_inputs(self, inputs: dict[str, Any]): workflow_run = self.components.workflow_run for label, value in inputs.items(): input_div_element = workflow_run.input_data_div(label=label).wait_for_visible() @@ -2635,7 +2632,7 @@ def create_object_store_template(self, instance: ObjectStoreInstance) -> str: return object_store_id def _fill_configuration_template( - self, name: str, description: Optional[str], parameters: List[ConfigTemplateParameter] + self, name: str, description: Optional[str], parameters: list[ConfigTemplateParameter] ): self.components.tool_form.parameter_input(parameter="_meta_name").wait_for_and_send_keys( name, @@ -2665,7 +2662,7 @@ def mouse_drag( to_element: Optional[WebElement] = None, from_offset=(0, 0), to_offset=(0, 0), - via_offsets: Optional[List[Tuple[int, int]]] = None, + via_offsets: Optional[list[tuple[int, int]]] = None, ): chain = self.action_chains().move_to_element(from_element).move_by_offset(*from_offset) chain = chain.click_and_hold().pause(self.wait_length(self.wait_types.UX_RENDER)) diff --git a/lib/galaxy/selenium/smart_components.py b/lib/galaxy/selenium/smart_components.py index e8c6e9ff8a18..348d3addf95d 100644 --- a/lib/galaxy/selenium/smart_components.py +++ b/lib/galaxy/selenium/smart_components.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, TYPE_CHECKING, ) @@ -153,7 +152,7 @@ def axe_eval(self) -> AxeResults: return self._has_driver.axe_eval(context=self._target.element_locator[1]) def assert_no_axe_violations_with_impact_of_at_least( - self, impact: Impact, excludes: Optional[List[str]] = None + self, impact: Impact, excludes: Optional[list[str]] = None ) -> None: self.wait_for_visible() self.axe_eval().assert_no_violations_with_impact_of_at_least(impact, excludes=excludes) diff --git a/lib/galaxy/short_term_storage/__init__.py b/lib/galaxy/short_term_storage/__init__.py index 183120df98c4..54765c9579c2 100644 --- a/lib/galaxy/short_term_storage/__init__.py +++ b/lib/galaxy/short_term_storage/__init__.py @@ -17,7 +17,6 @@ from pathlib import Path from typing import ( Any, - Dict, Optional, Union, ) @@ -57,14 +56,14 @@ class ShortTermStorageTargetSecurity: user_id: Optional[int] = None session_id: Optional[int] = None - def to_dict(self) -> Dict[str, Optional[int]]: + def to_dict(self) -> dict[str, Optional[int]]: return { "user_id": self.user_id, "session_id": self.session_id, } @classmethod - def from_dict(self, as_dict: Dict[str, Optional[int]]) -> "ShortTermStorageTargetSecurity": + def from_dict(self, as_dict: dict[str, Optional[int]]) -> "ShortTermStorageTargetSecurity": return ShortTermStorageTargetSecurity( user_id=as_dict.get("user_id"), session_id=as_dict.get("session_id"), @@ -94,7 +93,7 @@ class ShortTermStorageServeCompletedInformation: class ShortTermStorageServeCancelledInformation: target: ShortTermStorageTarget status_code: int - exception: Optional[Dict[str, Any]] + exception: Optional[dict[str, Any]] @property def message_exception(self) -> MessageException: diff --git a/lib/galaxy/tool_shed/cache.py b/lib/galaxy/tool_shed/cache.py index 7380fcb7f307..e5d0cd9a297a 100644 --- a/lib/galaxy/tool_shed/cache.py +++ b/lib/galaxy/tool_shed/cache.py @@ -1,10 +1,5 @@ import logging from collections import defaultdict -from typing import ( - Dict, - List, - Tuple, -) from sqlalchemy.orm import defer @@ -20,9 +15,9 @@ class ToolShedRepositoryCache: Cache installed ToolShedRepository objects. """ - local_repositories: List[ToolConfRepository] - repositories: List[ToolShedRepository] - repos_by_tuple: Dict[Tuple[str, str, str], List[ToolConfRepository]] + local_repositories: list[ToolConfRepository] + repositories: list[ToolShedRepository] + repos_by_tuple: dict[tuple[str, str, str], list[ToolConfRepository]] def __init__(self, session: install_model_scoped_session): self.session = session() diff --git a/lib/galaxy/tool_shed/galaxy_install/client.py b/lib/galaxy/tool_shed/galaxy_install/client.py index 51a59003e138..c55eeef35cf1 100644 --- a/lib/galaxy/tool_shed/galaxy_install/client.py +++ b/lib/galaxy/tool_shed/galaxy_install/client.py @@ -1,8 +1,6 @@ import threading from typing import ( Any, - Dict, - List, Optional, runtime_checkable, TYPE_CHECKING, @@ -31,7 +29,7 @@ class DataManagerInterface(Protocol): def process_result(self, out_data): ... - def write_bundle(self, out: Dict[str, OutputDataset]) -> Dict[str, OutputDataset]: ... + def write_bundle(self, out: dict[str, OutputDataset]) -> dict[str, OutputDataset]: ... class DataManagersInterface(Protocol): @@ -44,7 +42,7 @@ def load_manager_from_elem( def get_manager(self, data_manager_id: str) -> Optional[DataManagerInterface]: ... - def remove_manager(self, manager_ids: Union[str, List[str]]) -> None: ... + def remove_manager(self, manager_ids: Union[str, list[str]]) -> None: ... ToolBoxType = TypeVar("ToolBoxType", bound="AbstractToolBox", contravariant=True) diff --git a/lib/galaxy/tool_shed/galaxy_install/install_manager.py b/lib/galaxy/tool_shed/galaxy_install/install_manager.py index 493b7cced5a7..7597d243fb32 100644 --- a/lib/galaxy/tool_shed/galaxy_install/install_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/install_manager.py @@ -3,10 +3,7 @@ import os from typing import ( Any, - Dict, - List, Optional, - Tuple, ) from sqlalchemy import or_ @@ -47,7 +44,7 @@ def get_install_info_from_tool_shed( tool_shed_url: str, tool_shed_registry: Registry, name: str, owner: str, changeset_revision: str -) -> Tuple[RepositoryMetadataInstallInfoDict, ExtraRepoInfo]: +) -> tuple[RepositoryMetadataInstallInfoDict, ExtraRepoInfo]: params = dict(name=name, owner=owner, changeset_revision=changeset_revision) pathspec = ["api", "repositories", "get_repository_revision_install_info"] try: @@ -110,7 +107,7 @@ def _get_repository_components_for_installation( def __get_install_info_from_tool_shed( self, tool_shed_url: str, name: str, owner: str, changeset_revision: str - ) -> Tuple[RepositoryMetadataInstallInfoDict, List[ExtraRepoInfo]]: + ) -> tuple[RepositoryMetadataInstallInfoDict, list[ExtraRepoInfo]]: repository_revision_dict, repo_info_dict = get_install_info_from_tool_shed( tool_shed_url, self.app.tool_shed_registry, name, owner, changeset_revision ) @@ -331,7 +328,7 @@ def initiate_repository_installation(self, installation_dict): return encoded_kwd, query, tool_shed_repositories, encoded_repository_ids def install( - self, tool_shed_url: str, name: str, owner: str, changeset_revision: str, install_options: Dict[str, Any] + self, tool_shed_url: str, name: str, owner: str, changeset_revision: str, install_options: dict[str, Any] ): # Get all of the information necessary for installing the repository from the specified tool shed. repository_revision_dict, repo_info_dicts = self.__get_install_info_from_tool_shed( @@ -358,8 +355,8 @@ def __initiate_and_install_repositories( self, tool_shed_url: str, repository_revision_dict: RepositoryMetadataInstallInfoDict, - repo_info_dicts: List[ExtraRepoInfo], - install_options: Dict[str, Any], + repo_info_dicts: list[ExtraRepoInfo], + install_options: dict[str, Any], ): try: has_repository_dependencies = repository_revision_dict["has_repository_dependencies"] @@ -800,7 +797,7 @@ def update_tool_shed_repository( ) return (None, None) - def order_components_for_installation(self, tsr_ids: List[str], repo_info_dicts, tool_panel_section_keys): + def order_components_for_installation(self, tsr_ids: list[str], repo_info_dicts, tool_panel_section_keys): """ Some repositories may have repository dependencies that are required to be installed before the dependent repository. This method will inspect the list of repositories @@ -820,7 +817,7 @@ def order_components_for_installation(self, tsr_ids: List[str], repo_info_dicts, prior_install_required_dict = repository_util.get_prior_import_or_install_required_dict( self.app, tsr_ids, repo_info_dicts ) - processed_tsr_ids: List[str] = [] + processed_tsr_ids: list[str] = [] while len(processed_tsr_ids) != len(prior_install_required_dict.keys()): tsr_id = suc.get_next_prior_import_or_install_required_dict_entry( prior_install_required_dict, processed_tsr_ids diff --git a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py index 8c608e5513c0..67cbfc6e07c7 100644 --- a/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/installed_repository_manager.py @@ -8,11 +8,8 @@ import shutil from typing import ( Any, - Dict, - List, no_type_check, Optional, - Tuple, ) from galaxy import util @@ -40,16 +37,16 @@ log = logging.getLogger(__name__) -RepositoryTupleT = Tuple[str, str, str, str] +RepositoryTupleT = tuple[str, str, str, str] class InstalledRepositoryManager: app: InstallationTarget - _tool_paths: List[str] - installed_repository_dicts: List[Dict[str, Any]] - repository_dependencies_of_installed_repositories: Dict[RepositoryTupleT, List[RepositoryTupleT]] - installed_repository_dependencies_of_installed_repositories: Dict[RepositoryTupleT, List[RepositoryTupleT]] - installed_dependent_repositories_of_installed_repositories: Dict[RepositoryTupleT, List[RepositoryTupleT]] + _tool_paths: list[str] + installed_repository_dicts: list[dict[str, Any]] + repository_dependencies_of_installed_repositories: dict[RepositoryTupleT, list[RepositoryTupleT]] + installed_repository_dependencies_of_installed_repositories: dict[RepositoryTupleT, list[RepositoryTupleT]] + installed_dependent_repositories_of_installed_repositories: dict[RepositoryTupleT, list[RepositoryTupleT]] def __init__(self, app: InstallationTarget): """ @@ -83,7 +80,7 @@ def __init__(self, app: InstallationTarget): self.installed_dependent_repositories_of_installed_repositories = {} @property - def tool_paths(self) -> List[str]: + def tool_paths(self) -> list[str]: """Return all possible tool_path attributes of all tool config files.""" if len(self._tool_paths) != len(self.tool_configs): # This could be happen at startup or after the creation of a new shed_tool_conf.xml file @@ -211,19 +208,19 @@ def get_containing_repository_for_tool_dependency(self, tool_dependency_tup: tup def get_dependencies_for_repository( self, tool_shed_url: str, - repo_info_dict: Dict[str, repository_util.AnyRepositoryTupleT], + repo_info_dict: dict[str, repository_util.AnyRepositoryTupleT], includes_tool_dependencies, updating=False, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """ Return dictionaries containing the sets of installed and missing tool dependencies and repository dependencies associated with the repository defined by the received repo_info_dict. """ rdim = repository_dependency_manager.RepositoryDependencyInstallManager(self.app) repository = None - installed_rd: Dict[str, Any] = {} + installed_rd: dict[str, Any] = {} installed_td: repository_util.ToolDependenciesDictT = {} - missing_rd: Dict[str, Any] = {} + missing_rd: dict[str, Any] = {} missing_td: repository_util.ToolDependenciesDictT = {} name = next(iter(repo_info_dict)) repo_info_tuple = repo_info_dict[name] @@ -338,7 +335,7 @@ def get_dependencies_for_repository( def get_installed_and_missing_repository_dependencies( self, repository: ToolShedRepository - ) -> Tuple[Dict[str, Any], Dict[str, Any]]: + ) -> tuple[dict[str, Any], dict[str, Any]]: """ Return the installed and missing repository dependencies for a tool shed repository that has a record in the Galaxy database, but may or may not be installed. In this case, the repository dependencies are @@ -347,8 +344,8 @@ def get_installed_and_missing_repository_dependencies( dependencies are really a dependency of the dependent repository's contained tool dependency, and only if that tool dependency requires compilation. """ - missing_repository_dependencies: Dict[str, Any] = {} - installed_repository_dependencies: Dict[str, Any] = {} + missing_repository_dependencies: dict[str, Any] = {} + installed_repository_dependencies: dict[str, Any] = {} has_repository_dependencies = repository.has_repository_dependencies if has_repository_dependencies: # The repository dependencies container will include only the immediate repository @@ -408,7 +405,7 @@ def get_installed_and_missing_repository_dependencies( def get_installed_and_missing_repository_dependencies_for_new_or_updated_install( self, repo_info_tuple - ) -> Tuple[Dict[str, Any], Dict[str, Any]]: + ) -> tuple[dict[str, Any], dict[str, Any]]: """ Parse the received repository_dependencies dictionary that is associated with a repository being installed into Galaxy for the first time and attempt to determine repository dependencies that are @@ -508,7 +505,7 @@ def get_installed_and_missing_repository_dependencies_for_new_or_updated_install @no_type_check def get_installed_and_missing_tool_dependencies_for_repository( self, tool_dependencies_dict: repository_util.ToolDependenciesDictT - ) -> Tuple[repository_util.ToolDependenciesDictT, repository_util.ToolDependenciesDictT]: + ) -> tuple[repository_util.ToolDependenciesDictT, repository_util.ToolDependenciesDictT]: """ Return the lists of installed tool dependencies and missing tool dependencies for a set of repositories being installed into Galaxy. @@ -580,7 +577,7 @@ def get_installed_and_missing_tool_dependencies_for_repository( def get_repository_dependency_tups_for_installed_repository( self, repository, dependency_tups=None, status=None - ) -> List[RepositoryTupleT]: + ) -> list[RepositoryTupleT]: """ Return a list of of tuples defining tool_shed_repository objects (whose status can be anything) required by the received repository. The returned list defines the entire repository dependency tree. This method is called @@ -631,7 +628,7 @@ def get_repository_install_dir(self, tool_shed_repository: ToolShedRepository) - def handle_existing_tool_dependencies_that_changed_in_update( self, repository: ToolShedRepository, original_dependency_dict, new_dependency_dict - ) -> Tuple[List[str], List[str]]: + ) -> tuple[list[str], list[str]]: """ This method is called when a Galaxy admin is getting updates for an installed tool shed repository in order to cover the case where an existing tool dependency was changed (e.g., diff --git a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py index 60c4915a851a..f25bd46de486 100644 --- a/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/metadata/installed_repository_metadata_manager.py @@ -2,7 +2,6 @@ import os from typing import ( Any, - Dict, Optional, ) @@ -37,13 +36,13 @@ def __init__( repository: Optional[ToolShedRepository] = None, changeset_revision: Optional[str] = None, repository_clone_url: Optional[str] = None, - shed_config_dict: Optional[Dict[str, Any]] = None, + shed_config_dict: Optional[dict[str, Any]] = None, relative_install_dir: Optional[str] = None, repository_files_dir: Optional[str] = None, resetting_all_metadata_on_repository: bool = False, updating_installed_repository: bool = False, persist: bool = False, - metadata_dict: Optional[Dict[str, Any]] = None, + metadata_dict: Optional[dict[str, Any]] = None, ): super().__init__( app, diff --git a/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py b/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py index c870829bf994..808dee9485ef 100644 --- a/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/tools/data_manager.py @@ -4,8 +4,6 @@ import time from typing import ( Any, - Dict, - List, Optional, ) @@ -47,7 +45,7 @@ def data_managers_path(self) -> Optional[str]: return root.get("tool_path", None) return None - def _data_manager_config_elems_to_xml_file(self, config_elems: List[Element], config_filename: StrPath) -> None: + def _data_manager_config_elems_to_xml_file(self, config_elems: list[Element], config_filename: StrPath) -> None: """ Persist the current in-memory list of config_elems to a file named by the value of config_filename. @@ -68,13 +66,13 @@ def _data_manager_config_elems_to_xml_file(self, config_elems: List[Element], co def install_data_managers( self, shed_data_manager_conf_filename: StrPath, - metadata_dict: Dict[str, Any], - shed_config_dict: Dict[str, Any], + metadata_dict: dict[str, Any], + shed_config_dict: dict[str, Any], relative_install_dir: StrPath, repository, repository_tools_tups, - ) -> List["DataManagerInterface"]: - rval: List[DataManagerInterface] = [] + ) -> list["DataManagerInterface"]: + rval: list[DataManagerInterface] = [] if "data_manager" in metadata_dict: tpm = tool_panel_manager.ToolPanelManager(self.app) repository_tools_by_guid = {} diff --git a/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py b/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py index adf86824072b..a68036ab6065 100644 --- a/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py +++ b/lib/galaxy/tool_shed/galaxy_install/tools/tool_panel_manager.py @@ -2,8 +2,6 @@ import logging from typing import ( Any, - Dict, - List, ) from galaxy.exceptions import RequestParameterInvalidException @@ -30,7 +28,7 @@ class ToolPanelManager: def __init__(self, app: InstallationTarget): self.app = app - def add_to_shed_tool_config(self, shed_tool_conf_dict: Dict[str, Any], elem_list: list) -> None: + def add_to_shed_tool_config(self, shed_tool_conf_dict: dict[str, Any], elem_list: list) -> None: """ "A tool shed repository is being installed so change the shed_tool_conf file. Parse the config file to generate the entire list of config_elems instead of using the in-memory list @@ -184,7 +182,7 @@ def generate_tool_panel_dict_for_new_install(self, tool_dicts, tool_section=None currently be defined within the same tool section in the tool panel or outside of any sections. """ - tool_panel_dict: Dict[str, List[Dict[str, Any]]] = {} + tool_panel_dict: dict[str, list[dict[str, Any]]] = {} if tool_section: section_id = tool_section.id section_name = tool_section.name @@ -208,7 +206,7 @@ def generate_tool_panel_dict_for_new_install(self, tool_dicts, tool_section=None def generate_tool_panel_dict_for_tool_config( self, guid, tool_config, tool_sections=None - ) -> Dict[str, List[Dict[str, Any]]]: + ) -> dict[str, list[dict[str, Any]]]: """ Create a dictionary of the following type for a single tool config file name. The intent is to call this method for every tool config in a repository and @@ -224,13 +222,13 @@ def generate_tool_panel_dict_for_tool_config( name : }]} """ - tool_panel_dict: Dict[str, List[Dict[str, Any]]] = {} + tool_panel_dict: dict[str, list[dict[str, Any]]] = {} file_name = strip_path(tool_config) tool_section_dicts = self.generate_tool_section_dicts(tool_config=file_name, tool_sections=tool_sections) tool_panel_dict[guid] = tool_section_dicts return tool_panel_dict - def generate_tool_panel_dict_from_shed_tool_conf_entries(self, repository) -> Dict[str, List[Dict[str, Any]]]: + def generate_tool_panel_dict_from_shed_tool_conf_entries(self, repository) -> dict[str, list[dict[str, Any]]]: """ Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed_tool_conf in which the @@ -239,7 +237,7 @@ def generate_tool_panel_dict_from_shed_tool_conf_entries(self, repository) -> Di repository is being deactivated or un-installed and allows for activation or re-installation using the original layout. """ - tool_panel_dict: Dict[str, List[Dict[str, Any]]] = {} + tool_panel_dict: dict[str, list[dict[str, Any]]] = {} shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( self.app, repository ) @@ -293,11 +291,11 @@ def generate_tool_panel_elem_list( repository_clone_url: str, changeset_revision: str, tool_panel_dict: dict, - repository_tools_tups: List[tuple], + repository_tools_tups: list[tuple], owner="", ): """Generate a list of ElementTree Element objects for each section or tool.""" - elem_list: List[Element] = [] + elem_list: list[Element] = [] tool_elem = None cleaned_repository_clone_url = remove_protocol_and_user_from_clone_url(repository_clone_url) if not owner: @@ -344,8 +342,8 @@ def generate_tool_panel_elem_list( elem_list.append(tool_elem) return elem_list - def generate_tool_section_dicts(self, tool_config=None, tool_sections=None) -> List[Dict[str, Any]]: - tool_section_dicts: List[Dict[str, Any]] = [] + def generate_tool_section_dicts(self, tool_config=None, tool_sections=None) -> list[dict[str, Any]]: + tool_section_dicts: list[dict[str, Any]] = [] if tool_config is None: tool_config = "" if tool_sections: @@ -367,7 +365,7 @@ def generate_tool_section_dicts(self, tool_config=None, tool_sections=None) -> L tool_section_dicts.append(dict(tool_config=tool_config, id="", version="", name="")) return tool_section_dicts - def generate_tool_section_element_from_dict(self, tool_section_dict: Dict[str, str]) -> Element: + def generate_tool_section_element_from_dict(self, tool_section_dict: dict[str, str]) -> Element: # The value of tool_section_dict looks like the following. # { id: , version : , name : } tool_section = Element("section") diff --git a/lib/galaxy/tool_shed/metadata/metadata_generator.py b/lib/galaxy/tool_shed/metadata/metadata_generator.py index 596d1c1685c0..b0b7a6123a84 100644 --- a/lib/galaxy/tool_shed/metadata/metadata_generator.py +++ b/lib/galaxy/tool_shed/metadata/metadata_generator.py @@ -4,10 +4,7 @@ from typing import ( Any, cast, - Dict, - List, Optional, - Tuple, TYPE_CHECKING, Union, ) @@ -56,8 +53,8 @@ log = logging.getLogger(__name__) -InvalidFileT = Tuple[str, str] -HandleResultT = Tuple[List, bool, str] +InvalidFileT = tuple[str, str] +HandleResultT = tuple[list, bool, str] NOT_TOOL_CONFIGS = [ suc.DATATYPES_CONFIG_FILENAME, @@ -92,24 +89,24 @@ def repo_path(self, app) -> Optional[str]: ... class BaseMetadataGenerator: app: Union["BasicSharedApp", InstallationTarget] repository: Optional[RepositoryProtocol] - invalid_file_tups: List[InvalidFileT] + invalid_file_tups: list[InvalidFileT] changeset_revision: Optional[str] repository_clone_url: Optional[str] - shed_config_dict: Dict[str, Any] - metadata_dict: Dict[str, Any] + shed_config_dict: dict[str, Any] + metadata_dict: dict[str, Any] relative_install_dir: Optional[str] repository_files_dir: Optional[str] persist: bool - def initial_metadata_dict(self) -> Dict[str, Any]: + def initial_metadata_dict(self) -> dict[str, Any]: raise NotImplementedError() def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td=False) -> HandleResultT: raise NotImplementedError() def _generate_data_manager_metadata( - self, repo_dir, data_manager_config_filename, metadata_dict: Dict[str, Any], shed_config_dict=None - ) -> Dict[str, Any]: + self, repo_dir, data_manager_config_filename, metadata_dict: dict[str, Any], shed_config_dict=None + ) -> dict[str, Any]: """ Update the received metadata_dict with information from the parsed data_manager_config_filename. """ @@ -128,8 +125,8 @@ def _generate_data_manager_metadata( rel_data_manager_config_filename = os.path.join( relative_data_manager_dir, os.path.split(data_manager_config_filename)[1] ) - data_managers: Dict[str, dict] = {} - invalid_data_managers: List[dict] = [] + data_managers: dict[str, dict] = {} + invalid_data_managers: list[dict] = [] data_manager_metadata = { "config_filename": rel_data_manager_config_filename, "data_managers": data_managers, @@ -509,9 +506,9 @@ def generate_repository_dependency_metadata(self, repository_dependencies_config root = tree.getroot() xml_is_valid = root.tag == "repositories" if xml_is_valid: - invalid_repository_dependencies_dict: Dict[str, Any] = dict(description=root.get("description")) + invalid_repository_dependencies_dict: dict[str, Any] = dict(description=root.get("description")) invalid_repository_dependency_tups = [] - valid_repository_dependencies_dict: Dict[str, Any] = dict(description=root.get("description")) + valid_repository_dependencies_dict: dict[str, Any] = dict(description=root.get("description")) valid_repository_dependency_tups = [] for repository_elem in root.findall("repository"): repository_dependency_tup, repository_dependency_is_valid, err_msg = self.handle_repository_elem( @@ -839,11 +836,11 @@ def set_repository_files_dir(self, repository_files_dir: Optional[str] = None): def _update_repository_dependencies_metadata( self, - metadata: Dict[str, Any], - repository_dependency_tups: List[tuple], + metadata: dict[str, Any], + repository_dependency_tups: list[tuple], is_valid: bool, description: Optional[str], - ) -> Dict[str, Any]: + ) -> dict[str, Any]: if is_valid: repository_dependencies_dict = metadata.get("repository_dependencies", None) else: @@ -879,7 +876,7 @@ def __init__( repository=None, changeset_revision: Optional[str] = None, repository_clone_url: Optional[str] = None, - shed_config_dict: Optional[Dict[str, Any]] = None, + shed_config_dict: Optional[dict[str, Any]] = None, relative_install_dir=None, repository_files_dir=None, resetting_all_metadata_on_repository=False, @@ -923,7 +920,7 @@ def __init__( self.persist = persist self.invalid_file_tups = [] - def initial_metadata_dict(self) -> Dict[str, Any]: + def initial_metadata_dict(self) -> dict[str, Any]: # Shed related tool panel configs are only relevant to Galaxy. metadata_dict = {"shed_config_filename": self.shed_config_dict.get("config_filename")} return metadata_dict @@ -1032,7 +1029,7 @@ def handle_repository_elem(self, repository_elem, only_if_compiling_contained_td return repository_dependency_tup, is_valid, error_message -def _get_readme_file_names(repository_name: str) -> List[str]: +def _get_readme_file_names(repository_name: str) -> list[str]: """Return a list of file names that will be categorized as README files for the received repository_name.""" readme_files = ["readme", "read_me", "install"] valid_filenames = [f"{f}.txt" for f in readme_files] diff --git a/lib/galaxy/tool_shed/tools/data_table_manager.py b/lib/galaxy/tool_shed/tools/data_table_manager.py index 462667a01894..61fbaafd4557 100644 --- a/lib/galaxy/tool_shed/tools/data_table_manager.py +++ b/lib/galaxy/tool_shed/tools/data_table_manager.py @@ -2,7 +2,6 @@ import os import shutil from typing import ( - List, TYPE_CHECKING, Union, ) @@ -97,7 +96,7 @@ def generate_repository_info_elem_from_repository(self, tool_shed_repository, pa **kwd, ) - def get_tool_index_sample_files(self, sample_files: List[str]) -> List[str]: + def get_tool_index_sample_files(self, sample_files: list[str]) -> list[str]: """ Try to return the list of all appropriate tool data sample files included in the repository. diff --git a/lib/galaxy/tool_shed/unittest_utils/__init__.py b/lib/galaxy/tool_shed/unittest_utils/__init__.py index a58c3d6f6561..d11e9c622d1e 100644 --- a/lib/galaxy/tool_shed/unittest_utils/__init__.py +++ b/lib/galaxy/tool_shed/unittest_utils/__init__.py @@ -2,8 +2,6 @@ from pathlib import Path from typing import ( Any, - Dict, - List, NamedTuple, Optional, Union, @@ -62,7 +60,7 @@ def as_str(self) -> str: class Config: tool_data_path: str install_database_connection: str - install_database_engine_options: Dict[str, Any] = {} + install_database_engine_options: dict[str, Any] = {} update_integrated_tool_panel: bool = True integrated_tool_panel_config: str shed_tool_config_file: str @@ -79,7 +77,7 @@ def get(self, key, default): class TestTool: - _macro_paths: List[str] = [] + _macro_paths: list[str] = [] params_with_missing_data_table_entry: list = [] params_with_missing_index_file: list = [] @@ -139,7 +137,7 @@ class DummyDataManager(DataManagerInterface): def process_result(self, out_data): return None - def write_bundle(self, out) -> Dict[str, OutputDataset]: + def write_bundle(self, out) -> dict[str, OutputDataset]: return {} @@ -154,7 +152,7 @@ def load_manager_from_elem( def get_manager(self, data_manager_id: str) -> Optional[DataManagerInterface]: return None - def remove_manager(self, manager_ids: Union[str, List[str]]) -> None: + def remove_manager(self, manager_ids: Union[str, list[str]]) -> None: return None @property diff --git a/lib/galaxy/tool_shed/util/hg_util.py b/lib/galaxy/tool_shed/util/hg_util.py index f8713434127b..9256b56b60f7 100644 --- a/lib/galaxy/tool_shed/util/hg_util.py +++ b/lib/galaxy/tool_shed/util/hg_util.py @@ -3,7 +3,6 @@ import subprocess from typing import ( Optional, - Tuple, ) from galaxy.tool_shed.util import basic_util @@ -14,7 +13,7 @@ INITIAL_CHANGELOG_HASH = "000000000000" -def clone_repository(repository_clone_url: str, repository_file_dir: str, ctx_rev=None) -> Tuple[bool, Optional[str]]: +def clone_repository(repository_clone_url: str, repository_file_dir: str, ctx_rev=None) -> tuple[bool, Optional[str]]: """ Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository. diff --git a/lib/galaxy/tool_shed/util/repository_util.py b/lib/galaxy/tool_shed/util/repository_util.py index 315cd43426f1..b87f7bb7650c 100644 --- a/lib/galaxy/tool_shed/util/repository_util.py +++ b/lib/galaxy/tool_shed/util/repository_util.py @@ -4,10 +4,7 @@ import shutil from typing import ( Any, - Dict, - List, Optional, - Tuple, TYPE_CHECKING, Union, ) @@ -44,7 +41,7 @@ def check_for_updates( tool_shed_registry: Registry, install_model_context: install_model_scoped_session, repository_id: Optional[int] = None, -) -> Tuple[str, str]: +) -> tuple[str, str]: message = "" status = "ok" if repository_id is None: @@ -89,7 +86,7 @@ def check_for_updates( def _check_or_update_tool_shed_status_for_installed_repository( tool_shed_registry: Registry, install_model_context: install_model_scoped_session, repository: ToolShedRepository -) -> Tuple[bool, bool]: +) -> tuple[bool, bool]: updated = False tool_shed_status_dict = get_tool_shed_status_for(tool_shed_registry, repository) if tool_shed_status_dict: @@ -306,7 +303,7 @@ def get_installed_tool_shed_repository(app: "InstallationTarget", id): return rval[0] -def get_prior_import_or_install_required_dict(app: "InstallationTarget", tsr_ids: List[str], repo_info_dicts): +def get_prior_import_or_install_required_dict(app: "InstallationTarget", tsr_ids: list[str], repo_info_dicts): """ This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy when a repository and its dependencies are being installed. Return a @@ -315,7 +312,7 @@ def get_prior_import_or_install_required_dict(app: "InstallationTarget", tsr_ids must be imported or installed prior to the repository associated with the tsr_id key. """ # Initialize the dictionary. - prior_import_or_install_required_dict: Dict[str, List[str]] = {} + prior_import_or_install_required_dict: dict[str, list[str]] = {} for tsr_id in tsr_ids: prior_import_or_install_required_dict[tsr_id] = [] # Inspect the repository dependencies for each repository about to be installed and populate the dictionary. @@ -335,9 +332,9 @@ def get_prior_import_or_install_required_dict(app: "InstallationTarget", tsr_ids return prior_import_or_install_required_dict -ToolDependenciesDictT = Dict[str, Union[Dict[str, Any], List[Dict[str, Any]]]] -OldRepositoryTupleT = Tuple[str, str, str, str, str, ToolDependenciesDictT] -RepositoryTupleT = Tuple[str, str, str, str, str, Optional[Any], ToolDependenciesDictT] +ToolDependenciesDictT = dict[str, Union[dict[str, Any], list[dict[str, Any]]]] +OldRepositoryTupleT = tuple[str, str, str, str, str, ToolDependenciesDictT] +RepositoryTupleT = tuple[str, str, str, str, str, Optional[Any], ToolDependenciesDictT] AnyRepositoryTupleT = Union[OldRepositoryTupleT, RepositoryTupleT] @@ -480,7 +477,7 @@ def get_repository_for_dependency_relationship(app: "InstallationTarget", tool_s def get_repository_ids_requiring_prior_import_or_install( - app: "InstallationTarget", tsr_ids: List[str], repository_dependencies + app: "InstallationTarget", tsr_ids: list[str], repository_dependencies ): """ This method is used in the Tool Shed when exporting a repository and its dependencies, @@ -492,7 +489,7 @@ def get_repository_ids_requiring_prior_import_or_install( and whose associated repositories must be imported / installed prior to the dependent repository associated with the received repository_dependencies. """ - prior_tsr_ids: List[str] = [] + prior_tsr_ids: list[str] = [] if repository_dependencies: for key, rd_tups in repository_dependencies.items(): if key in ["description", "root_key"]: @@ -558,7 +555,7 @@ def get_tool_shed_repository_by_id(app, repository_id) -> ToolShedRepository: def get_tool_shed_status_for(tool_shed_registry: Registry, repository: ToolShedRepository): tool_shed_url = tool_shed_registry.get_tool_shed_url(str(repository.tool_shed)) assert tool_shed_url - params: Dict[str, Any] = dict( + params: dict[str, Any] = dict( name=repository.name, owner=repository.owner, changeset_revision=repository.changeset_revision ) pathspec = ["repository", "status_for_installed_repository"] diff --git a/lib/galaxy/tool_shed/util/shed_util_common.py b/lib/galaxy/tool_shed/util/shed_util_common.py index 6f358183742b..648b9de4eaf3 100644 --- a/lib/galaxy/tool_shed/util/shed_util_common.py +++ b/lib/galaxy/tool_shed/util/shed_util_common.py @@ -1,9 +1,5 @@ import logging import re -from typing import ( - Dict, - List, -) from galaxy import util from galaxy.tool_shed.util import repository_util @@ -77,7 +73,7 @@ def get_ctx_rev(app, tool_shed_url, name, owner, changeset_revision): def get_next_prior_import_or_install_required_dict_entry( - prior_required_dict: Dict[str, List[str]], processed_tsr_ids: List[str] + prior_required_dict: dict[str, list[str]], processed_tsr_ids: list[str] ): """ This method is used in the Tool Shed when exporting a repository and its dependencies, and in Galaxy diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py index d6bc2d44d028..bfa798f77f6a 100644 --- a/lib/galaxy/tools/__init__.py +++ b/lib/galaxy/tools/__init__.py @@ -15,13 +15,8 @@ from typing import ( Any, cast, - Dict, - List, NamedTuple, Optional, - Set, - Tuple, - Type, TYPE_CHECKING, Union, ) @@ -328,7 +323,7 @@ REQUIRE_FULL_DIRECTORY = { "includes": [{"path": "**", "path_type": "glob"}], } -IMPLICITLY_REQUIRED_TOOL_FILES: Dict[str, Dict] = { +IMPLICITLY_REQUIRED_TOOL_FILES: dict[str, dict] = { "deseq2": { "version": parse_version("2.11.40.6"), "required": {"includes": [{"path": "*.R", "path_type": "glob"}]}, @@ -486,7 +481,7 @@ class ToolBox(AbstractToolBox): def __init__(self, config_filenames, tool_root_dir, app, save_integrated_tool_panel: bool = True): self._reload_count = 0 self.tool_location_fetcher = ToolLocationFetcher() - self.cache_regions: Dict[str, ToolDocumentCache] = {} + self.cache_regions: dict[str, ToolDocumentCache] = {} # This is here to deal with the old default value, which doesn't make # sense in an "installed Galaxy" world. # FIXME: ./ @@ -753,7 +748,7 @@ def _load_workflow(self, workflow_id): def __build_tool_version_select_field(self, tools, tool_id, set_selected): """Build a SelectField whose options are the ids for the received list of tools.""" - options: List[Tuple[str, str]] = [] + options: list[tuple[str, str]] = [] for tool in tools: options.insert(0, (tool.version, tool.id)) select_field = SelectField(name="tool_id") @@ -791,7 +786,7 @@ def encode(self, tool, app, nested=False): """ Convert the data to a string """ - value = cast(Dict[str, Any], params_to_strings(tool.inputs, self.inputs, app, nested=nested)) + value = cast(dict[str, Any], params_to_strings(tool.inputs, self.inputs, app, nested=nested)) value["__page__"] = self.page value["__rerun_remap_job_id__"] = self.rerun_remap_job_id return value @@ -908,7 +903,7 @@ def persist_object(self, obj): def flush(self): self.sa_session.commit() - def get_library_folder(self, destination: Dict[str, Any]): + def get_library_folder(self, destination: dict[str, Any]): folder_id = destination.get("library_folder_id") assert folder_id decoded_folder_id = self.app.security.decode_id(folder_id) if isinstance(folder_id, str) else folder_id @@ -1041,14 +1036,14 @@ def __init__( self.repository_id = repository_id self._allow_code_files = allow_code_files # setup initial attribute values - self.stdio_exit_codes: List = [] - self.stdio_regexes: List = [] - self.inputs_by_page: List[Dict] = [] - self.display_by_page: List = [] - self.action: Union[str, Tuple[str, str]] = "/tool_runner/index" + self.stdio_exit_codes: list = [] + self.stdio_regexes: list = [] + self.inputs_by_page: list[dict] = [] + self.display_by_page: list = [] + self.action: Union[str, tuple[str, str]] = "/tool_runner/index" self.target = "galaxy_main" self.method = "post" - self.labels: List = [] + self.labels: list = [] self.check_values = True self.nginx_upload = False self.input_required = False @@ -1063,7 +1058,7 @@ def __init__( # parameters like SelectField objects. This enables us to more # easily ensure that parameter dependencies like index files or # tool_data_table_conf.xml entries exist. - self.input_params: List[ToolParameter] = [] + self.input_params: list[ToolParameter] = [] # Attributes of tools installed from Galaxy tool sheds. self.tool_shed: Optional[str] = None self.repository_name: Optional[str] = None @@ -1078,7 +1073,7 @@ def __init__( self.old_id: Optional[str] = None self.python_template_version: Optional[Version] = None self._lineage = None - self.dependencies: List = [] + self.dependencies: list = [] # populate toolshed repository info, if available self.populate_tool_shed_info(tool_shed_repository) # add tool resource parameters @@ -1086,13 +1081,13 @@ def __init__( self.tool_errors = None # Parse XML element containing configuration self.tool_source = tool_source - self.outputs: Dict[str, ToolOutputBase] = {} - self.output_collections: Dict[str, ToolOutputCollection] = {} + self.outputs: dict[str, ToolOutputBase] = {} + self.output_collections: dict[str, ToolOutputCollection] = {} self.command: Optional[str] = None - self.base_command: Optional[List[str]] = None - self.arguments: Optional[List[str]] = [] + self.base_command: Optional[list[str]] = None + self.arguments: Optional[list[str]] = [] self.shell_command: Optional[str] = None - self.javascript_requirements: Optional[List[JavascriptRequirement]] = None + self.javascript_requirements: Optional[list[JavascriptRequirement]] = None self._is_workflow_compatible = None self.__help = None self.__tests: Optional[str] = None @@ -1508,9 +1503,9 @@ def parse(self, tool_source: ToolSource, guid: Optional[str] = None, dynamic: bo self._is_workflow_compatible = self.check_workflow_compatible(self.tool_source) def __parse_legacy_features(self, tool_source: ToolSource): - self.code_namespace: Dict[str, str] = {} - self.hook_map: Dict[str, str] = {} - self.uihints: Dict[str, str] = {} + self.code_namespace: dict[str, str] = {} + self.hook_map: dict[str, str] = {} + self.uihints: dict[str, str] = {} if not hasattr(tool_source, "root"): return @@ -1692,7 +1687,7 @@ def parse_inputs(self, tool_source: ToolSource): # Load parameters (optional) self.inputs: ToolInputsT = {} pages = tool_source.parse_input_pages() - enctypes: Set[str] = set() + enctypes: set[str] = set() if pages.inputs_defined: if hasattr(pages, "input_elem"): input_elem = pages.input_elem @@ -2088,9 +2083,9 @@ def visit_inputs(self, values, callback): def expand_incoming( self, request_context: WorkRequestContext, incoming: ToolRequestT, input_format: InputFormatT = "legacy" - ) -> Tuple[ - List[ToolStateJobInstancePopulatedT], - List[ToolStateJobInstancePopulatedT], + ) -> tuple[ + list[ToolStateJobInstancePopulatedT], + list[ToolStateJobInstancePopulatedT], Optional[int], Optional[MatchingCollections], ]: @@ -2099,7 +2094,7 @@ def expand_incoming( # Fixed set of input parameters may correspond to any number of jobs. # Expand these out to individual parameters for given jobs (tool executions). - expanded_incomings: List[ToolStateJobInstanceT] + expanded_incomings: list[ToolStateJobInstanceT] collection_info: Optional[MatchingCollections] expanded_incomings, collection_info = expand_meta_parameters( request_context, self, incoming, input_format=input_format @@ -2112,8 +2107,8 @@ def expand_incoming( "internals.galaxy.tools.validation", "Validated and populated state for tool request", ) - all_errors: List[ParameterValidationErrorsT] = [] - all_params: List[ToolStateJobInstancePopulatedT] = [] + all_errors: list[ParameterValidationErrorsT] = [] + all_params: list[ToolStateJobInstancePopulatedT] = [] for expanded_incoming in expanded_incomings: params, errors = self._populate(request_context, expanded_incoming, input_format) @@ -2125,7 +2120,7 @@ def expand_incoming( return all_params, all_errors, rerun_remap_job_id, collection_info def _ensure_expansion_is_valid( - self, expanded_incomings: List[ToolStateJobInstanceT], rerun_remap_job_id: Optional[int] + self, expanded_incomings: list[ToolStateJobInstanceT], rerun_remap_job_id: Optional[int] ) -> None: """If the request corresponds to multiple jobs but this doesn't work with request configuration - raise an error. @@ -2145,7 +2140,7 @@ def _ensure_expansion_is_valid( def _populate( self, request_context, expanded_incoming: ToolStateJobInstanceT, input_format: InputFormatT - ) -> Tuple[ToolStateJobInstancePopulatedT, ParameterValidationErrorsT]: + ) -> tuple[ToolStateJobInstancePopulatedT, ParameterValidationErrorsT]: """Validate expanded parameters for a job to replace references with model objects. So convert a ToolStateJobInstanceT to a ToolStateJobInstancePopulatedT. @@ -2178,8 +2173,7 @@ def _handle_validate_input_hook( self, request_context, params: ToolStateJobInstancePopulatedT, errors: ParameterValidationErrorsT ): # If the tool provides a `validate_input` hook, call it. - validate_input = self.get_hook("validate_input") - if validate_input: + if validate_input := self.get_hook("validate_input"): # hooks are so terrible ... this is specifically for https://github.com/galaxyproject/tools-devteam/blob/main/tool_collections/gops/basecoverage/operation_filter.py legacy_non_dce_params = { k: v.hda if isinstance(v, model.DatasetCollectionElement) and v.hda else v for k, v in params.items() @@ -2190,9 +2184,9 @@ def completed_jobs( self, trans, use_cached_job: bool, - all_params: List[ToolStateJobInstancePopulatedT], - ) -> Dict[int, Optional[Job]]: - completed_jobs: Dict[int, Optional[Job]] = {} + all_params: list[ToolStateJobInstancePopulatedT], + ) -> dict[int, Optional[Job]]: + completed_jobs: dict[int, Optional[Job]] = {} for i, param in enumerate(all_params): if use_cached_job and trans.user: tool_id = self.id @@ -2228,8 +2222,8 @@ def handle_input( """ request_context = proxy_work_context_for_history(trans, history=history) expanded = self.expand_incoming(request_context, incoming=incoming, input_format=input_format) - all_params: List[ToolStateJobInstancePopulatedT] = expanded[0] - all_errors: List[ParameterValidationErrorsT] = expanded[1] + all_params: list[ToolStateJobInstancePopulatedT] = expanded[0] + all_errors: list[ParameterValidationErrorsT] = expanded[1] rerun_remap_job_id: Optional[int] = expanded[2] collection_info: Optional[MatchingCollections] = expanded[3] @@ -2239,7 +2233,7 @@ def handle_input( mapping_params = MappingParameters(incoming, all_params) if use_cached_job: mapping_params.param_template["__use_cached_job__"] = use_cached_job - completed_jobs: Dict[int, Optional[Job]] = self.completed_jobs(trans, use_cached_job, all_params) + completed_jobs: dict[int, Optional[Job]] = self.completed_jobs(trans, use_cached_job, all_params) execution_tracker = execute_job( trans, self, @@ -2274,7 +2268,7 @@ def handle_input( implicit_collections=execution_tracker.implicit_collections, ) - def handle_incoming_errors(self, all_errors: List[ParameterValidationErrorsT]) -> None: + def handle_incoming_errors(self, all_errors: list[ParameterValidationErrorsT]) -> None: if any(all_errors): # simple param_key -> message string for tool form. err_data = {key: unicodify(value) for d in all_errors for (key, value) in d.items()} @@ -2470,10 +2464,10 @@ def _execute( def params_to_strings(self, params: ToolStateJobInstancePopulatedT, app, nested=False): return params_to_strings(self.inputs, params, app, nested) - def params_from_strings(self, params: Dict, ignore_errors: bool = False) -> Dict: + def params_from_strings(self, params: dict, ignore_errors: bool = False) -> dict: return params_from_strings(self.inputs, params, self.app, ignore_errors) - def get_param_values(self, job: Job, ignore_errors: bool = False) -> Dict: + def get_param_values(self, job: Job, ignore_errors: bool = False) -> dict: """ Read encoded parameter values from the database and turn back into a dict of tool parameter values. @@ -2924,7 +2918,7 @@ def to_json( set_dataset_matcher_factory(request_context, self) # create tool state - state_inputs: Dict[str, str] = {} + state_inputs: dict[str, str] = {} state_errors: ParameterValidationErrorsT = {} populate_state(request_context, self.inputs, params.__dict__, state_inputs, state_errors) @@ -2993,7 +2987,7 @@ def populate_model(self, request_context, inputs, state_inputs, group_inputs, ot other_values=other_values, ) - def _map_source_to_history(self, trans: WorkRequestContext, tool_inputs: "ToolInputsT", params: Dict) -> None: + def _map_source_to_history(self, trans: WorkRequestContext, tool_inputs: "ToolInputsT", params: dict) -> None: # Need to remap dataset parameters. Job parameters point to original # dataset used; parameter should be the analygous dataset in the # current history. @@ -3001,13 +2995,13 @@ def _map_source_to_history(self, trans: WorkRequestContext, tool_inputs: "ToolIn assert history # Create index for hdas. - hda_source_dict: Dict[Union[int, str], HistoryDatasetAssociation] = {} + hda_source_dict: dict[Union[int, str], HistoryDatasetAssociation] = {} for hda in history.datasets: key = f"{hda.hid}_{hda.dataset.id}" hda_source_dict[hda.dataset.id] = hda_source_dict[key] = hda # Ditto for dataset collections. - hdca_source_dict: Dict[Union[int, str], HistoryDatasetCollectionAssociation] = {} + hdca_source_dict: dict[Union[int, str], HistoryDatasetCollectionAssociation] = {} for hdca in history.dataset_collections: key = f"{hdca.hid}_{hdca.collection.id}" hdca_source_dict[hdca.collection.id] = hdca_source_dict[key] = hdca @@ -3017,8 +3011,8 @@ def map_to_history(value): if isinstance(value, HistoryDatasetAssociation): id: int = value.dataset.id source: Union[ - Dict[Union[int, str], HistoryDatasetAssociation], - Dict[Union[int, str], HistoryDatasetCollectionAssociation], + dict[Union[int, str], HistoryDatasetAssociation], + dict[Union[int, str], HistoryDatasetCollectionAssociation], ] = hda_source_dict elif isinstance(value, HistoryDatasetCollectionAssociation): id = value.collection.id @@ -3229,7 +3223,7 @@ def exec_before_job(self, app, inp_data, out_data, param_dict=None): if param_dict is None: raise Exception("Internal error - param_dict is empty.") - job: Dict[str, str] = {} + job: dict[str, str] = {} json_wrap(self.inputs, param_dict, self.profile, job, handle_files="OBJECT") expression_inputs = { "job": job, @@ -3783,14 +3777,14 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history iter_elements_a[element_identifier_b] = hda_a_copy iter_elements_b[element_identifier_b] = hda_b_copy - sub_collection_a: Dict[str, Any] = {} + sub_collection_a: dict[str, Any] = {} sub_collection_a["src"] = "new_collection" sub_collection_a["collection_type"] = "list" sub_collection_a["elements"] = iter_elements_a output_a[element_identifier_a] = sub_collection_a - sub_collection_b: Dict[str, Any] = {} + sub_collection_b: dict[str, Any] = {} sub_collection_b["src"] = "new_collection" sub_collection_b["collection_type"] = "list" sub_collection_b["elements"] = iter_elements_b @@ -3959,7 +3953,7 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history new_element_structure = {} # Which inputs does the identifier appear in. - identifiers_map: Dict[str, List[int]] = {} + identifiers_map: dict[str, list[int]] = {} for input_num, input_list in enumerate(input_lists): for dce in input_list.collection.elements: element_identifier = dce.element_identifier @@ -4533,7 +4527,7 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history # Populate tool_type to ToolClass mappings -TOOL_CLASSES: List[Type[Tool]] = [ +TOOL_CLASSES: list[type[Tool]] = [ Tool, SetMetadataTool, OutputParameterJSONTool, diff --git a/lib/galaxy/tools/_types.py b/lib/galaxy/tools/_types.py index 635a86cf459d..b2b87a28ce9f 100644 --- a/lib/galaxy/tools/_types.py +++ b/lib/galaxy/tools/_types.py @@ -20,7 +20,6 @@ from typing import ( Any, - Dict, Union, ) @@ -28,38 +27,38 @@ # Input dictionary from the API, may include map/reduce instructions. Objects are referenced by "src" # dictionaries and encoded IDS. -ToolRequestT = Dict[str, Any] +ToolRequestT = dict[str, Any] # Input dictionary extracted from a tool request for running a tool individually as a single job. Objects are referenced # by "src" dictionaries with encoded IDs still but batch instructions have been pulled out. Parameters have not # been "checked" (check_param has not been called). -ToolStateJobInstanceT = Dict[str, Any] +ToolStateJobInstanceT = dict[str, Any] # Input dictionary for an individual job where objects are their model objects and parameters have been # "checked" (check_param has been called). -ToolStateJobInstancePopulatedT = Dict[str, Any] +ToolStateJobInstancePopulatedT = dict[str, Any] # Input dictionary for an individual where the state has been valiated and populated but then converted back down # to json. Object references are unified in the format of {"values": List["src" dictionary]} where the src dictionaries. # are decoded ids (ints). # See comments on galaxy.tools.parameters.params_to_strings for more information. -ToolStateDumpedToJsonInternalT = Dict[str, Any] +ToolStateDumpedToJsonInternalT = dict[str, Any] # Input dictionary for an individual where the state has been valiated and populated but then converted back down # to json. Object references are unified in the format of {"values": List["src" dictionary]} where src dictonaries # are encoded (ids). See comments on galaxy.tools.parameters.params_to_strings for more information. -ToolStateDumpedToJsonT = Dict[str, Any] +ToolStateDumpedToJsonT = dict[str, Any] # Input dictionary for an individual where the state has been valiated and populated but then converted back down # to json. Object references are unified in the format of {"values": List["src" dictionary]} but dumped into # strings. See comments on galaxy.tools.parameters.params_to_strings for more information. This maybe should be # broken into separate types for encoded and decoded IDs in subsequent type refinements if both are used, it not # this comment should be updated to indicate which is used exclusively. -ToolStateDumpedToStringsT = Dict[str, str] +ToolStateDumpedToStringsT = dict[str, str] # A dictionary of error messages that occur while attempting to validate a ToolStateJobInstanceT and transform it # into a ToolStateJobInstancePopulatedT with model objects populated. Tool errors indicate the job should not be # further processed. -ParameterValidationErrorsT = Dict[str, Union["ParameterValidationErrorsT", str, Exception]] +ParameterValidationErrorsT = dict[str, Union["ParameterValidationErrorsT", str, Exception]] InputFormatT = Literal["legacy", "21.01"] diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py index 30e463f8ffb9..fabe27d6ba82 100644 --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -3,17 +3,15 @@ import os import re from abc import abstractmethod +from collections.abc import ( + Mapping, + MutableMapping, +) from json import dumps from typing import ( Any, cast, - Dict, - List, - Mapping, - MutableMapping, Optional, - Set, - Tuple, TYPE_CHECKING, Union, ) @@ -82,8 +80,8 @@ log = logging.getLogger(__name__) -OutputDatasetsT = Dict[str, "DatasetInstance"] -ToolActionExecuteResult = Union[Tuple[Job, OutputDatasetsT, Optional[History]], Tuple[Job, OutputDatasetsT]] +OutputDatasetsT = dict[str, "DatasetInstance"] +ToolActionExecuteResult = Union[tuple[Job, OutputDatasetsT, Optional[History]], tuple[Job, OutputDatasetsT]] class ToolAction: @@ -152,7 +150,7 @@ def _collect_input_datasets( if current_user_roles is None: current_user_roles = trans.get_current_user_roles() input_datasets = LegacyUnprefixedDict() - all_permissions: Dict[str, Set[str]] = {} + all_permissions: dict[str, set[str]] = {} def record_permission(action, role_id): if action not in all_permissions: @@ -307,7 +305,7 @@ def process_dataset(data, formats=None): if not datatype.matches_any(input.formats): conversion_required = True break - processed_dataset_dict: Dict[DatasetInstance, DatasetInstance] = {} + processed_dataset_dict: dict[DatasetInstance, DatasetInstance] = {} for i, v in enumerate(collection.dataset_instances): processed_dataset = None if conversion_required: @@ -513,7 +511,7 @@ def execute( # wrapped params are used by change_format action and by output.label; only perform this wrapping once, as needed wrapped_params = self._wrapped_params(trans, tool, incoming, inp_data) - out_data: Dict[str, DatasetInstance] = {} + out_data: dict[str, DatasetInstance] = {} input_collections = LegacyUnprefixedDict({k: v[0][0] for k, v in inp_dataset_collections.items()}) input_collections._legacy_mapping = inp_dataset_collections._legacy_mapping output_collections = OutputCollections( @@ -617,14 +615,14 @@ def handle_output(name, output, hidden=None): out_data[name] = data if output.actions: # Apply pre-job tool-output-dataset actions; e.g. setting metadata, changing format - output_action_params: Dict[str, Any] = dict(out_data) + output_action_params: dict[str, Any] = dict(out_data) output_action_params.update(wrapped_params.params) output_action_params["__python_template_version__"] = tool.python_template_version output.actions.apply_action(data, output_action_params) # Flush all datasets at once. return data - child_dataset_names: Set[str] = set() + child_dataset_names: set[str] = set() for name, output in tool.outputs.items(): if not filter_output(tool, output, incoming): @@ -634,7 +632,7 @@ def handle_output(name, output, hidden=None): # Output collection is mapped over and has already been copied from original job continue collections_manager = app.dataset_collection_manager - element_identifiers: List[Dict[str, Union[str, List[Dict[str, Union[str, List[Any]]]]]]] = [] + element_identifiers: list[dict[str, Union[str, list[dict[str, Union[str, list[Any]]]]]]] = [] # mypy doesn't yet support recursive type definitions known_outputs = output.known_outputs(input_collections, collections_manager.type_registry) # Just to echo TODO elsewhere - this should be restructured to allow @@ -666,10 +664,10 @@ def handle_output(name, output, hidden=None): else: index = name_to_index[parent_id] current_element_identifiers = cast( - List[ - Dict[ + list[ + dict[ str, - Union[str, List[Dict[str, Union[str, List[Any]]]]], + Union[str, list[dict[str, Union[str, list[Any]]]]], ] ], current_element_identifiers[index]["element_identifiers"], @@ -915,7 +913,7 @@ def _get_on_text(self, inp_data): def _new_job_for_session( self, trans, tool: "Tool", history: Optional[History] - ) -> Tuple[Job, Optional[model.GalaxySession]]: + ) -> tuple[Job, Optional[model.GalaxySession]]: job = Job() job.galaxy_version = trans.app.config.version_major galaxy_session = None @@ -944,7 +942,7 @@ def _record_inputs(self, trans, tool, job, incoming, inp_data, inp_dataset_colle # FIXME: Don't need all of incoming here, just the defined parameters # from the tool. We need to deal with tools that pass all post # parameters to the command as a special case. - reductions: Dict[str, List[str]] = {} + reductions: dict[str, list[str]] = {} for name, dataset_collection_info_pairs in inp_dataset_collections.items(): for dataset_collection, reduced in dataset_collection_info_pairs: if reduced: diff --git a/lib/galaxy/tools/actions/metadata.py b/lib/galaxy/tools/actions/metadata.py index 00b95e91b851..7faaa9e46fb3 100644 --- a/lib/galaxy/tools/actions/metadata.py +++ b/lib/galaxy/tools/actions/metadata.py @@ -3,7 +3,6 @@ from json import dumps from typing import ( Any, - Dict, Optional, ) @@ -80,10 +79,10 @@ def execute_via_trans( self, tool, trans, - incoming: Optional[Dict[str, Any]], + incoming: Optional[dict[str, Any]], overwrite: bool = True, history: Optional[History] = None, - job_params: Optional[Dict[str, Any]] = None, + job_params: Optional[dict[str, Any]] = None, ): trans.check_user_activation() session = trans.get_galaxy_session() @@ -109,10 +108,10 @@ def execute_via_app( session_id: Optional[int], history_id: Optional[int], user: Optional[User] = None, - incoming: Optional[Dict[str, Any]] = None, + incoming: Optional[dict[str, Any]] = None, overwrite: bool = True, history: Optional[History] = None, - job_params: Optional[Dict[str, Any]] = None, + job_params: Optional[dict[str, Any]] = None, ): """ Execute using application. diff --git a/lib/galaxy/tools/actions/upload_common.py b/lib/galaxy/tools/actions/upload_common.py index 79d90eddae4c..388ca7d47a19 100644 --- a/lib/galaxy/tools/actions/upload_common.py +++ b/lib/galaxy/tools/actions/upload_common.py @@ -8,8 +8,6 @@ dumps, ) from typing import ( - Dict, - List, Optional, ) @@ -82,10 +80,10 @@ def persist_uploads(params, trans): @dataclass class LibraryParams: - roles: List[Role] - tags: Optional[List[str]] + roles: list[Role] + tags: Optional[list[str]] template: Optional[FormDefinition] - template_field_contents: Dict[str, str] + template_field_contents: dict[str, str] folder: LibraryFolder message: str replace_dataset: Optional[LibraryDataset] @@ -114,7 +112,7 @@ def handle_library_params( if params.get(field_name, False): field_value = util.restore_text(params.get(field_name, "")) template_field_contents[field_name] = field_value - roles: List[Role] = [] + roles: list[Role] = [] for role_id in util.listify(params.get("roles", [])): role = session.get(Role, role_id) roles.append(role) diff --git a/lib/galaxy/tools/cache.py b/lib/galaxy/tools/cache.py index ca7f30456ce0..deee5bc0b396 100644 --- a/lib/galaxy/tools/cache.py +++ b/lib/galaxy/tools/cache.py @@ -7,7 +7,6 @@ import zlib from threading import Lock from typing import ( - Dict, Optional, ) @@ -138,7 +137,7 @@ class ToolCache: def __init__(self): self._lock = Lock() - self._hash_by_tool_paths: Dict[str, ToolHash] = {} + self._hash_by_tool_paths: dict[str, ToolHash] = {} self._tools_by_path = {} self._tool_paths_by_id = {} self._macro_paths_by_id = {} diff --git a/lib/galaxy/tools/data/__init__.py b/lib/galaxy/tools/data/__init__.py index 4b49e586b69e..871925628c0b 100644 --- a/lib/galaxy/tools/data/__init__.py +++ b/lib/galaxy/tools/data/__init__.py @@ -7,9 +7,6 @@ import logging from typing import ( Any, - Dict, - List, - Type, ) import refgenconf @@ -27,7 +24,7 @@ log = logging.getLogger(__name__) -def table_from_dict(d: Dict[str, Any]) -> ToolDataTable: +def table_from_dict(d: dict[str, Any]) -> ToolDataTable: data_table_class = globals()[d["model_class"]] data_table = data_table_class.__new__(data_table_class) for attr, val in d.items(): @@ -37,7 +34,7 @@ def table_from_dict(d: Dict[str, Any]) -> ToolDataTable: return data_table -def from_dict(d: Dict[str, Any]) -> "ToolDataTableManager": +def from_dict(d: dict[str, Any]) -> "ToolDataTableManager": tdtm = ToolDataTableManager.__new__(ToolDataTableManager) tdtm.data_tables = {name: table_from_dict(data) for name, data in d.items()} return tdtm @@ -82,7 +79,7 @@ def __init__( other_config_dict=other_config_dict, ) self.config_element = config_element - self.data: List[List[str]] = [] + self.data: list[list[str]] = [] self.configure_and_load(config_element, tool_data_path, from_shed_config) def configure_and_load(self, config_element, tool_data_path, from_shed_config=False, url_timeout=10): @@ -182,7 +179,7 @@ def _remove_entry(self, values): # Registry of tool data types by type_key -tool_data_table_types_list: List[Type[ToolDataTable]] = tool_util_tool_data_table_types_list + [RefgenieToolDataTable] +tool_data_table_types_list: list[type[ToolDataTable]] = tool_util_tool_data_table_types_list + [RefgenieToolDataTable] tool_data_table_types = {cls.type_key: cls for cls in tool_data_table_types_list} diff --git a/lib/galaxy/tools/data_fetch.py b/lib/galaxy/tools/data_fetch.py index 896a28405abb..511c40d00fc8 100644 --- a/lib/galaxy/tools/data_fetch.py +++ b/lib/galaxy/tools/data_fetch.py @@ -8,10 +8,7 @@ from io import StringIO from typing import ( Any, - Dict, - List, Optional, - Tuple, ) import bdbag.bdbag_api @@ -55,7 +52,7 @@ def do_fetch( request_path: str, working_directory: str, registry: Registry, - file_sources_dict: Optional[Dict] = None, + file_sources_dict: Optional[dict] = None, ): assert os.path.exists(request_path) with open(request_path) as f: @@ -87,7 +84,7 @@ def _request_to_galaxy_json(upload_config: "UploadConfig", request): return {"__unnamed_outputs": fetched_targets} -def _fetch_target(upload_config: "UploadConfig", target: Dict[str, Any]): +def _fetch_target(upload_config: "UploadConfig", target: dict[str, Any]): destination = target.get("destination", None) assert destination, "No destination defined." @@ -194,7 +191,7 @@ def _resolve_item(item): ) extra_files_path = f"{primary_file}_extra" os.mkdir(extra_files_path) - rval: Dict[str, Any] = { + rval: dict[str, Any] = { "name": name, "filename": primary_file, "ext": requested_ext, @@ -445,7 +442,7 @@ def _bagit_to_items(directory): return items -def _decompress_target(upload_config: "UploadConfig", target: Dict[str, Any]): +def _decompress_target(upload_config: "UploadConfig", target: dict[str, Any]): elements_from_name, elements_from_path, _ = _has_src_to_path(upload_config, target, is_dataset=False) # by default Galaxy will check for a directory with a single file and interpret that # as the new root for expansion, this is a good user experience for uploading single @@ -471,8 +468,8 @@ def elements_tree_map(f, items): def _directory_to_items(directory): - items: List[Dict[str, Any]] = [] - dir_elements: Dict[str, Any] = {} + items: list[dict[str, Any]] = [] + dir_elements: dict[str, Any] = {} for root, dirs, files in os.walk(directory): if root in dir_elements: target = dir_elements[root] @@ -506,11 +503,11 @@ def _has_src_to_name(item) -> Optional[str]: def _has_src_to_path( upload_config: "UploadConfig", - item: Dict[str, Any], + item: dict[str, Any], is_dataset: bool = False, link_data_only: bool = False, link_data_only_explicitly_set: bool = False, -) -> Tuple[str, str, bool]: +) -> tuple[str, str, bool]: assert "src" in item, item src = item.get("src") name = item.get("name") @@ -591,11 +588,11 @@ def get_file_sources(working_directory, file_sources_as_dict=None): class UploadConfig: def __init__( self, - request: Dict[str, Any], + request: dict[str, Any], registry: Registry, working_directory: str, allow_failed_collections: bool, - file_sources_dict: Optional[Dict] = None, + file_sources_dict: Optional[dict] = None, ): self.registry = registry self.working_directory = working_directory @@ -657,7 +654,7 @@ def ensure_in_working_directory(self, path: str, purge_source, in_place) -> str: return new_path -def _link_data_only(has_config_dict) -> Tuple[bool, bool]: +def _link_data_only(has_config_dict) -> tuple[bool, bool]: if "link_data_only" in has_config_dict: link_data_only_raw = has_config_dict["link_data_only"] if not isinstance(link_data_only_raw, bool): diff --git a/lib/galaxy/tools/data_manager/manager.py b/lib/galaxy/tools/data_manager/manager.py index 8da9f8e97f9e..b48b84a1ce74 100644 --- a/lib/galaxy/tools/data_manager/manager.py +++ b/lib/galaxy/tools/data_manager/manager.py @@ -2,8 +2,6 @@ import logging import os from typing import ( - Dict, - List, Optional, Union, ) @@ -27,8 +25,8 @@ class DataManagers(DataManagersInterface): - data_managers: Dict[str, "DataManager"] - managed_data_tables: Dict[str, "DataManager"] + data_managers: dict[str, "DataManager"] + managed_data_tables: dict[str, "DataManager"] __reload_count: int def __init__(self, app: StructuredApp, xml_filename=None, reload_count: Optional[int] = None): @@ -110,7 +108,7 @@ def add_manager(self, data_manager): def get_manager(self, *args, **kwds): return self.data_managers.get(*args, **kwds) - def remove_manager(self, manager_ids: Union[str, List[str]]) -> None: + def remove_manager(self, manager_ids: Union[str, list[str]]) -> None: if not isinstance(manager_ids, list): manager_ids = [manager_ids] for manager_id in manager_ids: @@ -225,7 +223,7 @@ def _load_tool( self.tool = tool return tool - def process_result(self, out_data: Dict[str, OutputDataset]) -> None: + def process_result(self, out_data: dict[str, OutputDataset]) -> None: tool_data_tables = self.data_managers.app.tool_data_tables options = BundleProcessingOptions( what=f"data manager '{self.id}'", @@ -243,8 +241,8 @@ def process_result(self, out_data: Dict[str, OutputDataset]) -> None: def write_bundle( self, - out_data: Dict[str, OutputDataset], - ) -> Dict[str, OutputDataset]: + out_data: dict[str, OutputDataset], + ) -> dict[str, OutputDataset]: tool_data_tables = self.data_managers.app.tool_data_tables return tool_data_tables.write_bundle( out_data, diff --git a/lib/galaxy/tools/error_reports/plugins/base_git.py b/lib/galaxy/tools/error_reports/plugins/base_git.py index fa251e557244..d8ba8d0bb0c4 100644 --- a/lib/galaxy/tools/error_reports/plugins/base_git.py +++ b/lib/galaxy/tools/error_reports/plugins/base_git.py @@ -5,7 +5,6 @@ ABCMeta, abstractmethod, ) -from typing import Dict from galaxy.tools.errors import EmailErrorReporter from galaxy.util import ( @@ -21,12 +20,12 @@ class BaseGitPlugin(ErrorPlugin, metaclass=ABCMeta): """Base definition to send error reports to a Git repository provider""" - issue_cache: Dict[str, Dict] = {} - ts_urls: Dict[str, str] = {} - ts_repo_cache: Dict[str, Dict] = {} - git_project_cache: Dict[str, Dict] = {} - label_cache: Dict[str, Dict] = {} - git_username_id_cache: Dict[str, str] = {} + issue_cache: dict[str, dict] = {} + ts_urls: dict[str, str] = {} + ts_repo_cache: dict[str, dict] = {} + git_project_cache: dict[str, dict] = {} + label_cache: dict[str, dict] = {} + git_username_id_cache: dict[str, str] = {} # Git variables git_default_repo_owner = False diff --git a/lib/galaxy/tools/error_reports/plugins/sentry.py b/lib/galaxy/tools/error_reports/plugins/sentry.py index 127621e8803a..4a72b7708382 100644 --- a/lib/galaxy/tools/error_reports/plugins/sentry.py +++ b/lib/galaxy/tools/error_reports/plugins/sentry.py @@ -1,7 +1,6 @@ """The module describes the ``sentry`` error plugin.""" import logging -from typing import Dict try: import sentry_sdk @@ -38,7 +37,7 @@ def submit_report(self, dataset, job, tool, **kwargs): ) # strip the tool's version from its long id # Add contexts to the report. - contexts: Dict[str, dict] = {} + contexts: dict[str, dict] = {} # - "job" context contexts["job"] = { diff --git a/lib/galaxy/tools/error_reports/plugins/slack.py b/lib/galaxy/tools/error_reports/plugins/slack.py index 3c4f273ed034..169f9aeb4272 100644 --- a/lib/galaxy/tools/error_reports/plugins/slack.py +++ b/lib/galaxy/tools/error_reports/plugins/slack.py @@ -4,7 +4,6 @@ import uuid from typing import ( Any, - Dict, ) from galaxy.util import ( @@ -43,7 +42,7 @@ def submit_report(self, dataset, job, tool, **kwargs): error_report_id = str(uuid.uuid4())[0:13] title = self._generate_error_title(job) - blocks: Dict[str, Any] = { + blocks: dict[str, Any] = { "blocks": [ { "type": "section", diff --git a/lib/galaxy/tools/evaluation.py b/lib/galaxy/tools/evaluation.py index 12bcfe43d1ac..1f1a7d1cfb5f 100644 --- a/lib/galaxy/tools/evaluation.py +++ b/lib/galaxy/tools/evaluation.py @@ -9,8 +9,6 @@ from typing import ( Any, Callable, - Dict, - List, Optional, TYPE_CHECKING, ) @@ -135,13 +133,13 @@ def __init__(self, app: MinimalToolApp, tool: "Tool", job, local_working_directo self.job = job self.tool = tool self.local_working_directory = local_working_directory - self.file_sources_dict: Dict[str, Any] = {} - self.param_dict: Dict[str, Any] = {} - self.extra_filenames: List[str] = [] - self.environment_variables: List[Dict[str, str]] = [] + self.file_sources_dict: dict[str, Any] = {} + self.param_dict: dict[str, Any] = {} + self.extra_filenames: list[str] = [] + self.environment_variables: list[dict[str, str]] = [] self.version_command_line: Optional[str] = None self.command_line: Optional[str] = None - self.interactivetools: List[Dict[str, Any]] = [] + self.interactivetools: list[dict[str, Any]] = [] self.consumes_names = False self.use_cached_job = False @@ -266,12 +264,12 @@ def input(): return param_dict.clean_copy() def _materialize_objects( - self, deferred_objects: Dict[str, DeferrableObjectsT], job_working_directory: str - ) -> Dict[str, DeferrableObjectsT]: + self, deferred_objects: dict[str, DeferrableObjectsT], job_working_directory: str + ) -> dict[str, DeferrableObjectsT]: if not self.materialize_datasets: return {} - undeferred_objects: Dict[str, DeferrableObjectsT] = {} + undeferred_objects: dict[str, DeferrableObjectsT] = {} transient_directory = os.path.join(job_working_directory, "inputs") safe_makedirs(transient_directory) dataset_materializer = materializer_factory( @@ -296,8 +294,8 @@ def _materialize_objects( def _eval_format_source( self, job: model.Job, - inp_data: Dict[str, Optional[model.DatasetInstance]], - out_data: Dict[str, model.DatasetInstance], + inp_data: dict[str, Optional[model.DatasetInstance]], + out_data: dict[str, model.DatasetInstance], ): for output_name, output in out_data.items(): if ( @@ -313,9 +311,9 @@ def _eval_format_source( def _replaced_deferred_objects( self, - inp_data: Dict[str, Optional[model.DatasetInstance]], + inp_data: dict[str, Optional[model.DatasetInstance]], incoming: dict, - materalized_objects: Dict[str, DeferrableObjectsT], + materalized_objects: dict[str, DeferrableObjectsT], ): for key, value in materalized_objects.items(): if isinstance(value, model.DatasetInstance): @@ -340,14 +338,14 @@ def validate_inputs(input, value, context, **kwargs): def _deferred_objects( self, - input_datasets: Dict[str, Optional[model.DatasetInstance]], + input_datasets: dict[str, Optional[model.DatasetInstance]], incoming: dict, - ) -> Dict[str, DeferrableObjectsT]: + ) -> dict[str, DeferrableObjectsT]: """Collect deferred objects required for execution. Walk input datasets and collections and find inputs that need to be materialized. """ - deferred_objects: Dict[str, DeferrableObjectsT] = {} + deferred_objects: dict[str, DeferrableObjectsT] = {} for key, value in input_datasets.items(): if value is not None and value.state == model.Dataset.states.DEFERRED: if self._should_materialize_deferred_input(key, value): @@ -944,7 +942,7 @@ def build_param_dict(self, incoming, input_datasets, output_datasets, output_col job_working_directory = compute_environment.working_directory() from galaxy.workflow.modules import to_cwl - hda_references: List[model.HistoryDatasetAssociation] = [] + hda_references: list[model.HistoryDatasetAssociation] = [] cwl_style_inputs = to_cwl(incoming, hda_references=hda_references, compute_environment=compute_environment) return {"inputs": cwl_style_inputs, "outdir": job_working_directory} diff --git a/lib/galaxy/tools/execute.py b/lib/galaxy/tools/execute.py index 27033a804199..54fbf1956eff 100644 --- a/lib/galaxy/tools/execute.py +++ b/lib/galaxy/tools/execute.py @@ -11,11 +11,8 @@ from typing import ( Any, Callable, - Dict, - List, NamedTuple, Optional, - Tuple, Union, ) @@ -51,10 +48,10 @@ BATCH_EXECUTION_MESSAGE = "Created ${job_count} job(s) for tool ${tool_id} request" -CompletedJobsT = Dict[int, Optional[model.Job]] +CompletedJobsT = dict[int, Optional[model.Job]] JobCallbackT: TypeAlias = Callable -WorkflowResourceParametersT = Dict[str, Any] -DatasetCollectionElementsSliceT = Dict[str, model.DatasetCollectionElement] +WorkflowResourceParametersT = dict[str, Any] +DatasetCollectionElementsSliceT = dict[str, model.DatasetCollectionElement] DEFAULT_USE_CACHED_JOB = False DEFAULT_PREFERRED_OBJECT_STORE_ID: Optional[str] = None DEFAULT_RERUN_REMAP_JOB_ID: Optional[int] = None @@ -70,7 +67,7 @@ def __init__(self, execution_tracker: "ExecutionTracker"): class MappingParameters(NamedTuple): param_template: ToolRequestT - param_combinations: List[ToolStateJobInstancePopulatedT] + param_combinations: list[ToolStateJobInstancePopulatedT] def execute( @@ -183,7 +180,7 @@ def execute_single_job(execution_slice: "ExecutionSlice", completed_job: Optiona jobs_executed = 0 has_remaining_jobs = False execution_slice = None - job_datasets: Dict[str, List[model.DatasetInstance]] = {} # job: list of dataset instances created by job + job_datasets: dict[str, list[model.DatasetInstance]] = {} # job: list of dataset instances created by job for i, execution_slice in enumerate(execution_tracker.new_execution_slices()): if max_num_jobs is not None and jobs_executed >= max_num_jobs: @@ -264,11 +261,11 @@ def __init__( class ExecutionTracker: - execution_errors: List[ExecutionErrorsT] - successful_jobs: List[model.Job] - output_datasets: List[Tuple[str, model.HistoryDatasetAssociation]] - output_collections: List[Tuple[str, model.HistoryDatasetCollectionAssociation]] - implicit_collections: Dict[str, model.HistoryDatasetCollectionAssociation] + execution_errors: list[ExecutionErrorsT] + successful_jobs: list[model.Job] + output_datasets: list[tuple[str, model.HistoryDatasetAssociation]] + output_collections: list[tuple[str, model.HistoryDatasetCollectionAssociation]] + implicit_collections: dict[str, model.HistoryDatasetCollectionAssociation] def __init__( self, @@ -626,7 +623,7 @@ def __init__( # New to track these things for tool output API response in the tool case, # in the workflow case we just write stuff to the database and forget about # it. - self.outputs_by_output_name: Dict[str, List[Union[model.DatasetInstance, model.DatasetCollection]]] = ( + self.outputs_by_output_name: dict[str, list[Union[model.DatasetInstance, model.DatasetCollection]]] = ( collections.defaultdict(list) ) diff --git a/lib/galaxy/tools/execution_helpers.py b/lib/galaxy/tools/execution_helpers.py index 76413a5f6370..1f18141f2fa2 100644 --- a/lib/galaxy/tools/execution_helpers.py +++ b/lib/galaxy/tools/execution_helpers.py @@ -5,7 +5,7 @@ """ import logging -from typing import Collection +from collections.abc import Collection log = logging.getLogger(__name__) diff --git a/lib/galaxy/tools/expressions/evaluation.py b/lib/galaxy/tools/expressions/evaluation.py index f4adc9aac18b..f6f4786dd3b0 100644 --- a/lib/galaxy/tools/expressions/evaluation.py +++ b/lib/galaxy/tools/expressions/evaluation.py @@ -2,7 +2,6 @@ import os import subprocess from typing import ( - List, Optional, ) @@ -22,12 +21,12 @@ def do_eval( expression: str, jobinput: CWLObjectType, - javascript_requirements: Optional[List[JavascriptRequirement]] = None, + javascript_requirements: Optional[list[JavascriptRequirement]] = None, outdir: Optional[str] = None, tmpdir: Optional[str] = None, context: Optional["CWLOutputType"] = None, ): - requirements: List[CWLObjectType] = [] + requirements: list[CWLObjectType] = [] if javascript_requirements: for req in javascript_requirements: if expression_lib := req.expression_lib: diff --git a/lib/galaxy/tools/fetch/workbooks.py b/lib/galaxy/tools/fetch/workbooks.py index 66f2672bfa56..6186bc2ac46b 100644 --- a/lib/galaxy/tools/fetch/workbooks.py +++ b/lib/galaxy/tools/fetch/workbooks.py @@ -1,9 +1,6 @@ from dataclasses import dataclass from typing import ( - Dict, - List, Optional, - Tuple, Union, ) @@ -64,7 +61,7 @@ INSTRUCTION_ONCE_COMPLETE_COLLECTIONS = ( "Once data entry is complete, drop this file back into Galaxy to finish creating collections for your inputs." ) -INSTRUCTIONS_BY_TYPE: Dict[FetchWorkbookType, List[str]] = { +INSTRUCTIONS_BY_TYPE: dict[FetchWorkbookType, list[str]] = { "datasets": [ INSTRUCTION_USE_THIS, INSTRUCTION_EXTRA_COLUMNS, @@ -149,8 +146,8 @@ def generate(request: GenerateFetchWorkbookRequest) -> Workbook: return workbook -ParsedRow = Dict[str, Optional[str]] -ParsedRows = List[ParsedRow] +ParsedRow = dict[str, Optional[str]] +ParsedRows = list[ParsedRow] class ParseLogEntry(BaseModel): @@ -166,15 +163,15 @@ class SplitUpPairedDataLogEntry(ParseLogEntry): class InferredCollectionTypeLogEntry(ParseLogEntry): message: str - from_columns: List[ParsedColumn] + from_columns: list[ParsedColumn] -ParseLog = List[ParseLogEntry] +ParseLog = list[ParseLogEntry] class BaseParsedFetchWorkbook(BaseModel): rows: ParsedRows - columns: List[ParsedColumn] + columns: list[ParsedColumn] workbook_type: FetchWorkbookType parse_log: ParseLog @@ -216,7 +213,7 @@ def parse(payload: ParseFetchWorkbook) -> ParsedFetchWorkbook: return ParsedFetchWorkbookForDatasets(rows=rows, columns=columns, parse_log=parse_log) -def _validate_parsed_column_headers(column_headers: List[HeaderColumn]) -> None: +def _validate_parsed_column_headers(column_headers: list[HeaderColumn]) -> None: uri_like_columns = _uri_like_columns(column_headers) if len(uri_like_columns) > 2: raise RequestParameterInvalidException( @@ -226,7 +223,7 @@ def _validate_parsed_column_headers(column_headers: List[HeaderColumn]) -> None: raise RequestParameterInvalidException(EXCEPTION_NO_URIS_FOUND) -def _request_to_columns(request: GenerateFetchWorkbookRequest) -> List[HeaderColumn]: +def _request_to_columns(request: GenerateFetchWorkbookRequest) -> list[HeaderColumn]: if request.type == "datasets": return [ HeaderColumn("url", "URI", 0), @@ -298,14 +295,14 @@ def _load_row_data(workbook: Workbook, payload: ParseFetchWorkbook) -> ParsedRow def _split_paired_data_if_needed( - rows: ParsedRows, column_headers: List[HeaderColumn] -) -> Tuple[ParsedRows, List[HeaderColumn], Optional[SplitUpPairedDataLogEntry]]: + rows: ParsedRows, column_headers: list[HeaderColumn] +) -> tuple[ParsedRows, list[HeaderColumn], Optional[SplitUpPairedDataLogEntry]]: split_rows: ParsedRows = [] uri_like_columns = _uri_like_columns(column_headers) if len(_uri_like_columns(column_headers)) != 2: return rows, column_headers, None - hash_columns_to_split: List[Tuple[HeaderColumn, HeaderColumn]] = [] + hash_columns_to_split: list[tuple[HeaderColumn, HeaderColumn]] = [] for column_type in ["hash_sha1", "hash_md5", "hash_sha256", "hash_sha512"]: hash_columns = [c for c in column_headers if c.type == column_type] if len(hash_columns) == 2: @@ -374,7 +371,7 @@ def _split_paired_data_if_needed( def _fill_in_identifier_column_if_needed( - rows: ParsedRows, columns: List[HeaderColumn], config: Optional[FillIdentifiers] + rows: ParsedRows, columns: list[HeaderColumn], config: Optional[FillIdentifiers] ) -> ParsedRows: list_identifiers_columns = [c for c in columns if c.type == "list_identifiers"] uri_columns = _uri_like_columns(columns) @@ -387,7 +384,7 @@ def _fill_in_identifier_column_if_needed( uri_column = uri_columns[0] inner_list_identifier_column = list_identifiers_columns[-1] - uris_to_identifiers: List[Tuple[str, Optional[str]]] = [] + uris_to_identifiers: list[tuple[str, Optional[str]]] = [] for row in rows: uri = row.get(uri_column.name) if not uri: @@ -400,16 +397,16 @@ def _fill_in_identifier_column_if_needed( return rows -def _read_column_headers(worksheet: Worksheet) -> List[HeaderColumn]: +def _read_column_headers(worksheet: Worksheet) -> list[HeaderColumn]: column_titles = read_column_header_titles(worksheet) return column_titles_to_headers(column_titles) -def _uri_like_columns(column_headers: List[HeaderColumn]) -> List[HeaderColumn]: +def _uri_like_columns(column_headers: list[HeaderColumn]) -> list[HeaderColumn]: return [c for c in column_headers if c.type == "url" or c.type == "url_deferred"] -def _index_of_fist_uri_column(column_headers: List[HeaderColumn]) -> int: +def _index_of_fist_uri_column(column_headers: list[HeaderColumn]) -> int: for index, column_header in enumerate(column_headers): if column_header.type == "url" or column_header.type == "url_deferred": return index @@ -419,15 +416,15 @@ def _index_of_fist_uri_column(column_headers: List[HeaderColumn]) -> int: def _infer_fetch_workbook_collection_type( - column_headers: List[HeaderColumn], -) -> Tuple[str, InferredCollectionTypeLogEntry]: + column_headers: list[HeaderColumn], +) -> tuple[str, InferredCollectionTypeLogEntry]: paired_identifier_columns = [c for c in column_headers if c.type == "paired_identifier"] paired_or_unpaired_identifier_columns = [c for c in column_headers if c.type == "paired_or_unpaired_identifier"] any_paired = len(paired_identifier_columns) > 0 uri_columns = _uri_like_columns(column_headers) num_uris = len(uri_columns) - inference_on_columns: List[ParsedColumn] = [] + inference_on_columns: list[ParsedColumn] = [] list_type: str = "" for column_header in column_headers: @@ -463,5 +460,5 @@ def _infer_fetch_workbook_collection_type( ) -def _is_fetch_workbook_for_collections(column_headers: List[HeaderColumn]) -> bool: +def _is_fetch_workbook_for_collections(column_headers: list[HeaderColumn]) -> bool: return _infer_fetch_workbook_collection_type(column_headers)[0] != "" diff --git a/lib/galaxy/tools/parameters/__init__.py b/lib/galaxy/tools/parameters/__init__.py index 809a974b6c9f..ec53430aff15 100644 --- a/lib/galaxy/tools/parameters/__init__.py +++ b/lib/galaxy/tools/parameters/__init__.py @@ -5,7 +5,6 @@ from json import dumps from typing import ( cast, - Dict, Optional, Union, ) @@ -51,7 +50,7 @@ # Some tools use the code tag and access the code base, expecting certain tool parameters to be available here. __all__ = ("DataCollectionToolParameter", "DataToolParameter", "SelectToolParameter") -ToolInputsT = Dict[str, Union[Group, ToolParameter]] +ToolInputsT = dict[str, Union[Group, ToolParameter]] def visit_input_values( @@ -345,7 +344,7 @@ def params_to_strings( return rval -def params_from_strings(params: Dict[str, Union[Group, ToolParameter]], param_values, app, ignore_errors=False) -> Dict: +def params_from_strings(params: dict[str, Union[Group, ToolParameter]], param_values, app, ignore_errors=False) -> dict: """ Convert a dictionary of strings as produced by `params_to_strings` back into parameter values (decode the json representation and then diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index f4c31732fef7..841e4898b2af 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -10,15 +10,14 @@ import re import typing import urllib.parse -from collections.abc import MutableMapping +from collections.abc import ( + MutableMapping, + Sequence, +) from typing import ( Any, cast, - Dict, - List, Optional, - Sequence, - Type, TYPE_CHECKING, Union, ) @@ -798,7 +797,7 @@ def to_json(self, value, app, use_security): def to_python(self, value, app, validate=False): if not isinstance(value, list): value = [value] - lst: List[str] = [] + lst: list[str] = [] for val in value: if val in [None, ""]: lst = [] @@ -1022,14 +1021,14 @@ def get_legal_values(self, trans, other_values, value): """ determine the set of values of legal options """ - options = cast(List[ParameterOption], self.get_options(trans, other_values)) + options = cast(list[ParameterOption], self.get_options(trans, other_values)) return {option.dataset or option.value for option in options} def get_legal_names(self, trans, other_values): """ determine the set of values of legal options """ - options = cast(List[ParameterOption], self.get_options(trans, other_values)) + options = cast(list[ParameterOption], self.get_options(trans, other_values)) return {option.name: option.value for option in options} def from_json(self, value, trans, other_values=None): @@ -1153,7 +1152,7 @@ def to_python(self, value, app): def get_initial_value(self, trans, other_values): try: - options = cast(List[ParameterOption], self.get_options(trans, other_values)) + options = cast(list[ParameterOption], self.get_options(trans, other_values)) except ImplicitConversionRequired: return None if not options: @@ -1163,7 +1162,7 @@ def get_initial_value(self, trans, other_values): if not self.optional and not self.multiple and options: # Nothing selected, but not optional and not a multiple select, with some values, # so we have to default to something (the HTML form will anyway) - value2: Optional[Union[str, List[str]]] = options[0].value + value2: Optional[Union[str, list[str]]] = options[0].value else: value2 = None elif len(value) == 1 or not self.multiple: @@ -1203,7 +1202,7 @@ def to_dict(self, trans, other_values=None): d = super().to_dict(trans, other_values) # Get options, value. - options = cast(List[ParameterOption], self.get_options(trans, other_values)) + options = cast(list[ParameterOption], self.get_options(trans, other_values)) d["options"] = serialize_options(trans.security, options) d["display"] = self.display d["multiple"] = self.multiple @@ -1690,7 +1689,7 @@ def _get_options_from_code(self, trans=None, other_values=None): except Exception: return [] - def get_options(self, trans=None, other_values=None) -> List[DrillDownOptionsDict]: + def get_options(self, trans=None, other_values=None) -> list[DrillDownOptionsDict]: other_values = other_values or {} if self.is_dynamic: if self.dynamic_options: @@ -1700,12 +1699,12 @@ def get_options(self, trans=None, other_values=None) -> List[DrillDownOptionsDic return self.options def get_legal_values(self, trans, other_values, value): - def recurse_options(legal_values, options: List[DrillDownOptionsDict]): + def recurse_options(legal_values, options: list[DrillDownOptionsDict]): for option in options: legal_values.append(option["value"]) recurse_options(legal_values, option["options"]) - legal_values: List[str] = [] + legal_values: list[str] = [] recurse_options(legal_values, self.get_options(trans=trans, other_values=other_values)) return legal_values @@ -1746,7 +1745,7 @@ def to_param_dict_string(self, value, other_values=None): other_values = other_values or {} def get_options_list(value): - def get_base_option(value, options: List[DrillDownOptionsDict]): + def get_base_option(value, options: list[DrillDownOptionsDict]): for option in options: if value == option["value"]: return option @@ -1762,10 +1761,9 @@ def recurse_option(option_list, option: DrillDownOptionsDict): for opt in option["options"]: recurse_option(option_list, opt) - rval: List[str] = [] + rval: list[str] = [] options = self.get_options(other_values=other_values) - base_option = get_base_option(value, options) - if base_option: + if base_option := get_base_option(value, options): recurse_option(rval, base_option) return rval or [value] @@ -1792,7 +1790,7 @@ def recurse_option(option_list, option: DrillDownOptionsDict): return rval def get_initial_value(self, trans, other_values): - def recurse_options(initial_values, options: List[DrillDownOptionsDict]): + def recurse_options(initial_values, options: list[DrillDownOptionsDict]): for option in options: if option["selected"]: initial_values.append(option["value"]) @@ -1802,14 +1800,14 @@ def recurse_options(initial_values, options: List[DrillDownOptionsDict]): options = self.get_options(trans=trans, other_values=other_values) if not options: return None - initial_values: List[str] = [] + initial_values: list[str] = [] recurse_options(initial_values, options) if len(initial_values) == 0: return None return initial_values def to_text(self, value): - def get_option_display(value, options: List[DrillDownOptionsDict]): + def get_option_display(value, options: list[DrillDownOptionsDict]): for option in options: if value == option["value"]: return option["name"] @@ -2168,7 +2166,7 @@ def from_json(self, value, trans, other_values=None): value = self.to_python(value, trans.app) if isinstance(value, str) and value.find(",") > 0: value = [int(value_part) for value_part in value.split(",")] - rval: List[ + rval: list[ Union[ DatasetCollectionElement, HistoryDatasetAssociation, @@ -2416,7 +2414,7 @@ def append_ldda(ldda): # add datasets hda_list = util.listify(other_values.get(self.name)) # Prefetch all at once, big list of visible, non-deleted datasets. - matches_by_hid: Dict[int, List] = {} + matches_by_hid: dict[int, list] = {} for hda in history.active_visible_datasets_and_roles: match = dataset_matcher.hda_match(hda) if match: @@ -2509,7 +2507,7 @@ def __init__(self, tool: Optional["Tool"], input_source, trans=None): ) @property - def collection_types(self) -> Optional[List[str]]: + def collection_types(self) -> Optional[list[str]]: return self._collection_types def _history_query(self, trans): @@ -2788,7 +2786,7 @@ def to_text(self, value): # Code from CWL branch to massage in order to be shared across tools and workflows, # and for CWL artifacts as well as Galaxy ones. def raw_to_galaxy( - app: "MinimalApp", history: "History", as_dict_value: Dict[str, Any], commit: bool = True + app: "MinimalApp", history: "History", as_dict_value: dict[str, Any], commit: bool = True ) -> "HistoryItem": object_class = as_dict_value["class"] if object_class == "File": @@ -2874,7 +2872,7 @@ def write_elements_to_collection(has_elements, collection_builder): return hdca -parameter_types: Dict[str, Type[ToolParameter]] = dict( +parameter_types: dict[str, type[ToolParameter]] = dict( text=TextToolParameter, integer=IntegerToolParameter, float=FloatToolParameter, diff --git a/lib/galaxy/tools/parameters/cancelable_request.py b/lib/galaxy/tools/parameters/cancelable_request.py index 17964b805108..385662379640 100644 --- a/lib/galaxy/tools/parameters/cancelable_request.py +++ b/lib/galaxy/tools/parameters/cancelable_request.py @@ -2,7 +2,6 @@ import logging from typing import ( Any, - Dict, Optional, ) @@ -17,9 +16,9 @@ async def fetch_url( session: aiohttp.ClientSession, url: str, - params: Optional[Dict[str, Any]] = None, - data: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, + headers: Optional[dict[str, Any]] = None, method: REQUEST_METHOD = "GET", ): async with session.request(method=method, url=url, params=params, data=data, headers=headers) as response: @@ -28,9 +27,9 @@ async def fetch_url( async def async_request_with_timeout( url: str, - params: Optional[Dict[str, Any]] = None, - data: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, + headers: Optional[dict[str, Any]] = None, method: REQUEST_METHOD = "GET", timeout: float = 1.0, ): @@ -49,9 +48,9 @@ async def async_request_with_timeout( def request( url: str, - params: Optional[Dict[str, Any]] = None, - data: Optional[Dict[str, Any]] = None, - headers: Optional[Dict[str, Any]] = None, + params: Optional[dict[str, Any]] = None, + data: Optional[dict[str, Any]] = None, + headers: Optional[dict[str, Any]] = None, method: REQUEST_METHOD = "GET", timeout: float = 1.0, ): diff --git a/lib/galaxy/tools/parameters/dynamic_options.py b/lib/galaxy/tools/parameters/dynamic_options.py index f5694b85c46b..ba4e7d15e3f3 100644 --- a/lib/galaxy/tools/parameters/dynamic_options.py +++ b/lib/galaxy/tools/parameters/dynamic_options.py @@ -8,17 +8,14 @@ import logging import os import re +from collections.abc import Sequence from dataclasses import dataclass from io import StringIO from typing import ( Any, cast, - Dict, get_args, - List, Optional, - Sequence, - Set, ) from typing_extensions import Literal @@ -234,7 +231,7 @@ def compare_meta_value(file_value, dataset_value): # - for data sets: the meta data value # in both cases only meta data that is set (i.e. differs from the no_value) # is considered - meta_value: Set[Any] = set() + meta_value: set[Any] = set() for r in ref: if not r.metadata.element_is_set(self.key): continue @@ -246,7 +243,7 @@ def compare_meta_value(file_value, dataset_value): return copy.deepcopy(options) if self.column is not None: - rval: List[ParameterOption] = [] + rval: list[ParameterOption] = [] for fields in options: if compare_meta_value(fields[self.column], meta_value): rval.append(fields) @@ -639,7 +636,7 @@ def load_from_parameter(from_parameter, transform_lines=None): return self.parse_file_fields(obj) self.tool_param = tool_param - self.columns: Dict[str, int] = {} + self.columns: dict[str, int] = {} self.filters = [] self.file_fields = None self.largest_index = 0 @@ -846,7 +843,7 @@ def get_fields(self, trans, other_values): @staticmethod def to_parameter_options(options): - rval: List[ParameterOption] = [] + rval: list[ParameterOption] = [] for option in options: if isinstance(option, ParameterOption): rval.append(option) @@ -937,7 +934,7 @@ def get_field_by_name_for_value(self, field_name, value, trans, other_values): def get_options(self, trans, other_values) -> Sequence[ParameterOption]: - rval: List[ParameterOption] = [] + rval: list[ParameterOption] = [] def to_option(values): if len(values) == 2: @@ -1048,7 +1045,7 @@ def parse_from_url_options(elem: Element) -> Optional[FromUrlOptions]: return None -def template_or_none(template: Optional[str], context: Dict[str, Any]) -> Optional[str]: +def template_or_none(template: Optional[str], context: dict[str, Any]) -> Optional[str]: if template: return fill_template(template, context=context) return None diff --git a/lib/galaxy/tools/parameters/grouping.py b/lib/galaxy/tools/parameters/grouping.py index 3696b364b818..b69ae48bde4d 100644 --- a/lib/galaxy/tools/parameters/grouping.py +++ b/lib/galaxy/tools/parameters/grouping.py @@ -6,13 +6,11 @@ import logging import os import unicodedata +from collections.abc import Mapping from math import inf from typing import ( Any, Callable, - Dict, - List, - Mapping, Optional, TYPE_CHECKING, ) @@ -235,7 +233,7 @@ def value_from_basic(self, value, app, ignore_errors=False): def get_initial_value(self, trans, context): if self.inputs is None: raise Exception("Must set 'inputs' attribute to use.") - rval: Dict[str, Any] = {} + rval: dict[str, Any] = {} child_context = ExpressionContext(rval, context) for child_input in self.inputs.values(): rval[child_input.name] = child_input.get_initial_value(trans, child_context) @@ -258,9 +256,9 @@ class Dataset(Bunch): file_type: str dbkey: str datatype: data.Data - warnings: List[str] - metadata: Dict[str, str] - composite_files: Dict[str, Optional[str]] + warnings: list[str] + metadata: dict[str, str] + composite_files: dict[str, Optional[str]] uuid: Optional[str] tag_using_filenames: Optional[str] tags: Optional[str] @@ -639,7 +637,7 @@ def get_filenames(context): force_composite = asbool(context.get("force_composite", "False")) writable_files = d_type.writable_files writable_files_offset = 0 - groups_incoming: List = [None for _ in range(file_count)] + groups_incoming: list = [None for _ in range(file_count)] for i, group_incoming in enumerate(context.get(self.name, [])): i = int(group_incoming.get("__index__", i)) groups_incoming[i] = group_incoming @@ -745,7 +743,7 @@ def get_filenames(context): class Conditional(Group): type = "conditional" value_from: Callable[[ExpressionContext, "Conditional", "Tool"], Mapping[str, str]] - cases: List["ConditionalWhen"] + cases: list["ConditionalWhen"] def __init__(self, name: str): Group.__init__(self, name) @@ -772,7 +770,7 @@ def get_current_case(self, value): def value_to_basic(self, value, app, use_security=False): if self.test_param is None: raise Exception("Must set 'test_param' attribute to use.") - rval: Dict[str, Any] = {} + rval: dict[str, Any] = {} rval[self.test_param.name] = self.test_param.value_to_basic(value[self.test_param.name], app) current_case = rval["__current_case__"] = self.get_current_case(value[self.test_param.name]) for input in self.cases[current_case].inputs.values(): diff --git a/lib/galaxy/tools/parameters/meta.py b/lib/galaxy/tools/parameters/meta.py index e637677f0fba..8129353e1ef5 100644 --- a/lib/galaxy/tools/parameters/meta.py +++ b/lib/galaxy/tools/parameters/meta.py @@ -4,10 +4,7 @@ from collections import namedtuple from typing import ( Any, - Dict, - List, Optional, - Tuple, ) from galaxy import ( @@ -171,11 +168,11 @@ def is_batch(value): return WorkflowParameterExpansion(param_combinations, params_keys, input_combinations) -ExpandedT = Tuple[List[ToolStateJobInstanceT], Optional[matching.MatchingCollections]] +ExpandedT = tuple[list[ToolStateJobInstanceT], Optional[matching.MatchingCollections]] -def expand_flat_parameters_to_nested(incoming_copy: ToolRequestT) -> Dict[str, Any]: - nested_dict: Dict[str, Any] = {} +def expand_flat_parameters_to_nested(incoming_copy: ToolRequestT) -> dict[str, Any]: + nested_dict: dict[str, Any] = {} for incoming_key, incoming_value in incoming_copy.items(): if not incoming_key.startswith("__"): process_key(incoming_key, incoming_value=incoming_value, d=nested_dict) @@ -290,10 +287,10 @@ def merge_into(from_object, into_object): return reordered_incoming -def split_inputs_flat(inputs: Dict[str, Any], classifier): - single_inputs: Dict[str, Any] = {} - matched_multi_inputs: Dict[str, Any] = {} - multiplied_multi_inputs: Dict[str, Any] = {} +def split_inputs_flat(inputs: dict[str, Any], classifier): + single_inputs: dict[str, Any] = {} + matched_multi_inputs: dict[str, Any] = {} + multiplied_multi_inputs: dict[str, Any] = {} for input_key in inputs: input_type, expanded_val = classifier(input_key) @@ -308,9 +305,9 @@ def split_inputs_flat(inputs: Dict[str, Any], classifier): def split_inputs_nested(inputs, nested_dict, classifier): - single_inputs: Dict[str, Any] = {} - matched_multi_inputs: Dict[str, Any] = {} - multiplied_multi_inputs: Dict[str, Any] = {} + single_inputs: dict[str, Any] = {} + matched_multi_inputs: dict[str, Any] = {} + multiplied_multi_inputs: dict[str, Any] = {} unset_value = object() def visitor(input, value, prefix, prefixed_name, prefixed_label, error, **kwargs): diff --git a/lib/galaxy/tools/parameters/populate_model.py b/lib/galaxy/tools/parameters/populate_model.py index 41950bc3a03f..c0b238ba8a46 100644 --- a/lib/galaxy/tools/parameters/populate_model.py +++ b/lib/galaxy/tools/parameters/populate_model.py @@ -1,8 +1,6 @@ import logging from typing import ( Any, - Dict, - List, ) from galaxy.util.expressions import ExpressionContext @@ -11,7 +9,7 @@ log = logging.getLogger(__name__) -def populate_model(request_context, inputs, state_inputs, group_inputs: List[Dict[str, Any]], other_values=None): +def populate_model(request_context, inputs, state_inputs, group_inputs: list[dict[str, Any]], other_values=None): """ Populates the tool model consumed by the client form builder. """ @@ -23,7 +21,7 @@ def populate_model(request_context, inputs, state_inputs, group_inputs: List[Dic tool_dict = input.to_dict(request_context) group_size = len(group_state) tool_dict["cache"] = [None] * group_size - group_cache: List[List[Dict[str, Any]]] = tool_dict["cache"] + group_cache: list[list[dict[str, Any]]] = tool_dict["cache"] for i in range(group_size): group_cache[i] = [] populate_model(request_context, input.inputs, group_state[i], group_cache[i], other_values) diff --git a/lib/galaxy/tools/parameters/sanitize.py b/lib/galaxy/tools/parameters/sanitize.py index ccca3f3a8e23..b9f9eace8587 100644 --- a/lib/galaxy/tools/parameters/sanitize.py +++ b/lib/galaxy/tools/parameters/sanitize.py @@ -5,7 +5,6 @@ import logging import string from typing import ( - List, overload, ) @@ -161,7 +160,7 @@ def sanitize_text(self, text: str): def sanitize_param(self, value: str) -> str: ... @overload - def sanitize_param(self, value: List[str]) -> List[str]: ... + def sanitize_param(self, value: list[str]) -> list[str]: ... def sanitize_param(self, value): """Clean incoming parameters (strings or lists)""" diff --git a/lib/galaxy/tools/parameters/validation.py b/lib/galaxy/tools/parameters/validation.py index 39b4dc783d52..36a6d9cb4336 100644 --- a/lib/galaxy/tools/parameters/validation.py +++ b/lib/galaxy/tools/parameters/validation.py @@ -8,7 +8,6 @@ from typing import ( Any, cast, - List, Optional, Union, ) @@ -206,8 +205,8 @@ class MetadataValidator(Validator): def __init__( self, message: str, - check: Optional[List[str]] = None, - skip: Optional[List[str]] = None, + check: Optional[list[str]] = None, + skip: Optional[list[str]] = None, negate: bool = False, ): super().__init__(message, negate) @@ -347,7 +346,7 @@ def __init__( negate: bool = False, ): super().__init__(message, negate) - self.valid_values: List[Any] = [] + self.valid_values: list[Any] = [] self._data_table_content_version = None self._tool_data_table = tool_data_table if isinstance(metadata_column, str): @@ -512,11 +511,11 @@ def validate(self, value, trans=None): validator_types.update(deprecated_validator_types) -def parse_xml_validators(app, xml_el: util.Element) -> List[Validator]: +def parse_xml_validators(app, xml_el: util.Element) -> list[Validator]: return to_validators(app, parse_xml_validators_models(xml_el)) -def to_validators(app, validator_models: List[AnyValidatorModel]) -> List[Validator]: +def to_validators(app, validator_models: list[AnyValidatorModel]) -> list[Validator]: validators = [] for validator_model in validator_models: validators.append(_to_validator(app, validator_model)) diff --git a/lib/galaxy/tools/parameters/wrapped.py b/lib/galaxy/tools/parameters/wrapped.py index 92898b8a129e..0853c3c8f2dc 100644 --- a/lib/galaxy/tools/parameters/wrapped.py +++ b/lib/galaxy/tools/parameters/wrapped.py @@ -1,10 +1,8 @@ from collections import UserDict +from collections.abc import Sequence from typing import ( Any, - Dict, - List, Optional, - Sequence, TYPE_CHECKING, Union, ) @@ -48,7 +46,7 @@ class LegacyUnprefixedDict(UserDict[str, Any]): # This dict provides a fallback when dict lookup fails using those old rules def __init__(self, initialdata=None, **kwargs): - self._legacy_mapping: Dict[str, str] = {} + self._legacy_mapping: dict[str, str] = {} super().__init__(initialdata, **kwargs) def set_legacy_alias(self, new_key: str, old_key: str): @@ -183,7 +181,7 @@ def make_list_copy(from_list: list): return new_list -def process_key(incoming_key: str, incoming_value: Any, d: Dict[str, Any]): +def process_key(incoming_key: str, incoming_value: Any, d: dict[str, Any]): key_parts = incoming_key.split("|") if len(key_parts) == 1: # Regular parameter @@ -195,7 +193,7 @@ def process_key(incoming_key: str, incoming_value: Any, d: Dict[str, Any]): # Repeat input_name, index = split_flattened_repeat_key(key_parts[0]) d.setdefault(input_name, []) - newlist: List[Dict[Any, Any]] = [{} for _ in range(index + 1)] + newlist: list[dict[Any, Any]] = [{} for _ in range(index + 1)] d[input_name].extend(newlist[len(d[input_name]) :]) subdict = d[input_name][index] process_key("|".join(key_parts[1:]), incoming_value=incoming_value, d=subdict) @@ -213,7 +211,7 @@ def nested_key_to_path(key: str) -> Sequence[Union[str, int]]: E.g. "cond|repeat_0|paramA" -> ["cond", "repeat", 0, "paramA"]. Return value can be used with `boltons.iterutils.get_path`. """ - path: List[Union[str, int]] = [] + path: list[Union[str, int]] = [] key_parts = key.split("|") if len(key_parts) == 1: return key_parts @@ -227,8 +225,8 @@ def nested_key_to_path(key: str) -> Sequence[Union[str, int]]: return path -def flat_to_nested_state(incoming: Dict[str, Any]): - nested_state: Dict[str, Any] = {} +def flat_to_nested_state(incoming: dict[str, Any]): + nested_state: dict[str, Any] = {} for key, value in incoming.items(): process_key(key, value, nested_state) return nested_state diff --git a/lib/galaxy/tools/parameters/wrapped_json.py b/lib/galaxy/tools/parameters/wrapped_json.py index 30d77b940075..3e017de44aa0 100644 --- a/lib/galaxy/tools/parameters/wrapped_json.py +++ b/lib/galaxy/tools/parameters/wrapped_json.py @@ -1,10 +1,8 @@ import json import logging +from collections.abc import Sequence from typing import ( Any, - Dict, - List, - Sequence, TYPE_CHECKING, ) @@ -48,7 +46,7 @@ def data_collection_input_to_path(v): def data_collection_input_to_staging_path_and_source_path( v: "DatasetCollectionWrapper", invalid_chars: Sequence[str] = ("/",), include_collection_name: bool = False -) -> List[Dict[str, Any]]: +) -> list[dict[str, Any]]: staging_paths = v.get_all_staging_paths( invalid_chars=invalid_chars, include_collection_name=include_collection_name ) @@ -76,7 +74,7 @@ def data_collection_input_to_staging_path_and_source_path( def data_input_to_staging_path_and_source_path( v: "DatasetFilenameWrapper", invalid_chars: Sequence[str] = ("/",) -) -> Dict[str, Any]: +) -> dict[str, Any]: staging_path = v.get_staging_path(invalid_chars=invalid_chars) return { "element_identifier": v.element_identifier, diff --git a/lib/galaxy/tools/search/__init__.py b/lib/galaxy/tools/search/__init__.py index 9fa0c24baff4..2dfc9f38e942 100644 --- a/lib/galaxy/tools/search/__init__.py +++ b/lib/galaxy/tools/search/__init__.py @@ -30,8 +30,6 @@ import re import shutil from typing import ( - Dict, - List, Union, ) @@ -113,7 +111,7 @@ def build_index(self, tool_cache, toolbox, index_help: bool = True) -> None: for panel_search in self.panel_searches.values(): panel_search.build_index(tool_cache, toolbox, index_help=index_help) - def search(self, *args, **kwd) -> List[str]: + def search(self, *args, **kwd) -> list[str]: panel_view = kwd.pop("panel_view") if panel_view not in self.panel_searches: raise KeyError(f"Unknown panel_view specified {panel_view}") @@ -277,7 +275,7 @@ def _create_doc( self, tool, index_help: bool = True, - ) -> Dict[str, str]: + ) -> dict[str, str]: def clean(string): """Remove hyphens as they are Whoosh wildcards.""" if "-" in string: @@ -326,7 +324,7 @@ def search( self, q: str, config: GalaxyAppConfiguration, - ) -> List[str]: + ) -> list[str]: """Perform search on the in-memory index.""" # Change field boosts for searcher self.searcher = self.index.searcher( diff --git a/lib/galaxy/tools/wrappers.py b/lib/galaxy/tools/wrappers.py index cc4b6289ee92..8d4b1b1ad64a 100644 --- a/lib/galaxy/tools/wrappers.py +++ b/lib/galaxy/tools/wrappers.py @@ -3,18 +3,17 @@ import os import shlex import tempfile +from collections.abc import ( + Iterable, + Iterator, + KeysView, + Sequence, +) from functools import total_ordering from typing import ( Any, cast, - Dict, - Iterable, - Iterator, - KeysView, - List, Optional, - Sequence, - Tuple, TYPE_CHECKING, Union, ) @@ -73,7 +72,7 @@ class ToolParameterValueWrapper: Base class for object that Wraps a Tool Parameter and Value. """ - value: Optional[Union[str, List[str]]] + value: Optional[Union[str, list[str]]] input: "ToolParameter" def __bool__(self) -> bool: @@ -128,7 +127,7 @@ def __init__( self, input: "ToolParameter", value: Optional[str], - other_values: Optional[Dict[str, str]] = None, + other_values: Optional[dict[str, str]] = None, profile: Optional[float] = None, ) -> None: self.input = input @@ -136,9 +135,9 @@ def __init__( # Tools with old profile versions may treat an optional text parameter as `""` value = cast(TextToolParameter, input).wrapper_default self.value = value - self._other_values: Dict[str, str] = other_values or {} + self._other_values: dict[str, str] = other_values or {} - def _get_cast_values(self, other: Any) -> Tuple[Union[str, int, float, bool, None], Any]: + def _get_cast_values(self, other: Any) -> tuple[Union[str, int, float, bool, None], Any]: if isinstance(self.input, BooleanToolParameter) and isinstance(other, str): if other in (self.input.truevalue, self.input.falsevalue): return str(self), other @@ -210,14 +209,14 @@ class SelectToolParameterFieldWrapper: def __init__( self, input: "SelectToolParameter", - value: Union[str, List[str]], - other_values: Optional[Dict[str, str]], + value: Union[str, list[str]], + other_values: Optional[dict[str, str]], compute_environment: Optional["ComputeEnvironment"], ) -> None: self._input = input self._value = value self._other_values = other_values - self._fields: Dict[str, List[str]] = {} + self._fields: dict[str, list[str]] = {} self._compute_environment = compute_environment def __getattr__(self, name: str) -> Any: @@ -245,12 +244,12 @@ def __getattr__(self, name: str) -> Any: def __init__( self, input: "SelectToolParameter", - value: Union[str, List[str]], - other_values: Optional[Dict[str, str]] = None, + value: Union[str, list[str]], + other_values: Optional[dict[str, str]] = None, compute_environment: Optional["ComputeEnvironment"] = None, ): self.input = input - self.value: Union[str, List[str]] = value + self.value: Union[str, list[str]] = value self.input.value_label = input.value_to_display_text(value) self._other_values = other_values or {} self.compute_environment = compute_environment @@ -347,7 +346,7 @@ def get(self, key: str, default: Any = None) -> Any: except Exception: return default - def items(self) -> Iterator[Tuple[str, Any]]: + def items(self) -> Iterator[tuple[str, Any]]: return iter((k, self.get(k)) for k, v in self.metadata.items()) def __init__( @@ -359,7 +358,7 @@ def __init__( compute_environment: Optional["ComputeEnvironment"] = None, identifier: Optional[str] = None, io_type: str = "input", - formats: Optional[List[str]] = None, + formats: Optional[list[str]] = None, tool_evaluator: Optional["ToolEvaluator"] = None, ) -> None: dataset_instance: Optional[DatasetInstance] = None @@ -452,10 +451,10 @@ def get_staging_path(self, invalid_chars: Sequence[str] = ("/",)) -> str: return f"{safe_element_identifier}.{self.file_ext}" @property - def all_metadata_files(self) -> List[Tuple[str, str]]: + def all_metadata_files(self) -> list[tuple[str, str]]: return self.unsanitized.get_metadata_file_paths_and_extensions() if self else [] - def serialize(self, invalid_chars: Sequence[str] = ("/",)) -> Dict[str, Any]: + def serialize(self, invalid_chars: Sequence[str] = ("/",)) -> dict[str, Any]: return data_input_to_staging_path_and_source_path(self, invalid_chars=invalid_chars) if self else {} @property @@ -549,7 +548,7 @@ def paths_as_file(self, sep: str = "\n") -> str: return filepath -class DatasetListWrapper(List[DatasetFilenameWrapper], ToolParameterValueWrapper, HasDatasets): +class DatasetListWrapper(list[DatasetFilenameWrapper], ToolParameterValueWrapper, HasDatasets): """ """ def __init__( @@ -568,7 +567,7 @@ def __init__( ], **kwargs: Any, ) -> None: - self._dataset_elements_cache: Dict[str, List[DatasetFilenameWrapper]] = {} + self._dataset_elements_cache: dict[str, list[DatasetFilenameWrapper]] = {} if not isinstance(datasets, Sequence): datasets = [datasets] @@ -593,8 +592,8 @@ def to_wrapper( @staticmethod def to_dataset_instances( dataset_instance_sources: Any, - ) -> List[Union[None, DatasetInstance]]: - dataset_instances: List[Optional[DatasetInstance]] = [] + ) -> list[Union[None, DatasetInstance]]: + dataset_instances: list[Optional[DatasetInstance]] = [] if not isinstance(dataset_instance_sources, list): dataset_instance_sources = [dataset_instance_sources] for dataset_instance_source in dataset_instance_sources: @@ -610,7 +609,7 @@ def to_dataset_instances( dataset_instances.extend(dataset_instance_source.collection.dataset_elements) return dataset_instances - def get_datasets_for_group(self, group: str) -> List[DatasetFilenameWrapper]: + def get_datasets_for_group(self, group: str) -> list[DatasetFilenameWrapper]: group = str(group).lower() if not self._dataset_elements_cache.get(group): wrappers = [] @@ -620,7 +619,7 @@ def get_datasets_for_group(self, group: str) -> List[DatasetFilenameWrapper]: self._dataset_elements_cache[group] = wrappers return self._dataset_elements_cache[group] - def serialize(self, invalid_chars: Sequence[str] = ("/",)) -> List[Dict[str, Any]]: + def serialize(self, invalid_chars: Sequence[str] = ("/",)) -> list[dict[str, Any]]: return [v.serialize(invalid_chars) for v in self] def __str__(self) -> str: @@ -650,8 +649,8 @@ def __init__( ) -> None: super().__init__() self.job_working_directory = job_working_directory - self._dataset_elements_cache: Dict[str, List[DatasetFilenameWrapper]] = {} - self._element_identifiers_extensions_paths_and_metadata_files: Optional[List[List[Any]]] = None + self._dataset_elements_cache: dict[str, list[DatasetFilenameWrapper]] = {} + self._element_identifiers_extensions_paths_and_metadata_files: Optional[list[list[Any]]] = None self.datatypes_registry = datatypes_registry kwargs["datatypes_registry"] = datatypes_registry self.tool_evaluator = tool_evaluator @@ -676,9 +675,9 @@ def __init__( self.collection = collection elements = collection.elements - element_instances: Dict[str, DatasetCollectionElementWrapper] = {} + element_instances: dict[str, DatasetCollectionElementWrapper] = {} - element_instance_list: List[DatasetCollectionElementWrapper] = [] + element_instance_list: list[DatasetCollectionElementWrapper] = [] for dataset_collection_element in elements: element_object = dataset_collection_element.element_object element_identifier = dataset_collection_element.element_identifier @@ -699,7 +698,7 @@ def __init__( self.__element_instances = element_instances self.__element_instance_list = element_instance_list - def get_datasets_for_group(self, group: str) -> List[DatasetFilenameWrapper]: + def get_datasets_for_group(self, group: str) -> list[DatasetFilenameWrapper]: group = str(group).lower() if not self._dataset_elements_cache.get(group): wrappers = [] @@ -717,7 +716,7 @@ def get_datasets_for_group(self, group: str) -> List[DatasetFilenameWrapper]: self._dataset_elements_cache[group] = wrappers return self._dataset_elements_cache[group] - def keys(self) -> Union[List[str], KeysView[Any]]: + def keys(self) -> Union[list[str], KeysView[Any]]: if not self.__input_supplied: return [] return self.__element_instances.keys() @@ -731,11 +730,11 @@ def element_identifier(self) -> Optional[str]: return self.name @property - def all_paths(self) -> List[str]: + def all_paths(self) -> list[str]: return [path for _, _, path, _ in self.element_identifiers_extensions_paths_and_metadata_files] @property - def all_metadata_files(self) -> List[List[str]]: + def all_metadata_files(self) -> list[list[str]]: return [ metadata_files for _, _, _, metadata_files in self.element_identifiers_extensions_paths_and_metadata_files ] @@ -743,7 +742,7 @@ def all_metadata_files(self) -> List[List[str]]: @property def element_identifiers_extensions_paths_and_metadata_files( self, - ) -> List[List[Any]]: + ) -> list[list[Any]]: if self._element_identifiers_extensions_paths_and_metadata_files is None: if self.collection: result = self.collection.element_identifiers_extensions_paths_and_metadata_files @@ -757,7 +756,7 @@ def get_all_staging_paths( self, invalid_chars: Sequence[str] = ("/",), include_collection_name: bool = False, - ) -> List[str]: + ) -> list[str]: safe_element_identifiers = [] for element_identifiers, extension, *_ in self.element_identifiers_extensions_paths_and_metadata_files: datatype = self.datatypes_registry.get_datatype_by_extension(extension) @@ -783,7 +782,7 @@ def serialize( self, invalid_chars: Sequence[str] = ("/",), include_collection_name: bool = False, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: return data_collection_input_to_staging_path_and_source_path( self, invalid_chars=invalid_chars, @@ -836,13 +835,13 @@ def __bool__(self) -> bool: class ElementIdentifierMapper: """Track mapping of dataset collection elements datasets to element identifiers.""" - def __init__(self, input_datasets: Optional[Dict[str, Any]] = None) -> None: + def __init__(self, input_datasets: Optional[dict[str, Any]] = None) -> None: if input_datasets is not None: self.identifier_key_dict = {v: f"{k}|__identifier__" for k, v in input_datasets.items()} else: self.identifier_key_dict = {} - def identifier(self, dataset_value: str, input_values: Dict[str, str]) -> Optional[str]: + def identifier(self, dataset_value: str, input_values: dict[str, str]) -> Optional[str]: if isinstance(dataset_value, list): raise TypeError(f"Expected {dataset_value} to be hashable") element_identifier = None diff --git a/lib/galaxy/tours/_impl.py b/lib/galaxy/tours/_impl.py index ab6f280f45e5..2dab0a403ffe 100644 --- a/lib/galaxy/tours/_impl.py +++ b/lib/galaxy/tours/_impl.py @@ -4,7 +4,6 @@ import logging import os -from typing import List import yaml from pydantic import parse_obj_as @@ -78,7 +77,7 @@ def is_yaml(filename: str) -> bool: return False -def tour_paths(target_path: StrPath) -> List[str]: +def tour_paths(target_path: StrPath) -> list[str]: paths = [] if os.path.isdir(target_path): for filename in os.listdir(target_path): diff --git a/lib/galaxy/tours/_schema.py b/lib/galaxy/tours/_schema.py index 45020d700a02..ee96b0f32515 100644 --- a/lib/galaxy/tours/_schema.py +++ b/lib/galaxy/tours/_schema.py @@ -1,6 +1,5 @@ from enum import Enum from typing import ( - List, Optional, Union, ) @@ -24,8 +23,8 @@ class Requirement(str, Enum): class TourCore(BaseModel): name: str = Field(title="Name", description="Name of tour") description: str = Field(title="Description", description="Tour description") - tags: List[str] = Field(title="Tags", description="Topic topic tags") - requirements: List[Requirement] = Field(title="Requirements", description="Requirements to run the tour.") + tags: list[str] = Field(title="Tags", description="Topic topic tags") + requirements: list[Requirement] = Field(title="Requirements", description="Requirements to run the tour.") model_config = ConfigDict(use_enum_values=True) @@ -34,7 +33,7 @@ class Tour(TourCore): class TourList(RootModel): - root: List[Tour] = Field(title="List of tours", default=[]) + root: list[Tour] = Field(title="List of tours", default=[]) class TourStep(BaseModel): @@ -46,10 +45,10 @@ class TourStep(BaseModel): placement: Optional[str] = Field( None, title="Placement", description="Placement of the text box relative to the selected element" ) - preclick: Optional[Union[bool, List[str]]] = Field( + preclick: Optional[Union[bool, list[str]]] = Field( None, title="Pre-click", description="Elements that receive a click() event before the step is shown" ) - postclick: Optional[Union[bool, List[str]]] = Field( + postclick: Optional[Union[bool, list[str]]] = Field( None, title="Post-click", description="Elements that receive a click() event after the step is shown" ) textinsert: Optional[str] = Field( @@ -59,4 +58,4 @@ class TourStep(BaseModel): class TourDetails(TourCore): title_default: Optional[str] = Field(None, title="Default title", description="Default title for each step") - steps: List[TourStep] = Field(title="Steps", description="Tour steps") + steps: list[TourStep] = Field(title="Steps", description="Tour steps") diff --git a/lib/galaxy/visualization/data_providers/genome.py b/lib/galaxy/visualization/data_providers/genome.py index 8108b8b69bc9..c1c0569740f8 100644 --- a/lib/galaxy/visualization/data_providers/genome.py +++ b/lib/galaxy/visualization/data_providers/genome.py @@ -10,16 +10,13 @@ import random import re import sys +from collections.abc import Iterator from contextlib import contextmanager from json import loads from typing import ( Any, - Dict, IO, - Iterator, - List, Optional, - Tuple, Union, ) @@ -57,7 +54,7 @@ # Can be removed once https://github.com/pysam-developers/pysam/issues/939 is resolved. pysam.set_verbosity(0) -PAYLOAD_LIST_TYPE = List[Optional[Union[str, int, float, List[Tuple[int, int]]]]] +PAYLOAD_LIST_TYPE = list[Optional[Union[str, int, float, list[tuple[int, int]]]]] def float_nan(n): @@ -177,7 +174,7 @@ class GenomeDataProvider(BaseDataProvider): # filters. Key is column name, value is a dict with mandatory key 'index' # and optional key 'name'. E.g. this defines column 4: # col_name_data_attr_mapping = {4 : { index: 5, name: 'Score' } } - col_name_data_attr_mapping: Dict[Union[str, int], Dict] = {} + col_name_data_attr_mapping: dict[Union[str, int], dict] = {} def __init__( self, @@ -361,7 +358,7 @@ class TabixDataProvider(GenomeDataProvider, FilterableMixin): dataset_type = "tabix" - col_name_data_attr_mapping: Dict[Union[str, int], Dict] = {4: {"index": 4, "name": "Score"}} + col_name_data_attr_mapping: dict[Union[str, int], dict] = {4: {"index": 4, "name": "Score"}} @contextmanager def open_data_file(self): @@ -625,7 +622,7 @@ class VcfDataProvider(GenomeDataProvider): """ - col_name_data_attr_mapping: Dict[Union[str, int], Dict] = {"Qual": {"index": 6, "name": "Qual"}} + col_name_data_attr_mapping: dict[Union[str, int], dict] = {"Qual": {"index": 6, "name": "Qual"}} dataset_type = "variant" @@ -703,7 +700,7 @@ def get_mapping(ref, alt): if samples_data: # Process and pack samples' genotype and count alleles across samples. - alleles_seen: Dict[int, bool] = {} + alleles_seen: dict[int, bool] = {} has_alleles = False for sample in samples_data: @@ -946,7 +943,7 @@ def _nth_read_iterator(read_iterator, threshold): # Encode reads as list of lists. # results = [] - paired_pending: Dict[str, Dict[str, Any]] = {} + paired_pending: dict[str, dict[str, Any]] = {} unmapped = 0 message = None count = 0 @@ -1110,7 +1107,7 @@ class BBIDataProvider(GenomeDataProvider): dataset_type = "bigwig" @abc.abstractmethod - def _get_dataset(self) -> Tuple[IO[bytes], Union[BigBedFile, BigWigFile]]: ... + def _get_dataset(self) -> tuple[IO[bytes], Union[BigBedFile, BigWigFile]]: ... def valid_chroms(self): # No way to return this info as of now @@ -1386,12 +1383,12 @@ def process_data(self, iterator, start_val=0, max_vals=sys.maxsize, **kwargs): # TODO: extend this code or use code in gff_util to process GFF/3 as well # and then create a generic GFFDataProvider that can be used with both # raw and tabix datasets. - features: Dict[str, List[GFFInterval]] = {} + features: dict[str, list[GFFInterval]] = {} for line in iterator: line_attrs = parse_gff_attributes(line.split("\t")[8]) transcript_id = line_attrs["transcript_id"] - feature_list: List[GFFInterval] + feature_list: list[GFFInterval] if transcript_id in features: feature_list = features[transcript_id] else: diff --git a/lib/galaxy/visualization/data_providers/phyloviz/__init__.py b/lib/galaxy/visualization/data_providers/phyloviz/__init__.py index cf55a775d646..99a9b1a931b8 100644 --- a/lib/galaxy/visualization/data_providers/phyloviz/__init__.py +++ b/lib/galaxy/visualization/data_providers/phyloviz/__init__.py @@ -2,7 +2,6 @@ from typing import ( Any, - Dict, ) from galaxy.visualization.data_providers.basic import BaseDataProvider @@ -27,7 +26,7 @@ def get_data(self, tree_index=0): file_name = self.original_dataset.get_file_name() parseMsg = None jsonDicts = [] - rval: Dict[str, Any] = {"dataset_type": self.dataset_type} + rval: dict[str, Any] = {"dataset_type": self.dataset_type} if file_ext in ["newick", "nhx"]: # parses newick files newickParser = Newick_Parser() diff --git a/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py b/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py index a552e0921b4b..a8201a63f376 100644 --- a/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py +++ b/lib/galaxy/visualization/data_providers/phyloviz/baseparser.py @@ -1,7 +1,6 @@ import json from typing import ( Any, - Dict, ) @@ -34,7 +33,7 @@ def addChildNode(self, child): def __str__(self): return f"{self.name} id:{str(self.id)}, depth: {str(self.depth)}" - def toJson(self) -> Dict[str, Any]: + def toJson(self) -> dict[str, Any]: """Converts the data in the node to a dict representation of json""" thisJson = {"name": self.name, "id": self.id, "depth": self.depth, "dist": self.length} thisJson = self.addChildrenToJson(thisJson) @@ -89,7 +88,7 @@ def addRoot(self, root: Node): def generateJsonableDict(self): """Changes itself into a dictonary by recurssively calling the tojson on all its nodes. Think of it as a dict in an array of dict in an array of dict and so on...""" - jsonTree: Dict[str, Any] + jsonTree: dict[str, Any] if self.root: assert isinstance(self.root, Node) jsonTree = self.root.toJson() diff --git a/lib/galaxy/visualization/data_providers/registry.py b/lib/galaxy/visualization/data_providers/registry.py index 21ce1a6a9701..ba802629d3c6 100644 --- a/lib/galaxy/visualization/data_providers/registry.py +++ b/lib/galaxy/visualization/data_providers/registry.py @@ -1,7 +1,5 @@ from typing import ( - Dict, Optional, - Type, Union, ) @@ -34,8 +32,8 @@ from galaxy.visualization.data_providers.phyloviz import PhylovizDataProvider # a dict keyed on datatype with a 'default' string key. -PROVIDER_BY_DATATYPE_CLASS_DICT = Dict[Union[Literal["default"], Type[Data]], Type[BaseDataProvider]] -DATA_PROVIDER_BY_TYPE_NAME_DICT = Dict[str, Union[Type[BaseDataProvider], PROVIDER_BY_DATATYPE_CLASS_DICT]] +PROVIDER_BY_DATATYPE_CLASS_DICT = dict[Union[Literal["default"], type[Data]], type[BaseDataProvider]] +DATA_PROVIDER_BY_TYPE_NAME_DICT = dict[str, Union[type[BaseDataProvider], PROVIDER_BY_DATATYPE_CLASS_DICT]] class DataProviderRegistry: @@ -72,7 +70,7 @@ def get_data_provider(self, trans, name=None, source="data", raw=False, original """ data_provider: Optional[BaseDataProvider] - data_provider_class: Type[BaseDataProvider] + data_provider_class: type[BaseDataProvider] # any datatype class that is a subclass of another needs to be # checked before the parent in this conditional. diff --git a/lib/galaxy/visualization/genomes.py b/lib/galaxy/visualization/genomes.py index d38996c52c32..f7ae75be5828 100644 --- a/lib/galaxy/visualization/genomes.py +++ b/lib/galaxy/visualization/genomes.py @@ -4,7 +4,6 @@ import sys from json import loads from typing import ( - Dict, Optional, ) @@ -204,7 +203,7 @@ class Genomes: def __init__(self, app: StructuredApp): self.app = app # Create list of genomes from app.genome_builds - self.genomes: Dict[str, Genome] = {} + self.genomes: dict[str, Genome] = {} # Store internal versions of data tables for twobit and __dbkey__ self._table_versions = {"twobit": None, "__dbkeys__": None} self.reload_genomes() diff --git a/lib/galaxy/visualization/plugins/config_parser.py b/lib/galaxy/visualization/plugins/config_parser.py index 0cde3e765c92..ce90464e6f8e 100644 --- a/lib/galaxy/visualization/plugins/config_parser.py +++ b/lib/galaxy/visualization/plugins/config_parser.py @@ -1,8 +1,6 @@ import logging from typing import ( Any, - Dict, - List, ) from galaxy.util import ( @@ -123,7 +121,7 @@ def parse_visualization(self, xml_tree): # param modifiers provide extra information for other params (e.g. hda_ldda='hda' -> dataset_id is an hda id) # store these modifiers in a 2-level dictionary { target_param: { param_modifier_key: { param_mod_data } # ugh - wish we didn't need these - param_modifiers: Dict[str, Any] = {} + param_modifiers: dict[str, Any] = {} param_modifier_elements = param_confs.findall("param_modifier") if param_confs is not None else [] for param_modifier_conf in param_modifier_elements: param_modifier = self.param_modifier_parser.parse(param_modifier_conf) @@ -271,7 +269,7 @@ def parse_tests(self, xml_tree_list): # tests should NOT include expensive operations: reading file data, running jobs, etc. # do as much here as possible to reduce the overhead of seeing if a visualization is applicable # currently tests are or'd only (could be and'd or made into compound boolean tests) - tests: List[Dict[str, Any]] = [] + tests: list[dict[str, Any]] = [] if not xml_tree_list: return tests @@ -320,7 +318,7 @@ def parse_to_params(self, xml_tree_list): the registry to convert the data_source into one or more appropriate params for the visualization. """ - to_param_dict: Dict[str, Any] = {} + to_param_dict: dict[str, Any] = {} if not xml_tree_list: return to_param_dict diff --git a/lib/galaxy/visualization/plugins/datasource_testing.py b/lib/galaxy/visualization/plugins/datasource_testing.py index c3dbd7a430e3..df42c42340c0 100644 --- a/lib/galaxy/visualization/plugins/datasource_testing.py +++ b/lib/galaxy/visualization/plugins/datasource_testing.py @@ -1,6 +1,5 @@ import logging from typing import ( - List, Optional, ) @@ -27,7 +26,7 @@ def _build_getattr_lambda(attr_name_list): return lambda o: getattr(_build_getattr_lambda(attr_name_list[:-1])(o), next_attr_name) -def _check_uri_support(target_object, supported_protocols: List[str]) -> bool: +def _check_uri_support(target_object, supported_protocols: list[str]) -> bool: """Test if the target object is deferred and has a supported protocol.""" if not _is_deferred(target_object): @@ -39,8 +38,7 @@ def _check_uri_support(target_object, supported_protocols: List[str]) -> bool: if "*" in supported_protocols: return True # wildcard support for all protocols - deferred_source_uri = _deferred_source_uri(target_object) - if deferred_source_uri: + if deferred_source_uri := _deferred_source_uri(target_object): protocol = deferred_source_uri.split("://")[0] return protocol in supported_protocols return False diff --git a/lib/galaxy/visualization/plugins/plugin.py b/lib/galaxy/visualization/plugins/plugin.py index 20e4a2329b07..99cd8d8746c0 100644 --- a/lib/galaxy/visualization/plugins/plugin.py +++ b/lib/galaxy/visualization/plugins/plugin.py @@ -8,7 +8,6 @@ import os from typing import ( Any, - Dict, ) import mako.lookup @@ -93,7 +92,7 @@ def render_saved(self, visualization, trans=None, embedded=None, **kwargs): Render and return the text of the plugin webpage/fragment using the config/data of a saved visualization. """ - config: Dict[str, Any] = self._get_saved_visualization_config(visualization, **kwargs) + config: dict[str, Any] = self._get_saved_visualization_config(visualization, **kwargs) # pass the saved visualization config for parsing into render vars render_vars = self._build_render_vars(config, trans=trans, **kwargs) # update any values that were loaded from the saved Visualization @@ -131,7 +130,7 @@ def _get_url(self): return url_for(controller="visualization", action=self.name) return url_for("visualization_plugin", visualization_name=self.name) - def _get_saved_visualization_config(self, visualization, revision=None, **kwargs) -> Dict[str, Any]: + def _get_saved_visualization_config(self, visualization, revision=None, **kwargs) -> dict[str, Any]: """ Return the config of a saved visualization and revision. @@ -141,11 +140,11 @@ def _get_saved_visualization_config(self, visualization, revision=None, **kwargs return copy.copy(visualization.latest_revision.config) # ---- non-public - def _build_render_vars(self, config: Dict[str, Any], trans=None, **kwargs) -> Dict[str, Any]: + def _build_render_vars(self, config: dict[str, Any], trans=None, **kwargs) -> dict[str, Any]: """ Build all the variables that will be passed into the renderer. """ - render_vars: Dict[str, Any] = {} + render_vars: dict[str, Any] = {} # Meta variables passed to the template/renderer to describe the visualization being rendered. render_vars.update( visualization_name=self.name, diff --git a/lib/galaxy/visualization/plugins/resource_parser.py b/lib/galaxy/visualization/plugins/resource_parser.py index 01ca8e78b237..173da37e2817 100644 --- a/lib/galaxy/visualization/plugins/resource_parser.py +++ b/lib/galaxy/visualization/plugins/resource_parser.py @@ -8,7 +8,6 @@ import weakref from typing import ( Callable, - Dict, Optional, Union, ) @@ -47,7 +46,7 @@ class ResourceParser: new keys (e.g. dataset_id="NNN" -> hda=). """ - primitive_parsers: Dict[str, Callable[[str], ParameterPrimitiveType]] = { + primitive_parsers: dict[str, Callable[[str], ParameterPrimitiveType]] = { "str": lambda param: galaxy.util.sanitize_html.sanitize_html(param), "bool": lambda param: galaxy.util.string_as_bool(param), "int": int, @@ -157,20 +156,20 @@ def parse_config(self, trans, param_config_dict, query_params): # TODO: I would LOVE to rip modifiers out completely def parse_parameter_modifiers( self, trans, param_modifiers, query_params - ) -> Dict[str, Dict[str, Optional[ParameterType]]]: + ) -> dict[str, dict[str, Optional[ParameterType]]]: """ Parse and return parameters that are meant to modify other parameters, be grouped with them, or are needed to successfully parse other parameters. """ # only one level of modification - down that road lies madness # parse the modifiers out of query_params first since they modify the other params coming next - parsed_modifiers: Dict[str, Dict[str, Optional[ParameterType]]] = {} + parsed_modifiers: dict[str, dict[str, Optional[ParameterType]]] = {} if not param_modifiers: return parsed_modifiers # precondition: expects a two level dictionary # { target_param_name -> { param_modifier_name -> { param_modifier_data }}} for target_param_name, modifier_dict in param_modifiers.items(): - target_modifiers: Dict[str, Optional[ParameterType]] = {} + target_modifiers: dict[str, Optional[ParameterType]] = {} parsed_modifiers[target_param_name] = target_modifiers for modifier_name, modifier_config in modifier_dict.items(): diff --git a/lib/galaxy/web/framework/helpers/grids.py b/lib/galaxy/web/framework/helpers/grids.py index c839f7eff8dd..bc9364751dd4 100644 --- a/lib/galaxy/web/framework/helpers/grids.py +++ b/lib/galaxy/web/framework/helpers/grids.py @@ -1,6 +1,5 @@ import logging from typing import ( - List, Optional, ) @@ -66,7 +65,7 @@ class GridData: """ model_class: Optional[type] = None - columns: List[GridColumn] = [] + columns: list[GridColumn] = [] default_limit: int = 1000 def __init__(self): diff --git a/lib/galaxy/web/framework/middleware/static.py b/lib/galaxy/web/framework/middleware/static.py index 03e636e186bf..432c6084189b 100644 --- a/lib/galaxy/web/framework/middleware/static.py +++ b/lib/galaxy/web/framework/middleware/static.py @@ -1,8 +1,4 @@ import os -from typing import ( - List, - Tuple, -) from paste import request from paste.fileapp import FileApp @@ -55,7 +51,7 @@ def __call__(self, environ, start_response): if if_none_match := environ.get("HTTP_IF_NONE_MATCH"): mytime = os.stat(full).st_mtime if str(mytime) == if_none_match: - headers: List[Tuple[str, str]] = [] + headers: list[tuple[str, str]] = [] ETAG.update(headers, mytime) start_response("304 Not Modified", headers) return [""] # empty body diff --git a/lib/galaxy/web/legacy_framework/grids.py b/lib/galaxy/web/legacy_framework/grids.py index 204dabd2b842..c9306a1b36d8 100644 --- a/lib/galaxy/web/legacy_framework/grids.py +++ b/lib/galaxy/web/legacy_framework/grids.py @@ -5,8 +5,6 @@ loads, ) from typing import ( - Dict, - List, Optional, ) @@ -633,12 +631,12 @@ class Grid: async_template = "legacy/grid_base_async.mako" use_async = False use_hide_message = True - global_actions: List[GridAction] = [] - columns: List[GridColumn] = [] - operations: List[GridOperation] = [] - standard_filters: List[GridColumnFilter] = [] + global_actions: list[GridAction] = [] + columns: list[GridColumn] = [] + operations: list[GridOperation] = [] + standard_filters: list[GridColumnFilter] = [] # Any columns that are filterable (either standard or advanced) should have a default value set in the default filter. - default_filter: Dict[str, str] = {} + default_filter: dict[str, str] = {} default_sort_key: Optional[str] = None use_paging = False num_rows_per_page = 25 diff --git a/lib/galaxy/web/statsd_client.py b/lib/galaxy/web/statsd_client.py index cf83b299bc52..d27a7cf6ccd3 100644 --- a/lib/galaxy/web/statsd_client.py +++ b/lib/galaxy/web/statsd_client.py @@ -1,6 +1,5 @@ import sys from typing import ( - Dict, Optional, ) @@ -49,7 +48,7 @@ def _effective_infix(self, path, tags): CURRENT_TEST: Optional[str] = None -CURRENT_TEST_METRICS: Optional[Dict[str, Dict]] = None +CURRENT_TEST_METRICS: Optional[dict[str, dict]] = None class PyTestGalaxyStatsdClient(VanillaGalaxyStatsdClient): diff --git a/lib/galaxy/web_stack/__init__.py b/lib/galaxy/web_stack/__init__.py index 1f202b9d84e2..55555d566b13 100644 --- a/lib/galaxy/web_stack/__init__.py +++ b/lib/galaxy/web_stack/__init__.py @@ -7,10 +7,7 @@ import threading from typing import ( Callable, - FrozenSet, - List, Optional, - Type, ) from galaxy.model import database_utils @@ -27,8 +24,8 @@ def filter(self, record): class ApplicationStack: name: Optional[str] = None - prohibited_middleware: FrozenSet[str] = frozenset() - log_filter_class: Type[logging.Filter] = ApplicationStackLogFilter + prohibited_middleware: frozenset[str] = frozenset() + log_filter_class: type[logging.Filter] = ApplicationStackLogFilter log_format = "%(name)s %(levelname)s %(asctime)s [pN:%(processName)s,p:%(process)d,tN:%(threadName)s] %(message)s" # TODO: this belongs in the pool configuration server_name_template = "{server_name}" @@ -186,7 +183,7 @@ class WebApplicationStack(ApplicationStack): class GunicornApplicationStack(ApplicationStack): name = "Gunicorn" do_post_fork = "--preload" in os.environ.get("GUNICORN_CMD_ARGS", "") or "--preload" in sys.argv - postfork_functions: List[Callable] = [] + postfork_functions: list[Callable] = [] # Will be set to True by external hook late_postfork_event = threading.Event() late_postfork_thread: threading.Thread @@ -277,7 +274,7 @@ def pool_members(self, pool_name): return (self.config.server_name,) if self.in_pool(pool_name) else None -def application_stack_class() -> Type[ApplicationStack]: +def application_stack_class() -> type[ApplicationStack]: """Returns the correct ApplicationStack class for the stack under which this Galaxy process is running. """ diff --git a/lib/galaxy/web_stack/handlers.py b/lib/galaxy/web_stack/handlers.py index bd3376220f86..1d1f2fd84e66 100644 --- a/lib/galaxy/web_stack/handlers.py +++ b/lib/galaxy/web_stack/handlers.py @@ -8,7 +8,6 @@ import os import random from enum import Enum -from typing import Tuple from sqlalchemy.orm import object_session @@ -39,7 +38,7 @@ class HandlerAssignmentSkip(Exception): class ConfiguresHandlers: DEFAULT_HANDLER_TAG = "_default_" - DEFAULT_BASE_HANDLER_POOLS: Tuple[str, ...] = () + DEFAULT_BASE_HANDLER_POOLS: tuple[str, ...] = () def add_handler(self, handler_id, tags): if handler_id not in self.handlers: diff --git a/lib/galaxy/web_stack/message.py b/lib/galaxy/web_stack/message.py index 15259b4cc50e..b3af8c034989 100644 --- a/lib/galaxy/web_stack/message.py +++ b/lib/galaxy/web_stack/message.py @@ -5,7 +5,6 @@ import types from typing import ( Optional, - Tuple, ) log = logging.getLogger(__name__) @@ -118,8 +117,8 @@ def set_target(self, target): class ParamMessage(ApplicationStackMessage): _validate_kwargs = ("params",) - _validate_params: Tuple[str, ...] = () - _exclude_params: Tuple[str, ...] = () + _validate_params: tuple[str, ...] = () + _exclude_params: tuple[str, ...] = () def __init__(self, target=None, params=None, **kwargs): super().__init__(target=target) diff --git a/lib/galaxy/webapps/base/api.py b/lib/galaxy/webapps/base/api.py index 765bba28af70..1cc03d05141f 100644 --- a/lib/galaxy/webapps/base/api.py +++ b/lib/galaxy/webapps/base/api.py @@ -1,13 +1,11 @@ import os import stat import uuid +from collections.abc import Mapping from logging import getLogger from typing import ( Any, - Dict, - Mapping, Optional, - Tuple, TYPE_CHECKING, Union, ) @@ -54,7 +52,7 @@ # Copied from https://github.com/tiangolo/fastapi/issues/1240#issuecomment-1055396884 -def _get_range_header(range_header: str, file_size: int) -> Tuple[int, int]: +def _get_range_header(range_header: str, file_size: int) -> tuple[int, int]: def _invalid_range(): return HTTPException( status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE, @@ -196,7 +194,7 @@ def get_error_response_for_request(request: Request, exc: MessageException) -> J content = error_dict retry_after: Optional[int] = getattr(exc, "retry_after", None) - headers: Dict[str, str] = {} + headers: dict[str, str] = {} if retry_after: headers["Retry-After"] = str(retry_after) return JSONResponse(status_code=status_code, content=content, headers=headers) @@ -248,7 +246,7 @@ def add_request_id_middleware(app: FastAPI): def include_all_package_routers(app: FastAPI, package_name: str): - responses: Dict[Union[int, str], Dict[str, Any]] = { + responses: dict[Union[int, str], dict[str, Any]] = { "4XX": { "description": "Request Error", "model": MessageExceptionModel, diff --git a/lib/galaxy/webapps/base/controller.py b/lib/galaxy/webapps/base/controller.py index 4defc3ed2c2f..c4cdcb28cb6e 100644 --- a/lib/galaxy/webapps/base/controller.py +++ b/lib/galaxy/webapps/base/controller.py @@ -659,8 +659,7 @@ def get_visualization_config(self, trans, visualization): bookmarks = latest_revision.config.get("bookmarks", []) def pack_track(track_dict): - unencoded_id = track_dict.get("dataset_id") - if unencoded_id: + if unencoded_id := track_dict.get("dataset_id"): encoded_id = trans.security.encode_id(unencoded_id) else: encoded_id = track_dict["dataset"]["id"] diff --git a/lib/galaxy/webapps/base/webapp.py b/lib/galaxy/webapps/base/webapp.py index f995c39054e0..96bfb2d2dab0 100644 --- a/lib/galaxy/webapps/base/webapp.py +++ b/lib/galaxy/webapps/base/webapp.py @@ -11,9 +11,7 @@ from http.cookies import CookieError from typing import ( Any, - Dict, Optional, - Tuple, ) from urllib.parse import urlparse @@ -312,7 +310,7 @@ class GalaxyWebTransaction(base.DefaultWebTransaction, context.ProvidesHistoryCo """ def __init__( - self, environ: Dict[str, Any], app: BasicSharedApp, webapp: WebApplication, session_cookie: Optional[str] = None + self, environ: dict[str, Any], app: BasicSharedApp, webapp: WebApplication, session_cookie: Optional[str] = None ) -> None: self._app = app self.webapp = webapp @@ -331,7 +329,7 @@ def __init__( self.galaxy_session = None self.error_message = None self.host = self.request.host - self._short_term_cache: Dict[Tuple[str, ...], Any] = {} + self._short_term_cache: dict[tuple[str, ...], Any] = {} # set any cross origin resource sharing headers if configured to do so self.set_cors_headers() @@ -939,8 +937,7 @@ def get_or_create_default_history(self): # Look for an existing history that has the default name, is not # deleted, and is empty. If this exists, we associate it with the # current session and return it. - user = self.galaxy_session.user - if user: + if user := self.galaxy_session.user: stmt = select(History).filter_by(user=user, name=History.default_name, deleted=False) unnamed_histories = self.sa_session.scalars(stmt) for history in unnamed_histories: diff --git a/lib/galaxy/webapps/galaxy/api/__init__.py b/lib/galaxy/webapps/galaxy/api/__init__.py index 51066fed89c1..3cba1d7cc2e9 100644 --- a/lib/galaxy/webapps/galaxy/api/__init__.py +++ b/lib/galaxy/webapps/galaxy/api/__init__.py @@ -3,17 +3,15 @@ """ import inspect +from collections.abc import AsyncGenerator from enum import Enum from string import Template from typing import ( Any, - AsyncGenerator, Callable, cast, NamedTuple, Optional, - Tuple, - Type, TypeVar, ) from urllib.parse import ( @@ -126,7 +124,7 @@ def __init__(self, callable, dep_type): self.galaxy_type_depends = dep_type -def depends(dep_type: Type[T], app=get_app_with_request_session) -> T: +def depends(dep_type: type[T], app=get_app_with_request_session) -> T: async def _do_resolve(request: Request): async for _dep in app(): yield _dep.resolve(dep_type) @@ -565,7 +563,7 @@ class APIContentTypeRoute(APIRoute): match_content_type: str - def accept_matches(self, scope: Scope) -> Tuple[Match, Scope]: + def accept_matches(self, scope: Scope) -> tuple[Match, Scope]: content_type_header = Headers(scope=scope).get("content-type", None) if not content_type_header: return Match.PARTIAL, scope @@ -573,7 +571,7 @@ def accept_matches(self, scope: Scope) -> Tuple[Match, Scope]: return Match.NONE, scope return Match.FULL, scope - def matches(self, scope: Scope) -> Tuple[Match, Scope]: + def matches(self, scope: Scope) -> tuple[Match, Scope]: accept_match, accept_scope = self.accept_matches(scope) if accept_match == Match.NONE: return accept_match, accept_scope @@ -584,7 +582,7 @@ def matches(self, scope: Scope) -> Tuple[Match, Scope]: ) -def as_form(cls: Type[BaseModel]): +def as_form(cls: type[BaseModel]): """ Adds an as_form class method to decorated models. The as_form class method can be used with FastAPI endpoints. diff --git a/lib/galaxy/webapps/galaxy/api/chat.py b/lib/galaxy/webapps/galaxy/api/chat.py index 5f85b6f9f429..bec9f93eeef9 100644 --- a/lib/galaxy/webapps/galaxy/api/chat.py +++ b/lib/galaxy/webapps/galaxy/api/chat.py @@ -4,12 +4,12 @@ import logging from typing import ( + Annotated, Optional, Union, ) from fastapi import Path -from typing_extensions import Annotated from galaxy.config import GalaxyAppConfiguration from galaxy.exceptions import ConfigurationError diff --git a/lib/galaxy/webapps/galaxy/api/common.py b/lib/galaxy/webapps/galaxy/api/common.py index 228d68871ec7..1635d7e551db 100644 --- a/lib/galaxy/webapps/galaxy/api/common.py +++ b/lib/galaxy/webapps/galaxy/api/common.py @@ -2,10 +2,9 @@ from io import BytesIO from typing import ( + Annotated, Any, - List, Optional, - Set, ) from fastapi import ( @@ -15,7 +14,6 @@ Request, ) from starlette.responses import StreamingResponse -from typing_extensions import Annotated from galaxy.schema import ( FilterQueryParams, @@ -133,14 +131,14 @@ description="Comma-separated list of keys to be passed to the serializer", ) -FilterQueryQueryParam: Optional[List[str]] = Query( +FilterQueryQueryParam: Optional[list[str]] = Query( default=None, title="Filter Query", description="Generally a property name to filter by followed by an (often optional) hyphen and operator string.", examples=["create_time-gt"], ) -FilterValueQueryParam: Optional[List[str]] = Query( +FilterValueQueryParam: Optional[list[str]] = Query( default=None, title="Filter Value", description="The value to filter by.", @@ -192,8 +190,8 @@ def query_serialization_params( def get_value_filter_query_params( - q: Optional[List[str]] = FilterQueryQueryParam, - qv: Optional[List[str]] = FilterValueQueryParam, + q: Optional[list[str]] = FilterQueryQueryParam, + qv: Optional[list[str]] = FilterValueQueryParam, ) -> ValueFilterQueryParams: """ This function is meant to be used as a Dependency. @@ -206,8 +204,8 @@ def get_value_filter_query_params( def get_filter_query_params( - q: Optional[List[str]] = FilterQueryQueryParam, - qv: Optional[List[str]] = FilterValueQueryParam, + q: Optional[list[str]] = FilterQueryQueryParam, + qv: Optional[list[str]] = FilterValueQueryParam, offset: Optional[int] = OffsetQueryParam, limit: Optional[int] = LimitQueryParam, order: Optional[str] = OrderQueryParam, @@ -245,7 +243,7 @@ def normalize_permission_payload( return update_payload -def get_query_parameters_from_request_excluding(request: Request, exclude: Set[str]) -> dict: +def get_query_parameters_from_request_excluding(request: Request, exclude: set[str]) -> dict: """Gets all the request query parameters excluding the given parameters names in `exclude` set. This is useful when an endpoint uses arbitrary or dynamic query parameters that @@ -287,8 +285,8 @@ def index( """ def parse_elements( - elements: Optional[List[str]] = query, - ) -> Optional[List[Any]]: + elements: Optional[list[str]] = query, + ) -> Optional[list[Any]]: if query.default != Ellipsis and not elements: return query.default if elements and len(elements) == 1: diff --git a/lib/galaxy/webapps/galaxy/api/configuration.py b/lib/galaxy/webapps/galaxy/api/configuration.py index 51c23813a007..1c685cad72c6 100644 --- a/lib/galaxy/webapps/galaxy/api/configuration.py +++ b/lib/galaxy/webapps/galaxy/api/configuration.py @@ -6,8 +6,6 @@ import logging from typing import ( Any, - Dict, - List, Optional, ) @@ -69,7 +67,7 @@ def index( trans: ProvidesUserContext = DependsOnTrans, view: SerializationViewQueryParam = None, keys: Optional[str] = SerializationKeysQueryParam, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """ Return an object containing exposable configuration settings. @@ -85,7 +83,7 @@ def index( summary="Return Galaxy version information: major/minor version, optional extra info", response_description="Galaxy version information: major/minor version, optional extra info", ) - def version(self) -> Dict[str, Any]: + def version(self) -> dict[str, Any]: """Return Galaxy version information: major/minor version, optional extra info.""" return self.configuration_manager.version() @@ -95,7 +93,7 @@ def version(self) -> Dict[str, Any]: summary="Return dynamic tool configuration files", response_description="Dynamic tool configuration files", ) - def dynamic_tool_confs(self) -> List[Dict[str, str]]: + def dynamic_tool_confs(self) -> list[dict[str, str]]: """Return dynamic tool configuration files.""" return self.configuration_manager.dynamic_tool_confs() @@ -105,7 +103,7 @@ def dynamic_tool_confs(self) -> List[Dict[str, str]]: summary="Decode a given id", response_description="Decoded id", ) - def decode_id(self, encoded_id: str = EncodedIdPathParam) -> Dict[str, int]: + def decode_id(self, encoded_id: str = EncodedIdPathParam) -> dict[str, int]: """Decode a given id.""" return self.configuration_manager.decode_id(encoded_id) @@ -115,7 +113,7 @@ def decode_id(self, encoded_id: str = EncodedIdPathParam) -> Dict[str, int]: summary="Encode a given id", response_description="Encoded id", ) - def encode_id(self, decoded_id: int = DecodedIdPathParam) -> Dict[str, str]: + def encode_id(self, decoded_id: int = DecodedIdPathParam) -> dict[str, str]: """Decode a given id.""" return self.configuration_manager.encode_id(decoded_id) @@ -125,7 +123,7 @@ def encode_id(self, decoded_id: int = DecodedIdPathParam) -> Dict[str, str]: summary="Return tool lineages for tools that have them", response_description="Tool lineages for tools that have them", ) - def tool_lineages(self) -> List[Dict[str, Dict]]: + def tool_lineages(self) -> list[dict[str, dict]]: """Return tool lineages for tools that have them.""" return self.configuration_manager.tool_lineages() diff --git a/lib/galaxy/webapps/galaxy/api/dataset_collections.py b/lib/galaxy/webapps/galaxy/api/dataset_collections.py index c91ed7232725..d5cd3871ed11 100644 --- a/lib/galaxy/webapps/galaxy/api/dataset_collections.py +++ b/lib/galaxy/webapps/galaxy/api/dataset_collections.py @@ -1,5 +1,8 @@ from logging import getLogger -from typing import Optional +from typing import ( + Annotated, + Optional, +) from fastapi import ( Body, @@ -8,7 +11,6 @@ Response, status, ) -from typing_extensions import Annotated from galaxy.managers.context import ProvidesHistoryContext from galaxy.schema.fields import DecodedDatabaseIdField diff --git a/lib/galaxy/webapps/galaxy/api/datasets.py b/lib/galaxy/webapps/galaxy/api/datasets.py index 1e84e89e70cc..b207a3ca3504 100644 --- a/lib/galaxy/webapps/galaxy/api/datasets.py +++ b/lib/galaxy/webapps/galaxy/api/datasets.py @@ -9,8 +9,8 @@ StringIO, ) from typing import ( + Annotated, cast, - List, Optional, ) @@ -25,7 +25,6 @@ Response, StreamingResponse, ) -from typing_extensions import Annotated from galaxy.datatypes.dataproviders.base import MAX_LIMIT from galaxy.schema import ( @@ -153,7 +152,7 @@ def index( ), serialization_params: SerializationParams = Depends(query_serialization_params), filter_query_params: FilterQueryParams = Depends(get_filter_query_params), - ) -> List[AnyHistoryContentItem]: + ) -> list[AnyHistoryContentItem]: entries, total_matches = self.service.index(trans, history_id, serialization_params, filter_query_params) response.headers["total_matches"] = str(total_matches) return entries diff --git a/lib/galaxy/webapps/galaxy/api/datatypes.py b/lib/galaxy/webapps/galaxy/api/datatypes.py index 87fffccf999b..470d292c8a59 100644 --- a/lib/galaxy/webapps/galaxy/api/datatypes.py +++ b/lib/galaxy/webapps/galaxy/api/datatypes.py @@ -5,8 +5,6 @@ import logging from typing import ( cast, - Dict, - List, Optional, Union, ) @@ -80,7 +78,7 @@ async def index( self, extension_only: Optional[bool] = ExtensionOnlyQueryParam, upload_only: Optional[bool] = UploadOnlyQueryParam, - ) -> Union[List[DatatypeDetails], List[str]]: + ) -> Union[list[DatatypeDetails], list[str]]: """Gets the list of all available data types.""" return view_index(self.datatypes_registry, extension_only, upload_only) @@ -119,7 +117,7 @@ async def types_and_mapping( summary="Returns the list of all installed sniffers", response_description="List of datatype sniffers", ) - async def sniffers(self) -> List[str]: + async def sniffers(self) -> list[str]: """Gets the list of all installed data type sniffers.""" return view_sniffers(self.datatypes_registry) @@ -139,9 +137,9 @@ async def converters(self) -> DatatypeConverterList: summary="Returns a dictionary/map of datatypes and EDAM formats", response_description="Dictionary/map of datatypes and EDAM formats", ) - async def edam_formats(self) -> Dict[str, str]: + async def edam_formats(self) -> dict[str, str]: """Gets a map of datatypes and their corresponding EDAM formats.""" - return cast(Dict[str, str], view_edam_formats(self.datatypes_registry)) + return cast(dict[str, str], view_edam_formats(self.datatypes_registry)) @router.get( "/api/datatypes/edam_formats/detailed", @@ -161,9 +159,9 @@ async def edam_formats_detailed(self): summary="Returns a dictionary/map of datatypes and EDAM data", response_description="Dictionary/map of datatypes and EDAM data", ) - async def edam_data(self) -> Dict[str, str]: + async def edam_data(self) -> dict[str, str]: """Gets a map of datatypes and their corresponding EDAM data.""" - return cast(Dict[str, str], view_edam_data(self.datatypes_registry)) + return cast(dict[str, str], view_edam_data(self.datatypes_registry)) @router.get( "/api/datatypes/edam_data/detailed", diff --git a/lib/galaxy/webapps/galaxy/api/display_applications.py b/lib/galaxy/webapps/galaxy/api/display_applications.py index 93e028686351..b8028a54766c 100644 --- a/lib/galaxy/webapps/galaxy/api/display_applications.py +++ b/lib/galaxy/webapps/galaxy/api/display_applications.py @@ -4,8 +4,6 @@ import logging from typing import ( - Dict, - List, Optional, ) @@ -38,7 +36,7 @@ class FastAPIDisplay: ) def index( self, - ) -> List[DisplayApplication]: + ) -> list[DisplayApplication]: """ Returns the list of display applications. """ @@ -52,7 +50,7 @@ def index( ) def reload( self, - payload: Optional[Dict[str, List[str]]] = Body(default=None), + payload: Optional[dict[str, list[str]]] = Body(default=None), ) -> ReloadFeedback: """ Reloads the list of display applications. diff --git a/lib/galaxy/webapps/galaxy/api/dynamic_tools.py b/lib/galaxy/webapps/galaxy/api/dynamic_tools.py index 78906252bf42..bc675471b5f4 100644 --- a/lib/galaxy/webapps/galaxy/api/dynamic_tools.py +++ b/lib/galaxy/webapps/galaxy/api/dynamic_tools.py @@ -1,7 +1,6 @@ import logging from datetime import datetime from typing import ( - List, Optional, Union, ) @@ -66,7 +65,7 @@ class UnprivilegedToolsApi: dynamic_tools_manager: DynamicToolManager = depends(DynamicToolManager) @router.get("/api/unprivileged_tools", response_model_exclude_defaults=True) - def index(self, active: bool = True, trans: ProvidesUserContext = DependsOnTrans) -> List[UnprivilegedToolResponse]: + def index(self, active: bool = True, trans: ProvidesUserContext = DependsOnTrans) -> list[UnprivilegedToolResponse]: if not trans.user: return [] return [t.to_dict() for t in self.dynamic_tools_manager.list_unprivileged_tools(trans.user, active=active)] diff --git a/lib/galaxy/webapps/galaxy/api/file_sources.py b/lib/galaxy/webapps/galaxy/api/file_sources.py index 4546e7b88ebc..f68ef74f6967 100644 --- a/lib/galaxy/webapps/galaxy/api/file_sources.py +++ b/lib/galaxy/webapps/galaxy/api/file_sources.py @@ -1,5 +1,4 @@ import logging -from typing import List from fastapi import ( Body, @@ -111,7 +110,7 @@ def test_instance_configuration( def instance_index( self, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[UserFileSourceModel]: + ) -> list[UserFileSourceModel]: return self.file_source_instances_manager.index(trans) @router.get( diff --git a/lib/galaxy/webapps/galaxy/api/folders.py b/lib/galaxy/webapps/galaxy/api/folders.py index 1420cb373a60..ffdebf931eea 100644 --- a/lib/galaxy/webapps/galaxy/api/folders.py +++ b/lib/galaxy/webapps/galaxy/api/folders.py @@ -4,6 +4,7 @@ import logging from typing import ( + Annotated, Optional, Union, ) @@ -12,7 +13,6 @@ Body, Query, ) -from typing_extensions import Annotated from galaxy.managers.context import ProvidesUserContext from galaxy.schema.schema import ( diff --git a/lib/galaxy/webapps/galaxy/api/forms.py b/lib/galaxy/webapps/galaxy/api/forms.py index 85d84fbd0e4b..8c019e0252f9 100644 --- a/lib/galaxy/webapps/galaxy/api/forms.py +++ b/lib/galaxy/webapps/galaxy/api/forms.py @@ -3,10 +3,10 @@ """ import logging +from typing import Annotated from fastapi import Path from sqlalchemy import select -from typing_extensions import Annotated from galaxy import web from galaxy.forms.forms import form_factory diff --git a/lib/galaxy/webapps/galaxy/api/genomes.py b/lib/galaxy/webapps/galaxy/api/genomes.py index e13550b01163..ae72b4294fe4 100644 --- a/lib/galaxy/webapps/galaxy/api/genomes.py +++ b/lib/galaxy/webapps/galaxy/api/genomes.py @@ -1,6 +1,5 @@ from typing import ( Any, - List, ) from fastapi import ( @@ -71,7 +70,7 @@ class FastAPIGenomes: @router.get("/api/genomes", summary="Return a list of installed genomes", response_description="Installed genomes") def index( self, trans: ProvidesUserContext = DependsOnTrans, chrom_info: bool = ChromInfoQueryParam - ) -> List[List[str]]: + ) -> list[list[str]]: return self.manager.get_dbkeys(trans.user, chrom_info) @router.get( diff --git a/lib/galaxy/webapps/galaxy/api/groups.py b/lib/galaxy/webapps/galaxy/api/groups.py index c5e064b3998d..126d8e65e8f6 100644 --- a/lib/galaxy/webapps/galaxy/api/groups.py +++ b/lib/galaxy/webapps/galaxy/api/groups.py @@ -3,9 +3,9 @@ """ import logging +from typing import Annotated from fastapi import Body -from typing_extensions import Annotated from galaxy.managers.context import ProvidesAppContext from galaxy.managers.groups import GroupsManager diff --git a/lib/galaxy/webapps/galaxy/api/help.py b/lib/galaxy/webapps/galaxy/api/help.py index 8486ce350f8d..4388ef8597a3 100644 --- a/lib/galaxy/webapps/galaxy/api/help.py +++ b/lib/galaxy/webapps/galaxy/api/help.py @@ -1,7 +1,7 @@ import logging +from typing import Annotated from fastapi import Query -from typing_extensions import Annotated from galaxy.managers.context import ProvidesUserContext from galaxy.schema.help import HelpForumSearchResponse diff --git a/lib/galaxy/webapps/galaxy/api/histories.py b/lib/galaxy/webapps/galaxy/api/histories.py index af72e09c8678..00d4e91956bd 100644 --- a/lib/galaxy/webapps/galaxy/api/histories.py +++ b/lib/galaxy/webapps/galaxy/api/histories.py @@ -6,8 +6,8 @@ import logging from typing import ( + Annotated, Any, - List, Literal, Optional, Union, @@ -24,7 +24,6 @@ ) from pydantic.fields import Field from pydantic.main import BaseModel -from typing_extensions import Annotated from galaxy.managers.context import ( ProvidesHistoryContext, @@ -152,14 +151,14 @@ class DeleteHistoryPayload(BaseModel): class DeleteHistoriesPayload(BaseModel): - ids: Annotated[List[DecodedDatabaseIdField], Field(title="IDs", description="List of history IDs to be deleted.")] + ids: Annotated[list[DecodedDatabaseIdField], Field(title="IDs", description="List of history IDs to be deleted.")] purge: Annotated[ bool, Field(default=False, title="Purge", description="Whether to definitely remove this history from disk.") ] class UndeleteHistoriesPayload(BaseModel): - ids: Annotated[List[DecodedDatabaseIdField], Field(title="IDs", description="List of history IDs to be undeleted.")] + ids: Annotated[list[DecodedDatabaseIdField], Field(title="IDs", description="List of history IDs to be undeleted.")] @as_form @@ -208,7 +207,7 @@ def index( description="Whether to return only deleted items.", deprecated=True, # Marked as deprecated as it seems just like '/api/histories/deleted' ), - ) -> List[AnyHistoryView]: + ) -> list[AnyHistoryView]: if search is None: return self.service.index( trans, serialization_params, filter_query_params, deleted_only=deleted, all_histories=all @@ -252,7 +251,7 @@ def index_deleted( filter_query_params: FilterQueryParams = Depends(get_filter_query_params), serialization_params: SerializationParams = Depends(query_serialization_params), all: Optional[bool] = AllHistoriesQueryParam, - ) -> List[AnyHistoryView]: + ) -> list[AnyHistoryView]: return self.service.index( trans, serialization_params, filter_query_params, deleted_only=True, all_histories=all ) @@ -267,7 +266,7 @@ def published( trans: ProvidesHistoryContext = DependsOnTrans, filter_query_params: FilterQueryParams = Depends(get_filter_query_params), serialization_params: SerializationParams = Depends(query_serialization_params), - ) -> List[AnyHistoryView]: + ) -> list[AnyHistoryView]: return self.service.published(trans, serialization_params, filter_query_params) @router.get( @@ -280,7 +279,7 @@ def shared_with_me( trans: ProvidesHistoryContext = DependsOnTrans, filter_query_params: FilterQueryParams = Depends(get_filter_query_params), serialization_params: SerializationParams = Depends(query_serialization_params), - ) -> List[AnyHistoryView]: + ) -> list[AnyHistoryView]: return self.service.shared_with_me(trans, serialization_params, filter_query_params) @router.get( @@ -294,7 +293,7 @@ def get_archived_histories( trans: ProvidesHistoryContext = DependsOnTrans, serialization_params: SerializationParams = Depends(query_serialization_params), filter_query_params: FilterQueryParams = Depends(get_filter_query_params), - ) -> List[AnyArchivedHistoryView]: + ) -> list[AnyArchivedHistoryView]: """Get a list of all archived histories for the current user. Archived histories are histories are not part of the active histories of the user but they can be accessed using this endpoint. @@ -371,7 +370,7 @@ def citations( self, history_id: HistoryIDPathParam, trans: ProvidesHistoryContext = DependsOnTrans, - ) -> List[Any]: + ) -> list[Any]: return self.service.citations(trans, history_id) @router.post( @@ -425,7 +424,7 @@ def batch_delete( serialization_params: SerializationParams = Depends(query_serialization_params), purge: bool = Query(default=False), payload: DeleteHistoriesPayload = Body(...), - ) -> List[AnyHistoryView]: + ) -> list[AnyHistoryView]: if payload: purge = payload.purge results = [] @@ -457,7 +456,7 @@ def batch_undelete( trans: ProvidesHistoryContext = DependsOnTrans, serialization_params: SerializationParams = Depends(query_serialization_params), payload: UndeleteHistoriesPayload = Body(...), - ) -> List[AnyHistoryView]: + ) -> list[AnyHistoryView]: results = [] for history_id in payload.ids: result = self.service.undelete(trans, history_id, serialization_params) diff --git a/lib/galaxy/webapps/galaxy/api/history_contents.py b/lib/galaxy/webapps/galaxy/api/history_contents.py index 829d11f886c9..de7206ac2c0a 100644 --- a/lib/galaxy/webapps/galaxy/api/history_contents.py +++ b/lib/galaxy/webapps/galaxy/api/history_contents.py @@ -4,10 +4,9 @@ import logging from typing import ( - List, + Annotated, Literal, Optional, - Set, Union, ) @@ -24,7 +23,6 @@ Response, StreamingResponse, ) -from typing_extensions import Annotated from galaxy import util from galaxy.exceptions.utils import validation_error_to_message_exception @@ -270,7 +268,7 @@ def parse_index_query_params( def get_legacy_index_query_params( ids: Optional[str] = LegacyIdsQueryParam, - types: Optional[List[str]] = LegacyTypesQueryParam, + types: Optional[list[str]] = LegacyTypesQueryParam, details: Optional[str] = LegacyDetailsQueryParam, deleted: Optional[bool] = LegacyDeletedQueryParam, visible: Optional[bool] = LegacyVisibleQueryParam, @@ -290,7 +288,7 @@ def get_legacy_index_query_params( def parse_legacy_index_query_params( ids: Optional[str] = None, - types: Optional[Union[List[str], str]] = None, + types: Optional[Union[list[str], str]] = None, details: Optional[str] = None, deleted: Optional[bool] = None, visible: Optional[bool] = None, @@ -325,7 +323,7 @@ def parse_legacy_index_query_params( raise validation_error_to_message_exception(e) -def parse_content_types(types: Union[List[str], str]) -> List[HistoryContentType]: +def parse_content_types(types: Union[list[str], str]) -> list[HistoryContentType]: if isinstance(types, list) and len(types) == 1: # Support ?types=dataset,dataset_collection content_types = util.listify(types[0]) else: # Support ?types=dataset&types=dataset_collection @@ -337,7 +335,7 @@ def parse_dataset_details(details: Optional[str]): """Parses the different values that the `dataset_details` parameter can have from a string.""" if details is not None and details != "all": - dataset_details: Union[None, Set[str], str] = set(util.listify(details)) + dataset_details: Union[None, set[str], str] = set(util.listify(details)) else: # either None or 'all' dataset_details = details return dataset_details @@ -613,7 +611,7 @@ def index_jobs_summary( history_id: HistoryIDPathParam, trans: ProvidesHistoryContext = DependsOnTrans, params: HistoryContentsIndexJobsSummaryParams = Depends(get_index_jobs_summary_params), - ) -> List[AnyJobStateSummary]: + ) -> list[AnyJobStateSummary]: """Return job state summary info for jobs, implicit groups jobs for collections or workflow invocations. **Warning**: We allow anyone to fetch job state information about any object they @@ -699,7 +697,7 @@ def create_typed( type: HistoryContentType = ContentTypePathParam, serialization_params: SerializationParams = Depends(query_serialization_params), payload: CreateHistoryContentPayload = Body(...), - ) -> Union[AnyHistoryContentItem, List[AnyHistoryContentItem]]: + ) -> Union[AnyHistoryContentItem, list[AnyHistoryContentItem]]: """Create a new `HDA` or `HDCA` in the given History.""" return self._create(trans, history_id, type, serialization_params, payload) @@ -717,7 +715,7 @@ def create( type: Optional[HistoryContentType] = ContentTypeQueryParam(default=None), serialization_params: SerializationParams = Depends(query_serialization_params), payload: CreateHistoryContentPayload = Body(...), - ) -> Union[AnyHistoryContentItem, List[AnyHistoryContentItem]]: + ) -> Union[AnyHistoryContentItem, list[AnyHistoryContentItem]]: """Create a new `HDA` or `HDCA` in the given History.""" return self._create(trans, history_id, type, serialization_params, payload) @@ -728,7 +726,7 @@ def _create( type: Optional[HistoryContentType], serialization_params: SerializationParams, payload: CreateHistoryContentPayload, - ) -> Union[AnyHistoryContentItem, List[AnyHistoryContentItem]]: + ) -> Union[AnyHistoryContentItem, list[AnyHistoryContentItem]]: """Create a new `HDA` or `HDCA` in the given History.""" payload.type = type or payload.type return self.service.create(trans, history_id, payload, serialization_params) @@ -1076,7 +1074,7 @@ def create_from_store( trans: ProvidesHistoryContext = DependsOnTrans, serialization_params: SerializationParams = Depends(query_serialization_params), create_payload: CreateHistoryContentFromStore = Body(...), - ) -> List[AnyHistoryContentItem]: + ) -> list[AnyHistoryContentItem]: """ Create history contents from model store. Input can be a tarfile created with build_objects script distributed diff --git a/lib/galaxy/webapps/galaxy/api/item_tags.py b/lib/galaxy/webapps/galaxy/api/item_tags.py index cd84954d5b6c..436532ba6183 100644 --- a/lib/galaxy/webapps/galaxy/api/item_tags.py +++ b/lib/galaxy/webapps/galaxy/api/item_tags.py @@ -3,12 +3,12 @@ """ import logging +from typing import Annotated from fastapi import ( Body, Path, ) -from typing_extensions import Annotated from galaxy.managers.context import ProvidesAppContext from galaxy.managers.item_tags import ItemTagsManager diff --git a/lib/galaxy/webapps/galaxy/api/jobs.py b/lib/galaxy/webapps/galaxy/api/jobs.py index 755d7d47a6e7..5e442cb21908 100644 --- a/lib/galaxy/webapps/galaxy/api/jobs.py +++ b/lib/galaxy/webapps/galaxy/api/jobs.py @@ -10,8 +10,8 @@ datetime, ) from typing import ( + Annotated, Any, - List, Optional, Union, ) @@ -23,7 +23,6 @@ Query, ) from pydantic import Field -from typing_extensions import Annotated from galaxy import exceptions from galaxy.managers.context import ( @@ -238,12 +237,12 @@ class ShowFullJobResponse(EncodedJobDetails): title="Standard Error", description="Combined tool and job standard error streams.", ) - job_messages: Optional[List[AnyJobMessage]] = Field( + job_messages: Optional[list[AnyJobMessage]] = Field( default=None, title="Job Messages", description="List with additional information and possible reasons for a failed job.", ) - dependencies: Optional[List[Any]] = Field( + dependencies: Optional[list[Any]] = Field( default=None, title="Job dependencies", description="The dependencies of the job.", @@ -266,12 +265,12 @@ class FastAPIJobs: def index( self, trans: ProvidesUserContext = DependsOnTrans, - states: Optional[List[str]] = Depends(query_parameter_as_list(StateQueryParam)), + states: Optional[list[str]] = Depends(query_parameter_as_list(StateQueryParam)), user_details: bool = UserDetailsQueryParam, user_id: Optional[DecodedDatabaseIdField] = UserIdQueryParam, view: JobIndexViewEnum = ViewQueryParam, - tool_ids: Optional[List[str]] = Depends(query_parameter_as_list(ToolIdQueryParam)), - tool_ids_like: Optional[List[str]] = Depends(query_parameter_as_list(ToolIdLikeQueryParam)), + tool_ids: Optional[list[str]] = Depends(query_parameter_as_list(ToolIdQueryParam)), + tool_ids_like: Optional[list[str]] = Depends(query_parameter_as_list(ToolIdLikeQueryParam)), date_range_min: Optional[Union[datetime, date]] = DateRangeMinQueryParam, date_range_max: Optional[Union[datetime, date]] = DateRangeMaxQueryParam, history_id: Optional[DecodedDatabaseIdField] = HistoryIdQueryParam, @@ -282,7 +281,7 @@ def index( search: Optional[str] = SearchQueryParam, limit: int = LimitQueryParam, offset: int = OffsetQueryParam, - ) -> List[Union[ShowFullJobResponse, EncodedJobDetails, JobSummary]]: + ) -> list[Union[ShowFullJobResponse, EncodedJobDetails, JobSummary]]: payload = JobIndexPayload.model_construct( states=states, user_details=user_details, @@ -342,7 +341,7 @@ def resume( self, job_id: JobIdPathParam, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[JobOutputAssociation]: + ) -> list[JobOutputAssociation]: job = self.service.get_job(trans, job_id=job_id) if not job: raise exceptions.ObjectNotFound("Could not access job with the given id") @@ -399,7 +398,7 @@ def inputs( self, job_id: JobIdPathParam, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[JobInputAssociation]: + ) -> list[JobInputAssociation]: job = self.service.get_job(trans=trans, job_id=job_id) associations = self.service.dictify_associations(trans, job.input_datasets, job.input_library_datasets) input_associations = [] @@ -416,7 +415,7 @@ def outputs( self, job_id: JobIdPathParam, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[JobOutputAssociation]: + ) -> list[JobOutputAssociation]: job = self.service.get_job(trans=trans, job_id=job_id) associations = self.service.dictify_associations(trans, job.output_datasets, job.output_library_datasets) output_associations = [] @@ -504,7 +503,7 @@ def metrics_by_job( job_id: JobIdPathParam, hda_ldda: Annotated[Optional[DatasetSourceType], DeprecatedHdaLddaQueryParam] = DatasetSourceType.hda, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[Optional[JobMetric]]: + ) -> list[Optional[JobMetric]]: hda_ldda_str = hda_ldda or "hda" job = self.service.get_job(trans, job_id=job_id, hda_ldda=hda_ldda_str) return [JobMetric(**metric) for metric in summarize_job_metrics(trans, job)] @@ -520,7 +519,7 @@ def metrics_by_dataset( dataset_id: DatasetIdPathParam, hda_ldda: Annotated[DatasetSourceType, HdaLddaQueryParam] = DatasetSourceType.hda, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[Optional[JobMetric]]: + ) -> list[Optional[JobMetric]]: job = self.service.get_job(trans, dataset_id=dataset_id, hda_ldda=hda_ldda) return [JobMetric(**metric) for metric in summarize_job_metrics(trans, job)] @@ -547,7 +546,7 @@ def search( self, payload: Annotated[SearchJobsPayload, SearchJobBody], trans: ProvidesHistoryContext = DependsOnTrans, - ) -> List[EncodedJobDetails]: + ) -> list[EncodedJobDetails]: """ This method is designed to scan the list of previously run jobs and find records of jobs that had the exact some input parameters and datasets. This can be used to minimize the amount of repeated work, and simply diff --git a/lib/galaxy/webapps/galaxy/api/libraries.py b/lib/galaxy/webapps/galaxy/api/libraries.py index 7a814ee09f2a..4436bca293da 100644 --- a/lib/galaxy/webapps/galaxy/api/libraries.py +++ b/lib/galaxy/webapps/galaxy/api/libraries.py @@ -4,7 +4,6 @@ import logging from typing import ( - List, Optional, Union, ) @@ -112,7 +111,7 @@ def create_from_store( self, trans: ProvidesUserContext = DependsOnTrans, payload: CreateLibrariesFromStore = Body(...), - ) -> List[LibrarySummary]: + ) -> list[LibrarySummary]: return self.service.create_from_store(trans, payload) @router.patch( diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index 14652554a3ba..b9a512816d5c 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -5,7 +5,6 @@ import logging from typing import ( cast, - List, Optional, ) @@ -64,9 +63,9 @@ class JsonApiRoute(APIContentTypeRoute): LibraryContentsCreateForm = as_form(LibraryContentsFileCreatePayload) -async def get_files(request: Request, files: Optional[List[UploadFile]] = None): +async def get_files(request: Request, files: Optional[list[UploadFile]] = None): # FastAPI's UploadFile is a very light wrapper around starlette's UploadFile - files2: List[StarletteUploadFile] = cast(List[StarletteUploadFile], files or []) + files2: list[StarletteUploadFile] = cast(list[StarletteUploadFile], files or []) if not files2: data = await request.form() for value in data.values(): @@ -132,7 +131,7 @@ def create_form( self, library_id: LibraryIdPathParam, payload: LibraryContentsFileCreatePayload = Depends(LibraryContentsCreateForm.as_form), - files: List[StarletteUploadFile] = Depends(get_files), + files: list[StarletteUploadFile] = Depends(get_files), trans: ProvidesHistoryContext = DependsOnTrans, ) -> AnyLibraryContentsCreateResponse: """This endpoint is deprecated. Please use POST /api/folders/{folder_id} or POST /api/folders/{folder_id}/contents instead.""" diff --git a/lib/galaxy/webapps/galaxy/api/licenses.py b/lib/galaxy/webapps/galaxy/api/licenses.py index cc64c372af40..7b6098ecdc28 100644 --- a/lib/galaxy/webapps/galaxy/api/licenses.py +++ b/lib/galaxy/webapps/galaxy/api/licenses.py @@ -1,5 +1,3 @@ -from typing import List - from fastapi import Path from galaxy.managers.licenses import ( @@ -31,7 +29,7 @@ class FastAPILicenses: summary="Lists all available SPDX licenses", response_description="List of SPDX licenses", ) - async def index(self) -> List[LicenseMetadataModel]: + async def index(self) -> list[LicenseMetadataModel]: """Returns an index with all the available [SPDX licenses](https://spdx.org/licenses/).""" return self.licenses_manager.get_licenses() diff --git a/lib/galaxy/webapps/galaxy/api/object_store.py b/lib/galaxy/webapps/galaxy/api/object_store.py index 0963530e5818..2cddea67b305 100644 --- a/lib/galaxy/webapps/galaxy/api/object_store.py +++ b/lib/galaxy/webapps/galaxy/api/object_store.py @@ -4,7 +4,6 @@ import logging from typing import ( - List, Union, ) @@ -78,7 +77,7 @@ def index( self, trans: ProvidesUserContext = DependsOnTrans, selectable: bool = SelectableQueryParam, - ) -> List[Union[ConcreteObjectStoreModel, UserConcreteObjectStoreModel]]: + ) -> list[Union[ConcreteObjectStoreModel, UserConcreteObjectStoreModel]]: if not selectable: raise RequestParameterInvalidException( "The object store index query currently needs to be called with selectable=true" @@ -126,7 +125,7 @@ def instance_index( self, trans: ProvidesUserContext = DependsOnTrans, user: User = DependsOnUser, - ) -> List[UserConcreteObjectStoreModel]: + ) -> list[UserConcreteObjectStoreModel]: return self.object_store_instance_manager.index(trans) @router.get( diff --git a/lib/galaxy/webapps/galaxy/api/remote_files.py b/lib/galaxy/webapps/galaxy/api/remote_files.py index c5a708d748bf..b8e8e22ebba0 100644 --- a/lib/galaxy/webapps/galaxy/api/remote_files.py +++ b/lib/galaxy/webapps/galaxy/api/remote_files.py @@ -4,7 +4,7 @@ import logging from typing import ( - List, + Annotated, Optional, ) @@ -13,7 +13,6 @@ Response, ) from fastapi.param_functions import Query -from typing_extensions import Annotated from galaxy.files.sources import PluginKind from galaxy.managers.context import ProvidesUserContext @@ -161,8 +160,8 @@ def plugins( self, user_ctx: ProvidesUserContext = DependsOnTrans, browsable_only: Annotated[Optional[bool], BrowsableQueryParam] = True, - include_kind: Annotated[Optional[List[PluginKind]], IncludeKindQueryParam] = None, - exclude_kind: Annotated[Optional[List[PluginKind]], ExcludeKindQueryParam] = None, + include_kind: Annotated[Optional[list[PluginKind]], IncludeKindQueryParam] = None, + exclude_kind: Annotated[Optional[list[PluginKind]], ExcludeKindQueryParam] = None, ) -> FilesSourcePluginList: """Display plugin information for each of the gxfiles:// URI targets available.""" return self.manager.get_files_source_plugins( diff --git a/lib/galaxy/webapps/galaxy/api/sanitize_allow.py b/lib/galaxy/webapps/galaxy/api/sanitize_allow.py index 278f562bb5bf..1101462299c7 100755 --- a/lib/galaxy/webapps/galaxy/api/sanitize_allow.py +++ b/lib/galaxy/webapps/galaxy/api/sanitize_allow.py @@ -5,7 +5,6 @@ import logging from typing import ( Any, - Dict, ) from galaxy import web @@ -55,7 +54,7 @@ def _save_allowlist(self, trans): trans.app.queue_worker.send_control_task("reload_sanitize_allowlist", noop_self=True) def _generate_allowlist(self, trans): - sanitize_dict: Dict[str, Any] = dict( + sanitize_dict: dict[str, Any] = dict( blocked_toolshed=[], allowed_toolshed=[], blocked_local=[], allowed_local=[] ) ids = None diff --git a/lib/galaxy/webapps/galaxy/api/storage_cleaner.py b/lib/galaxy/webapps/galaxy/api/storage_cleaner.py index aec1b069d953..1d75b46891a2 100644 --- a/lib/galaxy/webapps/galaxy/api/storage_cleaner.py +++ b/lib/galaxy/webapps/galaxy/api/storage_cleaner.py @@ -4,7 +4,6 @@ import logging from typing import ( - List, Optional, ) @@ -71,7 +70,7 @@ def discarded_histories( offset: Optional[int] = OffsetQueryParam, limit: Optional[int] = LimitQueryParam, order: Optional[StoredItemOrderBy] = OrderQueryParam, - ) -> List[StoredItem]: + ) -> list[StoredItem]: return self.service.get_discarded(trans, "history", offset, limit, order) @router.delete( @@ -106,7 +105,7 @@ def discarded_datasets( offset: Optional[int] = OffsetQueryParam, limit: Optional[int] = LimitQueryParam, order: Optional[StoredItemOrderBy] = OrderQueryParam, - ) -> List[StoredItem]: + ) -> list[StoredItem]: return self.service.get_discarded(trans, "dataset", offset, limit, order) @router.delete( @@ -141,5 +140,5 @@ def archived_histories( offset: Optional[int] = OffsetQueryParam, limit: Optional[int] = LimitQueryParam, order: Optional[StoredItemOrderBy] = OrderQueryParam, - ) -> List[StoredItem]: + ) -> list[StoredItem]: return self.service.get_archived(trans, "history", offset, limit, order) diff --git a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py index 09424bceb0a7..dadf67048d1e 100644 --- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py @@ -2,7 +2,7 @@ import logging from time import strftime from typing import ( - List, + Annotated, Optional, ) @@ -14,7 +14,6 @@ HTTPBadRequest, HTTPForbidden, ) -from typing_extensions import Annotated from galaxy import ( exceptions, @@ -423,7 +422,7 @@ def index( changeset: Optional[str] = ChangesetQueryParam, deleted: Optional[bool] = DeletedQueryParam, uninstalled: Optional[bool] = UninstalledQueryParam, - ) -> List[InstalledToolShedRepository]: + ) -> list[InstalledToolShedRepository]: request = InstalledToolShedRepositoryIndexRequest( name=name, owner=owner, diff --git a/lib/galaxy/webapps/galaxy/api/tools.py b/lib/galaxy/webapps/galaxy/api/tools.py index c5c0b9c923d7..8f0bbec1bb85 100644 --- a/lib/galaxy/webapps/galaxy/api/tools.py +++ b/lib/galaxy/webapps/galaxy/api/tools.py @@ -8,8 +8,6 @@ from typing import ( Any, cast, - Dict, - List, Optional, ) @@ -120,9 +118,9 @@ class PNGIconResponse(FileResponse): FetchDataForm = as_form(FetchDataFormPayload) -async def get_files(request: Request, files: Optional[List[UploadFile]] = None): +async def get_files(request: Request, files: Optional[list[UploadFile]] = None): # FastAPI's UploadFile is a very light wrapper around starlette's UploadFile - files2: List[StarletteUploadFile] = cast(List[StarletteUploadFile], files or []) + files2: list[StarletteUploadFile] = cast(list[StarletteUploadFile], files or []) if not files2: data = await request.form() for value in data.values(): @@ -148,7 +146,7 @@ def fetch_form( self, payload: FetchDataFormPayload = Depends(FetchDataForm.as_form), trans: ProvidesHistoryContext = DependsOnTrans, - files: List[StarletteUploadFile] = Depends(get_files), + files: list[StarletteUploadFile] = Depends(get_files), ): return self.service.create_fetch(trans, payload, files) @@ -438,7 +436,7 @@ def tests_summary(self, trans: GalaxyWebTransaction, **kwd): Fetch complete test data for each tool with /api/tools/{tool_id}/test_data?tool_version= """ - test_counts_by_tool: Dict[str, Dict] = {} + test_counts_by_tool: dict[str, dict] = {} for _id, tool in self.app.toolbox.tools(): if not tool.is_datatype_converter: tests = tool.tests @@ -453,7 +451,7 @@ def tests_summary(self, trans: GalaxyWebTransaction, **kwd): return test_counts_by_tool @expose_api_anonymous_and_sessionless - def test_data(self, trans: GalaxyWebTransaction, id, **kwd) -> List[ToolTestDescriptionDict]: + def test_data(self, trans: GalaxyWebTransaction, id, **kwd) -> list[ToolTestDescriptionDict]: """ GET /api/tools/{tool_id}/test_data?tool_version={tool_version} @@ -739,7 +737,7 @@ def create(self, trans: GalaxyWebTransaction, payload, **kwd): return self.service._create(trans, payload, **kwd) -def _kwd_or_payload(kwd: Dict[str, Any]) -> Dict[str, Any]: +def _kwd_or_payload(kwd: dict[str, Any]) -> dict[str, Any]: if "payload" in kwd: - kwd = cast(Dict[str, Any], kwd.get("payload")) + kwd = cast(dict[str, Any], kwd.get("payload")) return kwd diff --git a/lib/galaxy/webapps/galaxy/api/users.py b/lib/galaxy/webapps/galaxy/api/users.py index 905d0f925416..1bc59db7e710 100644 --- a/lib/galaxy/webapps/galaxy/api/users.py +++ b/lib/galaxy/webapps/galaxy/api/users.py @@ -7,9 +7,8 @@ import logging import re from typing import ( + Annotated, Any, - Dict, - List, Optional, Union, ) @@ -22,7 +21,6 @@ status, ) from markupsafe import escape -from typing_extensions import Annotated from galaxy import ( exceptions, @@ -207,7 +205,7 @@ def index_deleted( f_email: Optional[str] = FilterEmailQueryParam, f_name: Optional[str] = FilterNameQueryParam, f_any: Optional[str] = FilterAnyQueryParam, - ) -> List[MaybeLimitedUserModel]: + ) -> list[MaybeLimitedUserModel]: return self.service.get_index(trans=trans, deleted=True, f_email=f_email, f_name=f_name, f_any=f_any) @router.post( @@ -302,7 +300,7 @@ def usage( self, trans: ProvidesUserContext = DependsOnTrans, user_id: FlexibleUserIdType = FlexibleUserIdPathParam, - ) -> List[UserQuotaUsage]: + ) -> list[UserQuotaUsage]: if user := self.service.get_user_full(trans, user_id, False): rval = self.user_serializer.serialize_disk_usage(user) return rval @@ -318,7 +316,7 @@ def objectstore_usage( self, trans: ProvidesUserContext = DependsOnTrans, user_id: FlexibleUserIdType = FlexibleUserIdPathParam, - ) -> List[UserObjectstoreUsage]: + ) -> list[UserObjectstoreUsage]: if user := self.service.get_user_full(trans, user_id, False): return user.dictify_objectstore_usage() else: @@ -481,7 +479,7 @@ def add_custom_builds( ) else: # Have everything needed; create new build. - build_dict: Dict[str, Any] = {"name": name} + build_dict: dict[str, Any] = {"name": name} if len_type in ["text", "file"]: # Create new len file new_len = HistoryDatasetAssociation(extension="len", create_dataset=True, sa_session=trans.sa_session) @@ -642,7 +640,7 @@ def index( f_email: Optional[str] = FilterEmailQueryParam, f_name: Optional[str] = FilterNameQueryParam, f_any: Optional[str] = FilterAnyQueryParam, - ) -> List[MaybeLimitedUserModel]: + ) -> list[MaybeLimitedUserModel]: return self.service.get_index(trans=trans, deleted=deleted, f_email=f_email, f_name=f_name, f_any=f_any) @router.get( diff --git a/lib/galaxy/webapps/galaxy/api/visualizations.py b/lib/galaxy/webapps/galaxy/api/visualizations.py index 3315a71e8f05..eadce18344a8 100644 --- a/lib/galaxy/webapps/galaxy/api/visualizations.py +++ b/lib/galaxy/webapps/galaxy/api/visualizations.py @@ -6,7 +6,10 @@ """ import logging -from typing import Optional +from typing import ( + Annotated, + Optional, +) from fastapi import ( Body, @@ -15,7 +18,6 @@ Response, status, ) -from typing_extensions import Annotated from galaxy.managers.context import ProvidesUserContext from galaxy.model import User diff --git a/lib/galaxy/webapps/galaxy/api/workflows.py b/lib/galaxy/webapps/galaxy/api/workflows.py index 778dd4b27f0c..1ed1a1b00154 100644 --- a/lib/galaxy/webapps/galaxy/api/workflows.py +++ b/lib/galaxy/webapps/galaxy/api/workflows.py @@ -7,9 +7,8 @@ import os from io import BytesIO from typing import ( + Annotated, Any, - Dict, - List, Optional, Union, ) @@ -27,7 +26,6 @@ UUID4, ) from starlette.responses import StreamingResponse -from typing_extensions import Annotated from galaxy import ( exceptions, @@ -537,7 +535,7 @@ def build_module(self, trans: GalaxyWebTransaction, payload=None): # tool state not sent, use the manually constructed inputs payload["tool_state"] = payload["inputs"] module = module_factory.from_dict(trans, payload, from_tool_form=from_tool_form) - module_state: Dict[str, Any] = {} + module_state: dict[str, Any] = {} errors: ParameterValidationErrorsT = {} if from_tool_form: populate_state(trans, module.get_inputs(), inputs, module_state, errors=errors, check=True) @@ -933,7 +931,7 @@ def index( offset: Optional[int] = OffsetQueryParam, search: Optional[str] = SearchQueryParam, skip_step_counts: bool = SkipStepCountsQueryParam, - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: """Lists stored workflows viewable by the user.""" payload = WorkflowIndexPayload.model_construct( show_published=show_published, @@ -1093,7 +1091,7 @@ def invoke( payload: InvokeWorkflowBody, workflow_id: MultiTypeWorkflowIDPathParam, trans: ProvidesHistoryContext = DependsOnTrans, - ) -> Union[WorkflowInvocationResponse, List[WorkflowInvocationResponse]]: + ) -> Union[WorkflowInvocationResponse, list[WorkflowInvocationResponse]]: return self.service.invoke_workflow(trans, workflow_id, payload) @router.get( @@ -1311,7 +1309,7 @@ def create_invocations_from_store( self, payload: CreateInvocationsFromStoreBody, trans: ProvidesHistoryContext = DependsOnTrans, - ) -> List[WorkflowInvocationResponse]: + ) -> list[WorkflowInvocationResponse]: """ Input can be an archive describing a Galaxy model store containing an workflow invocation - for instance one created with with write_store @@ -1343,7 +1341,7 @@ def index_invocations( step_details: StepDetailQueryParam = False, include_nested_invocations: bool = True, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[WorkflowInvocationResponse]: + ) -> list[WorkflowInvocationResponse]: if not trans.user: # Anon users don't have accessible invocations (currently, though published invocations should be a thing) response.headers["total_matches"] = "0" @@ -1396,7 +1394,7 @@ def index_workflow_invocations( view: SerializationViewQueryParam = None, step_details: StepDetailQueryParam = False, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[WorkflowInvocationResponse]: + ) -> list[WorkflowInvocationResponse]: invocations = self.index_invocations( response=response, workflow_id=workflow_id, @@ -1682,7 +1680,7 @@ def invocation_step_jobs_summary( self, invocation_id: InvocationIDPathParam, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[ + ) -> list[ Union[ InvocationStepJobsResponseStepModel, InvocationStepJobsResponseJobModel, @@ -1723,7 +1721,7 @@ def workflow_invocation_step_jobs_summary( workflow_id: StoredWorkflowIDPathParam, invocation_id: InvocationIDPathParam, trans: ProvidesUserContext = DependsOnTrans, - ) -> List[ + ) -> list[ Union[ InvocationStepJobsResponseStepModel, InvocationStepJobsResponseJobModel, @@ -1774,5 +1772,5 @@ def get_invocation_metrics( self, invocation_id: InvocationIDPathParam, trans: ProvidesHistoryContext = DependsOnTrans, - ) -> List[WorkflowJobMetric]: + ) -> list[WorkflowJobMetric]: return self.invocations_service.show_invocation_metrics(trans=trans, invocation_id=invocation_id) diff --git a/lib/galaxy/webapps/galaxy/controllers/admin.py b/lib/galaxy/webapps/galaxy/controllers/admin.py index 090fe6386c61..c66a9388c284 100644 --- a/lib/galaxy/webapps/galaxy/controllers/admin.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin.py @@ -1,5 +1,4 @@ import logging -from typing import Set from sqlalchemy import ( false, @@ -399,7 +398,7 @@ def data_tables_list(self, trans, **kwd): @web.require_admin def data_types_list(self, trans, **kwd) -> DatatypesEntryT: datatypes = [] - keys: Set[str] = set() + keys: set[str] = set() message = kwd.get("message", "") status = kwd.get("status", "done") for dtype in sorted(trans.app.datatypes_registry.datatype_elems, key=lambda dt: dt.get("extension")): diff --git a/lib/galaxy/webapps/galaxy/controllers/root.py b/lib/galaxy/webapps/galaxy/controllers/root.py index 40c72204fea5..8a32497cfa30 100644 --- a/lib/galaxy/webapps/galaxy/controllers/root.py +++ b/lib/galaxy/webapps/galaxy/controllers/root.py @@ -100,9 +100,8 @@ def display_as(self, trans: GalaxyWebTransaction, id=None, display_app=None, **k if the file could not be returned, returns a message as a string. """ # TODO: unencoded id - data = trans.sa_session.query(HistoryDatasetAssociation).get(id) authz_method = kwd.get("authz_method", "rbac") - if data: + if data := trans.sa_session.query(HistoryDatasetAssociation).get(id): if authz_method == "rbac" and trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ): diff --git a/lib/galaxy/webapps/galaxy/fast_app.py b/lib/galaxy/webapps/galaxy/fast_app.py index 143586156e8f..fa8ddd6aa072 100644 --- a/lib/galaxy/webapps/galaxy/fast_app.py +++ b/lib/galaxy/webapps/galaxy/fast_app.py @@ -1,6 +1,5 @@ from typing import ( Any, - Dict, ) from a2wsgi import WSGIMiddleware @@ -149,7 +148,7 @@ def get_fastapi_instance(root_path="") -> FastAPI: ) -def get_openapi_schema() -> Dict[str, Any]: +def get_openapi_schema() -> dict[str, Any]: """ Dumps openAPI schema without starting a full app and webserver. """ diff --git a/lib/galaxy/webapps/galaxy/services/authenticate.py b/lib/galaxy/webapps/galaxy/services/authenticate.py index f71caf88cdbd..a33ffd5e4537 100644 --- a/lib/galaxy/webapps/galaxy/services/authenticate.py +++ b/lib/galaxy/webapps/galaxy/services/authenticate.py @@ -1,9 +1,7 @@ from base64 import b64decode from typing import ( Any, - Dict, Optional, - Tuple, Union, ) from urllib.parse import unquote @@ -34,7 +32,7 @@ def __init__(self, user_manager: UserManager, auth_manager: AuthManager, api_key self._auth_manager = auth_manager self._api_keys_manager = api_keys_manager - def get_api_key(self, environ: Dict[str, Any], request: Request) -> APIKeyResponse: + def get_api_key(self, environ: dict[str, Any], request: Request) -> APIKeyResponse: auth_header = environ.get("HTTP_AUTHORIZATION") identity, password = self._decode_baseauth(auth_header) # check if this is an email address or username @@ -48,7 +46,7 @@ def get_api_key(self, environ: Dict[str, Any], request: Request) -> APIKeyRespon else: raise exceptions.AuthenticationFailed("Invalid password.") - def _decode_baseauth(self, encoded_str: Optional[Any]) -> Tuple[str, str]: + def _decode_baseauth(self, encoded_str: Optional[Any]) -> tuple[str, str]: """ Decode an encrypted HTTP basic authentication string. Returns a tuple of the form (email, password), and raises a HTTPBadRequest exception if diff --git a/lib/galaxy/webapps/galaxy/services/base.py b/lib/galaxy/webapps/galaxy/services/base.py index dcf91e80f2f2..9e320540d5e7 100644 --- a/lib/galaxy/webapps/galaxy/services/base.py +++ b/lib/galaxy/webapps/galaxy/services/base.py @@ -3,7 +3,6 @@ from typing import ( Any, cast, - List, NamedTuple, Optional, ) @@ -80,7 +79,7 @@ def encode_id(self, id: int, kind: Optional[str] = None) -> EncodedDatabaseIdFie """Encodes a raw database ID.""" return encode_with_security(self.security, id, kind=kind) - def decode_ids(self, ids: List[EncodedDatabaseIdField]) -> List[int]: + def decode_ids(self, ids: list[EncodedDatabaseIdField]) -> list[int]: """ Decodes all encoded IDs in the given list. """ diff --git a/lib/galaxy/webapps/galaxy/services/dataset_collections.py b/lib/galaxy/webapps/galaxy/services/dataset_collections.py index c237d04f84f1..923f775d6ab3 100644 --- a/lib/galaxy/webapps/galaxy/services/dataset_collections.py +++ b/lib/galaxy/webapps/galaxy/services/dataset_collections.py @@ -1,8 +1,6 @@ from logging import getLogger from typing import ( - List, Optional, - Set, TYPE_CHECKING, Union, ) @@ -68,8 +66,8 @@ class DatasetCollectionAttributesResult(Model): # Are the following fields really used/needed? extension: str = Field(..., description="The dataset file extension.", examples=["txt"]) model_class: Literal["HistoryDatasetCollectionAssociation"] = ModelClassField("HistoryDatasetCollectionAssociation") - dbkeys: Optional[Set[str]] - extensions: Optional[Set[str]] + dbkeys: Optional[set[str]] + extensions: Optional[set[str]] tags: TagCollection @@ -83,13 +81,13 @@ class SuitableConverter(Model): class SuitableConverters(RootModel): """Collection of converters that can be used on a particular dataset collection.""" - root: List[SuitableConverter] + root: list[SuitableConverter] class DatasetCollectionContentElements(RootModel): """Represents a collection of elements contained in the dataset collection.""" - root: List[DCESummary] + root: list[DCESummary] class DatasetCollectionsService(ServiceBase, UsesLibraryMixinItems): diff --git a/lib/galaxy/webapps/galaxy/services/datasets.py b/lib/galaxy/webapps/galaxy/services/datasets.py index 86500c10612a..bc6982906e06 100644 --- a/lib/galaxy/webapps/galaxy/services/datasets.py +++ b/lib/galaxy/webapps/galaxy/services/datasets.py @@ -7,10 +7,7 @@ from enum import Enum from typing import ( Any, - Dict, - List, Optional, - Tuple, Union, ) @@ -145,15 +142,15 @@ class DatasetStorageDetails(Model): dataset_state: str = Field( description="The model state of the supplied dataset instance.", ) - hashes: List[dict] = Field(description="The file contents hashes associated with the supplied dataset instance.") - sources: List[dict] = Field(description="The file sources associated with the supplied dataset instance.") + hashes: list[dict] = Field(description="The file contents hashes associated with the supplied dataset instance.") + sources: list[dict] = Field(description="The file sources associated with the supplied dataset instance.") shareable: bool = Field( description="Is this dataset shareable.", ) quota: ConcreteObjectStoreQuotaSourceDetails = Field( description="Information about quota sources around dataset storage." ) - badges: List[BadgeDict] = Field( + badges: list[BadgeDict] = Field( description="A list of badges describing object store properties for concrete object store dataset is stored in." ) relocatable: bool = Field( @@ -174,7 +171,7 @@ class DatasetInheritanceChainEntry(Model): class DatasetInheritanceChain(RootModel): - root: List[DatasetInheritanceChainEntry] = Field( + root: list[DatasetInheritanceChainEntry] = Field( default=[], title="Dataset inheritance chain", ) @@ -198,7 +195,7 @@ class ExtraFileEntry(Model): class DatasetExtraFiles(RootModel): """A list of extra files associated with a dataset.""" - root: List[ExtraFileEntry] + root: list[ExtraFileEntry] class DatasetTextContentDetails(Model): @@ -216,7 +213,7 @@ class DatasetTextContentDetails(Model): class ConvertedDatasetsMap(RootModel): """Map of `file extension` -> `converted dataset encoded id`""" - root: Dict[str, DecodedDatabaseIdField] # extension -> dataset ID + root: dict[str, DecodedDatabaseIdField] # extension -> dataset ID model_config = ConfigDict( json_schema_extra={ "example": { @@ -232,7 +229,7 @@ class DataMode(str, Enum): class DataResult(Model): - data: List[Any] + data: list[Any] dataset_type: Optional[str] = None message: Optional[str] = None extra_info: Optional[Any] = None # Seems to be always None, deprecate? @@ -244,7 +241,7 @@ class BamDataResult(DataResult): class DeleteDatasetBatchPayload(Model): - datasets: List[DatasetSourceId] = Field( + datasets: list[DatasetSourceId] = Field( description="The list of datasets IDs with their sources to be deleted/purged.", ) purge: Optional[bool] = Field( @@ -284,7 +281,7 @@ class DeleteDatasetBatchResult(Model): success_count: int = Field( description="The number of datasets successfully processed.", ) - errors: Optional[List[DatasetErrorMessage]] = Field( + errors: Optional[list[DatasetErrorMessage]] = Field( default=None, description=( "A list of dataset IDs and the corresponding error message if something " @@ -319,11 +316,11 @@ def __init__( self.dataset_manager = dataset_manager @property - def serializer_by_type(self) -> Dict[str, ModelSerializer]: + def serializer_by_type(self) -> dict[str, ModelSerializer]: return {"dataset": self.hda_serializer, "dataset_collection": self.hdca_serializer} @property - def dataset_manager_by_type(self) -> Dict[str, DatasetAssociationManager]: + def dataset_manager_by_type(self) -> dict[str, DatasetAssociationManager]: return {"hda": self.hda_manager, "ldda": self.ldda_manager} def index( @@ -332,7 +329,7 @@ def index( history_id: Optional[DecodedDatabaseIdField], serialization_params: SerializationParams, filter_query_params: FilterQueryParams, - ) -> Tuple[List[AnyHistoryContentItem], int]: + ) -> tuple[list[AnyHistoryContentItem], int]: """ Search datasets or collections using a query system and returns a list containing summary of dataset or dataset_collection information. @@ -527,7 +524,7 @@ def report(self, trans: ProvidesHistoryContext, dataset_id: DecodedDatabaseIdFie **extra_attributes, ) - def drs_dataset_instance(self, object_id: str) -> Tuple[int, DatasetSourceType]: + def drs_dataset_instance(self, object_id: str) -> tuple[int, DatasetSourceType]: if object_id.startswith("hda-"): decoded_object_id = self.decode_id(object_id[len("hda-") :], kind="drs") hda_ldda = DatasetSourceType.hda @@ -549,7 +546,7 @@ def get_drs_object(self, trans: ProvidesHistoryContext, object_id: str, request_ # TODO: issue warning if not being served on HTTPS @ 443 - required by the spec. self_uri = f"drs://drs.{request_url.components.netloc}/{object_id}" - checksums: List[Checksum] = [] + checksums: list[Checksum] = [] for dataset_hash in dataset_instance.dataset.hashes: if dataset_hash.extra_files_path: continue @@ -770,7 +767,7 @@ def delete_batch( Warning: only the ownership of the dataset and upload state for HDAs is checked, no other checks or restrictions are made. """ success_count = 0 - errors: List[DatasetErrorMessage] = [] + errors: list[DatasetErrorMessage] = [] for dataset in payload.datasets: try: manager = self.dataset_manager_by_type[dataset.src] @@ -896,7 +893,7 @@ def _search_features( trans, dataset: model.DatasetInstance, query: Optional[str], - ) -> List[List[str]]: + ) -> list[list[str]]: """ Returns features, locations in dataset that match query. Format is a list of features; each feature is a list itself: [name, location] diff --git a/lib/galaxy/webapps/galaxy/services/histories.py b/lib/galaxy/webapps/galaxy/services/histories.py index 59552e3a60fa..8dfdb16b93e5 100644 --- a/lib/galaxy/webapps/galaxy/services/histories.py +++ b/lib/galaxy/webapps/galaxy/services/histories.py @@ -9,9 +9,7 @@ ) from typing import ( cast, - List, Optional, - Tuple, Union, ) @@ -188,7 +186,7 @@ def index( ] return rval - def _get_deleted_filter(self, deleted: Optional[bool], filter_params: List[Tuple[str, str, str]]): + def _get_deleted_filter(self, deleted: Optional[bool], filter_params: list[tuple[str, str, str]]): # TODO: this should all be removed (along with the default) in v2 # support the old default of not-returning/filtering-out deleted histories try: @@ -218,7 +216,7 @@ def index_query( payload: HistoryIndexQueryPayload, serialization_params: SerializationParams, include_total_count: bool = False, - ) -> Tuple[List[AnyHistoryView], Union[int, None]]: + ) -> tuple[list[AnyHistoryView], Union[int, None]]: """Return a list of History accessible by the user :rtype: list @@ -572,7 +570,7 @@ def archive_export( trans, history_id: DecodedDatabaseIdField, payload: Optional[ExportHistoryArchivePayload] = None, - ) -> Tuple[HistoryArchiveExportResult, bool]: + ) -> tuple[HistoryArchiveExportResult, bool]: """ start job (if needed) to create history export for corresponding history. @@ -783,7 +781,7 @@ def get_archived_histories( serialization_params: SerializationParams, filter_query_params: FilterQueryParams, include_total_matches: bool = False, - ) -> Tuple[List[AnyArchivedHistoryView], Optional[int]]: + ) -> tuple[list[AnyArchivedHistoryView], Optional[int]]: if trans.anonymous: raise glx_exceptions.AuthenticationRequired("Only registered users can have or access archived histories.") diff --git a/lib/galaxy/webapps/galaxy/services/history_contents.py b/lib/galaxy/webapps/galaxy/services/history_contents.py index 4a7b765da3e3..66f8f0852453 100644 --- a/lib/galaxy/webapps/galaxy/services/history_contents.py +++ b/lib/galaxy/webapps/galaxy/services/history_contents.py @@ -1,14 +1,11 @@ import logging import os import re +from collections.abc import Iterable from typing import ( Any, cast, - Dict, - Iterable, - List, Optional, - Set, TYPE_CHECKING, Union, ) @@ -132,7 +129,7 @@ log = logging.getLogger(__name__) -DatasetDetailsType = Union[Set[DecodedDatabaseIdField], Literal["all"]] +DatasetDetailsType = Union[set[DecodedDatabaseIdField], Literal["all"]] class HistoryContentsIndexParams(Model): @@ -145,8 +142,8 @@ class HistoryContentsIndexParams(Model): class LegacyHistoryContentsIndexParams(Model): """Query parameters exclusively used by the *legacy version* of `index` operation.""" - ids: Optional[List[DecodedDatabaseIdField]] - types: List[HistoryContentType] + ids: Optional[list[DecodedDatabaseIdField]] + types: list[HistoryContentType] dataset_details: Optional[DatasetDetailsType] deleted: Optional[bool] visible: Optional[bool] @@ -160,8 +157,8 @@ class LegacyHistoryContentsIndexParams(Model): class HistoryContentsIndexJobsSummaryParams(Model): """Query parameters exclusively used by the `index_jobs_summary` operation.""" - ids: List[DecodedDatabaseIdField] = [] - types: List[JobSourceType] = [] + ids: list[DecodedDatabaseIdField] = [] + types: list[JobSourceType] = [] class CreateHistoryContentPayloadBase(Model): @@ -207,12 +204,12 @@ class CollectionElementIdentifier(Model): title="ID", description="The encoded ID of the element.", ) - tags: List[str] = Field( + tags: list[str] = Field( default=[], title="Tags", description="The list of tags associated with the element.", ) - element_identifiers: Optional[List["CollectionElementIdentifier"]] = Field( + element_identifiers: Optional[list["CollectionElementIdentifier"]] = Field( default=None, title="Element Identifiers", description="List of elements that should be in the new nested collection.", @@ -417,7 +414,7 @@ def index_jobs_summary( self, trans, params: HistoryContentsIndexJobsSummaryParams, - ) -> List[AnyJobStateSummary]: + ) -> list[AnyJobStateSummary]: """ Return job state summary info for jobs, implicit groups jobs for collections or workflow invocations @@ -510,7 +507,7 @@ def create( history_id: DecodedDatabaseIdField, payload: CreateHistoryContentPayload, serialization_params: SerializationParams, - ) -> Union[AnyHistoryContentItem, List[AnyHistoryContentItem]]: + ) -> Union[AnyHistoryContentItem, list[AnyHistoryContentItem]]: """ Create a new HDA or HDCA. @@ -537,14 +534,14 @@ def create_from_store( history_id: DecodedDatabaseIdField, payload: CreateHistoryContentFromStore, serialization_params: SerializationParams, - ) -> List[AnyHistoryContentItem]: + ) -> list[AnyHistoryContentItem]: history = self.history_manager.get_mutable(history_id, trans.user, current_history=trans.history) object_tracker = self.create_objects_from_store( trans, payload, history=history, ) - rval: List[AnyHistoryContentItem] = [] + rval: list[AnyHistoryContentItem] = [] serialization_params.default_view = "detailed" for hda in object_tracker.hdas_by_key.values(): if hda.visible: @@ -611,7 +608,7 @@ def update( trans, history_id: Optional[DecodedDatabaseIdField], id: DecodedDatabaseIdField, - payload: Dict[str, Any], + payload: dict[str, Any], serialization_params: SerializationParams, contents_type: HistoryContentType, ): @@ -650,7 +647,7 @@ def update_batch( history_id: DecodedDatabaseIdField, payload: UpdateHistoryContentsBatchPayload, serialization_params: SerializationParams, - ) -> List[AnyHistoryContentItem]: + ) -> list[AnyHistoryContentItem]: """ PUT /api/histories/{history_id}/contents @@ -667,8 +664,8 @@ def update_batch( """ history = self.history_manager.get_mutable(history_id, trans.user, current_history=trans.history) items = payload.items - hda_ids: List[DecodedDatabaseIdField] = [] - hdca_ids: List[DecodedDatabaseIdField] = [] + hda_ids: list[DecodedDatabaseIdField] = [] + hdca_ids: list[DecodedDatabaseIdField] = [] for item in items: contents_type = item.history_content_type if contents_type == HistoryContentType.dataset: @@ -702,7 +699,7 @@ def bulk_operation( history = self.history_manager.get_mutable(history_id, trans.user, current_history=trans.history) filters = self.history_contents_filters.parse_query_filters(filter_query_params) self._validate_bulk_operation_params(payload, trans.user, trans) - contents: List[HistoryItem] + contents: list[HistoryItem] if payload.items: contents = self._get_contents_by_item_list( trans, @@ -888,7 +885,7 @@ def __delete_dataset( rval["async_result"] = async_result is not None return rval - def __update_dataset_collection(self, trans, id: DecodedDatabaseIdField, payload: Dict[str, Any]): + def __update_dataset_collection(self, trans, id: DecodedDatabaseIdField, payload: dict[str, Any]): return self.dataset_collection_manager.update(trans, "history", id, payload) def __update_dataset( @@ -896,7 +893,7 @@ def __update_dataset( trans, history: History, id: DecodedDatabaseIdField, - payload: Dict[str, Any], + payload: dict[str, Any], serialization_params: SerializationParams, ): # anon user: ensure that history ids match up and the history is the current, @@ -910,7 +907,7 @@ def __update_dataset( return {} def __datasets_for_update( - self, trans, history: History, hda_ids: List[DecodedDatabaseIdField], payload: Dict[str, Any] + self, trans, history: History, hda_ids: list[DecodedDatabaseIdField], payload: dict[str, Any] ): anonymous_user = not trans.user_is_admin and trans.user is None if anonymous_user: @@ -931,7 +928,7 @@ def __datasets_for_update( return hdas - def __deserialize_dataset(self, trans, hda, payload: Dict[str, Any]): + def __deserialize_dataset(self, trans, hda, payload: dict[str, Any]): # TODO: when used in batch it would be a lot faster if we set flush=false # and the caller flushes only at the end or when a given chunk size is reached. self.hda_deserializer.deserialize(hda, payload, user=trans.user, trans=trans, flush=True) @@ -1347,8 +1344,8 @@ def _apply_bulk_operation( operation: HistoryContentItemOperation, params: Optional[AnyBulkOperationParams], trans: ProvidesHistoryContext, - ) -> List[BulkOperationItemError]: - errors: List[BulkOperationItemError] = [] + ) -> list[BulkOperationItemError]: + errors: list[BulkOperationItemError] = [] for item in contents: error = self._apply_operation_to_item(operation, item, params, trans) if error: @@ -1372,9 +1369,9 @@ def _apply_operation_to_item( ) def _get_contents_by_item_list( - self, trans, history: History, items: List[HistoryContentItem] - ) -> List["HistoryItem"]: - contents: List[HistoryItem] = [] + self, trans, history: History, items: list[HistoryContentItem] + ) -> list["HistoryItem"]: + contents: list[HistoryItem] = [] dataset_items = filter(lambda item: item.history_content_type == HistoryContentType.dataset, items) datasets_ids = (dataset.id for dataset in dataset_items) @@ -1412,7 +1409,7 @@ def __init__( self.hdca_manager = hdca_manager self.dataset_collection_manager = dataset_collection_manager self.flush = False - self._operation_map: Dict[HistoryContentItemOperation, ItemOperation] = { + self._operation_map: dict[HistoryContentItemOperation, ItemOperation] = { HistoryContentItemOperation.hide: lambda item, params, trans: self._hide(item), HistoryContentItemOperation.unhide: lambda item, params, trans: self._unhide(item), HistoryContentItemOperation.delete: lambda item, params, trans: self._delete(item, trans), diff --git a/lib/galaxy/webapps/galaxy/services/invocations.py b/lib/galaxy/webapps/galaxy/services/invocations.py index 9b7ef61cc1ce..7dfec9de8376 100644 --- a/lib/galaxy/webapps/galaxy/services/invocations.py +++ b/lib/galaxy/webapps/galaxy/services/invocations.py @@ -2,9 +2,6 @@ import logging from typing import ( Any, - Dict, - List, - Tuple, ) from pydantic import Field @@ -98,7 +95,7 @@ def __init__( def index( self, trans, invocation_payload: InvocationIndexPayload, serialization_params: InvocationSerializationParams - ) -> Tuple[List[WorkflowInvocationResponse], int]: + ) -> tuple[list[WorkflowInvocationResponse], int]: workflow_id = invocation_payload.workflow_id if invocation_payload.instance: instance = invocation_payload.instance @@ -191,7 +188,7 @@ def update_invocation_step(self, trans, step_id, action): wfi_step = self._workflows_manager.update_invocation_step(trans, step_id, action) return self.serialize_workflow_invocation_step(wfi_step) - def show_invocation_step_jobs_summary(self, trans, invocation_id) -> List[Dict[str, Any]]: + def show_invocation_step_jobs_summary(self, trans, invocation_id) -> list[dict[str, Any]]: ids = [] types = [] for job_source_type, job_source_id, _ in invocation_job_source_iter(trans.sa_session, invocation_id): @@ -199,7 +196,7 @@ def show_invocation_step_jobs_summary(self, trans, invocation_id) -> List[Dict[s types.append(job_source_type) return fetch_job_states(trans.sa_session, ids, types) - def show_invocation_jobs_summary(self, trans, invocation_id) -> Dict[str, Any]: + def show_invocation_jobs_summary(self, trans, invocation_id) -> dict[str, Any]: ids = [invocation_id] types = ["WorkflowInvocation"] return fetch_job_states(trans.sa_session, ids, types)[0] @@ -317,7 +314,7 @@ def serialize_workflow_invocation_to_request( preferred_object_store_id = None preferred_intermediate_object_store_id = None preferred_outputs_object_store_id = None - step_param_map: Dict[str, Dict] = {} + step_param_map: dict[str, dict] = {} for parameter in invocation.input_parameters: parameter_type = parameter.type diff --git a/lib/galaxy/webapps/galaxy/services/jobs.py b/lib/galaxy/webapps/galaxy/services/jobs.py index 8b18b54aec2f..9e3960ed4fdc 100644 --- a/lib/galaxy/webapps/galaxy/services/jobs.py +++ b/lib/galaxy/webapps/galaxy/services/jobs.py @@ -1,8 +1,6 @@ from enum import Enum from typing import ( Any, - Dict, - List, Optional, TYPE_CHECKING, Union, @@ -65,7 +63,7 @@ def show( trans: ProvidesUserContext, id: DecodedDatabaseIdField, full: bool = False, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: job = self.job_manager.get_accessible_job( trans, id, @@ -92,7 +90,7 @@ def index( or payload.history_id is not None ) jobs = self.job_manager.index_query(trans, payload) - out: List[Dict[str, Any]] = [] + out: list[dict[str, Any]] = [] for job in jobs.yield_per(model.YIELD_PER_ROWS): # TODO: optimize if this crucial if check_security_of_jobs and not security_check(trans, job.history, check_accessible=True): @@ -146,8 +144,8 @@ def get_job( # Raise an exception if neither job_id nor dataset_id is provided raise ValueError("Either job_id or dataset_id must be provided.") - def dictify_associations(self, trans, *association_lists) -> List[JobAssociation]: - rval: List[JobAssociation] = [] + def dictify_associations(self, trans, *association_lists) -> list[JobAssociation]: + rval: list[JobAssociation] = [] for association_list in association_lists: rval.extend(self.__dictify_association(trans, a) for a in association_list) return rval diff --git a/lib/galaxy/webapps/galaxy/services/libraries.py b/lib/galaxy/webapps/galaxy/services/libraries.py index 4f728ddd0b96..b16358dbb326 100644 --- a/lib/galaxy/webapps/galaxy/services/libraries.py +++ b/lib/galaxy/webapps/galaxy/services/libraries.py @@ -1,8 +1,6 @@ import logging from typing import ( Any, - Dict, - List, Optional, Union, ) @@ -94,7 +92,7 @@ def create(self, trans, payload: CreateLibraryPayload) -> LibrarySummary: library = self.library_manager.create(trans, payload.name, payload.description, payload.synopsis) return self._to_summary(trans, library) - def create_from_store(self, trans, payload: CreateLibrariesFromStore) -> List[LibrarySummary]: + def create_from_store(self, trans, payload: CreateLibrariesFromStore) -> list[LibrarySummary]: object_tracker = self.create_objects_from_store( trans, payload, @@ -188,7 +186,7 @@ def get_permissions( ) def set_permissions( - self, trans, id: DecodedDatabaseIdField, payload: Dict[str, Any] + self, trans, id: DecodedDatabaseIdField, payload: dict[str, Any] ) -> Union[LibraryLegacySummary, LibraryCurrentPermissions]: # Old legacy response """Set permissions of the given library to the given role ids. @@ -318,7 +316,7 @@ def set_permissions( roles = self.library_manager.get_current_roles(trans, library) return LibraryCurrentPermissions.model_construct(**roles) - def set_permissions_old(self, trans, library, payload: Dict[str, Any]) -> LibraryLegacySummary: + def set_permissions_old(self, trans, library, payload: dict[str, Any]) -> LibraryLegacySummary: """ *** old implementation for backward compatibility *** diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index bc88c14a5475..0c17a6eb463f 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -2,16 +2,14 @@ import shutil import tempfile from typing import ( + Annotated, cast, - List, Optional, - Tuple, Union, ) from fastapi import Path from starlette.datastructures import UploadFile as StarletteUploadFile -from typing_extensions import Annotated from galaxy import exceptions from galaxy.actions.library import LibraryActions @@ -87,7 +85,7 @@ def index( library_id: DecodedDatabaseIdField, ) -> LibraryContentsIndexListResponse: """Return a list of library files and folders.""" - rval: List[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] = [] + rval: list[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] = [] current_user_roles = trans.get_current_user_roles() library = trans.sa_session.get(Library, library_id) if not library: @@ -132,7 +130,7 @@ def create( trans: ProvidesHistoryContext, library_id: DecodedDatabaseIdField, payload: AnyLibraryContentsCreatePayload, - files: Optional[List[StarletteUploadFile]] = None, + files: Optional[list[StarletteUploadFile]] = None, ) -> AnyLibraryContentsCreateResponse: """Create a new library file or folder.""" if trans.user_is_bootstrap_admin: @@ -242,7 +240,7 @@ def delete( def _decode_library_content_id( self, content_id: MaybeLibraryFolderOrDatasetID, - ) -> Tuple: + ) -> tuple: if len(content_id) % 16 == 0: return "LibraryDataset", content_id elif content_id.startswith("F"): diff --git a/lib/galaxy/webapps/galaxy/services/library_folder_contents.py b/lib/galaxy/webapps/galaxy/services/library_folder_contents.py index 6629559140fa..689bc7de3697 100644 --- a/lib/galaxy/webapps/galaxy/services/library_folder_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_folder_contents.py @@ -1,6 +1,5 @@ import logging from dataclasses import dataclass -from typing import List from galaxy import ( exceptions, @@ -72,7 +71,7 @@ def index( user_permissions = self._retrieve_user_permissions_on_folder(trans, current_user_roles, folder) tag_manager = tags.GalaxyTagHandler(trans.sa_session) - folder_contents: List[AnyLibraryFolderItem] = [] + folder_contents: list[AnyLibraryFolderItem] = [] contents, total_rows = self.folder_manager.get_contents(trans, folder, payload) for content_item in contents: if isinstance(content_item, model.LibraryFolder): @@ -123,7 +122,7 @@ def create( raise exc def _retrieve_user_permissions_on_folder( - self, trans: ProvidesUserContext, current_user_roles: List[model.Role], folder: model.LibraryFolder + self, trans: ProvidesUserContext, current_user_roles: list[model.Role], folder: model.LibraryFolder ) -> UserFolderPermissions: """Returns the permissions of the user for the given folder. @@ -153,7 +152,7 @@ def _retrieve_user_permissions_on_folder( def _serialize_library_dataset( self, trans: ProvidesUserContext, - current_user_roles: List[model.Role], + current_user_roles: list[model.Role], tag_manager: tags.GalaxyTagHandler, library_dataset: model.LibraryDataset, ) -> FileLibraryFolderItem: diff --git a/lib/galaxy/webapps/galaxy/services/notifications.py b/lib/galaxy/webapps/galaxy/services/notifications.py index a9b4302a8db3..ae3e44947197 100644 --- a/lib/galaxy/webapps/galaxy/services/notifications.py +++ b/lib/galaxy/webapps/galaxy/services/notifications.py @@ -1,9 +1,7 @@ from datetime import datetime from typing import ( - List, NoReturn, Optional, - Set, Union, ) @@ -186,7 +184,7 @@ def update_broadcasted_notification( self._raise_notification_not_found(notification_id) def update_user_notifications( - self, user_context: ProvidesUserContext, notification_ids: Set[int], request: UserNotificationUpdateRequest + self, user_context: ProvidesUserContext, notification_ids: set[int], request: UserNotificationUpdateRequest ) -> NotificationsBatchUpdateResponse: """Updates a batch of notifications received by the user with the requested values.""" self.notification_manager.ensure_notifications_enabled() @@ -242,7 +240,7 @@ def _ensure_there_are_changes(self, request: NotificationUpdateRequest): def _get_all_broadcasted( self, since: Optional[datetime] = None, active_only: Optional[bool] = True - ) -> List[BroadcastNotificationResponse]: + ) -> list[BroadcastNotificationResponse]: notifications = self.notification_manager.get_all_broadcasted_notifications(since, active_only) broadcasted_notifications = [ BroadcastNotificationResponse.model_validate(notification) for notification in notifications @@ -255,7 +253,7 @@ def _get_user_notifications( limit: Optional[int] = None, offset: Optional[int] = None, since: Optional[datetime] = None, - ) -> List[UserNotificationResponse]: + ) -> list[UserNotificationResponse]: notifications = self.notification_manager.get_user_notifications(user_context.user, limit, offset, since) user_notifications = [UserNotificationResponse.model_validate(notification) for notification in notifications] return user_notifications diff --git a/lib/galaxy/webapps/galaxy/services/pages.py b/lib/galaxy/webapps/galaxy/services/pages.py index 3fb75432b265..d77560efd272 100644 --- a/lib/galaxy/webapps/galaxy/services/pages.py +++ b/lib/galaxy/webapps/galaxy/services/pages.py @@ -1,6 +1,5 @@ import logging from typing import ( - Tuple, Union, ) @@ -68,7 +67,7 @@ def __init__( def index( self, trans, payload: PageIndexQueryPayload, include_total_count: bool = False - ) -> Tuple[PageSummaryList, Union[int, None]]: + ) -> tuple[PageSummaryList, Union[int, None]]: """Return a list of Pages viewable by the user :rtype: list diff --git a/lib/galaxy/webapps/galaxy/services/sharable.py b/lib/galaxy/webapps/galaxy/services/sharable.py index 1ac68d7ae2c4..57cce95213ad 100644 --- a/lib/galaxy/webapps/galaxy/services/sharable.py +++ b/lib/galaxy/webapps/galaxy/services/sharable.py @@ -1,11 +1,6 @@ import logging from typing import ( - Dict, - List, Optional, - Set, - Tuple, - Type, Union, ) @@ -61,7 +56,7 @@ class ShareableService: and have a compatible SharableModelSerializer implementation. """ - share_with_status_cls: Type[ShareWithStatus] = ShareWithStatus + share_with_status_cls: type[ShareWithStatus] = ShareWithStatus def __init__( self, @@ -125,8 +120,8 @@ def _share_with_options( self, trans, item, - users: Set[User], - errors: Set[str], + users: set[User], + errors: set[str], share_option: Optional[SharingOptions] = None, ): new_users = None @@ -148,9 +143,9 @@ def _get_sharing_status(self, trans, item): status["users_shared_with"] = [{"id": a.user.id, "email": a.user.email} for a in item.users_shared_with] return SharingStatus(**status) - def _get_users(self, trans, emails_or_ids: List[UserIdentifier]) -> Tuple[Set[User], Set[str]]: - send_to_users: Set[User] = set() - send_to_err: Set[str] = set() + def _get_users(self, trans, emails_or_ids: list[UserIdentifier]) -> tuple[set[User], set[str]]: + send_to_users: set[User] = set() + send_to_err: set[str] = set() for email_or_id in set(emails_or_ids): send_to_user = None if isinstance(email_or_id, int): @@ -175,7 +170,7 @@ def _get_users(self, trans, emails_or_ids: List[UserIdentifier]) -> Tuple[Set[Us return send_to_users, send_to_err def _send_notification_to_users( - self, users_to_notify: Set[User], item: SharableItem, status: ShareWithStatus, galaxy_url: Optional[str] = None + self, users_to_notify: set[User], item: SharableItem, status: ShareWithStatus, galaxy_url: Optional[str] = None ): if ( self.notification_service.notification_manager.notifications_enabled @@ -193,7 +188,7 @@ def _send_notification_to_users( class SharedItemNotificationFactory: source = "galaxy_sharing_system" - type_map: Dict[Type[SharableItem], SharableItemType] = { + type_map: dict[type[SharableItem], SharableItemType] = { History: "history", StoredWorkflow: "workflow", Visualization: "visualization", @@ -202,7 +197,7 @@ class SharedItemNotificationFactory: @staticmethod def build_notification_request( - item: SharableItem, users_to_notify: Set[User], status: ShareWithStatus, galaxy_url: Optional[str] = None + item: SharableItem, users_to_notify: set[User], status: ShareWithStatus, galaxy_url: Optional[str] = None ) -> NotificationCreateRequest: user_ids = [user.id for user in users_to_notify] request = NotificationCreateRequest( diff --git a/lib/galaxy/webapps/galaxy/services/storage_cleaner.py b/lib/galaxy/webapps/galaxy/services/storage_cleaner.py index a671d94db2a2..a9c79f2ba10d 100644 --- a/lib/galaxy/webapps/galaxy/services/storage_cleaner.py +++ b/lib/galaxy/webapps/galaxy/services/storage_cleaner.py @@ -1,8 +1,6 @@ import logging from typing import ( - Dict, Optional, - Set, ) from galaxy.managers.base import StorageCleanerManager @@ -31,7 +29,7 @@ def __init__( self.user_manager = user_manager self.history_cleaner = history_cleaner self.hda_cleaner = hda_cleaner - self.storage_cleaner_map: Dict[StoredItemType, StorageCleanerManager] = { + self.storage_cleaner_map: dict[StoredItemType, StorageCleanerManager] = { "history": self.history_cleaner, "dataset": self.hda_cleaner, } @@ -66,6 +64,6 @@ def get_archived( user = self.get_authenticated_user(trans) return self.storage_cleaner_map[stored_item_type].get_archived(user, offset, limit, order) - def cleanup_items(self, trans: ProvidesHistoryContext, stored_item_type: StoredItemType, item_ids: Set[int]): + def cleanup_items(self, trans: ProvidesHistoryContext, stored_item_type: StoredItemType, item_ids: set[int]): user = self.get_authenticated_user(trans) return self.storage_cleaner_map[stored_item_type].cleanup_items(user, item_ids) diff --git a/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py b/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py index 594015ca4528..f02c51977cce 100644 --- a/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py +++ b/lib/galaxy/webapps/galaxy/services/tool_shed_repositories.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, ) @@ -42,7 +41,7 @@ def __init__( self._install_model_context = install_model_context self._tool_shed_registry = tool_shed_registry - def index(self, request: InstalledToolShedRepositoryIndexRequest) -> List[InstalledToolShedRepository]: + def index(self, request: InstalledToolShedRepositoryIndexRequest) -> list[InstalledToolShedRepository]: repositories = self._get_tool_shed_repositories( name=request.name, owner=request.owner, diff --git a/lib/galaxy/webapps/galaxy/services/tools.py b/lib/galaxy/webapps/galaxy/services/tools.py index c4adc9dd7742..ee34fa16330b 100644 --- a/lib/galaxy/webapps/galaxy/services/tools.py +++ b/lib/galaxy/webapps/galaxy/services/tools.py @@ -6,8 +6,6 @@ from typing import ( Any, cast, - Dict, - List, Literal, Optional, Union, @@ -64,7 +62,7 @@ def create_fetch( self, trans: ProvidesHistoryContext, fetch_payload: Union[FetchDataFormPayload, FetchDataPayload], - files: Optional[List[UploadFile]] = None, + files: Optional[list[UploadFile]] = None, ): payload = fetch_payload.model_dump(exclude_unset=True) request_version = "1" @@ -210,7 +208,7 @@ def _create(self, trans: ProvidesHistoryContext, payload, **kwd): def _handle_inputs_output_to_api_response(self, trans, tool, target_history, vars): # TODO: check for errors and ensure that output dataset(s) are available. output_datasets = vars.get("out_data", []) - rval: Dict[str, Any] = {"outputs": [], "output_collections": [], "jobs": [], "implicit_collections": []} + rval: dict[str, Any] = {"outputs": [], "output_collections": [], "jobs": [], "implicit_collections": []} rval["produces_entry_points"] = tool.produces_entry_points if job_errors := vars.get("job_errors", []): # If we are here - some jobs were successfully executed but some failed. diff --git a/lib/galaxy/webapps/galaxy/services/users.py b/lib/galaxy/webapps/galaxy/services/users.py index 6bcd08a393ad..431506d567da 100644 --- a/lib/galaxy/webapps/galaxy/services/users.py +++ b/lib/galaxy/webapps/galaxy/services/users.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, Union, ) @@ -201,7 +200,7 @@ def get_index( f_email: Optional[str], f_name: Optional[str], f_any: Optional[str], - ) -> List[MaybeLimitedUserModel]: + ) -> list[MaybeLimitedUserModel]: # never give any info to non-authenticated users if not trans.user: raise glx_exceptions.AuthenticationRequired("Only registered users can view the list of users") @@ -232,7 +231,7 @@ def get_index( trans.app.config.expose_user_email, trans.app.config.expose_user_name, ) - rval: List[MaybeLimitedUserModel] = [] + rval: list[MaybeLimitedUserModel] = [] for user in users: user_dict = user.to_dict() # If NOT configured to expose_email, do not expose email UNLESS the user is self, or diff --git a/lib/galaxy/webapps/galaxy/services/visualizations.py b/lib/galaxy/webapps/galaxy/services/visualizations.py index e44a2e17218b..9eec6478f09a 100644 --- a/lib/galaxy/webapps/galaxy/services/visualizations.py +++ b/lib/galaxy/webapps/galaxy/services/visualizations.py @@ -3,7 +3,6 @@ from typing import ( cast, Optional, - Tuple, Union, ) @@ -76,7 +75,7 @@ def index( trans: ProvidesUserContext, payload: VisualizationIndexQueryPayload, include_total_count: bool = False, - ) -> Tuple[VisualizationSummaryList, Union[int, None]]: + ) -> tuple[VisualizationSummaryList, Union[int, None]]: """Return a list of Visualizations viewable by the user :rtype: list diff --git a/lib/galaxy/webapps/galaxy/services/workflows.py b/lib/galaxy/webapps/galaxy/services/workflows.py index c5d4aa573a60..b1223eec77f5 100644 --- a/lib/galaxy/webapps/galaxy/services/workflows.py +++ b/lib/galaxy/webapps/galaxy/services/workflows.py @@ -1,10 +1,7 @@ import logging from typing import ( Any, - Dict, - List, Optional, - Tuple, Union, ) @@ -63,7 +60,7 @@ def index( trans: ProvidesUserContext, payload: WorkflowIndexPayload, include_total_count: bool = False, - ) -> Tuple[List[Dict[str, Any]], Optional[int]]: + ) -> tuple[list[dict[str, Any]], Optional[int]]: user = trans.user missing_tools = payload.missing_tools query, total_matches = self._workflows_manager.index_query(trans, payload, include_total_count) @@ -124,7 +121,7 @@ def invoke_workflow( trans, workflow_id, payload: InvokeWorkflowPayload, - ) -> Union[WorkflowInvocationResponse, List[WorkflowInvocationResponse]]: + ) -> Union[WorkflowInvocationResponse, list[WorkflowInvocationResponse]]: if trans.anonymous: raise exceptions.AuthenticationRequired("You need to be logged in to run workflows.") trans.check_user_activation() diff --git a/lib/galaxy/webapps/openapi/utils.py b/lib/galaxy/webapps/openapi/utils.py index 88ae97e43c0b..c362b40c49a6 100644 --- a/lib/galaxy/webapps/openapi/utils.py +++ b/lib/galaxy/webapps/openapi/utils.py @@ -2,13 +2,10 @@ Copy of https://github.com/tiangolo/fastapi/blob/master/fastapi/openapi/utils.py with changes from https://github.com/tiangolo/fastapi/pull/10903 """ +from collections.abc import Sequence from typing import ( Any, - Dict, - List, Optional, - Sequence, - Set, Union, ) @@ -37,15 +34,15 @@ def get_openapi( description: Optional[str] = None, routes: Sequence[BaseRoute], webhooks: Optional[Sequence[BaseRoute]] = None, - tags: Optional[List[Dict[str, Any]]] = None, - servers: Optional[List[Dict[str, Union[str, Any]]]] = None, + tags: Optional[list[dict[str, Any]]] = None, + servers: Optional[list[dict[str, Union[str, Any]]]] = None, terms_of_service: Optional[str] = None, - contact: Optional[Dict[str, Union[str, Any]]] = None, - license_info: Optional[Dict[str, Union[str, Any]]] = None, + contact: Optional[dict[str, Union[str, Any]]] = None, + license_info: Optional[dict[str, Union[str, Any]]] = None, separate_input_output_schemas: bool = True, schema_generator: Optional[GenerateJsonSchema] = None, -) -> Dict[str, Any]: - info: Dict[str, Any] = {"title": title, "version": version} +) -> dict[str, Any]: + info: dict[str, Any] = {"title": title, "version": version} if summary: info["summary"] = summary if description: @@ -56,13 +53,13 @@ def get_openapi( info["contact"] = contact if license_info: info["license"] = license_info - output: Dict[str, Any] = {"openapi": openapi_version, "info": info} + output: dict[str, Any] = {"openapi": openapi_version, "info": info} if servers: output["servers"] = servers - components: Dict[str, Dict[str, Any]] = {} - paths: Dict[str, Dict[str, Any]] = {} - webhook_paths: Dict[str, Dict[str, Any]] = {} - operation_ids: Set[str] = set() + components: dict[str, dict[str, Any]] = {} + paths: dict[str, dict[str, Any]] = {} + webhook_paths: dict[str, dict[str, Any]] = {} + operation_ids: set[str] = set() all_fields = get_fields_from_routes(list(routes or []) + list(webhooks or [])) model_name_map = get_compat_model_name_map(all_fields) schema_generator = schema_generator or GenerateJsonSchema(ref_template=REF_TEMPLATE) diff --git a/lib/galaxy/work/context.py b/lib/galaxy/work/context.py index 038ba1a82019..67aae7c9f2fb 100644 --- a/lib/galaxy/work/context.py +++ b/lib/galaxy/work/context.py @@ -1,10 +1,7 @@ import abc from typing import ( Any, - Dict, - List, Optional, - Tuple, TYPE_CHECKING, ) @@ -44,10 +41,10 @@ def __init__( ): self._app = app self.__user = user - self.__user_current_roles: Optional[List[Role]] = None + self.__user_current_roles: Optional[list[Role]] = None self.__history = history self._url_builder = url_builder - self._short_term_cache: Dict[Tuple[str, ...], Any] = {} + self._short_term_cache: dict[tuple[str, ...], Any] = {} self.workflow_building_mode = workflow_building_mode self.galaxy_session = galaxy_session diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py index fa1c7a9b747b..1b472be74f3f 100644 --- a/lib/galaxy/workflow/modules.py +++ b/lib/galaxy/workflow/modules.py @@ -7,16 +7,12 @@ import math import re from collections import defaultdict +from collections.abc import Iterable from typing import ( Any, cast, - Dict, get_args, - Iterable, - List, Optional, - Tuple, - Type, TYPE_CHECKING, Union, ) @@ -137,7 +133,7 @@ # ones. RUNTIME_POST_JOB_ACTIONS_KEY = "__POST_JOB_ACTIONS__" -POSSIBLE_PARAMETER_TYPES: Tuple[INPUT_PARAMETER_TYPES] = get_args(INPUT_PARAMETER_TYPES) +POSSIBLE_PARAMETER_TYPES: tuple[INPUT_PARAMETER_TYPES] = get_args(INPUT_PARAMETER_TYPES) class OptionDict(TypedDict): @@ -250,7 +246,7 @@ def evaluate_value_from_expressions(progress, step, execution_state, extra_step_ if not value_from_expressions and when_expression is None: return {} - hda_references: List[model.HistoryDatasetAssociation] = [] + hda_references: list[model.HistoryDatasetAssociation] = [] step_state = {} for key, value in extra_step_state.items(): @@ -550,7 +546,7 @@ def recover_mapping(self, invocation_step, progress): progress.set_step_outputs(invocation_step, outputs, already_persisted=True) - def get_informal_replacement_parameters(self, step) -> List[str]: + def get_informal_replacement_parameters(self, step) -> list[str]: """Return a list of informal replacement parameters. If replacement is handled via formal workflow inputs - do not include it in this list. @@ -667,12 +663,12 @@ class SubWorkflowModule(WorkflowModule): # - Second pass actually turn RuntimeInputs into inputs if possible. type = "subworkflow" name = "Subworkflow" - _modules: Optional[List[Any]] = None + _modules: Optional[list[Any]] = None subworkflow: Workflow def __init__(self, trans, content_id=None, **kwds): super().__init__(trans, content_id, **kwds) - self.post_job_actions: Optional[Dict[str, Any]] = None + self.post_job_actions: Optional[dict[str, Any]] = None @classmethod def from_dict(Class, trans, d, **kwds): @@ -830,7 +826,7 @@ def execute( assert len(progress.when_values) == 1, "Got more than 1 when value, this shouldn't be possible" iteration_elements_iter = [(None, progress.when_values[0] if progress.when_values else None)] - when_values: List[Union[bool, None]] = [] + when_values: list[Union[bool, None]] = [] for iteration_elements, when_value in iteration_elements_iter: if when_value is False or not step.when_expression: # We're skipping this step (when==False) or we keep @@ -915,7 +911,7 @@ def callback(input, prefixed_name, prefixed_label, value=None, **kwds): return inputs - def get_informal_replacement_parameters(self, step) -> List[str]: + def get_informal_replacement_parameters(self, step) -> list[str]: """Return a list of replacement parameters.""" replacement_parameters = set() @@ -980,7 +976,7 @@ def format_param(trans, formats): class InputModuleState(TypedDict, total=False): optional: bool - format: List[str] + format: list[str] tag: str @@ -1051,7 +1047,7 @@ def _parse_state_into_dict(self): optional = self.default_optional rval["optional"] = optional if "format" in inputs: - formats: Optional[List[str]] = listify(inputs["format"]) + formats: Optional[list[str]] = listify(inputs["format"]) else: formats = None if formats: @@ -1335,7 +1331,7 @@ def regex_validator_definition(): **when_this_type.inputs, } - restrict_how_source: Dict[str, Union[str, List[Dict[str, Union[str, bool]]]]] = dict( + restrict_how_source: dict[str, Union[str, list[dict[str, Union[str, bool]]]]] = dict( name="how", label="Restrict Text Values?", type="select" ) restrict_how_source["options"] = [ @@ -1459,13 +1455,13 @@ def regex_validator_definition(): def get_config_form(self, step=None): """Serializes input parameters of a module into input dictionaries.""" - group_inputs: List[Dict[str, Any]] = [] + group_inputs: list[dict[str, Any]] = [] populate_model(self.trans, self.get_inputs(), self.state.inputs, group_inputs) return {"title": self.name, "inputs": group_inputs} def restrict_options(self, step, connections: Iterable[WorkflowStepConnection], default_value): try: - static_options: List[List[ParameterOption]] = [] + static_options: list[list[ParameterOption]] = [] # Retrieve possible runtime options for 'select' type inputs for connection in connections: # Well this isn't a great assumption... @@ -1491,7 +1487,7 @@ def callback(input, prefixed_name, context, **kwargs): ].static_options ) - options: Optional[List[OptionDict]] = None + options: Optional[list[OptionDict]] = None if static_options and len(static_options) == 1: # If we are connected to a single option, just use it as is so order is preserved cleanly and such. options = [ @@ -1529,7 +1525,7 @@ def get_runtime_inputs(self, step, connections: Optional[Iterable[WorkflowStepCo raise ValueError("Invalid parameter type for workflow parameters encountered.") # Optional parameters for tool input source definition. - parameter_kwds: Dict[str, Union[str, List[Dict[str, Any]]]] = {} + parameter_kwds: dict[str, Union[str, list[dict[str, Any]]]] = {} if "multiple" in parameter_def: parameter_kwds["multiple"] = parameter_def["multiple"] @@ -1904,10 +1900,10 @@ def __init__( f"Exact tool specified during workflow module creation for [{tool_id}] but couldn't find correct version [{tool_version}]." ) self.tool = None - self.post_job_actions: Dict[str, Any] = {} - self.runtime_post_job_actions: Dict[str, Any] = {} - self.workflow_outputs: List[Dict[str, Any]] = [] - self.version_changes: List[str] = [] + self.post_job_actions: dict[str, Any] = {} + self.runtime_post_job_actions: dict[str, Any] = {} + self.workflow_outputs: list[dict[str, Any]] = [] + self.version_changes: list[str] = [] # ---- Creating modules from various representations --------------------- @@ -2102,7 +2098,7 @@ def get_all_outputs(self, data_only=False): for name, tool_output in self.tool.outputs.items(): if filter_output(self.tool, tool_output, self.state.inputs): continue - extra_kwds: Dict[str, Any] = {} + extra_kwds: dict[str, Any] = {} if isinstance(tool_output, ToolExpressionOutput): extra_kwds["parameter"] = True if isinstance(tool_output, ToolOutputCollection): @@ -2117,7 +2113,7 @@ def get_all_outputs(self, data_only=False): collection_type = rule_set.collection_type extra_kwds["collection_type"] = collection_type extra_kwds["collection_type_source"] = tool_output.structure.collection_type_source - formats: List[Optional[str]] = ["input"] # TODO: fix + formats: list[Optional[str]] = ["input"] # TODO: fix elif ( isinstance(tool_output, (ToolOutput, ToolExpressionOutput, ToolOutputCollection)) and tool_output.format_source is not None @@ -2150,7 +2146,7 @@ def get_all_outputs(self, data_only=False): def get_config_form(self, step=None): if self.tool: self.add_dummy_datasets(connections=step and step.input_connections) - incoming: Dict[str, str] = {} + incoming: dict[str, str] = {} params_to_incoming(incoming, self.tool.inputs, self.state.inputs, self.trans.app) return self.tool.to_json(self.trans, incoming, workflow_building_mode=True) @@ -2442,7 +2438,7 @@ def callback(input, prefixed_name: str, **kwargs): param_combinations.append(execution_state.inputs) complete = False - completed_jobs: Dict[int, Optional[Job]] = tool.completed_jobs( + completed_jobs: dict[int, Optional[Job]] = tool.completed_jobs( trans, use_cached_job, param_combinations, @@ -2483,7 +2479,7 @@ def callback(input, prefixed_name: str, **kwargs): raise DelayedWorkflowEvaluation(why=delayed_why) progress.record_executed_job_count(len(execution_tracker.successful_jobs)) - step_outputs: Dict[str, Union[model.HistoryDatasetCollectionAssociation, model.HistoryDatasetAssociation]] = {} + step_outputs: dict[str, Union[model.HistoryDatasetCollectionAssociation, model.HistoryDatasetAssociation]] = {} if collection_info: step_outputs.update(execution_tracker.implicit_collections) else: @@ -2576,7 +2572,7 @@ def __to_pja(self, key, value, step): action_arguments = None return PostJobAction(value["action_type"], step, output_name, action_arguments) - def get_informal_replacement_parameters(self, step: WorkflowStep) -> List[str]: + def get_informal_replacement_parameters(self, step: WorkflowStep) -> list[str]: """Return a list of replacement parameters.""" replacement_parameters = set() for pja in step.post_job_actions: @@ -2589,7 +2585,7 @@ def get_informal_replacement_parameters(self, step: WorkflowStep) -> List[str]: class WorkflowModuleFactory: - def __init__(self, module_types: Dict[str, Type[WorkflowModule]]): + def __init__(self, module_types: dict[str, type[WorkflowModule]]): self.module_types = module_types def from_dict(self, trans, d, **kwargs) -> WorkflowModule: diff --git a/lib/galaxy/workflow/refactor/execute.py b/lib/galaxy/workflow/refactor/execute.py index da967dcbcf12..c6b9395b0288 100644 --- a/lib/galaxy/workflow/refactor/execute.py +++ b/lib/galaxy/workflow/refactor/execute.py @@ -1,7 +1,6 @@ import logging from typing import ( Any, - Dict, ) from galaxy.exceptions import RequestParameterInvalidException @@ -136,7 +135,7 @@ def _apply_add_input(self, action: AddInputAction, execution: RefactorActionExec input_type = action.type module_type = None - tool_state: Dict[str, Any] = {} + tool_state: dict[str, Any] = {} if input_type in ["data", "dataset"]: module_type = "data_input" elif input_type in ["data_collection", "dataset_collection"]: diff --git a/lib/galaxy/workflow/refactor/schema.py b/lib/galaxy/workflow/refactor/schema.py index 76f4335ce01c..1ed82e902ec8 100644 --- a/lib/galaxy/workflow/refactor/schema.py +++ b/lib/galaxy/workflow/refactor/schema.py @@ -1,8 +1,7 @@ from enum import Enum from typing import ( + Annotated, Any, - Dict, - List, Optional, Union, ) @@ -12,7 +11,6 @@ Field, ) from typing_extensions import ( - Annotated, Literal, ) @@ -118,7 +116,7 @@ class AddStepAction(BaseAction): action_type: Literal["add_step"] type: str = Field(description="Module type of the step to add, see galaxy.workflow.modules for available types.") - tool_state: Optional[Dict[str, Any]] = None + tool_state: Optional[dict[str, Any]] = None label: Optional[str] = Field( None, description="A unique label for the step being added, must be distinct from the labels already present in the workflow.", @@ -144,9 +142,9 @@ class AddInputAction(BaseAction): label: Optional[str] = None position: Optional[Position] = None collection_type: Optional[str] = None - restrictions: Optional[List[str]] = None + restrictions: Optional[list[str]] = None restrict_on_connections: Optional[bool] = None - suggestions: Optional[List[str]] = None + suggestions: Optional[list[str]] = None optional: Optional[bool] = False default: Optional[Any] = None # this probably needs to be revisited when we have more complex field types @@ -269,7 +267,7 @@ class UpgradeAllStepsAction(BaseAction): class RefactorActions(BaseModel): - actions: List[Annotated[union_action_classes, Field(discriminator="action_type")]] + actions: list[Annotated[union_action_classes, Field(discriminator="action_type")]] dry_run: bool = False @@ -328,4 +326,4 @@ class RefactorActionExecutionMessage(BaseModel): class RefactorActionExecution(BaseModel): action: union_action_classes - messages: List[RefactorActionExecutionMessage] + messages: list[RefactorActionExecutionMessage] diff --git a/lib/galaxy/workflow/run.py b/lib/galaxy/workflow/run.py index 1712bb2e72b6..a3203e8f0281 100644 --- a/lib/galaxy/workflow/run.py +++ b/lib/galaxy/workflow/run.py @@ -3,10 +3,7 @@ from collections.abc import MutableMapping from typing import ( Any, - Dict, - List, Optional, - Tuple, TYPE_CHECKING, Union, ) @@ -62,7 +59,7 @@ log = logging.getLogger(__name__) -WorkflowOutputsType = Dict[int, Any] +WorkflowOutputsType = dict[int, Any] # Entry point for core workflow scheduler. @@ -71,7 +68,7 @@ def schedule( workflow: "Workflow", workflow_run_config: WorkflowRunConfig, workflow_invocation: WorkflowInvocation, -) -> Tuple[WorkflowOutputsType, WorkflowInvocation]: +) -> tuple[WorkflowOutputsType, WorkflowInvocation]: return __invoke(trans, workflow, workflow_run_config, workflow_invocation) @@ -81,7 +78,7 @@ def __invoke( workflow_run_config: WorkflowRunConfig, workflow_invocation: Optional[WorkflowInvocation] = None, populate_state: bool = False, -) -> Tuple[WorkflowOutputsType, WorkflowInvocation]: +) -> tuple[WorkflowOutputsType, WorkflowInvocation]: """Run the supplied workflow in the supplied target_history.""" if populate_state: modules.populate_module_and_state( @@ -132,7 +129,7 @@ def queue_invoke( trans: "GalaxyWebTransaction", workflow: "Workflow", workflow_run_config: WorkflowRunConfig, - request_params: Optional[Dict[str, Any]] = None, + request_params: Optional[dict[str, Any]] = None, populate_state: bool = True, flush: bool = True, ) -> WorkflowInvocation: @@ -201,7 +198,7 @@ def __init__( ) self.progress = progress - def invoke(self) -> Dict[int, Any]: + def invoke(self) -> dict[int, Any]: workflow_invocation = self.workflow_invocation config = self.trans.app.config maximum_duration = getattr(config, "maximum_workflow_invocation_duration", -1) @@ -364,17 +361,17 @@ class WorkflowProgress: def __init__( self, workflow_invocation: WorkflowInvocation, - inputs_by_step_id: Dict[int, Any], + inputs_by_step_id: dict[int, Any], module_injector: ModuleInjector, - param_map: Dict[int, Dict[str, Any]], + param_map: dict[int, dict[str, Any]], jobs_per_scheduling_iteration: int = -1, copy_inputs_to_history: bool = False, use_cached_job: bool = False, - replacement_dict: Optional[Dict[str, str]] = None, + replacement_dict: Optional[dict[str, str]] = None, subworkflow_collection_info=None, when_values=None, ) -> None: - self.outputs: Dict[int, Any] = {} + self.outputs: dict[int, Any] = {} self.module_injector = module_injector self.workflow_invocation = workflow_invocation self.inputs_by_step_id = inputs_by_step_id @@ -384,7 +381,7 @@ def __init__( self.copy_inputs_to_history = copy_inputs_to_history self.use_cached_job = use_cached_job self.replacement_dict = replacement_dict or {} - self.runtime_replacements: Dict[str, str] = {} + self.runtime_replacements: dict[str, str] = {} self.subworkflow_collection_info = subworkflow_collection_info self.subworkflow_structure = subworkflow_collection_info.structure if subworkflow_collection_info else None self.when_values = when_values @@ -401,7 +398,7 @@ def record_executed_job_count(self, job_count: int) -> None: def remaining_steps( self, - ) -> List[Tuple["WorkflowStep", Optional[WorkflowInvocationStep]]]: + ) -> list[tuple["WorkflowStep", Optional[WorkflowInvocationStep]]]: # Previously computed and persisted step states. step_states = self.workflow_invocation.step_states_by_step_id() steps = self.workflow_invocation.workflow.steps @@ -431,11 +428,11 @@ def remaining_steps( remaining_steps.append((step, invocation_step)) return remaining_steps - def replacement_for_input(self, trans, step: "WorkflowStep", input_dict: Dict[str, Any]): + def replacement_for_input(self, trans, step: "WorkflowStep", input_dict: dict[str, Any]): replacement: Union[ NoReplacement, model.DatasetCollectionInstance, - List[model.DatasetCollectionInstance], + list[model.DatasetCollectionInstance], HistoryItem, ] = NO_REPLACEMENT prefixed_name = input_dict["name"] @@ -569,7 +566,7 @@ def get_replacement_workflow_output(self, workflow_output: "WorkflowOutput"): def set_outputs_for_input( self, invocation_step: WorkflowInvocationStep, - outputs: Optional[Dict[str, Any]] = None, + outputs: Optional[dict[str, Any]] = None, already_persisted: bool = False, ) -> None: step = invocation_step.workflow_step @@ -600,7 +597,7 @@ def effective_replacement_dict(self): return replacement_dict def set_step_outputs( - self, invocation_step: WorkflowInvocationStep, outputs: Dict[str, Any], already_persisted: bool = False + self, invocation_step: WorkflowInvocationStep, outputs: dict[str, Any], already_persisted: bool = False ) -> None: step = invocation_step.workflow_step if invocation_step.output_value: @@ -689,7 +686,7 @@ def subworkflow_progress( self, subworkflow_invocation: WorkflowInvocation, step: "WorkflowStep", - param_map: Dict, + param_map: dict, subworkflow_collection_info=None, when_values=None, ) -> "WorkflowProgress": diff --git a/lib/galaxy/workflow/run_request.py b/lib/galaxy/workflow/run_request.py index 0a361700d2ac..43b235a09931 100644 --- a/lib/galaxy/workflow/run_request.py +++ b/lib/galaxy/workflow/run_request.py @@ -3,8 +3,6 @@ import uuid from typing import ( Any, - Dict, - List, Optional, TYPE_CHECKING, Union, @@ -81,18 +79,18 @@ class WorkflowRunConfig: def __init__( self, target_history: "History", - replacement_dict: Optional[Dict[str, Any]] = None, - inputs: Optional[Dict[int, Any]] = None, - param_map: Optional[Dict[int, Any]] = None, + replacement_dict: Optional[dict[str, Any]] = None, + inputs: Optional[dict[int, Any]] = None, + param_map: Optional[dict[int, Any]] = None, allow_tool_state_corrections: bool = False, copy_inputs_to_history: bool = False, use_cached_job: bool = False, - resource_params: Optional[Dict[int, Any]] = None, + resource_params: Optional[dict[int, Any]] = None, requires_materialization: bool = False, preferred_object_store_id: Optional[str] = None, preferred_outputs_object_store_id: Optional[str] = None, preferred_intermediate_object_store_id: Optional[str] = None, - effective_outputs: Optional[List[EffectiveOutput]] = None, + effective_outputs: Optional[list[EffectiveOutput]] = None, ) -> None: self.target_history = target_history self.replacement_dict = replacement_dict or {} @@ -110,8 +108,8 @@ def __init__( def _normalize_inputs( - steps: List["WorkflowStep"], inputs: Dict[str, Dict[str, Any]], inputs_by: str -) -> Dict[int, Dict[str, Any]]: + steps: list["WorkflowStep"], inputs: dict[str, dict[str, Any]], inputs_by: str +) -> dict[int, dict[str, Any]]: normalized_inputs = {} for step in steps: if step.type not in INPUT_STEP_TYPES: @@ -156,8 +154,8 @@ def _normalize_inputs( def _normalize_step_parameters( - steps: List["WorkflowStep"], param_map: Dict, legacy: bool = False, already_normalized: bool = False -) -> Dict: + steps: list["WorkflowStep"], param_map: dict, legacy: bool = False, already_normalized: bool = False +) -> dict: """Take a complex param_map that can reference parameters by step_id in the new flexible way or in the old one-parameter per step fashion or by tool id and normalize the parameters so @@ -174,7 +172,7 @@ def _normalize_step_parameters( raise exceptions.RequestParameterInvalidException( "Specifying subworkflow step parameters requires already_normalized to be specified as true." ) - subworkflow_param_dict: Dict[str, Dict[str, str]] = {} + subworkflow_param_dict: dict[str, dict[str, str]] = {} for key, value in param_dict.items(): step_index, param_name = key.split("|", 1) if step_index not in subworkflow_param_dict: @@ -189,7 +187,7 @@ def _normalize_step_parameters( return normalized_param_map -def _step_parameters(step: "WorkflowStep", param_map: Dict, legacy: bool = False) -> Dict: +def _step_parameters(step: "WorkflowStep", param_map: dict, legacy: bool = False) -> dict: """ Update ``step`` parameters based on the user-provided ``param_map`` dict. @@ -233,7 +231,7 @@ def _step_parameters(step: "WorkflowStep", param_map: Dict, legacy: bool = False return new_params -def _flatten_step_params(param_dict: Dict, prefix: str = "") -> Dict: +def _flatten_step_params(param_dict: dict, prefix: str = "") -> dict: # TODO: Temporary work around until tool code can process nested data # structures. This should really happen in there so the tools API gets # this functionality for free and so that repeats can be handled @@ -257,8 +255,8 @@ def _flatten_step_params(param_dict: Dict, prefix: str = "") -> Dict: def _get_target_history( trans: "GalaxyWebTransaction", workflow: "Workflow", - payload: Dict[str, Any], - param_keys: Optional[List[List]] = None, + payload: dict[str, Any], + param_keys: Optional[list[list]] = None, index: int = 0, ) -> History: param_keys = param_keys or [] @@ -300,8 +298,8 @@ def _get_target_history( def build_workflow_run_configs( - trans: "GalaxyWebTransaction", workflow: "Workflow", payload: Dict[str, Any] -) -> List[WorkflowRunConfig]: + trans: "GalaxyWebTransaction", workflow: "Workflow", payload: dict[str, Any] +) -> list[WorkflowRunConfig]: app = trans.app allow_tool_state_corrections = payload.get("allow_tool_state_corrections", False) use_cached_job = payload.get("use_cached_job", False) @@ -549,7 +547,7 @@ def add_parameter(name: str, value: str, type: WorkflowRequestInputParameter.typ if step.type == "subworkflow": subworkflow = step.subworkflow assert subworkflow - effective_outputs: Optional[List[EffectiveOutput]] = None + effective_outputs: Optional[list[EffectiveOutput]] = None if run_config.preferred_intermediate_object_store_id or run_config.preferred_outputs_object_store_id: step_outputs = step.workflow_outputs effective_outputs = [] @@ -629,7 +627,7 @@ def workflow_request_to_run_config( param_types = WorkflowRequestInputParameter.types history = workflow_invocation.history replacement_dict = {} - inputs: Dict[ + inputs: dict[ int, Union[HistoryDatasetAssociation, HistoryDatasetCollectionAssociation, str, int, float, bool, None] ] = {} param_map = {} diff --git a/lib/galaxy/workflow/trs_proxy.py b/lib/galaxy/workflow/trs_proxy.py index 5651632518c1..e00a3bf3316b 100644 --- a/lib/galaxy/workflow/trs_proxy.py +++ b/lib/galaxy/workflow/trs_proxy.py @@ -2,7 +2,6 @@ import os import re import urllib.parse -from typing import List import yaml @@ -91,7 +90,7 @@ def get_version_from_trs_url(self, trs_url): server, trs_tool_id, trs_version_id = self.get_trs_id_and_version_from_trs_url(trs_url=trs_url) return server.get_version_descriptor(trs_tool_id, trs_version_id) - def match_url(self, url, ip_allowlist: List[IpAllowedListEntryT]): + def match_url(self, url, ip_allowlist: list[IpAllowedListEntryT]): if url.lstrip().startswith("file://"): # requests doesn't know what to do with file:// anyway, but just in case we swap # out the implementation diff --git a/lib/galaxy/workflow/workflow_parameter_input_definitions.py b/lib/galaxy/workflow/workflow_parameter_input_definitions.py index ee59a87d42a9..5b3b9f170e5c 100644 --- a/lib/galaxy/workflow/workflow_parameter_input_definitions.py +++ b/lib/galaxy/workflow/workflow_parameter_input_definitions.py @@ -1,5 +1,4 @@ from typing import ( - Dict, Literal, Union, ) @@ -14,7 +13,7 @@ ) INPUT_PARAMETER_TYPES = Literal["text", "integer", "float", "boolean", "color", "directory_uri"] -default_source_type = Dict[str, Union[int, float, bool, str]] +default_source_type = dict[str, Union[int, float, bool, str]] tool_param_type = Union[ TextToolParameter, IntegerToolParameter, diff --git a/lib/galaxy_test/api/_framework.py b/lib/galaxy_test/api/_framework.py index 642f56412bd8..3668418a7052 100644 --- a/lib/galaxy_test/api/_framework.py +++ b/lib/galaxy_test/api/_framework.py @@ -1,5 +1,5 @@ +from collections.abc import Iterator from typing import ( - Iterator, Optional, ) from unittest import SkipTest diff --git a/lib/galaxy_test/api/conftest.py b/lib/galaxy_test/api/conftest.py index 74d8958b9158..5c9460410bca 100644 --- a/lib/galaxy_test/api/conftest.py +++ b/lib/galaxy_test/api/conftest.py @@ -1,11 +1,10 @@ """Fixtures for a version of API testing that relies more heavily on pytest injection.""" import os +from collections.abc import Iterator from dataclasses import dataclass from typing import ( Any, - Iterator, - List, Optional, ) @@ -130,8 +129,8 @@ def target_history( @pytest.fixture def required_tools( - dataset_populator: DatasetPopulator, history_id: str, required_tool_ids: List[str] -) -> List[RequiredTool]: + dataset_populator: DatasetPopulator, history_id: str, required_tool_ids: list[str] +) -> list[RequiredTool]: tools = [] for tool_id in required_tool_ids: tool = RequiredTool(dataset_populator, tool_id, history_id) @@ -140,7 +139,7 @@ def required_tools( @pytest.fixture -def required_tool(dataset_populator: DatasetPopulator, history_id: str, required_tool_ids: List[str]) -> RequiredTool: +def required_tool(dataset_populator: DatasetPopulator, history_id: str, required_tool_ids: list[str]) -> RequiredTool: if len(required_tool_ids) != 1: raise AssertionError("required_tool fixture must only be used on methods that require a single tool") tool_id = required_tool_ids[0] @@ -162,7 +161,7 @@ def check_required_tools(anonymous_galaxy_interactor, request): @pytest.fixture -def required_tool_ids(anonymous_galaxy_interactor, request) -> List[str]: +def required_tool_ids(anonymous_galaxy_interactor, request) -> list[str]: tool_ids = [] for marker in request.node.iter_markers(): if marker.name == "requires_tool_id": diff --git a/lib/galaxy_test/api/sharable.py b/lib/galaxy_test/api/sharable.py index 75cf272dbd09..9c8a623b83a4 100644 --- a/lib/galaxy_test/api/sharable.py +++ b/lib/galaxy_test/api/sharable.py @@ -1,7 +1,5 @@ from typing import ( Any, - Dict, - List, ) from unittest import SkipTest from uuid import uuid4 @@ -63,7 +61,7 @@ def test_sharing_without_user(self): sharing_response = self._get_resource_sharing_status(resource_id) assert not sharing_response["users_shared_with"] - payload: Dict[str, List[str]] = {"user_ids": []} + payload: dict[str, list[str]] = {"user_ids": []} sharing_response = self._set_resource_sharing(resource_id, action="share_with_users", payload=payload) assert not sharing_response["users_shared_with"] diff --git a/lib/galaxy_test/api/test_dataset_collections.py b/lib/galaxy_test/api/test_dataset_collections.py index 4501efcbb3e1..05f230a94357 100644 --- a/lib/galaxy_test/api/test_dataset_collections.py +++ b/lib/galaxy_test/api/test_dataset_collections.py @@ -1,7 +1,6 @@ import json import zipfile from io import BytesIO -from typing import List from urllib.parse import quote from galaxy.util.unittest_utils import skip_if_github_down @@ -728,7 +727,7 @@ def test_get_suitable_converters_different_datatypes_no_matches(self, history_id self._assert_status_code_is(response, 200) hdca_list_id = response.json()["outputs"][0]["id"] converters = self._get("dataset_collections/" + hdca_list_id + "/suitable_converters") - actual: List[str] = [] + actual: list[str] = [] for converter in converters.json(): actual.append(converter["tool_id"]) assert actual == [] diff --git a/lib/galaxy_test/api/test_datasets.py b/lib/galaxy_test/api/test_datasets.py index a6e67c46c98c..358e6115e1e2 100644 --- a/lib/galaxy_test/api/test_datasets.py +++ b/lib/galaxy_test/api/test_datasets.py @@ -2,10 +2,6 @@ import urllib import zipfile from io import BytesIO -from typing import ( - Dict, - List, -) from urllib.parse import quote from galaxy.model.unittest_utils.store_fixtures import ( @@ -85,7 +81,7 @@ def test_index_order_by_size(self): history_id, order_by="size-asc", expected_ids_order=dataset_ids_ordered_by_size_asc ) - def _assert_history_datasets_ordered(self, history_id, order_by: str, expected_ids_order: List[str]): + def _assert_history_datasets_ordered(self, history_id, order_by: str, expected_ids_order: list[str]): datasets_response = self._get(f"histories/{history_id}/contents?v=dev&keys=size&order={order_by}") self._assert_status_code_is(datasets_response, 200) datasets = datasets_response.json() @@ -698,7 +694,7 @@ def test_purge_does_not_reset_file_size(self): def test_delete_batch(self): num_datasets = 4 - dataset_map: Dict[int, str] = {} + dataset_map: dict[int, str] = {} history_id = self.dataset_populator.new_history() for index in range(num_datasets): hda = self.dataset_populator.new_dataset(history_id) @@ -747,7 +743,7 @@ def test_delete_batch_lddas(self): def test_delete_batch_error(self): num_datasets = 4 - dataset_map: Dict[int, str] = {} + dataset_map: dict[int, str] = {} with self._different_user(): history_id = self.dataset_populator.new_history() diff --git a/lib/galaxy_test/api/test_display_applications.py b/lib/galaxy_test/api/test_display_applications.py index 3fd5484fc833..03b86af7b8e4 100644 --- a/lib/galaxy_test/api/test_display_applications.py +++ b/lib/galaxy_test/api/test_display_applications.py @@ -1,5 +1,4 @@ import random -from typing import List from galaxy.util import UNKNOWN from galaxy_test.base.decorators import requires_admin @@ -51,7 +50,7 @@ def test_reload_as_non_admin_returns_403(self): response = self._post("display_applications/reload") self._assert_status_code_is(response, 403) - def _get_half_random_items(self, collection: List[str]) -> List[str]: + def _get_half_random_items(self, collection: list[str]) -> list[str]: half_num_items = int(len(collection) / 2) rval = random.sample(collection, half_num_items) return rval diff --git a/lib/galaxy_test/api/test_drs.py b/lib/galaxy_test/api/test_drs.py index 038a1c25c7e8..5859e2ecbe09 100644 --- a/lib/galaxy_test/api/test_drs.py +++ b/lib/galaxy_test/api/test_drs.py @@ -1,7 +1,6 @@ import tempfile from typing import ( Callable, - List, ) from unittest import SkipTest from urllib.parse import ( @@ -27,7 +26,7 @@ CONTENT = "My Cool DRS Data\n" # DRS expects identical get/post for two main API entrypoints. -HTTP_METHODS: List[Callable[[str], requests.Response]] = [requests.get, requests.post] +HTTP_METHODS: list[Callable[[str], requests.Response]] = [requests.get, requests.post] CHECKSUM_TEST_SLEEP_TIME = 3.0 diff --git a/lib/galaxy_test/api/test_folder_contents.py b/lib/galaxy_test/api/test_folder_contents.py index 051e665fb1ae..c491102ab66b 100644 --- a/lib/galaxy_test/api/test_folder_contents.py +++ b/lib/galaxy_test/api/test_folder_contents.py @@ -1,8 +1,6 @@ from typing import ( Any, - List, Optional, - Tuple, ) from galaxy_test.base.decorators import requires_new_library @@ -207,8 +205,8 @@ def test_index_permissions_include_deleted(self, history_id) -> None: folder_id = self._create_folder_in_library(folder_name) num_subfolders = 5 - subfolder_ids: List[str] = [] - deleted_subfolder_ids: List[str] = [] + subfolder_ids: list[str] = [] + deleted_subfolder_ids: list[str] = [] for index in range(num_subfolders): ldda_id = self._create_subfolder_in(folder_id, name=f"Folder_{index}") subfolder_ids.append(ldda_id) @@ -219,8 +217,8 @@ def test_index_permissions_include_deleted(self, history_id) -> None: deleted_subfolder_ids.append(subfolder_id) num_datasets = 5 - ldda_ids: List[str] = [] - deleted_ldda_ids: List[str] = [] + ldda_ids: list[str] = [] + deleted_ldda_ids: list[str] = [] for _ in range(num_datasets): ldda_id, _ = self._create_dataset_in_folder(history_id, folder_id) ldda_ids.append(ldda_id) @@ -328,7 +326,7 @@ def test_index_order_by(self, history_id): self._assert_folder_order_by_is_expected(folder_id, order_by, sort_desc, expected_order_by_name) def _assert_folder_order_by_is_expected( - self, folder_id: str, order_by: str, sort_desc: str, expected_order_by_name: List[str] + self, folder_id: str, order_by: str, sort_desc: str, expected_order_by_name: list[str] ): response = self._get(f"folders/{folder_id}/contents?order_by={order_by}&sort_desc={sort_desc}") index_response = self._assert_index_count_is_correct( @@ -374,7 +372,7 @@ def _create_dataset_in_folder( content: Optional[str] = None, ldda_message: Optional[str] = None, **kwds, - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Returns a tuple with the LDDA ID and the underlying HDA ID""" hda_id = self._create_hda(history_id, name, content, **kwds) data = { @@ -394,7 +392,7 @@ def _create_hda(self, history_id: str, name: Optional[str] = None, content: Opti hda_id = hda["id"] return hda_id - def _create_hdca_with_contents(self, history_id: str, contents: List[str]) -> str: + def _create_hdca_with_contents(self, history_id: str, contents: list[str]) -> str: hdca = self.dataset_collection_populator.create_list_in_history( history_id, contents=contents, direct_upload=True, wait=True ).json()["outputs"][0] diff --git a/lib/galaxy_test/api/test_group_roles.py b/lib/galaxy_test/api/test_group_roles.py index f0f92801dbbf..728fe4884dad 100644 --- a/lib/galaxy_test/api/test_group_roles.py +++ b/lib/galaxy_test/api/test_group_roles.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, ) @@ -126,7 +125,7 @@ def test_delete_unknown_raises_400(self): delete_response = self._delete(f"groups/{encoded_group_id}/roles/{encoded_role_id}", admin=True) self._assert_status_code_is(delete_response, 400) - def _create_group(self, group_name: str, encoded_role_ids: Optional[List[str]] = None): + def _create_group(self, group_name: str, encoded_role_ids: Optional[list[str]] = None): if encoded_role_ids is None: encoded_role_ids = [self.dataset_populator.user_private_role_id()] role_ids = encoded_role_ids diff --git a/lib/galaxy_test/api/test_group_users.py b/lib/galaxy_test/api/test_group_users.py index 699eeba998d9..c1deee8d2522 100644 --- a/lib/galaxy_test/api/test_group_users.py +++ b/lib/galaxy_test/api/test_group_users.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, ) @@ -125,7 +124,7 @@ def test_delete_unknown_raises_400(self): delete_response = self._delete(f"groups/{encoded_group_id}/users/{encoded_user_id}", admin=True) self._assert_status_code_is(delete_response, 400) - def _create_group(self, group_name: str, encoded_user_ids: Optional[List[str]] = None): + def _create_group(self, group_name: str, encoded_user_ids: Optional[list[str]] = None): if encoded_user_ids is None: encoded_user_ids = [self.dataset_populator.user_id()] user_ids = encoded_user_ids diff --git a/lib/galaxy_test/api/test_groups.py b/lib/galaxy_test/api/test_groups.py index 0176bde0d21c..11f4c33cb85a 100644 --- a/lib/galaxy_test/api/test_groups.py +++ b/lib/galaxy_test/api/test_groups.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, ) @@ -210,7 +209,7 @@ def _assert_valid_group(self, group, assert_id=None): if assert_id is not None: assert group["id"] == assert_id - def _assert_group_has_expected_values(self, group_id: str, name: str, user_ids: List[str], role_ids: List[str]): + def _assert_group_has_expected_values(self, group_id: str, name: str, user_ids: list[str], role_ids: list[str]): group = self._get(f"groups/{group_id}", admin=True).json() assert group["name"] == name users = self._get(f"groups/{group_id}/users", admin=True).json() diff --git a/lib/galaxy_test/api/test_history_contents.py b/lib/galaxy_test/api/test_history_contents.py index 07e8a9997114..98d40b3d3626 100644 --- a/lib/galaxy_test/api/test_history_contents.py +++ b/lib/galaxy_test/api/test_history_contents.py @@ -1,9 +1,7 @@ import urllib.parse from typing import ( Any, - List, Optional, - Tuple, Union, ) @@ -339,7 +337,7 @@ def _get_history_item_with_custom_serialization( content_id: str, item_type: str, expected_view: Optional[str] = None, - expected_keys: Optional[List[str]] = None, + expected_keys: Optional[list[str]] = None, ): view = f"&view={expected_view}" if expected_view else "" keys = f"&keys={','.join(expected_keys)}" if expected_keys else "" @@ -980,7 +978,7 @@ def test_elements_datatypes_field(self, history_id): self._upload_collection_list_with_elements(history_id, collection_name, elements) self._assert_collection_has_expected_elements_datatypes(history_id, collection_name, expected_datatypes) - def _upload_collection_list_with_elements(self, history_id: str, collection_name: str, elements: List[Any]): + def _upload_collection_list_with_elements(self, history_id: str, collection_name: str, elements: list[Any]): create_homogeneous_response = self.dataset_collection_populator.upload_collection( history_id, "list", elements=elements, name=collection_name, wait=True ) @@ -1605,7 +1603,7 @@ def _assert_total_matches_is(self, response, expected_total_matches: int): assert response["stats"]["total_matches"] assert response["stats"]["total_matches"] == expected_total_matches - def _create_test_history_contents(self, history_id) -> Tuple[List[str], List[str], List[Any]]: + def _create_test_history_contents(self, history_id) -> tuple[list[str], list[str], list[Any]]: """Creates 3 collections (pairs) and their corresponding datasets (6 in total) Returns a tuple with the list of ids for the datasets and the collections and the @@ -1627,7 +1625,7 @@ def _create_test_history_contents(self, history_id) -> Tuple[List[str], List[str assert item["visible"] return datasets_ids, collection_ids, history_contents - def _create_collection_in_history(self, history_id, num_collections=1) -> List[str]: + def _create_collection_in_history(self, history_id, num_collections=1) -> list[str]: collection_ids = [] for _ in range(num_collections): collection_id = self.dataset_collection_populator.create_pair_in_history( @@ -1639,7 +1637,7 @@ def _create_collection_in_history(self, history_id, num_collections=1) -> List[s def _get_history_contents(self, history_id: str, query: str = ""): return self._get(f"histories/{history_id}/contents{query}").json() - def _get_hidden_items_from_history_contents(self, history_contents) -> List[Any]: + def _get_hidden_items_from_history_contents(self, history_contents) -> list[Any]: return [content for content in history_contents if not content["visible"]] def _get_collection_with_id_from_history_contents(self, history_contents, collection_id: str) -> Optional[Any]: diff --git a/lib/galaxy_test/api/test_item_tags.py b/lib/galaxy_test/api/test_item_tags.py index 025c3504c012..f6e356546334 100644 --- a/lib/galaxy_test/api/test_item_tags.py +++ b/lib/galaxy_test/api/test_item_tags.py @@ -1,6 +1,5 @@ from typing import ( Any, - Dict, ) from galaxy_test.base.populators import ( @@ -130,7 +129,7 @@ def _test_delete_tag(self, prefix): def _create_valid_tag(self, prefix: str): url = f"{prefix}/tags/awesometagname" - tag_data: Dict[str, Any] = {} # Can also be dict(value="awesometagvalue") + tag_data: dict[str, Any] = {} # Can also be dict(value="awesometagvalue") response = self._post(url, data=tag_data, json=True) return response diff --git a/lib/galaxy_test/api/test_landing.py b/lib/galaxy_test/api/test_landing.py index 89e2c6680e30..a298240d2f5b 100644 --- a/lib/galaxy_test/api/test_landing.py +++ b/lib/galaxy_test/api/test_landing.py @@ -1,7 +1,6 @@ from base64 import b64encode from typing import ( Any, - Dict, ) from galaxy.schema.schema import ( @@ -121,7 +120,7 @@ def test_landing_claim_preserves_source_metadata(self): assert workflow["source_metadata"]["trs_version_id"] == "v0.12" -def _workflow_request_state() -> Dict[str, Any]: +def _workflow_request_state() -> dict[str, Any]: deferred = False input_b64_1 = b64encode(b"1 2 3").decode("utf-8") input_b64_2 = b64encode(b"4 5 6").decode("utf-8") diff --git a/lib/galaxy_test/api/test_pages.py b/lib/galaxy_test/api/test_pages.py index 582281af36b3..49bb1ce96bd2 100644 --- a/lib/galaxy_test/api/test_pages.py +++ b/lib/galaxy_test/api/test_pages.py @@ -1,7 +1,5 @@ from typing import ( Any, - Dict, - List, Optional, Union, ) @@ -30,7 +28,7 @@ def setUp(self): self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) - def _create_valid_page_with_slug(self, slug, **kwd) -> Dict[str, Any]: + def _create_valid_page_with_slug(self, slug, **kwd) -> dict[str, Any]: return self.dataset_populator.new_page(slug=slug, **kwd) def _create_valid_page_as(self, other_email, slug): @@ -452,7 +450,7 @@ def test_400_on_download_pdf_when_unsupported_content_format(self): pdf_response = self._get(f"pages/{page_id}.pdf") self._assert_status_code_is(pdf_response, 400) - def _create_published_page_with_slug(self, slug, **kwd) -> Dict[str, Any]: + def _create_published_page_with_slug(self, slug, **kwd) -> dict[str, Any]: response = self.dataset_populator.new_page(slug=slug, **kwd) response = self._make_public(response["id"]) return response @@ -465,20 +463,20 @@ def _share_with_user(self, page_id: str, user_id_or_email: str): response = self._put(f"pages/{page_id}/share_with_users", data, json=True) api_asserts.assert_status_code_is_ok(response) - def _index_raw(self, params: Optional[Dict[str, Any]] = None) -> Response: + def _index_raw(self, params: Optional[dict[str, Any]] = None) -> Response: index_response = self._get("pages", data=params or {}) return index_response - def _index(self, params: Optional[Dict[str, Any]] = None) -> List[Dict[str, Any]]: + def _index(self, params: Optional[dict[str, Any]] = None) -> list[dict[str, Any]]: index_response = self._index_raw(params) self._assert_status_code_is(index_response, 200) return index_response.json() - def _index_ids(self, params: Optional[Dict[str, Any]] = None) -> List[Dict[str, Any]]: + def _index_ids(self, params: Optional[dict[str, Any]] = None) -> list[dict[str, Any]]: return [p["id"] for p in self._index(params)] def _users_index_has_page_with_id( - self, has_id: Union[Dict[str, Any], str], params: Optional[Dict[str, Any]] = None + self, has_id: Union[dict[str, Any], str], params: Optional[dict[str, Any]] = None ): pages = self._index(params) if isinstance(has_id, dict): diff --git a/lib/galaxy_test/api/test_roles.py b/lib/galaxy_test/api/test_roles.py index cee1805ed727..1f8638ad3829 100644 --- a/lib/galaxy_test/api/test_roles.py +++ b/lib/galaxy_test/api/test_roles.py @@ -1,6 +1,5 @@ from typing import ( Any, - Dict, Optional, ) @@ -192,7 +191,7 @@ def test_purge_can_reuse_name(self): response = self._post("roles", payload, admin=True, json=True) self._assert_status_code_is(response, 200) - def _create_role(self, name: Optional[str] = None, description: Optional[str] = None) -> Dict[str, Any]: + def _create_role(self, name: Optional[str] = None, description: Optional[str] = None) -> dict[str, Any]: payload = self._build_valid_role_payload(name=name, description=description) response = self._post("roles", payload, admin=True, json=True) assert_status_code_is(response, 200) @@ -211,7 +210,7 @@ def _build_valid_role_payload(self, name: Optional[str] = None, description: Opt return payload @staticmethod - def check_role_dict(role_dict: Dict[str, Any], assert_id: Optional[str] = None) -> None: + def check_role_dict(role_dict: dict[str, Any], assert_id: Optional[str] = None) -> None: assert_has_keys(role_dict, "id", "name", "model_class", "url") assert role_dict["model_class"] == "Role" if assert_id is not None: diff --git a/lib/galaxy_test/api/test_tags.py b/lib/galaxy_test/api/test_tags.py index cdce0a0705af..fa009494434c 100644 --- a/lib/galaxy_test/api/test_tags.py +++ b/lib/galaxy_test/api/test_tags.py @@ -1,5 +1,4 @@ import json -from typing import List from unittest import SkipTest from uuid import uuid4 @@ -51,7 +50,7 @@ def _get_item(self, item_id: str): item = item_response.json() return item - def _assert_tags_in_item(self, item_id, expected_tags: List[str]): + def _assert_tags_in_item(self, item_id, expected_tags: list[str]): item = self._get_item(item_id) assert "tags" in item assert item["tags"] == expected_tags diff --git a/lib/galaxy_test/api/test_tool_execute.py b/lib/galaxy_test/api/test_tool_execute.py index a7486c867ad2..d69c9fecf131 100644 --- a/lib/galaxy_test/api/test_tool_execute.py +++ b/lib/galaxy_test/api/test_tool_execute.py @@ -8,7 +8,6 @@ """ from dataclasses import dataclass -from typing import List import pytest @@ -231,7 +230,7 @@ def test_map_over_empty_collection(target_history: TargetHistory, required_tool: @dataclass class MultiRunInRepeatFixtures: - repeat_datasets: List[SrcDict] + repeat_datasets: list[SrcDict] common_dataset: SrcDict @@ -302,8 +301,8 @@ def _check_multi_run_in_repeat(execute: DescribeToolExecution): @dataclass class TwoMultiRunsFixture: - first_two_datasets: List[SrcDict] - second_two_datasets: List[SrcDict] + first_two_datasets: list[SrcDict] + second_two_datasets: list[SrcDict] @pytest.fixture @@ -520,7 +519,7 @@ def test_optional_repeats_with_mins_filled_id(target_history: TargetHistory, req @requires_tool_id("gx_select") @requires_tool_id("gx_select_no_options_validation") -def test_select_first_by_default(required_tools: List[RequiredTool], tool_input_format: DescribeToolInputs): +def test_select_first_by_default(required_tools: list[RequiredTool], tool_input_format: DescribeToolInputs): empty = tool_input_format.when.any({}) for required_tool in required_tools: required_tool.execute.with_inputs(empty).assert_has_single_job.with_output("output").with_contents_stripped( @@ -532,7 +531,7 @@ def test_select_first_by_default(required_tools: List[RequiredTool], tool_input_ @requires_tool_id("gx_select_no_options_validation") @requires_tool_id("gx_select_dynamic_empty") @requires_tool_id("gx_select_dynamic_empty_validated") -def test_select_on_null_errors(required_tools: List[RequiredTool], tool_input_format: DescribeToolInputs): +def test_select_on_null_errors(required_tools: list[RequiredTool], tool_input_format: DescribeToolInputs): # test_select_first_by_default verifies the first option will just be selected, despite that if an explicit null # is passed, an error (rightfully) occurs. This test verifies that. null_parameter = tool_input_format.when.any({"parameter": None}) @@ -543,7 +542,7 @@ def test_select_on_null_errors(required_tools: List[RequiredTool], tool_input_fo @requires_tool_id("gx_select_dynamic_empty") @requires_tool_id("gx_select_dynamic_empty_validated") def test_select_empty_causes_error_regardless( - required_tools: List[RequiredTool], tool_input_format: DescribeToolInputs + required_tools: list[RequiredTool], tool_input_format: DescribeToolInputs ): # despite selects otherwise selecting defaults - nothing can be done if the select option list is empty empty = tool_input_format.when.any({}) @@ -553,7 +552,7 @@ def test_select_empty_causes_error_regardless( @requires_tool_id("gx_select_optional") @requires_tool_id("gx_select_optional_no_options_validation") -def test_select_optional_null_by_default(required_tools: List[RequiredTool], tool_input_format: DescribeToolInputs): +def test_select_optional_null_by_default(required_tools: list[RequiredTool], tool_input_format: DescribeToolInputs): # test_select_first_by_default shows that required select values will pick an option by default, # this test verify that doesn't occur for optional selects. empty = tool_input_format.when.any({}) @@ -570,7 +569,7 @@ def test_select_optional_null_by_default(required_tools: List[RequiredTool], too @requires_tool_id("gx_select_multiple") @requires_tool_id("gx_select_multiple_optional") def test_select_multiple_does_not_select_first_by_default( - required_tools: List[RequiredTool], tool_input_format: DescribeToolInputs + required_tools: list[RequiredTool], tool_input_format: DescribeToolInputs ): # unlike single selects - no selection is forced and these serve as optional by default empty = tool_input_format.when.any({}) @@ -586,7 +585,7 @@ def test_select_multiple_does_not_select_first_by_default( @requires_tool_id("gx_text") @requires_tool_id("gx_text_optional_false") -def test_null_to_text_tools(required_tools: List[RequiredTool], tool_input_format: DescribeToolInputs): +def test_null_to_text_tools(required_tools: list[RequiredTool], tool_input_format: DescribeToolInputs): for required_tool in required_tools: execute = required_tool.execute.with_inputs(tool_input_format.when.any({})) execute.assert_has_single_job.with_output("output").with_contents_stripped("") diff --git a/lib/galaxy_test/api/test_tools.py b/lib/galaxy_test/api/test_tools.py index 64d0d4326c96..5861bc45a76b 100644 --- a/lib/galaxy_test/api/test_tools.py +++ b/lib/galaxy_test/api/test_tools.py @@ -6,8 +6,6 @@ from io import BytesIO from typing import ( Any, - Dict, - List, Optional, ) from uuid import uuid4 @@ -869,7 +867,7 @@ def _run_filter(self, history_id, failed_hdca_id, batch=False): filtered_hdca = self.dataset_populator.get_history_collection_details(history_id, hid=filtered_hid, wait=False) return filtered_hdca - def _apply_rules_and_check(self, example: Dict[str, Any]) -> None: + def _apply_rules_and_check(self, example: dict[str, Any]) -> None: with self.dataset_populator.test_history(require_new=False) as history_id: inputs = stage_rules_example(self.galaxy_interactor, history_id, example) hdca = inputs["input"] @@ -984,7 +982,7 @@ def test_drill_down_first_by_default(self): # tool test framework filling in a default. Creating a raw request here # verifies that currently select parameters don't require a selection. with self.dataset_populator.test_history(require_new=False) as history_id: - inputs: Dict[str, Any] = {} + inputs: dict[str, Any] = {} response = self._run("gx_drill_down_exact", history_id, inputs, assert_ok=False) self._assert_status_code_is(response, 400) assert "an invalid option" in response.text @@ -1934,7 +1932,7 @@ def test_dynamic_parameter_error_handling(self): def register_job_data(job_data): job_data_list.append(job_data) - def tool_test_case_list(inputs, required_files) -> List[ValidToolTestDict]: + def tool_test_case_list(inputs, required_files) -> list[ValidToolTestDict]: return [ { "inputs": inputs, diff --git a/lib/galaxy_test/api/test_workflows.py b/lib/galaxy_test/api/test_workflows.py index b5555c9bf688..112709698133 100644 --- a/lib/galaxy_test/api/test_workflows.py +++ b/lib/galaxy_test/api/test_workflows.py @@ -9,9 +9,7 @@ from typing import ( Any, cast, - Dict, Optional, - Tuple, Union, ) from uuid import uuid4 @@ -201,11 +199,11 @@ def _upload_yaml_workflow(self, has_yaml, **kwds) -> str: def _setup_workflow_run( self, - workflow: Optional[Dict[str, Any]] = None, + workflow: Optional[dict[str, Any]] = None, inputs_by: str = "step_id", history_id: Optional[str] = None, workflow_id: Optional[str] = None, - ) -> Tuple[Dict[str, Any], str, str]: + ) -> tuple[dict[str, Any], str, str]: return self.workflow_populator.setup_workflow_run(workflow, inputs_by, history_id, workflow_id) def _ds_entry(self, history_content): @@ -217,7 +215,7 @@ def _invocation_details(self, workflow_id: Optional[str], invocation_id: str, ** invocation_details = invocation_details_response.json() return invocation_details - def _run_jobs(self, has_workflow, history_id: str, **kwds) -> Union[Dict[str, Any], RunJobsSummary]: + def _run_jobs(self, has_workflow, history_id: str, **kwds) -> Union[dict[str, Any], RunJobsSummary]: return self.workflow_populator.run_workflow(has_workflow, history_id=history_id, **kwds) def _run_workflow(self, has_workflow, history_id: str, **kwds) -> RunJobsSummary: diff --git a/lib/galaxy_test/base/api.py b/lib/galaxy_test/base/api.py index b1881f917b63..07c0f1c4c456 100644 --- a/lib/galaxy_test/base/api.py +++ b/lib/galaxy_test/base/api.py @@ -2,7 +2,6 @@ from contextlib import contextmanager from typing import ( Any, - Dict, Optional, ) from urllib.parse import ( @@ -48,10 +47,10 @@ def celery_config(): class UsesCeleryTasks: @classmethod - def handle_galaxy_config_kwds(cls, config: Dict[str, Any]) -> None: + def handle_galaxy_config_kwds(cls, config: dict[str, Any]) -> None: config["enable_celery_tasks"] = True config["metadata_strategy"] = f'{config.get("metadata_strategy", "directory")}_celery' - celery_conf: Dict[str, Any] = config.get("celery_conf", {}) + celery_conf: dict[str, Any] = config.get("celery_conf", {}) celery_conf.update(DEFAULT_CELERY_CONFIG) config["celery_conf"] = celery_conf diff --git a/lib/galaxy_test/base/api_asserts.py b/lib/galaxy_test/base/api_asserts.py index fcc3c5c83444..6d5cdfc28b45 100644 --- a/lib/galaxy_test/base/api_asserts.py +++ b/lib/galaxy_test/base/api_asserts.py @@ -3,7 +3,6 @@ from typing import ( Any, cast, - Dict, Optional, Union, ) @@ -103,8 +102,8 @@ def assert_error_message_contains(response: Union[Response, dict], expected_cont assert expected_contains in err_msg, f"Expected error message [{err_msg}] to contain [{expected_contains}]." -def _as_dict(response: Union[Response, dict]) -> Dict[str, Any]: - as_dict: Dict[str, Any] +def _as_dict(response: Union[Response, dict]) -> dict[str, Any]: + as_dict: dict[str, Any] if isinstance(response, Response): as_dict = cast(dict, response.json()) else: diff --git a/lib/galaxy_test/base/api_util.py b/lib/galaxy_test/base/api_util.py index 7bfe607ebf53..c6a5552926ed 100644 --- a/lib/galaxy_test/base/api_util.py +++ b/lib/galaxy_test/base/api_util.py @@ -3,7 +3,6 @@ import random import string from typing import ( - Dict, Optional, ) @@ -41,7 +40,7 @@ def get_user_api_key() -> Optional[str]: return os.environ.get("GALAXY_TEST_USER_API_KEY", DEFAULT_GALAXY_USER_API_KEY) -def baseauth_headers(username: str, password: str) -> Dict[str, str]: +def baseauth_headers(username: str, password: str) -> dict[str, str]: unencoded_credentials = f"{username}:{password}" authorization = base64.b64encode(unencoded_credentials.encode("utf-8")).decode("utf-8") headers = { diff --git a/lib/galaxy_test/base/env.py b/lib/galaxy_test/base/env.py index b611639c4ed9..a116c22b0e6c 100644 --- a/lib/galaxy_test/base/env.py +++ b/lib/galaxy_test/base/env.py @@ -6,7 +6,6 @@ import struct from typing import ( Optional, - Tuple, ) from galaxy.util import asbool @@ -14,7 +13,7 @@ DEFAULT_WEB_HOST = socket.gethostbyname("localhost") REQUIRE_ALL_NEEDED_TOOLS = asbool(os.environ.get("GALAXY_TEST_REQUIRE_ALL_NEEDED_TOOLS", "0")) -GalaxyTarget = Tuple[str, Optional[str], str] +GalaxyTarget = tuple[str, Optional[str], str] def setup_keep_outdir() -> str: diff --git a/lib/galaxy_test/base/json_schema_utils.py b/lib/galaxy_test/base/json_schema_utils.py index a1571ba8feb4..d20a307ae02b 100644 --- a/lib/galaxy_test/base/json_schema_utils.py +++ b/lib/galaxy_test/base/json_schema_utils.py @@ -1,6 +1,5 @@ from typing import ( Any, - Dict, ) import jsonschema @@ -8,7 +7,7 @@ from galaxy.util import requests from galaxy_test.base import api_asserts -schema_store: Dict[str, Any] = {} +schema_store: dict[str, Any] = {} class JsonSchemaValidator: diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index c6d6f178e00e..2587ced25c37 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -51,6 +51,7 @@ ABCMeta, abstractmethod, ) +from collections.abc import Generator from functools import wraps from io import StringIO from operator import itemgetter @@ -58,13 +59,8 @@ Any, Callable, cast, - Dict, - Generator, - List, NamedTuple, Optional, - Set, - Tuple, Union, ) from uuid import UUID @@ -443,7 +439,7 @@ def new_dataset( to_posix_lines=True, auto_decompress=True, **kwds, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Create a new history dataset instance (HDA). :returns: a dictionary describing the new HDA @@ -547,7 +543,7 @@ def fetch( return tool_response - def fetch_hdas(self, history_id: str, items: List[Dict[str, Any]], wait: bool = True) -> List[Dict[str, Any]]: + def fetch_hdas(self, history_id: str, items: list[dict[str, Any]], wait: bool = True) -> list[dict[str, Any]]: destination = {"type": "hdas"} targets = [ { @@ -564,12 +560,12 @@ def fetch_hdas(self, history_id: str, items: List[Dict[str, Any]], wait: bool = outputs = fetch_response.json()["outputs"] return outputs - def fetch_hda(self, history_id: str, item: Dict[str, Any], wait: bool = True) -> Dict[str, Any]: + def fetch_hda(self, history_id: str, item: dict[str, Any], wait: bool = True) -> dict[str, Any]: hdas = self.fetch_hdas(history_id, [item], wait=wait) assert len(hdas) == 1 return hdas[0] - def create_deferred_hda(self, history_id, uri: str, ext: Optional[str] = None) -> Dict[str, Any]: + def create_deferred_hda(self, history_id, uri: str, ext: Optional[str] = None) -> dict[str, Any]: item = { "src": "url", "url": uri, @@ -601,20 +597,20 @@ def tag_dataset(self, history_id, hda_id, tags, raise_on_error=True): response.raise_for_status() return response.json() - def create_from_store_raw(self, payload: Dict[str, Any]) -> Response: + def create_from_store_raw(self, payload: dict[str, Any]) -> Response: create_response = self._post("histories/from_store", payload, json=True) return create_response - def create_from_store_raw_async(self, payload: Dict[str, Any]) -> Response: + def create_from_store_raw_async(self, payload: dict[str, Any]) -> Response: create_response = self._post("histories/from_store_async", payload, json=True) return create_response def create_from_store( self, - store_dict: Optional[Dict[str, Any]] = None, + store_dict: Optional[dict[str, Any]] = None, store_path: Optional[str] = None, model_store_format: Optional[str] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: payload = _store_payload(store_dict=store_dict, store_path=store_path, model_store_format=model_store_format) create_response = self.create_from_store_raw(payload) api_asserts.assert_status_code_is_ok(create_response) @@ -622,22 +618,22 @@ def create_from_store( def create_from_store_async( self, - store_dict: Optional[Dict[str, Any]] = None, + store_dict: Optional[dict[str, Any]] = None, store_path: Optional[str] = None, model_store_format: Optional[str] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: payload = _store_payload(store_dict=store_dict, store_path=store_path, model_store_format=model_store_format) create_response = self.create_from_store_raw_async(payload) create_response.raise_for_status() return create_response.json() - def create_contents_from_store_raw(self, history_id: str, payload: Dict[str, Any]) -> Response: + def create_contents_from_store_raw(self, history_id: str, payload: dict[str, Any]) -> Response: create_response = self._post(f"histories/{history_id}/contents_from_store", payload, json=True) return create_response def create_contents_from_store( - self, history_id: str, store_dict: Optional[Dict[str, Any]] = None, store_path: Optional[str] = None - ) -> List[Dict[str, Any]]: + self, history_id: str, store_dict: Optional[dict[str, Any]] = None, store_path: Optional[str] = None + ) -> list[dict[str, Any]]: if store_dict is not None: assert isinstance(store_dict, dict) if store_path is not None: @@ -647,14 +643,14 @@ def create_contents_from_store( create_response.raise_for_status() return create_response.json() - def download_contents_to_store(self, history_id: str, history_content: Dict[str, Any], extension=".tgz") -> str: + def download_contents_to_store(self, history_id: str, history_content: dict[str, Any], extension=".tgz") -> str: url = f"histories/{history_id}/contents/{history_content['history_content_type']}s/{history_content['id']}/prepare_store_download" download_response = self._post(url, dict(include_files=False, model_store_format=extension), json=True) storage_request_id = self.assert_download_request_ok(download_response) self.wait_for_download_ready(storage_request_id) return self._get_to_tempfile(f"short_term_storage/{storage_request_id}") - def reupload_contents(self, history_content: Dict[str, Any]): + def reupload_contents(self, history_content: dict[str, Any]): history_id = history_content["history_id"] temp_tar = self.download_contents_to_store(history_id, history_content, "tgz") with tarfile.open(name=temp_tar) as tf: @@ -722,7 +718,7 @@ def has_active_jobs(): def wait_for_jobs( self, - jobs: Union[List[dict], List[str]], + jobs: Union[list[dict], list[str]], assert_ok: bool = False, timeout: timeout_type = DEFAULT_TIMEOUT, ok_states=None, @@ -748,7 +744,7 @@ def wait_for_job( def get_job_details(self, job_id: str, full: bool = False) -> Response: return self._get(f"jobs/{job_id}", {"full": full}) - def job_outputs(self, job_id: str) -> List[Dict[str, Any]]: + def job_outputs(self, job_id: str) -> list[dict[str, Any]]: outputs = self._get(f"jobs/{job_id}/outputs") outputs.raise_for_status() return outputs.json() @@ -760,7 +756,7 @@ def compute_hash( extra_files_path: Optional[str] = None, wait: bool = True, ) -> Response: - data: Dict[str, Any] = {} + data: dict[str, Any] = {} if hash_function: data["hash_function"] = hash_function if extra_files_path: @@ -776,17 +772,17 @@ def cancel_history_jobs(self, history_id: str, wait=True) -> None: for active_job in active_jobs: self.cancel_job(active_job["id"]) - def history_jobs(self, history_id: str) -> List[Dict[str, Any]]: + def history_jobs(self, history_id: str) -> list[dict[str, Any]]: query_params = {"history_id": history_id, "order_by": "create_time"} jobs_response = self._get("jobs", query_params) assert jobs_response.status_code == 200 return jobs_response.json() - def history_jobs_for_tool(self, history_id: str, tool_id: str) -> List[Dict[str, Any]]: + def history_jobs_for_tool(self, history_id: str, tool_id: str) -> list[dict[str, Any]]: jobs = self.history_jobs(history_id) return [j for j in jobs if j["tool_id"] == tool_id] - def invocation_jobs(self, invocation_id: str) -> List[Dict[str, Any]]: + def invocation_jobs(self, invocation_id: str) -> list[dict[str, Any]]: query_params = {"invocation_id": invocation_id, "order_by": "create_time"} jobs_response = self._get("jobs", query_params) assert jobs_response.status_code == 200 @@ -886,7 +882,7 @@ def use_workflow_landing(self, uuid: UUID4) -> WorkflowLandingRequest: api_asserts.assert_status_code_is(landing_reponse, 200) return WorkflowLandingRequest.model_validate(landing_reponse.json()) - def create_tool_from_path(self, tool_path: str) -> Dict[str, Any]: + def create_tool_from_path(self, tool_path: str) -> dict[str, Any]: tool_directory = os.path.dirname(os.path.abspath(tool_path)) payload = dict( src="from_path", @@ -957,14 +953,14 @@ def show_unprivileged_tool(self, uuid: str, assert_ok=True): assert response.status_code == 200, response.text return response.json() - def create_tool(self, representation, tool_directory: Optional[str] = None) -> Dict[str, Any]: + def create_tool(self, representation, tool_directory: Optional[str] = None) -> dict[str, Any]: payload = dict( representation=representation, tool_directory=tool_directory, ) return self._create_tool_raw(payload) - def _create_tool_raw(self, payload) -> Dict[str, Any]: + def _create_tool_raw(self, payload) -> dict[str, Any]: using_requirement("admin") try: create_response = self._post("dynamic_tools", data=payload, admin=True, json=True) @@ -1151,7 +1147,7 @@ def describe_tool_execution(self, tool_id: str) -> "DescribeToolExecution": return DescribeToolExecution(self, tool_id) def materialize_dataset_instance(self, history_id: str, id: str, source: str = "hda"): - payload: Dict[str, Any] + payload: dict[str, Any] if source == "ldda": url = f"histories/{history_id}/materialize" payload = { @@ -1185,7 +1181,7 @@ def get_history_dataset_content( else: return display_response.content - def display_chunk(self, dataset_id: str, offset: int = 0, ck_size: Optional[int] = None) -> Dict[str, Any]: + def display_chunk(self, dataset_id: str, offset: int = 0, ck_size: Optional[int] = None) -> dict[str, Any]: # use the dataset display API endpoint with the offset parameter to enable chunking # of the target dataset for certain datatypes kwds = { @@ -1198,7 +1194,7 @@ def display_chunk(self, dataset_id: str, offset: int = 0, ck_size: Optional[int] print(display_response.content) return display_response.json() - def get_history_dataset_source_transform_actions(self, history_id: str, **kwd) -> Set[str]: + def get_history_dataset_source_transform_actions(self, history_id: str, **kwd) -> set[str]: details = self.get_history_dataset_details(history_id, **kwd) if "sources" not in details: return set() @@ -1215,7 +1211,7 @@ def get_history_dataset_source_transform_actions(self, history_id: str, **kwd) - assert isinstance(transform, list) return {t["action"] for t in transform} - def get_history_dataset_details(self, history_id: str, keys: Optional[str] = None, **kwds) -> Dict[str, Any]: + def get_history_dataset_details(self, history_id: str, keys: Optional[str] = None, **kwds) -> dict[str, Any]: dataset_id = self.__history_content_id(history_id, **kwds) details_response = self.get_history_dataset_details_raw(history_id, dataset_id, keys=keys) details_response.raise_for_status() @@ -1341,7 +1337,7 @@ def __history_content_id(self, history_id: str, wait=True, **kwds) -> str: history_content_id = history_contents[-1]["id"] return history_content_id - def get_history_contents(self, history_id: str, data=None) -> List[Dict[str, Any]]: + def get_history_contents(self, history_id: str, data=None) -> list[dict[str, Any]]: contents_response = self._get_contents_request(history_id, data=data) contents_response.raise_for_status() return contents_response.json() @@ -1363,7 +1359,7 @@ def ds_entry(self, history_content: dict) -> dict: src = "hdca" return dict(src=src, id=history_content["id"]) - def dataset_storage_info(self, dataset_id: str) -> Dict[str, Any]: + def dataset_storage_info(self, dataset_id: str) -> dict[str, Any]: response = self.dataset_storage_info_raw(dataset_id) response.raise_for_status() return response.json() @@ -1379,7 +1375,7 @@ def get_roles(self) -> list: assert roles_response.status_code == 200 return roles_response.json() - def get_configuration(self, admin=False) -> Dict[str, Any]: + def get_configuration(self, admin=False) -> dict[str, Any]: if admin: using_requirement("admin") response = self._get("configuration", admin=admin) @@ -1410,18 +1406,18 @@ def user_private_role_id(self) -> str: assert "id" in role, role return role["id"] - def get_usage(self) -> List[Dict[str, Any]]: + def get_usage(self) -> list[dict[str, Any]]: usage_response = self.galaxy_interactor.get("users/current/usage") usage_response.raise_for_status() return usage_response.json() - def get_usage_for(self, label: Optional[str]) -> Dict[str, Any]: + def get_usage_for(self, label: Optional[str]) -> dict[str, Any]: label_as_str = label if label is not None else "__null__" usage_response = self.galaxy_interactor.get(f"users/current/usage/{label_as_str}") usage_response.raise_for_status() return usage_response.json() - def update_user(self, properties: Dict[str, Any]) -> Dict[str, Any]: + def update_user(self, properties: dict[str, Any]) -> dict[str, Any]: update_response = self.update_user_raw(properties) api_asserts.assert_status_code_is_ok(update_response) return update_response.json() @@ -1430,7 +1426,7 @@ def set_user_preferred_object_store_id(self, store_id: Optional[str]) -> None: user_properties = self.update_user({"preferred_object_store_id": store_id}) assert user_properties["preferred_object_store_id"] == store_id - def update_user_raw(self, properties: Dict[str, Any]) -> Response: + def update_user_raw(self, properties: dict[str, Any]) -> Response: update_response = self.galaxy_interactor.put("users/current", properties, json=True) return update_response @@ -1514,7 +1510,7 @@ def make_public(self, history_id: str) -> dict: assert sharing_response.status_code == 200 return sharing_response.json() - def validate_dataset(self, history_id: str, dataset_id: str) -> Dict[str, Any]: + def validate_dataset(self, history_id: str, dataset_id: str) -> dict[str, Any]: url = f"histories/{history_id}/contents/{dataset_id}/validate" update_response = self._put(url) assert update_response.status_code == 200, update_response.content @@ -1598,7 +1594,7 @@ def import_history(self, import_data): api_asserts.assert_status_code_is(import_response, 200) return import_response.json()["id"] - def wait_for_history_with_name(self, history_name: str, desc: str) -> Dict[str, Any]: + def wait_for_history_with_name(self, history_name: str, desc: str) -> dict[str, Any]: def has_history_with_name(): histories = self.history_names() return histories.get(history_name, None) @@ -1618,24 +1614,24 @@ def import_history_and_wait_for_name(self, import_data, history_name): self.wait_for_history(imported_history_id) return imported_history_id - def history_names(self) -> Dict[str, Dict]: + def history_names(self) -> dict[str, dict]: return {h["name"]: h for h in self.get_histories()} def rename_history(self, history_id: str, new_name: str): self.update_history(history_id, {"name": new_name}) - def update_history(self, history_id: str, payload: Dict[str, Any]) -> Response: + def update_history(self, history_id: str, payload: dict[str, Any]) -> Response: update_url = f"histories/{history_id}" put_response = self._put(update_url, payload, json=True) return put_response - def update_dataset(self, dataset_id: str, update_payload: Dict[str, Any]): + def update_dataset(self, dataset_id: str, update_payload: dict[str, Any]): update_url = f"datasets/{dataset_id}" put_response = self._put(update_url, update_payload, json=True) api_asserts.assert_status_code_is_ok(put_response) return put_response.json() - def update_dataset_collection(self, dataset_collection_id: str, update_payload: Dict[str, Any]): + def update_dataset_collection(self, dataset_collection_id: str, update_payload: dict[str, Any]): update_url = f"dataset_collections/{dataset_collection_id}" put_response = self._put(update_url, update_payload, json=True) api_asserts.assert_status_code_is_ok(put_response) @@ -1762,7 +1758,7 @@ def wait_for_dataset( timeout=timeout, ) - def create_object_store_raw(self, payload: Dict[str, Any]) -> Response: + def create_object_store_raw(self, payload: dict[str, Any]) -> Response: response = self._post( "/api/object_store_instances", payload, @@ -1770,12 +1766,12 @@ def create_object_store_raw(self, payload: Dict[str, Any]) -> Response: ) return response - def create_object_store(self, payload: Dict[str, Any]) -> Dict[str, Any]: + def create_object_store(self, payload: dict[str, Any]) -> dict[str, Any]: response = self.create_object_store_raw(payload) api_asserts.assert_status_code_is_ok(response) return response.json() - def upgrade_object_store_raw(self, id: str, payload: Dict[str, Any]) -> Response: + def upgrade_object_store_raw(self, id: str, payload: dict[str, Any]) -> Response: response = self._put( f"/api/object_store_instances/{id}", payload, @@ -1783,7 +1779,7 @@ def upgrade_object_store_raw(self, id: str, payload: Dict[str, Any]) -> Response ) return response - def upgrade_object_store(self, id: str, payload: Dict[str, Any]) -> Dict[str, Any]: + def upgrade_object_store(self, id: str, payload: dict[str, Any]) -> dict[str, Any]: response = self.upgrade_object_store_raw(id, payload) api_asserts.assert_status_code_is_ok(response) return response.json() @@ -1792,20 +1788,20 @@ def upgrade_object_store(self, id: str, payload: Dict[str, Any]) -> Dict[str, An update_object_store_raw = upgrade_object_store_raw update_object_store = upgrade_object_store - def selectable_object_stores(self) -> List[Dict[str, Any]]: + def selectable_object_stores(self) -> list[dict[str, Any]]: selectable_object_stores_response = self._get("object_stores?selectable=true") selectable_object_stores_response.raise_for_status() selectable_object_stores = selectable_object_stores_response.json() return selectable_object_stores - def selectable_object_store_ids(self) -> List[str]: + def selectable_object_store_ids(self) -> list[str]: selectable_object_stores = self.selectable_object_stores() selectable_object_store_ids = [s["object_store_id"] for s in selectable_object_stores] return selectable_object_store_ids def new_page( self, slug: str = "mypage", title: str = "MY PAGE", content_format: str = "html", content: Optional[str] = None - ) -> Dict[str, Any]: + ) -> dict[str, Any]: page_response = self.new_page_raw(slug=slug, title=title, content_format=content_format, content=content) api_asserts.assert_status_code_is(page_response, 200) return page_response.json() @@ -1819,7 +1815,7 @@ def new_page_raw( def new_page_payload( self, slug: str = "mypage", title: str = "MY PAGE", content_format: str = "html", content: Optional[str] = None - ) -> Dict[str, str]: + ) -> dict[str, str]: if content is None: if content_format == "html": content = "

Page!

" @@ -1870,7 +1866,7 @@ def get_history_export_tasks(self, history_id: str): api_asserts.assert_status_code_is_ok(response) return response.json() - def make_page_public(self, page_id: str) -> Dict[str, Any]: + def make_page_public(self, page_id: str) -> dict[str, Any]: sharing_response = self._put(f"pages/{page_id}/publish") assert sharing_response.status_code == 200 return sharing_response.json() @@ -1898,7 +1894,7 @@ def restore_archived_history(self, history_id: str, force: Optional[bool] = None restore_response = self._put(f"histories/{history_id}/archive/restore{f'?force={force}' if force else ''}") return restore_response - def get_archived_histories(self, query: Optional[str] = None) -> List[Dict[str, Any]]: + def get_archived_histories(self, query: Optional[str] = None) -> list[dict[str, Any]]: if query: query = f"?{query}" index_response = self._get(f"histories/archived{query if query else ''}") @@ -2000,12 +1996,12 @@ def import_workflow_from_path(self, from_path: str, object_id: Optional[str] = N api_asserts.assert_status_code_is(import_response, 200) return import_response.json()["id"] - def create_workflow(self, workflow: Dict[str, Any], **create_kwds) -> str: + def create_workflow(self, workflow: dict[str, Any], **create_kwds) -> str: upload_response = self.create_workflow_response(workflow, **create_kwds) uploaded_workflow_id = upload_response.json()["id"] return uploaded_workflow_id - def create_workflow_response(self, workflow: Dict[str, Any], **create_kwds) -> Response: + def create_workflow_response(self, workflow: dict[str, Any], **create_kwds) -> Response: data = dict(workflow=json.dumps(workflow), **create_kwds) upload_response = self._post("workflows/upload", data=data) return upload_response @@ -2040,7 +2036,7 @@ def workflow_state(): return wait_on_state(workflow_state, desc="workflow invocation state", timeout=timeout, assert_ok=assert_ok) - def workflow_invocations(self, workflow_id: str, include_nested_invocations=True) -> List[Dict[str, Any]]: + def workflow_invocations(self, workflow_id: str, include_nested_invocations=True) -> list[dict[str, Any]]: response = self._get(f"workflows/{workflow_id}/invocations") api_asserts.assert_status_code_is(response, 200) return response.json() @@ -2050,7 +2046,7 @@ def cancel_invocation(self, invocation_id: str): api_asserts.assert_status_code_is(response, 200) return response.json() - def history_invocations(self, history_id: str, include_nested_invocations: bool = True) -> List[Dict[str, Any]]: + def history_invocations(self, history_id: str, include_nested_invocations: bool = True) -> list[dict[str, Any]]: history_invocations_response = self._get( "invocations", {"history_id": history_id, "include_nested_invocations": include_nested_invocations} ) @@ -2119,7 +2115,7 @@ def download_invocation_to_uri(self, invocation_id, target_uri, extension="tgz") def create_invocation_from_store_raw( self, history_id: str, - store_dict: Optional[Dict[str, Any]] = None, + store_dict: Optional[dict[str, Any]] = None, store_path: Optional[str] = None, model_store_format: Optional[str] = None, ) -> Response: @@ -2132,7 +2128,7 @@ def create_invocation_from_store_raw( def create_invocation_from_store( self, history_id: str, - store_dict: Optional[Dict[str, Any]] = None, + store_dict: Optional[dict[str, Any]] = None, store_path: Optional[str] = None, model_store_format: Optional[str] = None, ) -> Response: @@ -2246,7 +2242,7 @@ def download_workflow( history_id: Optional[str] = None, instance: Optional[bool] = None, ) -> dict: - params: Dict[str, Any] = {} + params: dict[str, Any] = {} if style is not None: params["style"] = style if history_id is not None: @@ -2265,7 +2261,7 @@ def invocation_to_request(self, invocation_id: str): api_asserts.assert_status_code_is_ok(request_response) return request_response.json() - def set_tags(self, workflow_id: str, tags: List[str]) -> None: + def set_tags(self, workflow_id: str, tags: list[str]) -> None: update_payload = {"tags": tags} response = self.update_workflow(workflow_id, update_payload) response.raise_for_status() @@ -2279,7 +2275,7 @@ def update_workflow(self, workflow_id: str, workflow_object: dict) -> Response: def refactor_workflow( self, workflow_id: str, actions: list, dry_run: Optional[bool] = None, style: Optional[str] = None ) -> Response: - data: Dict[str, Any] = dict( + data: dict[str, Any] = dict( actions=actions, ) if style is not None: @@ -2308,7 +2304,7 @@ def run_workflow( expected_response: int = 200, assert_ok: bool = True, client_convert: Optional[bool] = None, - extra_invocation_kwds: Optional[Dict[str, Any]] = None, + extra_invocation_kwds: Optional[dict[str, Any]] = None, round_trip_format_conversion: bool = False, invocations: int = 1, raw_yaml: bool = False, @@ -2350,7 +2346,7 @@ def run_workflow( inputs, label_map, has_uploads = load_data_dict( history_id, test_data_dict, self.dataset_populator, self.dataset_collection_populator ) - workflow_request: Dict[str, Any] = dict( + workflow_request: dict[str, Any] = dict( history=f"hist_id={history_id}", workflow_id=workflow_id, ) @@ -2404,7 +2400,7 @@ def _request_to_summary( self, history_id: str, workflow_id: str, - workflow_request: Dict[str, Any], + workflow_request: dict[str, Any], inputs, wait: bool, assert_ok: bool, @@ -2447,13 +2443,13 @@ def dump_workflow(self, workflow_id, style=None): else: print(json.dumps(raw_workflow, sort_keys=True, indent=2)) - def workflow_inputs(self, workflow_id: str) -> Dict[str, Dict[str, Any]]: + def workflow_inputs(self, workflow_id: str) -> dict[str, dict[str, Any]]: workflow_show_response = self._get(f"workflows/{workflow_id}") api_asserts.assert_status_code_is_ok(workflow_show_response) workflow_inputs = workflow_show_response.json()["inputs"] return workflow_inputs - def build_ds_map(self, workflow_id: str, label_map: Dict[str, Any]) -> str: + def build_ds_map(self, workflow_id: str, label_map: dict[str, Any]) -> str: workflow_inputs = self.workflow_inputs(workflow_id) ds_map = {} for key, value in workflow_inputs.items(): @@ -2464,20 +2460,20 @@ def build_ds_map(self, workflow_id: str, label_map: Dict[str, Any]) -> str: def setup_workflow_run( self, - workflow: Optional[Dict[str, Any]] = None, + workflow: Optional[dict[str, Any]] = None, inputs_by: str = "step_id", history_id: Optional[str] = None, workflow_id: Optional[str] = None, - ) -> Tuple[Dict[str, Any], str, str]: + ) -> tuple[dict[str, Any], str, str]: ds_entry = self.dataset_populator.ds_entry if not workflow_id: assert workflow, "If workflow_id not specified, must specify a workflow dictionary to load" workflow_id = self.create_workflow(workflow) if not history_id: history_id = self.dataset_populator.new_history() - hda1: Optional[Dict[str, Any]] = None - hda2: Optional[Dict[str, Any]] = None - label_map: Optional[Dict[str, Any]] = None + hda1: Optional[dict[str, Any]] = None + hda2: Optional[dict[str, Any]] = None + label_map: Optional[dict[str, Any]] = None if inputs_by != "url": hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3", wait=True) hda2 = self.dataset_populator.new_dataset(history_id, content="4 5 6", wait=True) @@ -2523,7 +2519,7 @@ def setup_workflow_run( return workflow_request, history_id, workflow_id - def get_invocation_jobs(self, invocation_id: str) -> List[Dict[str, Any]]: + def get_invocation_jobs(self, invocation_id: str) -> list[dict[str, Any]]: jobs_response = self._get("jobs", data={"invocation_id": invocation_id}) api_asserts.assert_status_code_is(jobs_response, 200) jobs = jobs_response.json() @@ -2620,7 +2616,7 @@ def __init__(self, galaxy_interactor): # Required for ImporterGalaxyInterface interface - so we can recursively import # nested workflows. - def import_workflow(self, workflow, **kwds) -> Dict[str, Any]: + def import_workflow(self, workflow, **kwds) -> dict[str, Any]: workflow_str = json.dumps(workflow, indent=4) data = { "workflow": workflow_str, @@ -2630,14 +2626,14 @@ def import_workflow(self, workflow, **kwds) -> Dict[str, Any]: assert upload_response.status_code == 200, upload_response.text return upload_response.json() - def import_tool(self, tool) -> Dict[str, Any]: + def import_tool(self, tool) -> dict[str, Any]: """Import a new dynamically defined tool Required to implement ImporterGalaxyInterface. """ return self.dataset_populator.create_tool(tool) - def build_module(self, step_type: str, content_id: Optional[str] = None, inputs: Optional[Dict[str, Any]] = None): + def build_module(self, step_type: str, content_id: Optional[str] = None, inputs: Optional[dict[str, Any]] = None): payload = {"inputs": inputs or {}, "type": step_type, "content_id": content_id} response = self._post("workflows/build_module", data=payload, json=True) assert response.status_code == 200, response @@ -2648,7 +2644,7 @@ def scaling_workflow_yaml(self, **kwd): has_workflow = yaml.dump(workflow_dict) return has_workflow - def _scale_workflow_dict(self, workflow_type="simple", **kwd) -> Dict[str, Any]: + def _scale_workflow_dict(self, workflow_type="simple", **kwd) -> dict[str, Any]: if workflow_type == "two_outputs": return self._scale_workflow_dict_two_outputs(**kwd) elif workflow_type == "wave_simple": @@ -2656,7 +2652,7 @@ def _scale_workflow_dict(self, workflow_type="simple", **kwd) -> Dict[str, Any]: else: return self._scale_workflow_dict_simple(**kwd) - def _scale_workflow_dict_simple(self, **kwd) -> Dict[str, Any]: + def _scale_workflow_dict_simple(self, **kwd) -> dict[str, Any]: collection_size = kwd.get("collection_size", 2) workflow_depth = kwd.get("workflow_depth", 3) @@ -2678,7 +2674,7 @@ def _scale_workflow_dict_simple(self, **kwd) -> Dict[str, Any]: } return workflow_dict - def _scale_workflow_dict_two_outputs(self, **kwd) -> Dict[str, Any]: + def _scale_workflow_dict_two_outputs(self, **kwd) -> dict[str, Any]: collection_size = kwd.get("collection_size", 10) workflow_depth = kwd.get("workflow_depth", 10) @@ -2704,7 +2700,7 @@ def _scale_workflow_dict_two_outputs(self, **kwd) -> Dict[str, Any]: } return workflow_dict - def _scale_workflow_dict_wave(self, **kwd) -> Dict[str, Any]: + def _scale_workflow_dict_wave(self, **kwd) -> dict[str, Any]: collection_size = kwd.get("collection_size", 10) workflow_depth = kwd.get("workflow_depth", 10) @@ -2730,7 +2726,7 @@ def _scale_workflow_dict_wave(self, **kwd) -> Dict[str, Any]: return workflow_dict @staticmethod - def _link(link: str, output_name: Optional[str] = None) -> Dict[str, Any]: + def _link(link: str, output_name: Optional[str] = None) -> dict[str, Any]: if output_name is not None: link = f"{str(link)}/{output_name}" return {"$link": link} @@ -2807,7 +2803,7 @@ def run_cwl_job( self, artifact: str, job_path: Optional[str] = None, - job: Optional[Dict] = None, + job: Optional[dict] = None, test_data_directory: Optional[str] = None, history_id: Optional[str] = None, assert_ok: bool = True, @@ -2913,15 +2909,15 @@ def new_private_library(self, name): self.set_permissions(library_id, role_id) return library - def create_from_store_raw(self, payload: Dict[str, Any]) -> Response: + def create_from_store_raw(self, payload: dict[str, Any]) -> Response: using_requirement("admin") using_requirement("new_library") create_response = self.galaxy_interactor.post("libraries/from_store", payload, json=True, admin=True) return create_response def create_from_store( - self, store_dict: Optional[Dict[str, Any]] = None, store_path: Optional[str] = None - ) -> List[Dict[str, Any]]: + self, store_dict: Optional[dict[str, Any]] = None, store_path: Optional[str] = None + ) -> list[dict[str, Any]]: payload = _store_payload(store_dict=store_dict, store_path=store_path) create_response = self.create_from_store_raw(payload) api_asserts.assert_status_code_is_ok(create_response) @@ -3082,7 +3078,7 @@ def show_ld_raw(self, library_id: str, library_dataset_id: str) -> Response: response = self.galaxy_interactor.get(f"libraries/{library_id}/contents/{library_dataset_id}") return response - def show_ld(self, library_id: str, library_dataset_id: str) -> Dict[str, Any]: + def show_ld(self, library_id: str, library_dataset_id: str) -> dict[str, Any]: response = self.show_ld_raw(library_id, library_dataset_id) response.raise_for_status() return response.json() @@ -3105,13 +3101,13 @@ def new_library_dataset_in_private_library(self, library_name="private_dataset", return library, library_dataset - def get_library_contents(self, library_id: str) -> List[Dict[str, Any]]: + def get_library_contents(self, library_id: str) -> list[dict[str, Any]]: all_contents_response = self.galaxy_interactor.get(f"libraries/{library_id}/contents") api_asserts.assert_status_code_is(all_contents_response, 200) all_contents = all_contents_response.json() return all_contents - def get_library_contents_with_path(self, library_id: str, path: str) -> Dict[str, Any]: + def get_library_contents_with_path(self, library_id: str, path: str) -> dict[str, Any]: all_contents = self.get_library_contents(library_id) matching = [c for c in all_contents if c["name"] == path] if len(matching) == 0: @@ -3359,8 +3355,8 @@ def __create_payload_fetch(self, history_id: str, collection_type, ext="txt", ** payload["__files"] = kwds.pop("__files") return payload - def wait_for_fetched_collection(self, fetch_response: Union[Dict[str, Any], Response]): - fetch_response_dict: Dict[str, Any] + def wait_for_fetched_collection(self, fetch_response: Union[dict[str, Any], Response]): + fetch_response_dict: dict[str, Any] if isinstance(fetch_response, Response): fetch_response_dict = fetch_response.json() else: @@ -3464,12 +3460,12 @@ def _create_collection(self, payload: dict) -> Response: return create_response -LoadDataDictResponseT = Tuple[Dict[str, Any], Dict[str, Any], bool] +LoadDataDictResponseT = tuple[dict[str, Any], dict[str, Any], bool] def load_data_dict( history_id: str, - test_data: Dict[str, Any], + test_data: dict[str, Any], dataset_populator: BaseDatasetPopulator, dataset_collection_populator: BaseDatasetCollectionPopulator, ) -> LoadDataDictResponseT: @@ -3493,7 +3489,7 @@ def read_test_data(test_dict): if is_dict and ("elements" in value or value.get("collection_type")): elements_data = value.get("elements", []) elements = [] - new_collection_kwds: Dict[str, Any] = {} + new_collection_kwds: dict[str, Any] = {} for i, element_data in enumerate(elements_data): # Adapt differences between test_data dict and fetch API description. if "name" not in element_data: @@ -3579,13 +3575,13 @@ def read_test_data(test_dict): def stage_inputs( galaxy_interactor: ApiTestInteractor, history_id: str, - job: Dict[str, Any], + job: dict[str, Any], use_path_paste: bool = True, use_fetch_api: bool = True, to_posix_lines: bool = True, tool_or_workflow: Literal["tool", "workflow"] = "workflow", job_dir: Optional[str] = None, -) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: +) -> tuple[dict[str, Any], list[dict[str, Any]]]: """Alternative to load_data_dict that uses production-style workflow inputs.""" kwds = {} if job_dir is not None: @@ -3602,8 +3598,8 @@ def stage_inputs( def stage_rules_example( - galaxy_interactor: ApiTestInteractor, history_id: str, example: Dict[str, Any] -) -> Dict[str, Any]: + galaxy_interactor: ApiTestInteractor, history_id: str, example: dict[str, Any] +) -> dict[str, Any]: """Wrapper around stage_inputs for staging collections defined by rules spec DSL.""" input_dict = example["test_data"].copy() input_dict["collection_type"] = input_dict.pop("type") @@ -3658,11 +3654,11 @@ def get_state(): def _store_payload( - store_dict: Optional[Dict[str, Any]] = None, + store_dict: Optional[dict[str, Any]] = None, store_path: Optional[str] = None, model_store_format: Optional[str] = None, -) -> Dict[str, Any]: - payload: Dict[str, Any] = {} +) -> dict[str, Any]: + payload: dict[str, Any] = {} # Ensure only one store method set. assert store_dict is not None or store_path is not None assert store_dict is None or store_path is None @@ -3685,7 +3681,7 @@ def __init__(self, dataset_populator: BaseDatasetPopulator, history_id: str, hda self._hda_id = hda_id @property - def details(self) -> Dict[str, Any]: + def details(self) -> dict[str, Any]: dataset_details = self._dataset_populator.get_history_dataset_details(self._history_id, dataset_id=self._hda_id) return dataset_details @@ -3694,8 +3690,7 @@ def contents(self) -> str: return self._dataset_populator.get_history_dataset_content(history_id=self._history_id, dataset_id=self._hda_id) def with_contents(self, expected_contents: str) -> Self: - contents = self.contents - if contents != expected_contents: + if (contents := self.contents) != expected_contents: raise AssertionError(f"Output dataset had contents {contents} but expected {expected_contents}") return self @@ -3706,16 +3701,14 @@ def with_contents_stripped(self, expected_contents: str) -> Self: return self def containing(self, expected_contents: str) -> Self: - contents = self.contents - if expected_contents not in contents: + if expected_contents not in (contents := self.contents): raise AssertionError( f"Output dataset had contents {contents} which does not contain the expected text {expected_contents}" ) return self def with_file_ext(self, expected_ext: str) -> Self: - ext = self.details["file_ext"] - if ext != expected_ext: + if (ext := self.details["file_ext"]) != expected_ext: raise AssertionError(f"Output dataset had file extension {ext}, not the expected extension {expected_ext}") return self @@ -3725,8 +3718,7 @@ def json(self) -> Any: return json.loads(contents) def with_json(self, expected_json: Any) -> Self: - json = self.json - if json != expected_json: + if (json := self.json) != expected_json: raise AssertionError(f"Output dataset contianed JSON {json}, not {expected_json} as expected") return self @@ -3747,14 +3739,14 @@ def __init__(self, dataset_populator: BaseDatasetPopulator, history_id: str, hdc self._hdca_id = hdca_id @property - def details(self) -> Dict[str, Any]: + def details(self) -> dict[str, Any]: collection_details = self._dataset_populator.get_history_collection_details( self._history_id, content_id=self._hdca_id ) return collection_details @property - def elements(self) -> List[Dict[str, Any]]: + def elements(self) -> list[dict[str, Any]]: return self.details["elements"] def with_n_elements(self, n: int) -> Self: @@ -3763,7 +3755,7 @@ def with_n_elements(self, n: int) -> Self: raise AssertionError("Collection contained {count} elements and not the expected {n} elements") return self - def with_element_dict(self, index: Union[str, int]) -> Dict[str, Any]: + def with_element_dict(self, index: Union[str, int]) -> dict[str, Any]: elements = self.elements if isinstance(index, int): element_dict = elements[index] @@ -3777,8 +3769,7 @@ def with_dataset_element(self, index: Union[str, int]) -> "DescribeToolExecution return DescribeToolExecutionOutput(self._dataset_populator, self._history_id, element_object["id"]) def named(self, expected_name: str) -> Self: - name = self.details["name"] - if name != expected_name: + if (name := self.details["name"]) != expected_name: raise AssertionError(f"Dataset collection named {name} did not have expected name {expected_name}.") return self @@ -3796,7 +3787,7 @@ def __init__(self, dataset_populator: BaseDatasetPopulator, history_id: str, job self._dataset_populator = dataset_populator self._history_id = history_id self._job_id = job_id - self._final_details: Optional[Dict[str, Any]] = None + self._final_details: Optional[dict[str, Any]] = None def _wait_for(self): if self._final_details is None: @@ -3804,7 +3795,7 @@ def _wait_for(self): self._final_details = self._dataset_populator.get_job_details(self._job_id).json() @property - def final_details(self) -> Dict[str, Any]: + def final_details(self) -> dict[str, Any]: self._wait_for() final_details = self._final_details assert final_details @@ -3817,8 +3808,7 @@ def final_state(self) -> str: return final_state def with_final_state(self, expected_state: str) -> Self: - final_state = self.final_state - if final_state != expected_state: + if (final_state := self.final_state) != expected_state: raise AssertionError( f"Expected job {self._job_id} to end with state {expected_state} but it ended with state {final_state}" ) @@ -3886,22 +3876,22 @@ def execute(self) -> "DescribeToolExecution": class DescribeToolInputs: _input_format: str = "legacy" - _inputs: Optional[Dict[str, Any]] + _inputs: Optional[dict[str, Any]] def __init__(self, input_format: str): self._input_format = input_format self._inputs = None - def any(self, inputs: Dict[str, Any]) -> Self: + def any(self, inputs: dict[str, Any]) -> Self: self._inputs = inputs return self - def flat(self, inputs: Dict[str, Any]) -> Self: + def flat(self, inputs: dict[str, Any]) -> Self: if self._input_format == "legacy": self._inputs = inputs return self - def nested(self, inputs: Dict[str, Any]) -> Self: + def nested(self, inputs: dict[str, Any]) -> Self: if self._input_format == "21.01": self._inputs = inputs return self @@ -3916,7 +3906,7 @@ class DescribeToolExecution: _history_id: Optional[str] = None _execute_response: Optional[Response] = None _input_format: Optional[str] = None - _inputs: Dict[str, Any] + _inputs: dict[str, Any] def __init__(self, dataset_populator: BaseDatasetPopulator, tool_id: str): self._dataset_populator = dataset_populator @@ -3930,7 +3920,7 @@ def in_history(self, has_history_id: Union[str, "TargetHistory"]) -> Self: self._history_id = has_history_id._history_id return self - def with_inputs(self, inputs: Union[DescribeToolInputs, Dict[str, Any]]) -> Self: + def with_inputs(self, inputs: Union[DescribeToolInputs, dict[str, Any]]) -> Self: if isinstance(inputs, DescribeToolInputs): self._inputs = inputs._inputs or {} self._input_format = inputs._input_format @@ -3939,7 +3929,7 @@ def with_inputs(self, inputs: Union[DescribeToolInputs, Dict[str, Any]]) -> Self self._input_format = "legacy" return self - def with_nested_inputs(self, inputs: Dict[str, Any]) -> Self: + def with_nested_inputs(self, inputs: dict[str, Any]) -> Self: self._inputs = inputs self._input_format = "21.01" return self @@ -3964,7 +3954,7 @@ def _ensure_executed(self) -> None: if self._execute_response is None: self._execute() - def _assert_executed_ok(self) -> Dict[str, Any]: + def _assert_executed_ok(self) -> dict[str, Any]: self._ensure_executed() execute_response = self._execute_response assert execute_response is not None @@ -4103,7 +4093,7 @@ def __init__(self, gi): self.dataset_populator = GiDatasetPopulator(gi) -ListContentsDescription = Union[List[str], List[Tuple[str, str]]] +ListContentsDescription = Union[list[str], list[tuple[str, str]]] class TargetHistory: @@ -4156,7 +4146,7 @@ def with_list_of_paired_and_unpaired(self) -> "HasSrcDict": ) ) - def with_pair(self, contents: Optional[List[str]] = None) -> "HasSrcDict": + def with_pair(self, contents: Optional[list[str]] = None) -> "HasSrcDict": return self._fetch_response( self._dataset_collection_populator.create_pair_in_history( self._history_id, contents=contents, direct_upload=True, wait=True @@ -4195,9 +4185,9 @@ class SrcDict(TypedDict): class HasSrcDict: - api_object: Union[str, Dict[str, Any]] + api_object: Union[str, dict[str, Any]] - def __init__(self, src_type: str, api_object: Union[str, Dict[str, Any]]): + def __init__(self, src_type: str, api_object: Union[str, dict[str, Any]]): self.src_type = src_type self.api_object = api_object diff --git a/lib/galaxy_test/base/uses_shed_api.py b/lib/galaxy_test/base/uses_shed_api.py index f617c5745f42..597920cb5fd2 100644 --- a/lib/galaxy_test/base/uses_shed_api.py +++ b/lib/galaxy_test/base/uses_shed_api.py @@ -2,8 +2,6 @@ from typing import ( Any, Callable, - Dict, - List, Optional, ) from unittest import SkipTest @@ -16,7 +14,7 @@ DEFAULT_TOOL_SHED_URL = "https://toolshed.g2.bx.psu.edu" -OperationT = Callable[[Dict[str, Any]], Response] +OperationT = Callable[[dict[str, Any]], Response] class UsesShedApi: @@ -24,10 +22,10 @@ class UsesShedApi: @abc.abstractmethod def galaxy_interactor(self) -> GalaxyInteractorApi: ... - def delete_repo_request(self, payload: Dict[str, Any]) -> Response: + def delete_repo_request(self, payload: dict[str, Any]) -> Response: return self.galaxy_interactor._delete("tool_shed_repositories", data=payload, admin=True) - def install_repo_request(self, payload: Dict[str, Any]) -> Response: + def install_repo_request(self, payload: dict[str, Any]) -> Response: return self.galaxy_interactor._post( "tool_shed_repositories/new/install_repository_revision", data=payload, admin=True ) @@ -40,7 +38,7 @@ def repository_operation( changeset: str, tool_shed_url: str = DEFAULT_TOOL_SHED_URL, tool_panel_section_id: Optional[str] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: payload = {"tool_shed_url": tool_shed_url, "name": name, "owner": owner, "changeset_revision": changeset} if tool_panel_section_id: payload["tool_panel_section_id"] = tool_panel_section_id @@ -55,7 +53,7 @@ def install_repository( changeset: str, tool_shed_url: str = DEFAULT_TOOL_SHED_URL, tool_panel_section_id: Optional[str] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: try: return self.repository_operation( operation=self.install_repo_request, @@ -72,15 +70,15 @@ def install_repository( def uninstall_repository( self, owner: str, name: str, changeset: str, tool_shed_url: str = DEFAULT_TOOL_SHED_URL - ) -> Dict[str, Any]: + ) -> dict[str, Any]: return self.repository_operation( operation=self.delete_repo_request, owner=owner, name=name, changeset=changeset, tool_shed_url=tool_shed_url ) def index_repositories( self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None - ) -> List[Dict[str, Any]]: - params: Dict[str, str] = {} + ) -> list[dict[str, Any]]: + params: dict[str, str] = {} if owner is not None: params["owner"] = owner if name is not None: @@ -95,7 +93,7 @@ def index_repositories( def get_installed_repository_for( self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: index = self.index_repositories(owner, name, changeset) if len(index) == 0: return None @@ -104,7 +102,7 @@ def get_installed_repository_for( else: return index[0] - def get_installed_repository(self, id: str) -> Dict[str, Any]: + def get_installed_repository(self, id: str) -> dict[str, Any]: response = self.galaxy_interactor._get(f"tool_shed_repositories/{id}", admin=True) response.raise_for_status() return response.json() diff --git a/lib/galaxy_test/driver/driver_util.py b/lib/galaxy_test/driver/driver_util.py index 206a69f9e2e4..5a30f69f03c9 100644 --- a/lib/galaxy_test/driver/driver_util.py +++ b/lib/galaxy_test/driver/driver_util.py @@ -16,8 +16,6 @@ from pathlib import Path from typing import ( Any, - Dict, - List, Optional, ) from urllib.parse import urlparse @@ -798,8 +796,8 @@ class TestDriver: def __init__(self): """Setup tracked resources.""" - self.server_wrappers: List[ServerWrapper] = [] - self.temp_directories: List[str] = [] + self.server_wrappers: list[ServerWrapper] = [] + self.temp_directories: list[str] = [] def setup(self) -> None: """Called before tests are built.""" @@ -951,7 +949,7 @@ def _ensure_config_object(self, config_object): return config_object def run_tool_test( - self, tool_id: str, index: int = 0, resource_parameters: Optional[Dict[str, Any]] = None, **kwd + self, tool_id: str, index: int = 0, resource_parameters: Optional[dict[str, Any]] = None, **kwd ) -> None: if resource_parameters is None: resource_parameters = {} diff --git a/lib/galaxy_test/driver/integration_util.py b/lib/galaxy_test/driver/integration_util.py index 90c971a1e295..63849761e288 100644 --- a/lib/galaxy_test/driver/integration_util.py +++ b/lib/galaxy_test/driver/integration_util.py @@ -8,11 +8,10 @@ import os import re import sys +from collections.abc import Iterator from typing import ( ClassVar, - Iterator, Optional, - Type, TYPE_CHECKING, ) from unittest import ( @@ -207,7 +206,7 @@ class IntegrationTestCase(IntegrationInstance, TestCase): """Unit TestCase with utilities for spinning up Galaxy.""" -def integration_module_instance(clazz: Type[IntegrationInstance]): +def integration_module_instance(clazz: type[IntegrationInstance]): def _instance() -> Iterator[IntegrationInstance]: instance = clazz() instance.setUpClass() diff --git a/lib/galaxy_test/selenium/framework.py b/lib/galaxy_test/selenium/framework.py index 44660bcc0054..f7fd42097535 100644 --- a/lib/galaxy_test/selenium/framework.py +++ b/lib/galaxy_test/selenium/framework.py @@ -12,9 +12,7 @@ ) from typing import ( Any, - Dict, Optional, - Tuple, TYPE_CHECKING, ) @@ -622,7 +620,7 @@ def workflow_upload_yaml_with_random_name(self, content: str, **kwds) -> str: workflow_populator.upload_yaml_workflow(content, name=name, **kwds) return name - def workflow_run_setup_inputs(self, content: Optional[str]) -> Tuple[str, Dict[str, Any]]: + def workflow_run_setup_inputs(self, content: Optional[str]) -> tuple[str, dict[str, Any]]: history_id = self.current_history_id() if content: yaml_content = yaml.safe_load(content) diff --git a/lib/galaxy_test/selenium/test_published_workflows.py b/lib/galaxy_test/selenium/test_published_workflows.py index 087101a12c2b..12af9cd254ed 100644 --- a/lib/galaxy_test/selenium/test_published_workflows.py +++ b/lib/galaxy_test/selenium/test_published_workflows.py @@ -1,6 +1,5 @@ from typing import ( Any, - Dict, ) from .framework import ( @@ -30,7 +29,7 @@ def setup_shared_state(self): self.id_2 = workflow_2["id"] self.logout_if_needed() - def _new_public_workflow(self) -> Dict[str, Any]: + def _new_public_workflow(self) -> dict[str, Any]: name = self._get_random_name() contents = self.workflow_populator.load_workflow(name) workflow_response = self.workflow_populator.create_workflow_response(contents, publish=True) diff --git a/lib/galaxy_test/selenium/test_tool_form.py b/lib/galaxy_test/selenium/test_tool_form.py index 4d49d09523f8..4e4c24045dd2 100644 --- a/lib/galaxy_test/selenium/test_tool_form.py +++ b/lib/galaxy_test/selenium/test_tool_form.py @@ -1,8 +1,6 @@ import json from typing import ( Any, - Dict, - List, ) import pytest @@ -119,7 +117,7 @@ def test_repeat_reordering(self): self.home() self.tool_open("text_repeat") - def assert_input_order(inputs: List[str]): + def assert_input_order(inputs: list[str]): for index, input in enumerate(inputs): parameter_input = self.components.tool_form.parameter_input(parameter=f"the_repeat_{index}|texttest") parameter_value = parameter_input.wait_for_value() @@ -552,7 +550,7 @@ def test_run_apply_rules_tutorial(self): self.history_multi_view_display_collection_contents(32, "list:list") self.screenshot("rules_apply_rules_example_4_15_filtered_and_nested") - def _apply_rules_and_check(self, example: Dict[str, Any]) -> None: + def _apply_rules_and_check(self, example: dict[str, Any]) -> None: rule_builder = self.components.rule_builder self.home() diff --git a/lib/tool_shed/dependencies/attribute_handlers.py b/lib/tool_shed/dependencies/attribute_handlers.py index c9f91794a147..2182d7ec17d8 100644 --- a/lib/tool_shed/dependencies/attribute_handlers.py +++ b/lib/tool_shed/dependencies/attribute_handlers.py @@ -1,10 +1,7 @@ import copy import logging from typing import ( - Dict, - List, Optional, - Tuple, TYPE_CHECKING, ) @@ -227,8 +224,8 @@ def handle_tag_attributes(self, tool_dependencies_config): def _create_element( tag: str, - attributes: Optional[Dict[str, str]] = None, - sub_elements: Optional[Dict[str, List[Tuple[str, str]]]] = None, + attributes: Optional[dict[str, str]] = None, + sub_elements: Optional[dict[str, list[tuple[str, str]]]] = None, ) -> Optional[Element]: """ Create a new element whose tag is the value of the received tag, and whose attributes are all diff --git a/lib/tool_shed/dependencies/repository/relation_builder.py b/lib/tool_shed/dependencies/repository/relation_builder.py index 61b876568457..2aa24303f425 100644 --- a/lib/tool_shed/dependencies/repository/relation_builder.py +++ b/lib/tool_shed/dependencies/repository/relation_builder.py @@ -1,10 +1,7 @@ import logging from typing import ( Any, - Dict, - List, Optional, - Tuple, TYPE_CHECKING, ) @@ -29,17 +26,17 @@ class RelationBuilder: def __init__(self, app: "ToolShedApp", repository, repository_metadata, tool_shed_url, trans=None): - self.all_repository_dependencies: Dict[str, Any] = {} + self.all_repository_dependencies: dict[str, Any] = {} self.app = app - self.circular_repository_dependencies: List[Tuple] = [] + self.circular_repository_dependencies: list[tuple] = [] self.repository = repository self.repository_metadata = repository_metadata - self.handled_key_rd_dicts: List[Dict[str, List[str]]] = [] - self.key_rd_dicts_to_be_processed: List[Dict[str, List[str]]] = [] + self.handled_key_rd_dicts: list[dict[str, list[str]]] = [] + self.key_rd_dicts_to_be_processed: list[dict[str, list[str]]] = [] self.tool_shed_url = tool_shed_url self.trans = trans - def can_add_to_key_rd_dicts(self, key_rd_dict, key_rd_dicts: List[Dict[str, List[str]]]): + def can_add_to_key_rd_dicts(self, key_rd_dict, key_rd_dicts: list[dict[str, list[str]]]): """Handle the case where an update to the changeset revision was done.""" k = next(iter(key_rd_dict)) rd = key_rd_dict[k] @@ -52,13 +49,13 @@ def can_add_to_key_rd_dicts(self, key_rd_dict, key_rd_dicts: List[Dict[str, List return False return True - def filter_only_if_compiling_contained_td(self, key_rd_dict: Dict[str, Any]): + def filter_only_if_compiling_contained_td(self, key_rd_dict: dict[str, Any]): """ Return a copy of the received key_rd_dict with repository dependencies that are needed only_if_compiling_contained_td filtered out of the list of repository dependencies for each rd_key. """ - filtered_key_rd_dict: Dict[str, Any] = {} + filtered_key_rd_dict: dict[str, Any] = {} for rd_key, required_rd_tup in key_rd_dict.items(): ( tool_shed, @@ -211,8 +208,8 @@ def get_repository_dependency_as_key(self, repository_dependency): tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td ) - def get_updated_changeset_revisions_for_repository_dependencies(self, key_rd_dicts: List[Dict[str, Any]]): - updated_key_rd_dicts: List[Dict[str, Any]] = [] + def get_updated_changeset_revisions_for_repository_dependencies(self, key_rd_dicts: list[dict[str, Any]]): + updated_key_rd_dicts: list[dict[str, Any]] = [] for key_rd_dict in key_rd_dicts: key = next(iter(key_rd_dict)) repository_dependency = key_rd_dict[key] @@ -318,7 +315,7 @@ def handle_current_repository_dependency(self, current_repository_key: str): return self.get_repository_dependencies_for_changeset_revision() def handle_key_rd_dicts_for_repository( - self, current_repository_key, repository_key_rd_dicts: List[Dict[str, List[str]]] + self, current_repository_key, repository_key_rd_dicts: list[dict[str, list[str]]] ): key_rd_dict = repository_key_rd_dicts.pop(0) repository_dependency = key_rd_dict[current_repository_key] @@ -395,7 +392,7 @@ def in_circular_repository_dependencies(self, repository_key_rd_dict): return True return False - def in_key_rd_dicts(self, key_rd_dict: Dict[str, List[str]], key_rd_dicts: List[Dict[str, List[str]]]): + def in_key_rd_dicts(self, key_rd_dict: dict[str, list[str]], key_rd_dicts: list[dict[str, list[str]]]): """Return True if key_rd_dict is contained in the list of key_rd_dicts.""" k = next(iter(key_rd_dict)) v = key_rd_dict[k] @@ -439,7 +436,7 @@ def populate_repository_dependency_objects_for_processing( more repository dependencies, so this method is repeatedly called until all repository dependencies have been discovered. """ - current_repository_key_rd_dicts: List[Dict[str, Any]] = [] + current_repository_key_rd_dicts: list[dict[str, Any]] = [] filtered_current_repository_key_rd_dicts = [] for rd_tup in repository_dependencies_dict["repository_dependencies"]: new_key_rd_dict = {current_repository_key: rd_tup} @@ -506,11 +503,11 @@ def prune_invalid_repository_dependencies(self, repository_dependencies): valid_repository_dependencies["root_key"] = root_key return valid_repository_dependencies - def remove_from_key_rd_dicts(self, key_rd_dict: Dict[str, List[str]], key_rd_dicts: List[Dict[str, List[str]]]): + def remove_from_key_rd_dicts(self, key_rd_dict: dict[str, list[str]], key_rd_dicts: list[dict[str, list[str]]]): """Eliminate the key_rd_dict from the list of key_rd_dicts if it is contained in the list.""" k = next(iter(key_rd_dict)) v = key_rd_dict[k] - clean_key_rd_dicts: List[Dict[str, List[str]]] = [] + clean_key_rd_dicts: list[dict[str, list[str]]] = [] for krd_dict in key_rd_dicts: key = next(iter(krd_dict)) val = krd_dict[key] @@ -519,9 +516,9 @@ def remove_from_key_rd_dicts(self, key_rd_dict: Dict[str, List[str]], key_rd_dic clean_key_rd_dicts.append(krd_dict) return clean_key_rd_dicts - def remove_repository_dependency_reference_to_self(self, key_rd_dicts: List[Dict[str, Any]]): + def remove_repository_dependency_reference_to_self(self, key_rd_dicts: list[dict[str, Any]]): """Remove all repository dependencies that point to a revision within its own repository.""" - clean_key_rd_dicts: List[Dict[str, Any]] = [] + clean_key_rd_dicts: list[dict[str, Any]] = [] key = next(iter(key_rd_dicts[0])) repository_tup = key.split(container_util.STRSEP) ( diff --git a/lib/tool_shed/managers/categories.py b/lib/tool_shed/managers/categories.py index 869e8a40fac3..6b62c91fea9a 100644 --- a/lib/tool_shed/managers/categories.py +++ b/lib/tool_shed/managers/categories.py @@ -1,8 +1,6 @@ from typing import ( Any, Callable, - Dict, - List, ) from sqlalchemy import select @@ -43,19 +41,19 @@ def create(self, trans: ProvidesUserContext, category_request: CreateCategoryReq else: raise exceptions.RequestParameterMissingException('Missing required parameter "name".') - def index_db(self, trans: ProvidesUserContext, deleted: bool) -> List[Category]: + def index_db(self, trans: ProvidesUserContext, deleted: bool) -> list[Category]: if deleted and not trans.user_is_admin: raise exceptions.AdminRequiredException("Only administrators can query deleted categories.") return list(get_categories_by_deleted(trans.sa_session, deleted)) - def index(self, trans: ProvidesUserContext, deleted: bool) -> List[Dict[str, Any]]: - category_dicts: List[Dict[str, Any]] = [] + def index(self, trans: ProvidesUserContext, deleted: bool) -> list[dict[str, Any]]: + category_dicts: list[dict[str, Any]] = [] for category in self.index_db(trans, deleted): category_dict = self.to_dict(category) category_dicts.append(category_dict) return category_dicts - def to_dict(self, category: Category) -> Dict[str, Any]: + def to_dict(self, category: Category) -> dict[str, Any]: category_dict = category.to_dict(view="collection", value_mapper=get_value_mapper(self.app)) category_dict["repositories"] = category.active_repository_count() category_dict["url"] = web.url_for( @@ -74,7 +72,7 @@ def to_model(self, category: Category) -> CategoryResponse: ) -def get_value_mapper(app: ToolShedApp) -> Dict[str, Callable]: +def get_value_mapper(app: ToolShedApp) -> dict[str, Callable]: value_mapper = {"id": app.security.encode_id} return value_mapper diff --git a/lib/tool_shed/managers/model_cache.py b/lib/tool_shed/managers/model_cache.py index 9ce8c205e901..039a63285ce5 100644 --- a/lib/tool_shed/managers/model_cache.py +++ b/lib/tool_shed/managers/model_cache.py @@ -2,9 +2,7 @@ import os from typing import ( Any, - Dict, Optional, - Type, TypeVar, ) @@ -12,20 +10,20 @@ from galaxy.util.hash_util import md5_hash_str -RAW_CACHED_JSON = Dict[str, Any] +RAW_CACHED_JSON = dict[str, Any] -def hash_model(model_class: Type[BaseModel]) -> str: +def hash_model(model_class: type[BaseModel]) -> str: return md5_hash_str(json.dumps(model_class.model_json_schema())) -MODEL_HASHES: Dict[Type[BaseModel], str] = {} +MODEL_HASHES: dict[type[BaseModel], str] = {} M = TypeVar("M", bound=BaseModel) -def ensure_model_has_hash(model_class: Type[BaseModel]) -> None: +def ensure_model_has_hash(model_class: type[BaseModel]) -> None: if model_class not in MODEL_HASHES: MODEL_HASHES[model_class] = hash_model(model_class) @@ -38,20 +36,20 @@ def __init__(self, cache_directory: str): os.makedirs(cache_directory) self._cache_directory = cache_directory - def _cache_target(self, model_class: Type[M], tool_id: str, tool_version: str) -> str: + def _cache_target(self, model_class: type[M], tool_id: str, tool_version: str) -> str: ensure_model_has_hash(model_class) # consider breaking this into multiple directories... cache_target = os.path.join(self._cache_directory, MODEL_HASHES[model_class], tool_id, tool_version) return cache_target - def get_cache_entry_for(self, model_class: Type[M], tool_id: str, tool_version: str) -> Optional[M]: + def get_cache_entry_for(self, model_class: type[M], tool_id: str, tool_version: str) -> Optional[M]: cache_target = self._cache_target(model_class, tool_id, tool_version) if not os.path.exists(cache_target): return None with open(cache_target) as f: return model_class.model_validate(json.load(f)) - def has_cached_entry_for(self, model_class: Type[M], tool_id: str, tool_version: str) -> bool: + def has_cached_entry_for(self, model_class: type[M], tool_id: str, tool_version: str) -> bool: cache_target = self._cache_target(model_class, tool_id, tool_version) return os.path.exists(cache_target) diff --git a/lib/tool_shed/managers/repositories.py b/lib/tool_shed/managers/repositories.py index 65c58e6eef59..0cdc2d50ec1c 100644 --- a/lib/tool_shed/managers/repositories.py +++ b/lib/tool_shed/managers/repositories.py @@ -10,8 +10,6 @@ Any, Callable, cast, - Dict, - List, Optional, Union, ) @@ -155,7 +153,7 @@ class UpdatesRequest(BaseModel): hexlify: bool = True -def check_updates(app: ToolShedApp, request: UpdatesRequest) -> Union[str, Dict[str, Any]]: +def check_updates(app: ToolShedApp, request: UpdatesRequest) -> Union[str, dict[str, Any]]: name = request.name owner = request.owner changeset_revision = request.changeset_revision @@ -217,7 +215,7 @@ def guid_to_repository(app: ToolShedApp, tool_id: str) -> Repository: return _get_repository_by_name_and_owner(app.model.context, name, owner) -def index_tool_ids(app: ToolShedApp, tool_ids: List[str]) -> Dict[str, Any]: +def index_tool_ids(app: ToolShedApp, tool_ids: list[str]) -> dict[str, Any]: repository_found = [] all_metadata = {} for tool_id in tool_ids: @@ -231,7 +229,7 @@ def index_tool_ids(app: ToolShedApp, tool_ids: List[str]) -> Dict[str, Any]: continue for changeset, changehash in repository.installable_revisions(app): metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changehash) - tools: Optional[List[Dict[str, Any]]] = metadata.metadata.get("tools") + tools: Optional[list[dict[str, Any]]] = metadata.metadata.get("tools") if not tools: log.warning(f"Repository {owner}/{name}/{changehash} does not contain valid tools, skipping") continue @@ -281,7 +279,7 @@ class PaginatedIndexRequest(IndexRequest): page_size: int -def index_repositories(app: ToolShedApp, index_request: IndexRequest) -> List[Repository]: +def index_repositories(app: ToolShedApp, index_request: IndexRequest) -> list[Repository]: session = app.model.context return list(session.scalars(_get_repositories_by_name_and_owner_and_deleted(app.security, index_request))) @@ -390,7 +388,7 @@ def get_install_info(trans: ProvidesRepositoriesContext, name, owner, changeset_ return {}, {}, {} -def get_value_mapper(app: ToolShedApp) -> Dict[str, Callable]: +def get_value_mapper(app: ToolShedApp) -> dict[str, Callable]: value_mapper = { "id": app.security.encode_id, "repository_id": app.security.encode_id, @@ -401,7 +399,7 @@ def get_value_mapper(app: ToolShedApp) -> Dict[str, Callable]: def get_ordered_installable_revisions( app: ToolShedApp, name: Optional[str], owner: Optional[str], tsr_id: Optional[str] -) -> List[str]: +) -> list[str]: eagerload_columns = [Repository.downloadable_revisions] if None not in [name, owner]: # Get the repository information. @@ -420,7 +418,7 @@ def get_ordered_installable_revisions( return [revision[1] for revision in repository.installable_revisions(app, sort_revisions=True)] -def get_repository_metadata_dict(app: ToolShedApp, id: str, recursive: bool, downloadable_only: bool) -> Dict[str, Any]: +def get_repository_metadata_dict(app: ToolShedApp, id: str, recursive: bool, downloadable_only: bool) -> dict[str, Any]: all_metadata = {} repository = get_repository_in_tool_shed(app, id, eagerload_columns=[Repository.downloadable_revisions]) for changeset, changehash in get_metadata_revisions( @@ -551,7 +549,7 @@ def handle_repository(trans, repository, results): start_time = strftime("%Y-%m-%d %H:%M:%S") results = dict(start_time=start_time, repository_status=[], successful_count=0, unsuccessful_count=0) - handled_repository_ids: List[str] = [] + handled_repository_ids: list[str] = [] encoded_ids_to_skip = request.encoded_ids_to_skip or [] if trans.user_is_admin: my_writable = request.my_writable @@ -613,7 +611,7 @@ def create_repository(trans: ProvidesUserContext, request: CreateRepositoryReque return repo -def to_element_dict(app, repository: Repository, include_categories: bool = False) -> Dict[str, Any]: +def to_element_dict(app, repository: Repository, include_categories: bool = False) -> dict[str, Any]: value_mapper = get_value_mapper(app) repository_dict = repository.to_dict(view="element", value_mapper=value_mapper) if include_categories: @@ -630,7 +628,7 @@ def repositories_by_category( installable: bool = True, ): category = get_category(app, category_id) - category_dict: Dict[str, Any] + category_dict: dict[str, Any] if category is None: category_dict = dict(message=f"Unable to locate category record for id {str(id)}.", status="error") return category_dict diff --git a/lib/tool_shed/managers/tools.py b/lib/tool_shed/managers/tools.py index c0c54379624c..d52e020742a3 100644 --- a/lib/tool_shed/managers/tools.py +++ b/lib/tool_shed/managers/tools.py @@ -2,10 +2,7 @@ import tempfile from collections import namedtuple from typing import ( - Dict, - List, Optional, - Tuple, ) from galaxy import exceptions @@ -39,7 +36,7 @@ from .repositories import get_repository_revision_metadata_model from .trs import trs_tool_id_to_repository_metadata -STOCK_TOOL_SOURCES: Optional[Dict[str, Dict[str, ToolSource]]] = None +STOCK_TOOL_SOURCES: Optional[dict[str, dict[str, ToolSource]]] = None def search(trans: SessionRequestContext, q: str, page: int = 1, page_size: int = 10) -> dict: @@ -83,7 +80,7 @@ def search(trans: SessionRequestContext, q: str, page: int = 1, page_size: int = def get_repository_metadata_tool_dict( trans: ProvidesRepositoriesContext, trs_tool_id: str, tool_version: str -) -> Tuple[RepositoryMetadata, RepositoryMetadataToolDict]: +) -> tuple[RepositoryMetadata, RepositoryMetadataToolDict]: if trs_tool_id.count("~") < 2: RequestParameterInvalidException(f"Invalid TRS tool id ({trs_tool_id})") @@ -93,7 +90,7 @@ def get_repository_metadata_tool_dict( raise ObjectNotFound() tool_version_repository_metadata: RepositoryMetadata = metadata_by_version[tool_version] raw_metadata = tool_version_repository_metadata.metadata - tool_dicts: List[RepositoryMetadataToolDict] = raw_metadata.get("tools", []) + tool_dicts: list[RepositoryMetadataToolDict] = raw_metadata.get("tools", []) for tool_dict in tool_dicts: if tool_dict["id"] != tool_id or tool_dict["version"] != tool_version: continue @@ -130,7 +127,7 @@ def parsed_tool_model_for( def tool_source_for( trans: ProvidesRepositoriesContext, trs_tool_id: str, tool_version: str, repository_clone_url: Optional[str] = None -) -> Tuple[ToolSource, Optional[RepositoryMetadata]]: +) -> tuple[ToolSource, Optional[RepositoryMetadata]]: if "~" in trs_tool_id: return _shed_tool_source_for(trans, trs_tool_id, tool_version, repository_clone_url) else: @@ -142,7 +139,7 @@ def tool_source_for( def _shed_tool_source_for( trans: ProvidesRepositoriesContext, trs_tool_id: str, tool_version: str, repository_clone_url: Optional[str] = None -) -> Tuple[ToolSource, RepositoryMetadata]: +) -> tuple[ToolSource, RepositoryMetadata]: rval = get_repository_metadata_tool_dict(trans, trs_tool_id, tool_version) repository_metadata, tool_version_metadata = rval tool_config = tool_version_metadata["tool_config"] diff --git a/lib/tool_shed/managers/trs.py b/lib/tool_shed/managers/trs.py index ebb74220ccd3..d2ba03ccd25d 100644 --- a/lib/tool_shed/managers/trs.py +++ b/lib/tool_shed/managers/trs.py @@ -1,10 +1,7 @@ from typing import ( Any, cast, - Dict, - List, Optional, - Tuple, ) from starlette.datastructures import URL @@ -70,7 +67,7 @@ def service_info(app: ToolShedApp, request_url: URL): ) -def tool_classes() -> List[ToolClass]: +def tool_classes() -> list[ToolClass]: return [ToolClass(id="galaxy_tool", name="Galaxy Tool", description="Galaxy XML Tools")] @@ -86,11 +83,11 @@ def trs_tool_id_to_repository(trans: ProvidesRepositoriesContext, trs_tool_id: s def get_repository_metadata_by_tool_version( app: ToolShedApp, repository: Repository, tool_id: str -) -> Dict[str, RepositoryMetadata]: +) -> dict[str, RepositoryMetadata]: versions = {} for _, changeset in repository.installable_revisions(app): metadata = get_current_repository_metadata_for_changeset_revision(app, repository, changeset) - tools: Optional[List[Dict[str, Any]]] = metadata.metadata.get("tools") + tools: Optional[list[dict[str, Any]]] = metadata.metadata.get("tools") if not tools: continue for tool_metadata in tools: @@ -100,21 +97,21 @@ def get_repository_metadata_by_tool_version( return versions -def get_tools_for(repository_metadata: RepositoryMetadata) -> List[Dict[str, Any]]: - tools: Optional[List[Dict[str, Any]]] = repository_metadata.metadata.get("tools") +def get_tools_for(repository_metadata: RepositoryMetadata) -> list[dict[str, Any]]: + tools: Optional[list[dict[str, Any]]] = repository_metadata.metadata.get("tools") assert tools return tools def trs_tool_id_to_repository_metadata( trans: ProvidesRepositoriesContext, trs_tool_id: str -) -> Tuple[Repository, Dict[str, RepositoryMetadata]]: +) -> tuple[Repository, dict[str, RepositoryMetadata]]: tool_guid = decode_identifier(trans.repositories_hostname, trs_tool_id) tool_guid = remove_protocol_and_user_from_clone_url(tool_guid) _, tool_id = tool_guid.rsplit("/", 1) repository = guid_to_repository(trans.app, tool_guid) app = trans.app - versions: Dict[str, RepositoryMetadata] = get_repository_metadata_by_tool_version(app, repository, tool_id) + versions: dict[str, RepositoryMetadata] = get_repository_metadata_by_tool_version(app, repository, tool_id) if not versions: raise ObjectNotFound() @@ -128,11 +125,11 @@ def get_tool(trans: ProvidesRepositoriesContext, trs_tool_id: str) -> Tool: repository, metadata_by_version = repo_metadata repo_owner = repository.user.username - aliases: List[str] = [guid] + aliases: list[str] = [guid] hostname = remove_protocol_and_user_from_clone_url(trans.repositories_hostname) url = f"https://{hostname}/repos/{repo_owner}/{repository.name}" - versions: List[ToolVersion] = [] + versions: list[ToolVersion] = [] for tool_version_str, _ in metadata_by_version.items(): version_url = url # TODO: tool_version = ToolVersion( diff --git a/lib/tool_shed/managers/users.py b/lib/tool_shed/managers/users.py index 6d10e8b20f12..070c3dcb50fe 100644 --- a/lib/tool_shed/managers/users.py +++ b/lib/tool_shed/managers/users.py @@ -1,5 +1,3 @@ -from typing import List - from sqlalchemy import select from galaxy.exceptions import RequestParameterInvalidException @@ -17,8 +15,8 @@ ) -def index(app: ToolShedApp, deleted: bool) -> List[ApiUser]: - users: List[ApiUser] = [] +def index(app: ToolShedApp, deleted: bool) -> list[ApiUser]: + users: list[ApiUser] = [] for user in get_users_by_deleted(app.model.context, User, deleted): users.append(get_api_user(app, user)) return users diff --git a/lib/tool_shed/metadata/repository_metadata_manager.py b/lib/tool_shed/metadata/repository_metadata_manager.py index e9e03f101229..13031959ff73 100644 --- a/lib/tool_shed/metadata/repository_metadata_manager.py +++ b/lib/tool_shed/metadata/repository_metadata_manager.py @@ -2,8 +2,6 @@ import tempfile from typing import ( Any, - Dict, - List, Optional, ) @@ -52,7 +50,7 @@ class ToolShedMetadataGenerator(BaseMetadataGenerator): # why is mypy making me re-annotate these things from the base class, it didn't # when they were in the same file - invalid_file_tups: List[InvalidFileT] + invalid_file_tups: list[InvalidFileT] repository_clone_url: Optional[str] def __init__( @@ -61,7 +59,7 @@ def __init__( repository: Optional[Repository] = None, changeset_revision: Optional[str] = None, repository_clone_url: Optional[str] = None, - shed_config_dict: Optional[Dict[str, Any]] = None, + shed_config_dict: Optional[dict[str, Any]] = None, relative_install_dir=None, repository_files_dir=None, resetting_all_metadata_on_repository=False, @@ -102,7 +100,7 @@ def __init__( self.invalid_file_tups = [] self.sa_session = trans.app.model.session - def initial_metadata_dict(self) -> Dict[str, Any]: + def initial_metadata_dict(self) -> dict[str, Any]: return {} def set_repository( @@ -794,7 +792,7 @@ def reset_all_metadata_on_repository_in_tool_shed(self, repository_clone_url=Non # The list of changeset_revisions refers to repository_metadata records that have been created # or updated. When the following loop completes, we'll delete all repository_metadata records # for this repository that do not have a changeset_revision value in this list. - changeset_revisions: List[Optional[str]] = [] + changeset_revisions: list[Optional[str]] = [] # When a new repository_metadata record is created, it always uses the values of # metadata_changeset_revision and metadata_dict. metadata_changeset_revision = None @@ -1051,7 +1049,7 @@ def set_repository_metadata_due_to_new_tip(self, host, content_alert_str=None, * return status, error_message -def _get_changeset_revisions_that_contain_tools(app: "ToolShedApp", repo, repository) -> List[str]: +def _get_changeset_revisions_that_contain_tools(app: "ToolShedApp", repo, repository) -> list[str]: changeset_revisions_that_contain_tools = [] for changeset in repo.changelog: changeset_revision = str(repo[changeset]) diff --git a/lib/tool_shed/test/base/api.py b/lib/tool_shed/test/base/api.py index fc2d93f9a0cc..73349651e447 100644 --- a/lib/tool_shed/test/base/api.py +++ b/lib/tool_shed/test/base/api.py @@ -2,7 +2,6 @@ from functools import wraps from typing import ( Any, - Dict, Optional, ) @@ -96,7 +95,7 @@ def _get_driver(self, embedded_driver): class ShedGalaxyInteractorApi(GalaxyInteractorApi): def __init__(self, galaxy_url: str): - interactor_kwds: Dict[str, Any] = {} + interactor_kwds: dict[str, Any] = {} interactor_kwds["galaxy_url"] = galaxy_url interactor_kwds["master_api_key"] = get_galaxy_admin_api_key() interactor_kwds["api_key"] = get_galaxy_user_key() diff --git a/lib/tool_shed/test/base/api_util.py b/lib/tool_shed/test/base/api_util.py index e90a495aa69b..30df5c37033c 100644 --- a/lib/tool_shed/test/base/api_util.py +++ b/lib/tool_shed/test/base/api_util.py @@ -4,7 +4,6 @@ from typing import ( Any, Callable, - Dict, Optional, ) from urllib.parse import urljoin @@ -94,7 +93,7 @@ def api_version(self) -> Literal["v1", "v2"]: api_version = config.get("api_version", "v1") return api_version - def version(self) -> Dict[str, Any]: + def version(self) -> dict[str, Any]: response = self.get("version") response.raise_for_status() return response.json() @@ -104,7 +103,7 @@ def hg_url_base(self): return self.url -def create_user(admin_interactor: ShedApiInteractor, user_dict: Dict[str, Any], assert_ok=True) -> Dict[str, Any]: +def create_user(admin_interactor: ShedApiInteractor, user_dict: dict[str, Any], assert_ok=True) -> dict[str, Any]: email = user_dict["email"] if "password" not in user_dict: user_dict["password"] = "testpass" @@ -118,7 +117,7 @@ def create_user(admin_interactor: ShedApiInteractor, user_dict: Dict[str, Any], def ensure_user_with_email( admin_api_interactor: ShedApiInteractor, email: str, password: Optional[str] -) -> Dict[str, Any]: +) -> dict[str, Any]: all_users_response = admin_api_interactor.get("users") try: all_users_response.raise_for_status() diff --git a/lib/tool_shed/test/base/browser.py b/lib/tool_shed/test/base/browser.py index f20804eecc09..ee8e9875d6f8 100644 --- a/lib/tool_shed/test/base/browser.py +++ b/lib/tool_shed/test/base/browser.py @@ -1,6 +1,5 @@ import abc from typing import ( - List, Union, ) @@ -9,7 +8,7 @@ class ShedBrowser(metaclass=abc.ABCMeta): @abc.abstractmethod - def visit_url(self, url: str, allowed_codes: List[int]) -> str: + def visit_url(self, url: str, allowed_codes: list[int]) -> str: """Navigate to the supplied URL.""" @abc.abstractmethod @@ -42,9 +41,9 @@ def is_twill(self) -> bool: """Return whether this is a twill browser.""" @abc.abstractmethod - def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: + def edit_repository_categories(self, categories_to_add: list[str], categories_to_remove: list[str]) -> None: """Select some new categories and then restore the component.""" @abc.abstractmethod - def grant_users_access(self, usernames: List[str]) -> None: + def grant_users_access(self, usernames: list[str]) -> None: """Select users to grant access to.""" diff --git a/lib/tool_shed/test/base/playwrightbrowser.py b/lib/tool_shed/test/base/playwrightbrowser.py index cc32cb49f2a5..62c55917eee4 100644 --- a/lib/tool_shed/test/base/playwrightbrowser.py +++ b/lib/tool_shed/test/base/playwrightbrowser.py @@ -1,5 +1,4 @@ import time -from typing import List from playwright.sync_api import ( expect, @@ -26,7 +25,7 @@ class PlaywrightShedBrowser(ShedBrowser): def __init__(self, page: Page): self._page = page - def visit_url(self, url: str, allowed_codes: List[int]) -> str: + def visit_url(self, url: str, allowed_codes: list[int]) -> str: try: response = self._page.goto(url) except Exception as e: @@ -146,7 +145,7 @@ def _fill_form_value(self, form: Locator, control_name: str, value: FormValueTyp if input_s.count(): input_s.select_option(value) # type:ignore[arg-type, unused-ignore] - def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: + def edit_repository_categories(self, categories_to_add: list[str], categories_to_remove: list[str]) -> None: multi_select = "form[name='categories'] select[name='category_id']" select_locator = self._page.locator(multi_select) select_locator.evaluate("node => node.selectedOptions = []") @@ -157,7 +156,7 @@ def edit_repository_categories(self, categories_to_add: List[str], categories_to select_locator.select_option(label=categories_to_remove) self.submit_form_with_name("categories", "manage_categories_button") - def grant_users_access(self, usernames: List[str]): + def grant_users_access(self, usernames: list[str]): multi_select = "form[name='user_access'] select[name='allow_push']" select_locator = self._page.locator(multi_select) select_locator.evaluate("node => node.selectedOptions = []") diff --git a/lib/tool_shed/test/base/populators.py b/lib/tool_shed/test/base/populators.py index 2c0c7ce1e510..74ae96d926bf 100644 --- a/lib/tool_shed/test/base/populators.py +++ b/lib/tool_shed/test/base/populators.py @@ -1,9 +1,8 @@ import tarfile +from collections.abc import Iterator from pathlib import Path from tempfile import NamedTemporaryFile from typing import ( - Iterator, - List, Optional, Union, ) @@ -243,7 +242,7 @@ def upload_revision( api_asserts.assert_status_code_is_ok(response) return RepositoryUpdate(root=response.json()) - def new_repository(self, category_ids: Union[List[str], str], prefix: str = DEFAULT_PREFIX) -> Repository: + def new_repository(self, category_ids: Union[list[str], str], prefix: str = DEFAULT_PREFIX) -> Repository: name = random_name(prefix=prefix) synopsis = random_name(prefix=prefix) request = CreateRepositoryRequest( @@ -273,7 +272,7 @@ def new_category( response.raise_for_status() return Category(**response.json()) - def get_categories(self) -> List[Category]: + def get_categories(self) -> list[Category]: response = self._api_interactor.get("categories") response.raise_for_status() return [Category(**c) for c in response.json()] @@ -335,7 +334,7 @@ def repository_index_paginated( api_asserts.assert_status_code_is_ok(repository_response) return PaginatedRepositoryIndexResults(**repository_response.json()) - def get_usernames_allowed_to_push(self, repository: HasRepositoryId) -> List[str]: + def get_usernames_allowed_to_push(self, repository: HasRepositoryId) -> list[str]: repository_id = self._repository_id(repository) show_response = self._api_interactor.get(f"repositories/{repository_id}/allow_push") show_response.raise_for_status() diff --git a/lib/tool_shed/test/base/test_db_util.py b/lib/tool_shed/test/base/test_db_util.py index 21f08b1451ce..0607ae59652e 100644 --- a/lib/tool_shed/test/base/test_db_util.py +++ b/lib/tool_shed/test/base/test_db_util.py @@ -1,5 +1,4 @@ import logging -from typing import List from sqlalchemy import ( false, @@ -38,7 +37,7 @@ def get_all_repositories(): return sa_session().scalars(select(model.Repository)).all() -def get_all_installed_repositories(session=None) -> List[galaxy.model.tool_shed_install.ToolShedRepository]: +def get_all_installed_repositories(session=None) -> list[galaxy.model.tool_shed_install.ToolShedRepository]: if session is None: session = install_session() ToolShedRepository = galaxy.model.tool_shed_install.ToolShedRepository diff --git a/lib/tool_shed/test/base/twillbrowser.py b/lib/tool_shed/test/base/twillbrowser.py index b023bec47dc4..2630c7b6145f 100644 --- a/lib/tool_shed/test/base/twillbrowser.py +++ b/lib/tool_shed/test/base/twillbrowser.py @@ -1,8 +1,4 @@ import tempfile -from typing import ( - Dict, - List, -) import twill.commands as tc from twill.browser import FormElement # type:ignore[attr-defined, unused-ignore] @@ -18,7 +14,7 @@ tc.timeout(240) -def visit_url(url: str, allowed_codes: List[int]) -> str: +def visit_url(url: str, allowed_codes: list[int]) -> str: tc.go(url) return_code = tc.browser.code assert return_code in allowed_codes, "Invalid HTTP return code {}, allowed codes: {}".format( @@ -33,7 +29,7 @@ def page_content() -> str: class TwillShedBrowser(ShedBrowser): - def visit_url(self, url: str, allowed_codes: List[int]) -> str: + def visit_url(self, url: str, allowed_codes: list[int]) -> str: return visit_url(url, allowed_codes=allowed_codes) def page_content(self) -> str: @@ -63,11 +59,11 @@ def write_temp_file(self, content, suffix=".html"): return fh.name def submit_form_with_name(self, form_name: str, button="runtool_btn", **kwd): - forms_by_name: Dict[str, FormElement] = {f.get("name"): f for f in self._show_forms()} + forms_by_name: dict[str, FormElement] = {f.get("name"): f for f in self._show_forms()} form = forms_by_name[form_name] self._submit_form(form, button, **kwd) - def _show_forms(self) -> List[FormElement]: + def _show_forms(self) -> list[FormElement]: """Shows form, helpful for debugging new tests""" return tc.browser.forms @@ -110,7 +106,7 @@ def fill_form_value(self, form_name: str, control_name: str, value: FormValueTyp value = str(value) tc.fv(form_name, control_name, value) - def edit_repository_categories(self, categories_to_add: List[str], categories_to_remove: List[str]) -> None: + def edit_repository_categories(self, categories_to_add: list[str], categories_to_remove: list[str]) -> None: """Select some new categories and then restore the component.""" strings_displayed = [] strings_not_displayed = [] @@ -134,7 +130,7 @@ def edit_repository_categories(self, categories_to_add: List[str], categories_to self.submit_form_with_name("categories", "manage_categories_button") self._check_for_strings(strings_displayed, strings_not_displayed) - def grant_users_access(self, usernames: List[str]): + def grant_users_access(self, usernames: list[str]): for username in usernames: self.fill_form_value("user_access", "allow_push", f"+{username}") self.submit_form_with_name("user_access", "user_access_button") @@ -143,7 +139,7 @@ def grant_users_access(self, usernames: List[str]): def is_twill(self) -> bool: return True - def _check_for_strings(self, strings_displayed: List[str], strings_not_displayed: List[str]): + def _check_for_strings(self, strings_displayed: list[str], strings_not_displayed: list[str]): if strings_displayed: for check_str in strings_displayed: self.check_page_for_string(check_str) diff --git a/lib/tool_shed/test/base/twilltestcase.py b/lib/tool_shed/test/base/twilltestcase.py index 5d47f45aa068..9a4a547210dc 100644 --- a/lib/tool_shed/test/base/twilltestcase.py +++ b/lib/tool_shed/test/base/twilltestcase.py @@ -7,15 +7,12 @@ import tarfile import tempfile import time +from collections.abc import Iterator from json import loads from pathlib import Path from typing import ( Any, - Dict, - Iterator, - List, Optional, - Tuple, Union, ) from urllib.parse import ( @@ -113,7 +110,7 @@ def display_installed_jobs_list_page( @abc.abstractmethod def installed_repository_extended_info( self, installed_repository: galaxy_model.ToolShedRepository - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """""" @abc.abstractmethod @@ -133,7 +130,7 @@ def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepos """""" @abc.abstractmethod - def reset_metadata_on_installed_repositories(self, repositories: List[galaxy_model.ToolShedRepository]) -> None: + def reset_metadata_on_installed_repositories(self, repositories: list[galaxy_model.ToolShedRepository]) -> None: """""" @abc.abstractmethod @@ -147,7 +144,7 @@ def uninstall_repository(self, installed_repository: galaxy_model.ToolShedReposi @abc.abstractmethod def update_installed_repository( self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """""" @property @@ -161,7 +158,7 @@ def shed_tool_data_table_conf(self) -> str: """""" @abc.abstractmethod - def get_tool_names(self) -> List[str]: + def get_tool_names(self) -> list[str]: """""" @abc.abstractmethod @@ -173,17 +170,17 @@ def get_installed_repository_by_name_owner( @abc.abstractmethod def get_installed_repositories_by_name_owner( self, repository_name: str, repository_owner: str - ) -> List[galaxy_model.ToolShedRepository]: + ) -> list[galaxy_model.ToolShedRepository]: """""" @abc.abstractmethod def get_installed_repository_for( self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: """""" @abc.abstractmethod - def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + def get_all_installed_repositories(self) -> list[galaxy_model.ToolShedRepository]: """""" @abc.abstractmethod @@ -252,7 +249,7 @@ def display_installed_jobs_list_page( def installed_repository_extended_info( self, installed_repository: galaxy_model.ToolShedRepository - ) -> Dict[str, Any]: + ) -> dict[str, Any]: params = {"id": self.testcase.security.encode_id(installed_repository.id)} self._visit_galaxy_url("/admin_toolshed/manage_repository_json", params=params) json = page_content() @@ -297,7 +294,7 @@ def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepos url = "/admin_toolshed/restore_repository" self._visit_galaxy_url(url, params=params) - def reset_metadata_on_installed_repositories(self, repositories: List[galaxy_model.ToolShedRepository]) -> None: + def reset_metadata_on_installed_repositories(self, repositories: list[galaxy_model.ToolShedRepository]) -> None: repository_ids = [] for repository in repositories: repository_ids.append(self.testcase.security.encode_id(repository.id)) @@ -321,7 +318,7 @@ def uninstall_repository(self, installed_repository: galaxy_model.ToolShedReposi def update_installed_repository( self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False - ) -> Dict[str, Any]: + ) -> dict[str, Any]: repository_id = self.testcase.security.encode_id(installed_repository.id) params = { "id": repository_id, @@ -358,7 +355,7 @@ def tool_data_path(self): def shed_tool_data_table_conf(self): return os.environ.get("TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF") - def get_tool_names(self) -> List[str]: + def get_tool_names(self) -> list[str]: response = self.testcase.galaxy_interactor._get("tools?in_panel=false") response.raise_for_status() tool_list = response.json() @@ -371,17 +368,17 @@ def get_installed_repository_by_name_owner( def get_installed_repositories_by_name_owner( self, repository_name: str, repository_owner: str - ) -> List[galaxy_model.ToolShedRepository]: + ) -> list[galaxy_model.ToolShedRepository]: return test_db_util.get_installed_repository_by_name_owner( repository_name, repository_owner, return_multiple=True ) def get_installed_repository_for( self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: return self.testcase.get_installed_repository_for(owner=owner, name=name, changeset=changeset) - def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + def get_all_installed_repositories(self) -> list[galaxy_model.ToolShedRepository]: repositories = test_db_util.get_all_installed_repositories() for repository in repositories: test_db_util.ga_refresh(repository) @@ -485,7 +482,7 @@ def display_installed_jobs_list_page( def installed_repository_extended_info( self, installed_repository: galaxy_model.ToolShedRepository - ) -> Dict[str, Any]: + ) -> dict[str, Any]: self._installation_target.install_model.context.refresh(installed_repository) return build_manage_repository_dict(self._installation_target, "ok", installed_repository) @@ -521,7 +518,7 @@ def reactivate_repository(self, installed_repository: galaxy_model.ToolShedRepos irm = self._installation_target.installed_repository_manager irm.activate_repository(installed_repository) - def reset_metadata_on_installed_repositories(self, repositories: List[galaxy_model.ToolShedRepository]) -> None: + def reset_metadata_on_installed_repositories(self, repositories: list[galaxy_model.ToolShedRepository]) -> None: for repository in repositories: irmm = InstalledRepositoryMetadataManager(self._installation_target) irmm.set_repository(repository) @@ -542,7 +539,7 @@ def uninstall_repository(self, installed_repository: galaxy_model.ToolShedReposi def update_installed_repository( self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False - ) -> Dict[str, Any]: + ) -> dict[str, Any]: message, status = check_for_updates( self._installation_target.tool_shed_registry, self._installation_target.install_model.context, @@ -565,7 +562,7 @@ def get_installed_repository_by_name_owner( def get_installed_repositories_by_name_owner( self, repository_name: str, repository_owner: str - ) -> List[galaxy_model.ToolShedRepository]: + ) -> list[galaxy_model.ToolShedRepository]: return test_db_util.get_installed_repository_by_name_owner( repository_name, repository_owner, @@ -575,14 +572,14 @@ def get_installed_repositories_by_name_owner( def get_installed_repository_for( self, owner: Optional[str] = None, name: Optional[str] = None, changeset: Optional[str] = None - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: repository = get_installed_repository(self._installation_target.install_model.context, name, owner, changeset) if repository: return repository.to_dict() else: return None - def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + def get_all_installed_repositories(self) -> list[galaxy_model.ToolShedRepository]: repositories = test_db_util.get_all_installed_repositories( session=self._installation_target.install_model.context ) @@ -601,7 +598,7 @@ def shed_tool_data_table_conf(self): def tool_data_path(self): return self._installation_target.config.tool_data_path - def get_tool_names(self) -> List[str]: + def get_tool_names(self) -> list[str]: tool_names = [] for _, tool in self._installation_target.toolbox.tools(): tool_names.append(tool.name) @@ -698,7 +695,7 @@ def create( password: str = "testuser", username: str = "admin-user", redirect: Optional[str] = None, - ) -> Tuple[bool, bool, bool]: + ) -> tuple[bool, bool, bool]: # HACK: don't use panels because late_javascripts() messes up the twill browser and it # can't find form fields (and hence user can't be logged in). params = dict(cntrller=cntrller, use_panels=False) @@ -828,7 +825,7 @@ def join_url_and_params(self, url: str, params, query=None) -> str: url += f"?{urlencode(params)}" return url - def visit_url(self, url: str, params=None, allowed_codes: Optional[List[int]] = None) -> str: + def visit_url(self, url: str, params=None, allowed_codes: Optional[list[int]] = None) -> str: parsed_url = urlparse(url) if len(parsed_url.netloc) == 0: url = f"http://{self.host}:{self.port}{parsed_url.path}" @@ -1183,7 +1180,7 @@ def commit_tar_to_repository( if string_displayed not in text: raise AssertionError(f"Failed to find {string_displayed} in JSON response {text}") - def delete_files_from_repository(self, repository: Repository, filenames: List[str]): + def delete_files_from_repository(self, repository: Repository, filenames: list[str]): with self.cloned_repo(repository) as temp_directory: for filename in filenames: to_delete = os.path.join(temp_directory, filename) @@ -1206,7 +1203,7 @@ def delete_repository(self, repository: Repository) -> None: params = {"operation": "Delete", "id": repository_id} self.visit_url("/admin/browse_repositories", params=params) strings_displayed = ["Deleted 1 repository", repository.name] - strings_not_displayed: List[str] = [] + strings_not_displayed: list[str] = [] self.check_for_strings(strings_displayed, strings_not_displayed) def display_installed_jobs_list_page(self, installed_repository, data_manager_names=None, strings_displayed=None): @@ -1260,8 +1257,8 @@ def display_repository_file_contents( def edit_repository_categories( self, repository: Repository, - categories_to_add: List[str], - categories_to_remove: List[str], + categories_to_add: list[str], + categories_to_remove: list[str], restore_original=True, ) -> None: params = {"id": repository.id} @@ -1398,7 +1395,7 @@ def generate_temp_path(self, test_script_path, additional_paths=None): os.makedirs(temp_path) return temp_path - def get_all_installed_repositories(self) -> List[galaxy_model.ToolShedRepository]: + def get_all_installed_repositories(self) -> list[galaxy_model.ToolShedRepository]: assert self._installation_client return self._installation_client.get_all_installed_repositories() @@ -1412,7 +1409,7 @@ def get_hg_repo(self, path): return hg.repository(ui.ui(), path.encode("utf-8")) def get_repositories_category_api( - self, categories: List[Category], strings_displayed=None, strings_not_displayed=None + self, categories: list[Category], strings_displayed=None, strings_not_displayed=None ): for category in categories: url = f"/api/categories/{category.id}/repositories" @@ -1457,7 +1454,7 @@ def get_repository_changelog_tuples(self, repository): changelog_tuples.append((ctx.rev(), ctx)) return changelog_tuples - def get_repository_file_list(self, repository: Repository, base_path: str, current_path=None) -> List[str]: + def get_repository_file_list(self, repository: Repository, base_path: str, current_path=None) -> list[str]: """Recursively load repository folder contents and append them to a list. Similar to os.walk but via /repository/open_folder.""" if current_path is None: request_param_path = base_path @@ -1509,7 +1506,7 @@ def get_repository_metadata_by_changeset_revision(self, repository_id: int, chan repository_id, changeset_revision ) or test_db_util.get_repository_metadata_by_repository_id_changeset_revision(repository_id, None) - def get_repository_metadata_revisions(self, repository: Repository) -> List[str]: + def get_repository_metadata_revisions(self, repository: Repository) -> list[str]: return [ str(repository_metadata.changeset_revision) for repository_metadata in self._db_repository(repository).metadata_revisions @@ -1601,7 +1598,7 @@ def _install_repository( install_tool_dependencies: bool = False, install_repository_dependencies: bool = True, changeset_revision: Optional[str] = None, - preview_strings_displayed: Optional[List[str]] = None, + preview_strings_displayed: Optional[list[str]] = None, new_tool_panel_section_label: Optional[str] = None, ) -> None: self.browse_tool_shed(url=self.url) @@ -1819,7 +1816,7 @@ def undelete_repository(self, repository: Repository) -> None: params = {"operation": "Undelete", "id": repository.id} self.visit_url("/admin/browse_repositories", params=params) strings_displayed = ["Undeleted 1 repository", repository.name] - strings_not_displayed: List[str] = [] + strings_not_displayed: list[str] = [] self.check_for_strings(strings_displayed, strings_not_displayed) def _uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepository) -> None: @@ -1828,7 +1825,7 @@ def _uninstall_repository(self, installed_repository: galaxy_model.ToolShedRepos def update_installed_repository( self, installed_repository: galaxy_model.ToolShedRepository, verify_no_updates: bool = False - ) -> Dict[str, Any]: + ) -> dict[str, Any]: assert self._installation_client return self._installation_client.update_installed_repository(installed_repository, verify_no_updates=False) @@ -1950,7 +1947,7 @@ def _get_installed_repository_by_name_owner( def _get_installed_repositories_by_name_owner( self, repository_name: str, repository_owner: str - ) -> List[galaxy_model.ToolShedRepository]: + ) -> list[galaxy_model.ToolShedRepository]: assert self._installation_client return self._installation_client.get_installed_repositories_by_name_owner(repository_name, repository_owner) diff --git a/lib/tool_shed/test/functional/conftest.py b/lib/tool_shed/test/functional/conftest.py index 21172f8ff72e..d0bcaf78508c 100644 --- a/lib/tool_shed/test/functional/conftest.py +++ b/lib/tool_shed/test/functional/conftest.py @@ -1,7 +1,7 @@ import os +from collections.abc import Generator from typing import ( Callable, - Generator, ) import pytest diff --git a/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py b/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py index 002f3e7a3597..777763cb0cb4 100644 --- a/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py +++ b/lib/tool_shed/test/functional/test_0300_reset_all_metadata.py @@ -1,5 +1,3 @@ -from typing import Dict - import pytest from ..base import common @@ -547,8 +545,8 @@ def test_0100_create_and_upload_dependency_definitions(self): def test_0110_reset_metadata_on_all_repositories(self): """Reset metadata on all repositories, then verify that it has not changed.""" self.login(email=common.admin_email, username=common.admin_username) - old_metadata: Dict[str, Dict] = {} - new_metadata: Dict[str, Dict] = {} + old_metadata: dict[str, dict] = {} + new_metadata: dict[str, dict] = {} repositories = self.test_db_util.get_all_repositories() for repository in repositories: old_metadata[self.security.encode_id(repository.id)] = {} diff --git a/lib/tool_shed/test/functional/test_shed_categories.py b/lib/tool_shed/test/functional/test_shed_categories.py index 1485617a3bdb..9ef824e49f7e 100644 --- a/lib/tool_shed/test/functional/test_shed_categories.py +++ b/lib/tool_shed/test/functional/test_shed_categories.py @@ -1,12 +1,10 @@ -from typing import Dict - from galaxy_test.base.api_util import random_name from ..base.api import ShedApiTestCase class TestShedCategoriesApi(ShedApiTestCase): def test_create_requires_name(self): - body: Dict = {} + body: dict = {} response = self.admin_api_interactor.post("categories", json=body) assert response.status_code == 400 diff --git a/lib/tool_shed/util/commit_util.py b/lib/tool_shed/util/commit_util.py index f6bb545bb502..0e19b31cc86e 100644 --- a/lib/tool_shed/util/commit_util.py +++ b/lib/tool_shed/util/commit_util.py @@ -7,9 +7,7 @@ import tempfile from collections import namedtuple from typing import ( - List, Optional, - Tuple, TYPE_CHECKING, Union, ) @@ -159,7 +157,7 @@ def handle_bz2(repository: "Repository", uploaded_file_name): shutil.move(uncompressed.name, uploaded_file_name) -ChangeResponseT = Tuple[Union[bool, str], str, List[str], str, int, int] +ChangeResponseT = tuple[Union[bool, str], str, list[str], str, int, int] def handle_directory_changes( diff --git a/lib/tool_shed/util/repository_util.py b/lib/tool_shed/util/repository_util.py index 39766f71e54f..1e5dc0576f85 100644 --- a/lib/tool_shed/util/repository_util.py +++ b/lib/tool_shed/util/repository_util.py @@ -4,9 +4,7 @@ import re import tempfile from typing import ( - List, Optional, - Tuple, TYPE_CHECKING, ) @@ -198,10 +196,10 @@ def create_repository( description, long_description, user, - category_ids: Optional[List[str]] = None, + category_ids: Optional[list[str]] = None, remote_repository_url=None, homepage_url=None, -) -> Tuple[model.Repository, str]: +) -> tuple[model.Repository, str]: """Create a new ToolShed repository""" category_ids = category_ids or [] sa_session = app.model.session @@ -438,7 +436,7 @@ def change_repository_name_in_hgrc_file(hgrc_file: str, new_name: str) -> None: def update_repository( trans: "ProvidesUserContext", id: str, **kwds -) -> Tuple[Optional[model.Repository], Optional[str]]: +) -> tuple[Optional[model.Repository], Optional[str]]: """Update an existing ToolShed repository""" app = trans.app sa_session = app.model.session @@ -455,7 +453,7 @@ def update_repository( def update_validated_repository( trans: "ProvidesUserContext", repository: model.Repository, **kwds -) -> Tuple[Optional[model.Repository], Optional[str]]: +) -> tuple[Optional[model.Repository], Optional[str]]: """Update an existing ToolShed repository metadata once permissions have been checked.""" app = trans.app sa_session = app.model.session diff --git a/lib/tool_shed/util/shed_util_common.py b/lib/tool_shed/util/shed_util_common.py index 2eb02b5ef5ab..d1629622c2be 100644 --- a/lib/tool_shed/util/shed_util_common.py +++ b/lib/tool_shed/util/shed_util_common.py @@ -4,7 +4,6 @@ import socket import string from typing import ( - List, TYPE_CHECKING, ) @@ -333,7 +332,7 @@ def is_path_within_repo(app: "ToolShedApp", path: str, repository_id: str) -> bo def open_repository_files_folder( app: "ToolShedApp", folder_path: str, repository_id: str, is_admin: bool = False -) -> List: +) -> list: """ Return a list of dictionaries, each of which contains information for a file or directory contained within a directory in a repository file hierarchy. diff --git a/lib/tool_shed/webapp/api/categories.py b/lib/tool_shed/webapp/api/categories.py index b0eb31cf4cca..d5af1903f1ca 100644 --- a/lib/tool_shed/webapp/api/categories.py +++ b/lib/tool_shed/webapp/api/categories.py @@ -1,8 +1,6 @@ import logging from typing import ( Any, - Dict, - List, ) import tool_shed.util.shed_util_common as suc @@ -84,7 +82,7 @@ def get_repositories(self, trans, category_id, **kwd): return category_dict @expose_api_anonymous_and_sessionless - def index(self, trans, deleted=False, **kwd) -> List[Dict[str, Any]]: + def index(self, trans, deleted=False, **kwd) -> list[dict[str, Any]]: """ GET /api/categories Return a list of dictionaries that contain information about each Category. diff --git a/lib/tool_shed/webapp/api/groups.py b/lib/tool_shed/webapp/api/groups.py index 3fd412dc7211..ea5b88e657d4 100644 --- a/lib/tool_shed/webapp/api/groups.py +++ b/lib/tool_shed/webapp/api/groups.py @@ -1,7 +1,6 @@ import logging from typing import ( Callable, - Dict, ) from sqlalchemy import select @@ -45,7 +44,7 @@ def __init__(self, app: ToolShedApp): super().__init__(app) self.group_manager = groups.GroupManager() - def __get_value_mapper(self, trans) -> Dict[str, Callable]: + def __get_value_mapper(self, trans) -> dict[str, Callable]: value_mapper = {"id": trans.security.encode_id} return value_mapper diff --git a/lib/tool_shed/webapp/api/repositories.py b/lib/tool_shed/webapp/api/repositories.py index 95a079649399..1534d34b6a54 100644 --- a/lib/tool_shed/webapp/api/repositories.py +++ b/lib/tool_shed/webapp/api/repositories.py @@ -5,7 +5,6 @@ from time import strftime from typing import ( Callable, - Dict, ) from webob.compat import cgi_FieldStorage @@ -227,7 +226,7 @@ def get_installable_revisions(self, trans, **kwd): return [] return repository.installable_revisions(self.app) - def __get_value_mapper(self, trans) -> Dict[str, Callable]: + def __get_value_mapper(self, trans) -> dict[str, Callable]: return get_value_mapper(self.app) @expose_api_raw_anonymous_and_sessionless diff --git a/lib/tool_shed/webapp/api/repository_revisions.py b/lib/tool_shed/webapp/api/repository_revisions.py index 3b7404c0e472..aee7c1076d0a 100644 --- a/lib/tool_shed/webapp/api/repository_revisions.py +++ b/lib/tool_shed/webapp/api/repository_revisions.py @@ -1,7 +1,6 @@ import logging from typing import ( Callable, - Dict, ) from sqlalchemy import select @@ -22,7 +21,7 @@ class RepositoryRevisionsController(BaseShedAPIController): """RESTful controller for interactions with tool shed repository revisions.""" - def __get_value_mapper(self, trans) -> Dict[str, Callable]: + def __get_value_mapper(self, trans) -> dict[str, Callable]: value_mapper = { "id": trans.security.encode_id, "repository_id": trans.security.encode_id, diff --git a/lib/tool_shed/webapp/api2/__init__.py b/lib/tool_shed/webapp/api2/__init__.py index cb7bbc1a8073..1dcf031c07cd 100644 --- a/lib/tool_shed/webapp/api2/__init__.py +++ b/lib/tool_shed/webapp/api2/__init__.py @@ -1,11 +1,9 @@ import logging +from collections.abc import AsyncGenerator from json import JSONDecodeError from typing import ( - AsyncGenerator, cast, - List, Optional, - Type, TypeVar, ) @@ -80,7 +78,7 @@ async def get_app_with_request_session() -> AsyncGenerator[ToolShedApp, None]: api_key_cookie = APIKeyCookie(name=AUTH_COOKIE_NAME, auto_error=False) -def depends(dep_type: Type[T]) -> T: +def depends(dep_type: type[T]) -> T: return framework_depends(dep_type, app=get_app_with_request_session) @@ -166,7 +164,7 @@ class Router(FrameworkRouter): # return Depends(get_body) -def depend_on_either_json_or_form_data(model: Type[B]) -> B: +def depend_on_either_json_or_form_data(model: type[B]) -> B: async def get_body(request: Request): content_type = request.headers.get("Content-Type") if content_type is None: @@ -289,7 +287,7 @@ async def get_body(request: Request): RepositoryIndexCategoryQueryParam: Optional[str] = Query(None, title="Category ID") -RepositoryIndexToolIdsQueryParam: Optional[List[str]] = Query( +RepositoryIndexToolIdsQueryParam: Optional[list[str]] = Query( None, title="Tool IDs", description="List of tool GUIDs to find the repository for" ) diff --git a/lib/tool_shed/webapp/api2/categories.py b/lib/tool_shed/webapp/api2/categories.py index cd8696f65cfb..de6f03204cfb 100644 --- a/lib/tool_shed/webapp/api2/categories.py +++ b/lib/tool_shed/webapp/api2/categories.py @@ -1,5 +1,4 @@ from typing import ( - List, Optional, ) @@ -48,7 +47,7 @@ def create( description="index category", operation_id="categories__index", ) - def index(self, trans: SessionRequestContext = DependsOnTrans) -> List[CategoryResponse]: + def index(self, trans: SessionRequestContext = DependsOnTrans) -> list[CategoryResponse]: """ Return a list of dictionaries that contain information about each Category. """ diff --git a/lib/tool_shed/webapp/api2/repositories.py b/lib/tool_shed/webapp/api2/repositories.py index a2c386989e4d..82699f467c73 100644 --- a/lib/tool_shed/webapp/api2/repositories.py +++ b/lib/tool_shed/webapp/api2/repositories.py @@ -5,7 +5,6 @@ from typing import ( cast, IO, - List, Optional, Union, ) @@ -111,7 +110,7 @@ router = Router(tags=["repositories"]) -IndexResponse = Union[RepositorySearchResults, List[Repository], PaginatedRepositoryIndexResults] +IndexResponse = Union[RepositorySearchResults, list[Repository], PaginatedRepositoryIndexResults] @as_form @@ -273,7 +272,7 @@ def get_ordered_installable_revisions( owner: Optional[str] = OptionalRepositoryOwnerParam, name: Optional[str] = OptionalRepositoryNameParam, tsr_id: Optional[str] = OptionalRepositoryIdParam, - ) -> List[str]: + ) -> list[str]: return get_ordered_installable_revisions(self.app, name, owner, tsr_id) @router.post( @@ -415,7 +414,7 @@ def show_allow_push( self, trans: SessionRequestContext = DependsOnTrans, encoded_repository_id: str = RepositoryIdPathParam, - ) -> List[str]: + ) -> list[str]: repository = get_repository_in_tool_shed(self.app, encoded_repository_id) ensure_can_manage(trans, repository) return trans.app.security_agent.usernames_that_can_push(repository) @@ -429,7 +428,7 @@ def add_allow_push( trans: SessionRequestContext = DependsOnTrans, encoded_repository_id: str = RepositoryIdPathParam, username: str = UsernameIdPathParam, - ) -> List[str]: + ) -> list[str]: repository = get_repository_in_tool_shed(self.app, encoded_repository_id) if not can_manage_repo(trans, repository): raise InsufficientPermissionsException("You do not have permission to update this repository.") @@ -513,7 +512,7 @@ def remove_allow_push( trans: SessionRequestContext = DependsOnTrans, encoded_repository_id: str = RepositoryIdPathParam, username: str = UsernameIdPathParam, - ) -> List[str]: + ) -> list[str]: repository = get_repository_in_tool_shed(self.app, encoded_repository_id) if not can_manage_repo(trans, repository): raise InsufficientPermissionsException("You do not have permission to update this repository.") @@ -531,13 +530,13 @@ async def create_changeset_revision( encoded_repository_id: str = RepositoryIdPathParam, commit_message: Optional[str] = CommitMessageQueryParam, trans: SessionRequestContext = DependsOnTrans, - files: Optional[List[UploadFile]] = None, + files: Optional[list[UploadFile]] = None, revision_request: RepositoryUpdateRequest = Depends(RepositoryUpdateRequestFormData.as_form), # type: ignore[attr-defined] ) -> RepositoryUpdate: try: # Code stolen from Marius' work in Galaxy's Tools API. - files2: List[StarletteUploadFile] = cast(List[StarletteUploadFile], files or []) + files2: list[StarletteUploadFile] = cast(list[StarletteUploadFile], files or []) # FastAPI's UploadFile is a very light wrapper around starlette's UploadFile if not files2: data = await request.form() diff --git a/lib/tool_shed/webapp/api2/tools.py b/lib/tool_shed/webapp/api2/tools.py index c82af1422ba6..48b6b6949b46 100644 --- a/lib/tool_shed/webapp/api2/tools.py +++ b/lib/tool_shed/webapp/api2/tools.py @@ -1,5 +1,4 @@ import logging -from typing import List from fastapi import ( Path, @@ -109,7 +108,7 @@ def service_info(self, request: Request) -> Service: return service_info(self.app, request.url) @router.get("/api/ga4gh/trs/v2/toolClasses", operation_id="tools__trs_tool_classes") - def tool_classes(self) -> List[ToolClass]: + def tool_classes(self) -> list[ToolClass]: return tool_classes() @router.get( @@ -142,7 +141,7 @@ def trs_get_versions( self, trans: SessionRequestContext = DependsOnTrans, tool_id: str = TOOL_ID_PATH_PARAM, - ) -> List[ToolVersion]: + ) -> list[ToolVersion]: return get_tool(trans, tool_id).versions @router.get( diff --git a/lib/tool_shed/webapp/api2/users.py b/lib/tool_shed/webapp/api2/users.py index 0407c7e1104a..e7886d92f7f5 100644 --- a/lib/tool_shed/webapp/api2/users.py +++ b/lib/tool_shed/webapp/api2/users.py @@ -1,7 +1,6 @@ import logging import os from typing import ( - List, Optional, ) @@ -114,7 +113,7 @@ class FastAPIUsers: description="index users", operation_id="users__index", ) - def index(self, trans: SessionRequestContext = DependsOnTrans) -> List[User]: + def index(self, trans: SessionRequestContext = DependsOnTrans) -> list[User]: deleted = False return index(trans.app, deleted) diff --git a/lib/tool_shed/webapp/fast_app.py b/lib/tool_shed/webapp/fast_app.py index a350597330aa..0450d26809f7 100644 --- a/lib/tool_shed/webapp/fast_app.py +++ b/lib/tool_shed/webapp/fast_app.py @@ -4,7 +4,6 @@ from typing import ( Any, cast, - Dict, Optional, ) @@ -204,7 +203,7 @@ def get_fastapi_instance() -> FastAPI: ) -def get_openapi_schema() -> Dict[str, Any]: +def get_openapi_schema() -> dict[str, Any]: """ Dumps openAPI schema without starting a full app and webserver. """ diff --git a/lib/tool_shed/webapp/model/__init__.py b/lib/tool_shed/webapp/model/__init__.py index 0ce2a2e0b37a..e02cfe1e93c3 100644 --- a/lib/tool_shed/webapp/model/__init__.py +++ b/lib/tool_shed/webapp/model/__init__.py @@ -3,13 +3,13 @@ import random import string import weakref +from collections.abc import Mapping from datetime import ( datetime, timedelta, ) from typing import ( Any, - Mapping, Optional, TYPE_CHECKING, ) diff --git a/lib/tool_shed/webapp/model/mapping.py b/lib/tool_shed/webapp/model/mapping.py index 8f60f908b65b..75bc6b65f727 100644 --- a/lib/tool_shed/webapp/model/mapping.py +++ b/lib/tool_shed/webapp/model/mapping.py @@ -6,9 +6,7 @@ import logging from typing import ( Any, - Dict, Optional, - Type, TYPE_CHECKING, ) @@ -28,14 +26,14 @@ class ToolShedModelMapping(SharedModelMapping): - User: Type["ToolShedUser"] + User: type["ToolShedUser"] security_agent: CommunityRBACAgent shed_counter: shed_statistics.ShedCounter create_tables: bool def init( - url: str, engine_options: Optional[Dict[str, Any]] = None, create_tables: bool = False + url: str, engine_options: Optional[dict[str, Any]] = None, create_tables: bool = False ) -> ToolShedModelMapping: """Connect mappings to the database""" engine_options = engine_options or {} diff --git a/lib/tool_shed/webapp/model/migrations/__init__.py b/lib/tool_shed/webapp/model/migrations/__init__.py index eae748a507d5..ea50ec2350a3 100644 --- a/lib/tool_shed/webapp/model/migrations/__init__.py +++ b/lib/tool_shed/webapp/model/migrations/__init__.py @@ -1,8 +1,8 @@ import logging import os +from collections.abc import Iterable from typing import ( cast, - Iterable, Optional, ) diff --git a/lib/tool_shed/webapp/model/migrations/alembic/env.py b/lib/tool_shed/webapp/model/migrations/alembic/env.py index 4c05fabaf5d7..04f4769df4cd 100644 --- a/lib/tool_shed/webapp/model/migrations/alembic/env.py +++ b/lib/tool_shed/webapp/model/migrations/alembic/env.py @@ -2,7 +2,6 @@ from typing import ( Callable, cast, - Dict, ) from alembic import context @@ -76,7 +75,7 @@ def _get_url_from_config() -> str: def _load_url() -> str: - context_dict = cast(Dict, context.get_x_argument(as_dictionary=True)) + context_dict = cast(dict, context.get_x_argument(as_dictionary=True)) url = context_dict.get("url") assert url return url diff --git a/lib/tool_shed/webapp/model/migrations/scripts.py b/lib/tool_shed/webapp/model/migrations/scripts.py index fc92de362e94..c9d5a0d00fff 100644 --- a/lib/tool_shed/webapp/model/migrations/scripts.py +++ b/lib/tool_shed/webapp/model/migrations/scripts.py @@ -1,6 +1,5 @@ import os from typing import ( - List, Optional, ) @@ -17,7 +16,7 @@ TOOLSHED_CONFIG_PREFIX = "TOOL_SHED_CONFIG_" -def get_dburl(argv: List[str], cwd: str) -> str: +def get_dburl(argv: list[str], cwd: str) -> str: """Return db url.""" config_file = pop_arg_from_args(argv, CONFIG_FILE_ARG) return get_dburl_from_file(cwd, config_file) diff --git a/lib/tool_shed/webapp/security/__init__.py b/lib/tool_shed/webapp/security/__init__.py index 310435ccb061..92bbedd7f653 100644 --- a/lib/tool_shed/webapp/security/__init__.py +++ b/lib/tool_shed/webapp/security/__init__.py @@ -1,7 +1,6 @@ """Tool Shed Security""" import logging -from typing import List from sqlalchemy import ( false, @@ -229,7 +228,7 @@ def set_entity_user_associations(self, users=None, roles=None, groups=None, dele for group in groups: self.associate_components(user=user, group=group) - def usernames_that_can_push(self, repository) -> List[str]: + def usernames_that_can_push(self, repository) -> list[str]: return listify(repository.allow_push()) def can_push(self, app, user, repository): diff --git a/lib/tool_shed_client/schema/__init__.py b/lib/tool_shed_client/schema/__init__.py index eec5ac07254c..a5609c65e3fe 100644 --- a/lib/tool_shed_client/schema/__init__.py +++ b/lib/tool_shed_client/schema/__init__.py @@ -1,10 +1,7 @@ from typing import ( Any, cast, - Dict, - List, Optional, - Tuple, Union, ) @@ -45,13 +42,13 @@ class DetailedRepository(Repository): class RepositoryPermissions(BaseModel): - allow_push: List[str] + allow_push: list[str] can_manage: bool # can the requesting user manage the repository can_push: bool class RepositoryRevisionReadmes(RootModel): - root: Dict[str, str] + root: dict[str, str] class CreateUserRequest(BaseModel): @@ -96,7 +93,7 @@ class GetOrderedInstallableRevisionsRequest(BaseModel): class OrderedInstallableRevisions(RootModel): - root: List[str] + root: list[str] RepositoryType = Literal[ @@ -117,7 +114,7 @@ class CreateRepositoryRequest(BaseModel): alias="type", title="Type", ) - category_ids: Optional[Union[List[str], str]] = Field( + category_ids: Optional[Union[list[str], str]] = Field( ..., alias="category_ids[]", title="Category IDs", @@ -136,7 +133,7 @@ class UpdateRepositoryRequest(BaseModel): description: Optional[str] = None remote_repository_url: Optional[str] = None homepage_url: Optional[str] = None - category_ids: Optional[List[str]] = Field( + category_ids: Optional[list[str]] = Field( None, alias="category_ids", title="Category IDs", @@ -175,9 +172,9 @@ class RepositoryTool(BaseModel): class RepositoryRevisionMetadata(BaseModel): id: str repository: Repository - repository_dependencies: List["RepositoryDependency"] - tools: Optional[List["RepositoryTool"]] = None - invalid_tools: List[str] # added for rendering list of invalid tools in 2.0 frontend + repository_dependencies: list["RepositoryDependency"] + tools: Optional[list["RepositoryTool"]] = None + invalid_tools: list[str] # added for rendering list of invalid tools in 2.0 frontend repository_id: str numeric_revision: int changeset_revision: str @@ -200,7 +197,7 @@ class RepositoryDependency(RepositoryRevisionMetadata): class RepositoryMetadata(RootModel): - root: Dict[str, RepositoryRevisionMetadata] + root: dict[str, RepositoryRevisionMetadata] @property def latest_revision(self) -> RepositoryRevisionMetadata: @@ -224,7 +221,7 @@ class ResetMetadataOnRepositoryRequest(BaseModel): class ResetMetadataOnRepositoryResponse(BaseModel): status: str # TODO: enum... - repository_status: List[str] + repository_status: list[str] start_time: str stop_time: str @@ -238,13 +235,13 @@ class ResetMetadataOnRepositoriesRequest(BaseModel): in addition to those repositories of type tool_dependency_definition. This param is ignored if the current user is not an admin user, in which case this same restriction is automatic.""", ) - encoded_ids_to_skip: Optional[List[str]] = Field( + encoded_ids_to_skip: Optional[list[str]] = Field( None, description="a list of encoded repository ids for repositories that should not be processed" ) class ResetMetadataOnRepositoriesResponse(BaseModel): - repository_status: List[str] + repository_status: list[str] start_time: str stop_time: str @@ -265,7 +262,7 @@ class ToolSearchHitTool(BaseModel): class ToolSearchHit(BaseModel): tool: ToolSearchHitTool - matched_terms: Dict[str, Any] + matched_terms: dict[str, Any] score: float @@ -275,7 +272,7 @@ class ToolSearchResults(BaseModel): page: str page_size: str hostname: str - hits: List[ToolSearchHit] + hits: list[ToolSearchHit] def find_search_hit(self, repository: Repository) -> Optional[ToolSearchHit]: matching_hit: Optional[ToolSearchHit] = None @@ -313,11 +310,11 @@ class RepositoriesByCategory(BaseModel): name: str description: str repository_count: int - repositories: List[Repository] + repositories: list[Repository] class RepositoryIndexResponse(RootModel): - root: List[Repository] + root: list[Repository] class RepositorySearchRequest(BaseModel): @@ -352,7 +349,7 @@ class RepositorySearchResults(BaseModel): page: str page_size: str hostname: str - hits: List[RepositorySearchHit] + hits: list[RepositorySearchHit] # align with the search version of this to some degree but fix some things also @@ -361,7 +358,7 @@ class PaginatedRepositoryIndexResults(BaseModel): page: int page_size: int hostname: str - hits: List[Repository] + hits: list[Repository] class GetInstallInfoRequest(BaseModel): @@ -398,12 +395,12 @@ class RepositoryMetadataInstallInfoDict(TypedDict): malicious: bool repository_id: str url: str - valid_tools: List[ValidToolDict] + valid_tools: list[ValidToolDict] # So hard to type this... the keys are repo names and the elements # are tuples that have been list-ified. -ExtraRepoInfo = Dict[str, List] +ExtraRepoInfo = dict[str, list] # { # "add_column": [ # "add_column hello", @@ -421,8 +418,8 @@ class EmptyDict(TypedDict): pass -LegacyInstallInfoTuple = Tuple[ - Optional[Dict], Union[RepositoryMetadataInstallInfoDict, EmptyDict], Union[ExtraRepoInfo, EmptyDict] +LegacyInstallInfoTuple = tuple[ + Optional[dict], Union[RepositoryMetadataInstallInfoDict, EmptyDict], Union[ExtraRepoInfo, EmptyDict] ] @@ -433,7 +430,7 @@ class RepositoryExtraInstallInfo(BaseModel): changeset_revision: str ctx_rev: str repository_owner: str - repository_dependencies: Optional[Dict] = None + repository_dependencies: Optional[dict] = None # tool dependencies not longer work so don't transmit them in v2? # tool_dependencies: Optional[Dict] @@ -471,7 +468,7 @@ def from_legacy_dict(as_dict: ValidToolDict) -> "ValidTool": return ValidTool(**as_dict) @staticmethod - def from_legacy_list(as_dicts: List[ValidToolDict]) -> List["ValidTool"]: + def from_legacy_list(as_dicts: list[ValidToolDict]) -> list["ValidTool"]: return [ValidTool.from_legacy_dict(d) for d in as_dicts] @@ -485,7 +482,7 @@ class RepositoryMetadataInstallInfo(BaseModel): malicious: bool repository_id: str url: str - valid_tools: List[ValidTool] + valid_tools: list[ValidTool] # no longer used, don't transmit. # has_repository_dependencies_only_if_compiling_contained_td: bool # includes_datatypes: bool diff --git a/scripts/apply_tags.py b/scripts/apply_tags.py index fa21a1c4cf9a..d9c1d96ed0e1 100644 --- a/scripts/apply_tags.py +++ b/scripts/apply_tags.py @@ -2,7 +2,6 @@ import sys import time -from typing import List from bioblend.galaxy import GalaxyInstance @@ -109,7 +108,7 @@ def find_parent_recursive(dataset_id, recursive_parents): recursive_parent_ids = {} for item in datasets_inheritance_chain: - recursive_parents: List = [] + recursive_parents: list = [] find_parent_recursive(item, recursive_parents) # take unique parents diff --git a/scripts/bootstrap_test_shed.py b/scripts/bootstrap_test_shed.py index 071e5c71bb05..3e7f387b44a7 100644 --- a/scripts/bootstrap_test_shed.py +++ b/scripts/bootstrap_test_shed.py @@ -12,8 +12,6 @@ import tempfile from typing import ( Any, - Dict, - List, Optional, ) @@ -45,7 +43,7 @@ CATEGORIES_TO_COPY = ["Data Export", "Climate Analysis", "Materials science"] -def main(argv: List[str]) -> None: +def main(argv: list[str]) -> None: arg_parser = _arg_parser() namespace = arg_parser.parse_args(argv) populator = init_populator(namespace) @@ -79,19 +77,19 @@ def main(argv: List[str]) -> None: mirror_main_repository(populator, repo, local_category.id) -def get_main_categories() -> List[Dict[str, Any]]: +def get_main_categories() -> list[dict[str, Any]]: main_categories_endpoint = f"{MAIN_SHED_API}/categories" main_categories = requests.get(main_categories_endpoint).json() return main_categories -def get_main_users() -> List[Dict[str, Any]]: +def get_main_users() -> list[dict[str, Any]]: main_users_endpoint = f"{MAIN_SHED_API}/users" main_users = requests.get(main_users_endpoint).json() return main_users -def get_main_repositories_for_category(category_id) -> List[Dict[str, Any]]: +def get_main_repositories_for_category(category_id) -> list[dict[str, Any]]: main_category_repos_endpoint = f"{MAIN_SHED_API}/categories/{category_id}/repositories" main_repos_for_category_response = requests.get(main_category_repos_endpoint) main_repos_for_category = main_repos_for_category_response.json() @@ -106,9 +104,9 @@ class RemoteToolShedPopulator(ToolShedPopulator): for tests. """ - _categories_by_name: Optional[Dict[str, Category]] = None - _users_by_username: Optional[Dict[str, Dict[str, Any]]] = None - _populators_by_username: Dict[str, "RemoteToolShedPopulator"] = {} + _categories_by_name: Optional[dict[str, Category]] = None + _users_by_username: Optional[dict[str, dict[str, Any]]] = None + _populators_by_username: dict[str, "RemoteToolShedPopulator"] = {} def __init__(self, admin_interactor: ShedApiInteractor, user_interactor: ShedApiInteractor): super().__init__(admin_interactor, user_interactor) @@ -130,14 +128,14 @@ def populator_for_user(self, username): return self._populators_by_username[username] @property - def categories_by_name(self) -> Dict[str, Category]: + def categories_by_name(self) -> dict[str, Category]: if self._categories_by_name is None: categories = self.get_categories() self._categories_by_name = {c.name: c for c in categories} return self._categories_by_name @property - def users_by_username(self) -> Dict[str, Dict[str, Any]]: + def users_by_username(self) -> dict[str, dict[str, Any]]: if self._users_by_username is None: users_response = self._api_interactor.get("users") if users_response.status_code == 400: @@ -148,14 +146,14 @@ def users_by_username(self) -> Dict[str, Dict[str, Any]]: self._users_by_username = {u["username"]: u for u in users} return self._users_by_username - def new_category_if_needed(self, as_json: Dict[str, Any]) -> Category: + def new_category_if_needed(self, as_json: dict[str, Any]) -> Category: name = as_json["name"] description = as_json["description"] if name in self.categories_by_name: return self.categories_by_name[name] return self.new_category(name, description) - def new_user_if_needed(self, as_json: Dict[str, Any]) -> Dict[str, Any]: + def new_user_if_needed(self, as_json: dict[str, Any]) -> dict[str, Any]: if "username" not in as_json: email = as_json["email"] as_json["username"] = email.split("@", 1)[0] @@ -187,7 +185,7 @@ def mirror_main_users(populator: RemoteToolShedPopulator): populator.new_user_if_needed(user) -def mirror_main_repository(populator: RemoteToolShedPopulator, repository: Dict[str, Any], category_id: str): +def mirror_main_repository(populator: RemoteToolShedPopulator, repository: dict[str, Any], category_id: str): # TODO: mirror the user as_dict = repository.copy() as_dict["category_ids"] = category_id diff --git a/test/functional/test_toolbox_pytest.py b/test/functional/test_toolbox_pytest.py index 896e3609913e..19c8c6b8c6ea 100644 --- a/test/functional/test_toolbox_pytest.py +++ b/test/functional/test_toolbox_pytest.py @@ -1,6 +1,5 @@ import os from typing import ( - List, NamedTuple, ) @@ -24,7 +23,7 @@ def get_skiplist(): return skiplist -def get_cases() -> List[ToolTest]: +def get_cases() -> list[ToolTest]: atc = ApiTestCase() atc._test_driver = GalaxyTestDriver() atc._test_driver.setup() diff --git a/test/functional/tools/parameters/gx_drill_down_code.py b/test/functional/tools/parameters/gx_drill_down_code.py index 518cb67c6a7e..d587fbd58af0 100644 --- a/test/functional/tools/parameters/gx_drill_down_code.py +++ b/test/functional/tools/parameters/gx_drill_down_code.py @@ -1,5 +1,5 @@ def collate_table(path: str) -> list: - with open(path, "r") as f: + with open(path) as f: contents = f.read() first_options = [] second_options = [] diff --git a/test/functional/tools/parameters/gx_select_dynamic_empty.py b/test/functional/tools/parameters/gx_select_dynamic_empty.py index 6a79a047f3cb..ad173e51a441 100644 --- a/test/functional/tools/parameters/gx_select_dynamic_empty.py +++ b/test/functional/tools/parameters/gx_select_dynamic_empty.py @@ -1,8 +1,2 @@ -from typing import ( - List, - Tuple, -) - - -def empty_list() -> List[Tuple[str, str, bool]]: +def empty_list() -> list[tuple[str, str, bool]]: return [] diff --git a/test/functional/tools/parameters/gx_select_dynamic_options.py b/test/functional/tools/parameters/gx_select_dynamic_options.py index 734174b6da1d..c4857d95a266 100644 --- a/test/functional/tools/parameters/gx_select_dynamic_options.py +++ b/test/functional/tools/parameters/gx_select_dynamic_options.py @@ -1,10 +1,4 @@ -from typing import ( - List, - Tuple, -) - - -def every_other_word(path: str) -> List[Tuple[str, str, bool]]: - with open(path, "r") as f: +def every_other_word(path: str) -> list[tuple[str, str, bool]]: + with open(path) as f: contents = f.read() return [(r.strip(), r.strip(), False) for (i, r) in enumerate(contents.split()) if i % 2 == 0] diff --git a/test/integration/objectstore/test_per_user.py b/test/integration/objectstore/test_per_user.py index efc20ff5d2c4..db7783a11db4 100644 --- a/test/integration/objectstore/test_per_user.py +++ b/test/integration/objectstore/test_per_user.py @@ -1,7 +1,5 @@ from typing import ( Any, - Dict, - Tuple, ) from galaxy.objectstore.templates.examples import get_example @@ -83,7 +81,7 @@ class BaseUserObjectStoreIntegration(BaseObjectStoreIntegrationTestCase): - def _create_simple_payload(self) -> Dict[str, Any]: + def _create_simple_payload(self) -> dict[str, Any]: body = { "name": "My Cool Disk", "template_id": "general_disk", @@ -147,7 +145,7 @@ def _run_tool(tool_id, inputs, preferred_object_store_id=None): self.dataset_populator.set_user_preferred_object_store_id(None) return storage_info, output - def _storage_info_for_job_output(self, job_dict) -> Tuple[Dict[str, Any], Dict[str, Any]]: + def _storage_info_for_job_output(self, job_dict) -> tuple[dict[str, Any], dict[str, Any]]: outputs = job_dict["outputs"] # could be a list or dictionary depending on source try: output = outputs[0] @@ -161,7 +159,7 @@ def _write_template_and_object_store_config(cls, config, catalog: str): cls._configure_object_store(DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE, config) cls._configure_object_store_template_catalog(catalog, config) - def _get_dataset_filename(self, history_id: str, output: Dict[str, Any]) -> str: + def _get_dataset_filename(self, history_id: str, output: dict[str, Any]) -> str: details = self.dataset_populator.get_history_dataset_details(history_id, dataset_id=output["id"]) assert "file_name" in details file_name = details["file_name"] diff --git a/test/integration/objectstore/test_selection_with_user_preferred_object_store.py b/test/integration/objectstore/test_selection_with_user_preferred_object_store.py index fee15742b8a2..9d64ada0eff0 100644 --- a/test/integration/objectstore/test_selection_with_user_preferred_object_store.py +++ b/test/integration/objectstore/test_selection_with_user_preferred_object_store.py @@ -4,7 +4,6 @@ import string from typing import ( Any, - Dict, Optional, ) @@ -145,7 +144,7 @@ """ -def assert_storage_name_is(storage_dict: Dict[str, Any], name: str): +def assert_storage_name_is(storage_dict: dict[str, Any], name: str): storage_name = storage_dict["name"] assert name == storage_name, f"Found incorrect storage name {storage_name}, expected {name} in {storage_dict}" @@ -369,7 +368,7 @@ def test_workflow_collection_dynamic_output(self): assert_storage_name_is(output_info, "Static Storage") assert_storage_name_is(intermediate_dict, "Dynamic EBS") - def _run_workflow_with_collections_1(self, history_id: str, extra_invocation_kwds: Optional[Dict[str, Any]] = None): + def _run_workflow_with_collections_1(self, history_id: str, extra_invocation_kwds: Optional[dict[str, Any]] = None): wf_run = self.workflow_populator.run_workflow( WORKFLOW_WITH_COLLECTIONS_1, test_data=WORKFLOW_WITH_COLLECTIONS_1_TEST_DATA, @@ -388,7 +387,7 @@ def _run_workflow_with_collections_1(self, history_id: str, extra_invocation_kwd output_info = self._storage_info(objects[0]) return intermediate_info, output_info - def _run_workflow_with_collections_2(self, history_id: str, extra_invocation_kwds: Optional[Dict[str, Any]] = None): + def _run_workflow_with_collections_2(self, history_id: str, extra_invocation_kwds: Optional[dict[str, Any]] = None): wf_run = self.workflow_populator.run_workflow( WORKFLOW_WITH_COLLECTIONS_2, test_data=WORKFLOW_WITH_COLLECTIONS_1_TEST_DATA, @@ -408,7 +407,7 @@ def _run_workflow_with_collections_2(self, history_id: str, extra_invocation_kwd return intermediate_info, output_info def _run_simple_nested_workflow_get_output_storage_info_dicts( - self, history_id: str, extra_invocation_kwds: Optional[Dict[str, Any]] = None + self, history_id: str, extra_invocation_kwds: Optional[dict[str, Any]] = None ): wf_run = self.workflow_populator.run_workflow( WORKFLOW_NESTED_SIMPLE, @@ -428,7 +427,7 @@ def _run_simple_nested_workflow_get_output_storage_info_dicts( return output_info, intermediate_info def _run_nested_workflow_with_effective_output_get_output_storage_info_dicts( - self, history_id: str, extra_invocation_kwds: Optional[Dict[str, Any]] = None, twice_nested=False + self, history_id: str, extra_invocation_kwds: Optional[dict[str, Any]] = None, twice_nested=False ): workflow_data = WORKFLOW_NESTED_OUTPUT if not twice_nested else WORKFLOW_NESTED_TWICE_OUTPUT wf_run = self.workflow_populator.run_workflow( @@ -448,7 +447,7 @@ def _run_nested_workflow_with_effective_output_get_output_storage_info_dicts( return output_info, intermediate_info def _run_workflow_get_output_storage_info_dicts( - self, history_id: str, extra_invocation_kwds: Optional[Dict[str, Any]] = None + self, history_id: str, extra_invocation_kwds: Optional[dict[str, Any]] = None ): wf_run = self.workflow_populator.run_workflow( TEST_WORKFLOW, @@ -462,11 +461,11 @@ def _run_workflow_get_output_storage_info_dicts( intermediate_info = self._storage_info_for_job_id(jobs[1]["id"]) return output_info, intermediate_info, wf_run - def _storage_info_for_job_id(self, job_id: str) -> Dict[str, Any]: + def _storage_info_for_job_id(self, job_id: str) -> dict[str, Any]: job_dict = self.dataset_populator.get_job_details(job_id, full=True).json() return self._storage_info_for_job_output(job_dict) - def _storage_info_for_job_output(self, job_dict) -> Dict[str, Any]: + def _storage_info_for_job_output(self, job_dict) -> dict[str, Any]: outputs = job_dict["outputs"] # could be a list or dictionary depending on source try: output = outputs[0] diff --git a/test/integration/resubmission_runners.py b/test/integration/resubmission_runners.py index e516ca243b27..0a9d42739429 100644 --- a/test/integration/resubmission_runners.py +++ b/test/integration/resubmission_runners.py @@ -1,6 +1,5 @@ import subprocess import time -from typing import List from galaxy import model from galaxy.jobs.runners import JobState @@ -64,7 +63,7 @@ def queue_job(self, job_wrapper): class FailOnlyFirstJobRunner(LocalJobRunner): """Job runner that knows about test cases and checks final state assumptions.""" - tests_seen: List[str] = [] + tests_seen: list[str] = [] def queue_job(self, job_wrapper): resource_parameters = job_wrapper.get_resource_parameters() diff --git a/test/integration/test_celery_user_rate_limit.py b/test/integration/test_celery_user_rate_limit.py index c36dacc880fd..ad4d166b235c 100644 --- a/test/integration/test_celery_user_rate_limit.py +++ b/test/integration/test_celery_user_rate_limit.py @@ -1,10 +1,6 @@ import datetime import tempfile from functools import lru_cache -from typing import ( - Dict, - List, -) from celery.result import AsyncResult from sqlalchemy import text @@ -86,9 +82,9 @@ def _test_mock_pass_user_id_task(self, num_users: int, num_calls: int, tasks_per start_time = datetime.datetime.now(datetime.timezone.utc) timer = ExecutionTimer() # Invoke test task num_calls times for each user - results: Dict[int, List[AsyncResult]] = {} + results: dict[int, list[AsyncResult]] = {} for user in users: - user_results: List[AsyncResult] = [] + user_results: list[AsyncResult] = [] for _ in range(num_calls): user_results.append(mock_user_id_task.delay(task_user_id=user)) results[user] = user_results diff --git a/test/integration/test_coexecution.py b/test/integration/test_coexecution.py index 7e7543443dc7..6891818c77d3 100644 --- a/test/integration/test_coexecution.py +++ b/test/integration/test_coexecution.py @@ -20,7 +20,6 @@ import time from typing import ( Any, - Dict, ) import yaml @@ -378,7 +377,7 @@ def test_mulled_simple(self, history_id: str) -> None: assert "0.7.15-r1140" in output -def set_infrastucture_url(config: Dict[str, Any]) -> None: +def set_infrastucture_url(config: dict[str, Any]) -> None: hostname = to_infrastructure_uri("0.0.0.0") infrastructure_url = f"http://{hostname}:$GALAXY_WEB_PORT" config["galaxy_infrastructure_url"] = infrastructure_url diff --git a/test/integration/test_container_resolvers.py b/test/integration/test_container_resolvers.py index 45b257c58ae5..4a0789e891e4 100644 --- a/test/integration/test_container_resolvers.py +++ b/test/integration/test_container_resolvers.py @@ -5,8 +5,6 @@ from typing import ( Any, ClassVar, - Dict, - List, Optional, TYPE_CHECKING, ) @@ -115,7 +113,7 @@ class for containerized (docker) tests cache is cleared before each test """ - assumptions: Dict[str, Any] + assumptions: dict[str, Any] container_type: str = "docker" dataset_populator: DatasetPopulator framework_tool_and_types = True @@ -159,8 +157,7 @@ def handle_galaxy_config_kwds(cls, config) -> None: def _remove_tested_docker_image_from_cache(self): cmd1 = ["docker", "image", "ls", "--quiet", "--filter", f'reference={self.assumptions["run"]["cache_name"]}'] - image_ids = execute(cmd1) - if image_ids: + if image_ids := execute(cmd1): image_id_list = image_ids.splitlines() assert len(image_id_list) == 1 cmd2 = ["docker", "image", "rm", "--force", image_id_list[0]] @@ -251,7 +248,7 @@ def tool_id(self) -> str: ... @property - def assumptions(self) -> Dict[str, Any]: + def assumptions(self) -> dict[str, Any]: """a dictionary storing the assumptions of the three tests needs to contain 3 keys ("run", "list", "build") @@ -305,7 +302,7 @@ def _assert_container_in_cache_api_calls( """ ... - def _check_status(self, status: Dict[str, Any], assumptions: Dict[str, Any]) -> None: + def _check_status(self, status: dict[str, Any], assumptions: dict[str, Any]) -> None: """ function to check the status of a API call against assumptions dict """ @@ -392,7 +389,7 @@ def test_tool_run(self: ContainerResolverTestProtocol, history_id: str) -> None: for o in self.assumptions["run"]["output"]: assert o in output - def _check_status(self: ContainerResolverTestProtocol, status: Dict[str, Any], assumptions: Dict[str, Any]) -> None: + def _check_status(self: ContainerResolverTestProtocol, status: dict[str, Any], assumptions: dict[str, Any]) -> None: """see ContainerResolverTestProtocol._check_status""" if "unresolved" in assumptions: assert status["model_class"] == "NullDependency" @@ -503,7 +500,7 @@ class TestDefaultContainerResolvers(DockerContainerResolverTestCase, ContainerRe - listing containers does not cache them """ - assumptions: Dict[str, Any] = { + assumptions: dict[str, Any] = { "run": { "output": [ "bedtools v2.26.0", @@ -635,13 +632,13 @@ class TestMulledContainerResolvers(DockerContainerResolverTestCase, ContainerRes - building the container creates a cache entry (cached=True, 1st call resolves with mulled and 2nd with cached_mulled) """ - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ { "type": "cached_mulled", }, {"type": "mulled"}, ] - assumptions: Dict[str, Any] = { + assumptions: dict[str, Any] = { "run": { "output": [ "bedtools v2.26.0", @@ -710,7 +707,7 @@ class TestMulledSingularityContainerResolvers( - 2nd round resolves cached image, uses the cached container """ - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ { "type": "cached_mulled_singularity", }, @@ -778,7 +775,7 @@ class TestMulledContainerResolversNoAutoInstall(TestMulledContainerResolvers): No difference (since the cached name is identical to the URI) """ - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ { "type": "cached_mulled", }, @@ -798,7 +795,7 @@ class TestMulledSingularityContainersResolversNoAutoInstall(TestMulledSingularit the path is used instead of the URI) """ - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ { "type": "cached_mulled_singularity", }, @@ -868,11 +865,11 @@ class TestCondaFallBack(DockerContainerResolverTestCase, ContainerResolverTestCa """ allow_conda_fallback: bool = True - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ {"type": "null"}, ] - assumptions: Dict[str, Any] = { + assumptions: dict[str, Any] = { "run": { "output": [ "bedtools v2.26.0", @@ -910,11 +907,11 @@ class TestCondaFallBackAndRequireContainer(DockerContainerResolverTestCase, Cont """ allow_conda_fallback: bool = True - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ {"type": "null"}, ] - assumptions: Dict[str, Any] = { + assumptions: dict[str, Any] = { "run": { "expect_failure": True, "cached": False, @@ -969,10 +966,10 @@ class TestExplicitContainerResolver(DockerContainerResolverTestCase, ContainerRe - list and build resolve the URI and do not cache the container """ - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ {"type": "explicit"}, ] - assumptions: Dict[str, Any] = { + assumptions: dict[str, Any] = { "run": { "output": [ "Program: bwa (alignment via Burrows-Wheeler transformation)", @@ -1035,10 +1032,10 @@ class TestExplicitSingularityContainerResolver( - list and build resolve the URI and do not cache the container """ - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ {"type": "explicit_singularity"}, ] - assumptions: Dict[str, Any] = { + assumptions: dict[str, Any] = { "run": { "output": [ "Program: bwa (alignment via Burrows-Wheeler transformation)", @@ -1098,10 +1095,10 @@ class TestCachedExplicitSingularityContainerResolver( - list resolves to the path irrespective if the path is existent (TODO bug?) """ - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ {"type": "cached_explicit_singularity"}, ] - assumptions: Dict[str, Any] = { + assumptions: dict[str, Any] = { "run": { "output": [ "Program: bwa (alignment via Burrows-Wheeler transformation)", @@ -1163,10 +1160,10 @@ class TestCachedExplicitSingularityContainerResolverWithSingularityRequirement( here """ - container_resolvers_config: List[Dict[str, Any]] = [ + container_resolvers_config: list[dict[str, Any]] = [ {"type": "cached_explicit_singularity"}, ] - assumptions: Dict[str, Any] = { + assumptions: dict[str, Any] = { "run": { "output": [ "cowsay works LOL", diff --git a/test/integration/test_containerized_jobs.py b/test/integration/test_containerized_jobs.py index 2254b79a8a5a..15fbe486eac6 100644 --- a/test/integration/test_containerized_jobs.py +++ b/test/integration/test_containerized_jobs.py @@ -5,7 +5,6 @@ import unittest from typing import ( Any, - Dict, ) from galaxy.util.commands import which @@ -87,7 +86,7 @@ def handle_galaxy_config_kwds(cls, config) -> None: disable_dependency_resolution(config) -def disable_dependency_resolution(config: Dict[str, Any]) -> None: +def disable_dependency_resolution(config: dict[str, Any]) -> None: # Disable tool dependency resolution. config["tool_dependency_dir"] = "none" config["conda_auto_init"] = False diff --git a/test/integration/test_interactivetools_api.py b/test/integration/test_interactivetools_api.py index 7058070be0bc..2d3e88008cff 100644 --- a/test/integration/test_interactivetools_api.py +++ b/test/integration/test_interactivetools_api.py @@ -4,8 +4,6 @@ import tempfile from typing import ( Any, - Dict, - List, Optional, ) @@ -71,8 +69,8 @@ def entry_point_target(self, entry_point_id: str) -> str: api_asserts.assert_has_key(access_json, "target") return access_json["target"] - def wait_on_entry_points_active(self, job_id: str, expected_num: int = 1) -> List[Dict[str, Any]]: - def active_entry_points() -> Optional[List[Dict[str, Any]]]: + def wait_on_entry_points_active(self, job_id: str, expected_num: int = 1) -> list[dict[str, Any]]: + def active_entry_points() -> Optional[list[dict[str, Any]]]: entry_points = self.entry_points_for_job(job_id) if len(entry_points) != expected_num: return None @@ -88,7 +86,7 @@ def active_entry_points() -> Optional[List[Dict[str, Any]]]: # Can be decreased when galaxy_ext/container_monitor/monitor.py changes return wait_on(active_entry_points, "entry points to become active", timeout=120) - def entry_points_for_job(self, job_id: str) -> List[Dict[str, Any]]: + def entry_points_for_job(self, job_id: str) -> list[dict[str, Any]]: entry_points_response = self._get(f"entry_points?job_id={job_id}") api_asserts.assert_status_code_is(entry_points_response, 200) return entry_points_response.json() diff --git a/test/integration/test_job_files.py b/test/integration/test_job_files.py index 4f244419156b..0b5fe23df384 100644 --- a/test/integration/test_job_files.py +++ b/test/integration/test_job_files.py @@ -18,7 +18,6 @@ import io import os import tempfile -from typing import Dict import requests from sqlalchemy import select @@ -136,11 +135,11 @@ def test_write_with_tus(self): assert path upload_url = self._api_url(f"job_files/resumable_upload?job_key={job_key}", use_key=False) - headers: Dict[str, str] = {} + headers: dict[str, str] = {} my_client = client.TusClient(upload_url, headers=headers) storage = None - metadata: Dict[str, str] = {} + metadata: dict[str, str] = {} t_file = tempfile.NamedTemporaryFile("w") t_file.write("some initial text data") t_file.flush() diff --git a/test/integration/test_kubernetes_runner.py b/test/integration/test_kubernetes_runner.py index 180e40c52525..2b250162fbbc 100644 --- a/test/integration/test_kubernetes_runner.py +++ b/test/integration/test_kubernetes_runner.py @@ -11,7 +11,6 @@ import tempfile import time from typing import ( - List, Optional, overload, ) @@ -175,8 +174,8 @@ class TestKubernetesIntegration(BaseJobEnvironmentIntegrationTestCase, MulledJob dataset_populator: KubernetesDatasetPopulator job_config: Config jobs_directory: str - persistent_volume_claims: List[KubeSetupConfigTuple] - persistent_volumes: List[KubeSetupConfigTuple] + persistent_volume_claims: list[KubeSetupConfigTuple] + persistent_volumes: list[KubeSetupConfigTuple] container_type = "docker" def setUp(self) -> None: diff --git a/test/integration/test_notifications.py b/test/integration/test_notifications.py index 89077357f824..7e01abecef2b 100644 --- a/test/integration/test_notifications.py +++ b/test/integration/test_notifications.py @@ -4,10 +4,7 @@ ) from typing import ( Any, - Dict, - List, Optional, - Tuple, ) from uuid import uuid4 @@ -381,7 +378,7 @@ def _get_notifications_status_since(self, since: datetime): return status def _send_test_notification_to( - self, user_ids: List[str], subject: Optional[str] = None, message: Optional[str] = None + self, user_ids: list[str], subject: Optional[str] = None, message: Optional[str] = None ): request = { "recipients": {"user_ids": user_ids}, @@ -398,7 +395,7 @@ def _send_broadcast_notification( message: Optional[str] = None, publication_time: Optional[datetime] = None, expiration_time: Optional[datetime] = None, - action_links: Optional[List[Tuple[str, str]]] = None, + action_links: Optional[list[tuple[str, str]]] = None, ): payload = notification_broadcast_test_data() if subject is not None: @@ -418,7 +415,7 @@ def _send_broadcast_notification( notifications_status = response.json() return notifications_status - def _update_notification(self, notification_id: str, update_state: Dict[str, Any]): + def _update_notification(self, notification_id: str, update_state: dict[str, Any]): update_response = self._put(f"notifications/{notification_id}", data=update_state, json=True) self._assert_status_code_is(update_response, 204) diff --git a/test/integration/test_scripts_pgcleanup.py b/test/integration/test_scripts_pgcleanup.py index 4ecdc9b18abf..5fc4d1de8ec9 100644 --- a/test/integration/test_scripts_pgcleanup.py +++ b/test/integration/test_scripts_pgcleanup.py @@ -1,5 +1,3 @@ -from typing import List - from galaxy_test.base.populators import skip_without_tool from .test_scripts import BaseScriptsIntegrationTestCase @@ -227,7 +225,7 @@ def is_purged(self, history_id: str, dataset) -> bool: ) return details_response["purged"] - def _pgcleanup_check_output(self, extra_args: List[str]) -> str: + def _pgcleanup_check_output(self, extra_args: list[str]) -> str: config_file = self.write_config_file() output = self._scripts_check_output(SCRIPT, ["-c", config_file] + extra_args) print(output) diff --git a/test/integration/test_storage_cleaner.py b/test/integration/test_storage_cleaner.py index e0766a854a94..19ebd681793b 100644 --- a/test/integration/test_storage_cleaner.py +++ b/test/integration/test_storage_cleaner.py @@ -1,5 +1,4 @@ from typing import ( - List, NamedTuple, Optional, ) @@ -124,8 +123,8 @@ def _build_test_items(self, resource_name: str): def _assert_monitoring_and_cleanup_for_discarded_resource( self, resource: str, - test_items: List[StoredItemDataForTests], - item_ids: List[str], + test_items: list[StoredItemDataForTests], + item_ids: list[str], delete_resource_uri: Optional[str] = None, ): """Tests the storage cleaner API for a particular resource (histories or datasets)""" @@ -193,8 +192,8 @@ def _assert_monitoring_and_cleanup_for_discarded_resource( assert not cleanup_result["errors"] def _create_histories_with( - self, test_histories: List[StoredItemDataForTests], wait_for_histories=True - ) -> List[str]: + self, test_histories: list[StoredItemDataForTests], wait_for_histories=True + ) -> list[str]: history_ids = [] for history_data in test_histories: post_data = dict(name=history_data.name) @@ -211,8 +210,8 @@ def _create_histories_with( return history_ids def _create_datasets_in_history_with( - self, history_id: str, test_datasets: List[StoredItemDataForTests], wait_for_history=True - ) -> List[str]: + self, history_id: str, test_datasets: list[StoredItemDataForTests], wait_for_history=True + ) -> list[str]: dataset_ids = [] for dataset_data in test_datasets: dataset = self.dataset_populator.new_dataset( @@ -223,7 +222,7 @@ def _create_datasets_in_history_with( self.dataset_populator.wait_for_history(history_id) return dataset_ids - def _assert_order_is_expected(self, storage_items_url: str, order_by: str, expected_ordered_names: List[str]): + def _assert_order_is_expected(self, storage_items_url: str, order_by: str, expected_ordered_names: list[str]): items_response = self._get(f"{storage_items_url}?order={order_by}") self._assert_status_code_is_ok(items_response) items = items_response.json() diff --git a/test/integration/test_upload_configuration_options.py b/test/integration/test_upload_configuration_options.py index cbecfb975768..9417ca08a21c 100644 --- a/test/integration/test_upload_configuration_options.py +++ b/test/integration/test_upload_configuration_options.py @@ -25,7 +25,6 @@ import tempfile from typing import ( Any, - Dict, ) from requests import Response @@ -60,13 +59,13 @@ def setUp(self) -> None: def fetch_target( self, - target: Dict[str, Any], + target: dict[str, Any], history_id: str, assert_ok: bool = False, attach_test_file: bool = False, wait: bool = False, ) -> Response: - payload: Dict[str, Any] = { + payload: dict[str, Any] = { "history_id": history_id, "targets": [target], } @@ -413,14 +412,14 @@ def handle_galaxy_config_kwds(cls, config) -> None: cls.handle_extra_ftp_config(config) @classmethod - def handle_extra_ftp_config(cls, config: Dict[str, Any]) -> None: + def handle_extra_ftp_config(cls, config: dict[str, Any]) -> None: """Overrride to specify additional FTP configuration options.""" @classmethod def ftp_dir(cls) -> str: return cls.temp_config_dir("ftp") - def _check_content(self, dataset: Dict[str, Any], content: str, history_id: str, ext: str = "txt") -> None: + def _check_content(self, dataset: dict[str, Any], content: str, history_id: str, ext: str = "txt") -> None: dataset = self.dataset_populator.get_history_dataset_details(history_id, dataset=dataset) assert dataset["file_ext"] == ext, dataset content = self.dataset_populator.get_history_dataset_content(history_id, dataset=dataset) diff --git a/test/integration/test_users.py b/test/integration/test_users.py index bc1eaadfcb76..5c40d980e447 100644 --- a/test/integration/test_users.py +++ b/test/integration/test_users.py @@ -1,18 +1,17 @@ from typing import ( ClassVar, - Set, ) from galaxy_test.driver import integration_util -USER_SUMMARY_KEYS: Set[str] = {"model_class", "id", "email", "username", "deleted", "active", "last_password_change"} +USER_SUMMARY_KEYS: set[str] = {"model_class", "id", "email", "username", "deleted", "active", "last_password_change"} class UsersIntegrationCase(integration_util.IntegrationTestCase): expose_user_name: ClassVar[bool] expose_user_email: ClassVar[bool] expected_regular_user_list_count: ClassVar[int] - expected_limited_user_keys: ClassVar[Set[str]] + expected_limited_user_keys: ClassVar[set[str]] @classmethod def handle_galaxy_config_kwds(cls, config): diff --git a/test/integration/test_workflow_refactoring.py b/test/integration/test_workflow_refactoring.py index a964dbb0fb2a..7d5a4da15eea 100644 --- a/test/integration/test_workflow_refactoring.py +++ b/test/integration/test_workflow_refactoring.py @@ -2,8 +2,6 @@ import json from typing import ( Any, - Dict, - List, ) from sqlalchemy import select @@ -45,8 +43,8 @@ input1: test_input """ -ActionJson = Dict[str, Any] -ActionsJson = List[ActionJson] +ActionJson = dict[str, Any] +ActionsJson = list[ActionJson] class TestWorkflowRefactoringIntegration(integration_util.IntegrationTestCase, UsesShedApi): @@ -796,7 +794,7 @@ def _export_for_update(self, workflow): with self.workflow_populator.export_for_update(workflow_id) as workflow_object: yield workflow_object - def _refactor(self, actions: List[Dict[str, Any]], stored_workflow=None, dry_run=False, style="ga"): + def _refactor(self, actions: list[dict[str, Any]], stored_workflow=None, dry_run=False, style="ga"): stmt = select(User).order_by(User.id.desc()).limit(1) user = self._app.model.session.execute(stmt).scalar_one() mock_trans = MockTrans(self._app, user) diff --git a/test/integration/test_workflow_tasks.py b/test/integration/test_workflow_tasks.py index d5f8d4f9bdb1..20228dbe35f6 100644 --- a/test/integration/test_workflow_tasks.py +++ b/test/integration/test_workflow_tasks.py @@ -9,7 +9,6 @@ from typing import ( Any, cast, - Dict, ) from galaxy.util.compression_utils import CompressedFile @@ -178,7 +177,7 @@ def test_export_import_invocation_with_copied_hdca_and_database_operation_tool(s def _export_and_import_workflow_invocation( self, summary: RunJobsSummary, use_uris: bool = True, model_store_format="tgz" - ) -> Dict[str, Any]: + ) -> dict[str, Any]: invocation_id = summary.invocation_id extension = model_store_format or "tgz" if use_uris: @@ -200,7 +199,7 @@ def _export_and_import_workflow_invocation( imported_invocation_details = self._assert_one_invocation_created_and_get_details(response) return imported_invocation_details - def _rerun_imported_workflow(self, summary: RunJobsSummary, create_response: Dict[str, Any]): + def _rerun_imported_workflow(self, summary: RunJobsSummary, create_response: dict[str, Any]): workflow_id = create_response["workflow_id"] history_id = self.dataset_populator.new_history() new_workflow_request = summary.workflow_request.copy() @@ -210,7 +209,7 @@ def _rerun_imported_workflow(self, summary: RunJobsSummary, create_response: Dic invocation_id = invocation_response.json()["id"] self.workflow_populator.wait_for_workflow(workflow_id, invocation_id, history_id, assert_ok=True) - def _assert_one_invocation_created_and_get_details(self, response: Any) -> Dict[str, Any]: + def _assert_one_invocation_created_and_get_details(self, response: Any) -> dict[str, Any]: assert isinstance(response, list) assert len(response) == 1 invocation = response[0] diff --git a/test/unit/app/jobs/test_command_factory.py b/test/unit/app/jobs/test_command_factory.py index bd589d43534a..ed17c4c9d314 100644 --- a/test/unit/app/jobs/test_command_factory.py +++ b/test/unit/app/jobs/test_command_factory.py @@ -2,10 +2,6 @@ import shutil from os import getcwd from tempfile import mkdtemp -from typing import ( - List, - Tuple, -) from galaxy.jobs.command_factory import ( build_command, @@ -32,7 +28,7 @@ class TestCommandFactory(TestCase): def setUp(self): self.job_dir = mkdtemp() self.job_wrapper = MockJobWrapper(self.job_dir) - self.workdir_outputs: List[Tuple[str, str]] = [] + self.workdir_outputs: list[tuple[str, str]] = [] def workdir_outputs(job_wrapper, **kwds): assert job_wrapper == self.job_wrapper diff --git a/test/unit/app/jobs/test_job_configuration.py b/test/unit/app/jobs/test_job_configuration.py index 29a22480830c..19e738648bf1 100644 --- a/test/unit/app/jobs/test_job_configuration.py +++ b/test/unit/app/jobs/test_job_configuration.py @@ -3,7 +3,6 @@ import shutil import tempfile from typing import ( - Dict, Optional, ) from unittest import mock @@ -99,7 +98,7 @@ def _with_handlers_config(self, assign_with=None, default=None, handlers=None, b self._job_configuration_base_pools = base_pools self._write_config_from(HANDLER_TEMPLATE_JOB_CONF, template=template) - def _write_config_from(self, path: StrPath, template: Optional[Dict[str, str]] = None) -> None: + def _write_config_from(self, path: StrPath, template: Optional[dict[str, str]] = None) -> None: template = template or {} try: contents = open(path).read() diff --git a/test/unit/app/jobs/test_job_wrapper.py b/test/unit/app/jobs/test_job_wrapper.py index db16417df3c2..1675841c21be 100644 --- a/test/unit/app/jobs/test_job_wrapper.py +++ b/test/unit/app/jobs/test_job_wrapper.py @@ -3,8 +3,6 @@ from contextlib import contextmanager from typing import ( cast, - Dict, - Type, TYPE_CHECKING, ) @@ -54,7 +52,7 @@ def setUp(self): job.tool_id = TEST_TOOL_ID job.user = User() job.object_store_id = "foo" - self.model_objects: Dict[Type[Base], Dict[int, Base]] = {Job: {345: job}} + self.model_objects: dict[type[Base], dict[int, Base]] = {Job: {345: job}} self.app.model.session = cast("scoped_session", MockContext(self.model_objects)) self.app._toolbox = cast(ToolBox, MockToolbox(MockTool(self))) diff --git a/test/unit/app/managers/test_NotificationManager.py b/test/unit/app/managers/test_NotificationManager.py index 76e934cc9e6f..ef87fa605a13 100644 --- a/test/unit/app/managers/test_NotificationManager.py +++ b/test/unit/app/managers/test_NotificationManager.py @@ -4,10 +4,7 @@ ) from typing import ( Any, - Dict, - List, Optional, - Set, ) from unittest.mock import patch @@ -48,7 +45,7 @@ def _create_test_user(self, username="user1") -> User: user = self.user_manager.create(**user_data) return user - def _create_test_users(self, num_users: int = 1) -> List[User]: + def _create_test_users(self, num_users: int = 1) -> list[User]: users = [self._create_test_user(f"username{num:02}") for num in range(num_users)] return users @@ -71,7 +68,7 @@ def _default_test_notification_data(self): }, } - def _send_message_notification_to_users(self, users: List[User], notification: Optional[Dict[str, Any]] = None): + def _send_message_notification_to_users(self, users: list[User], notification: Optional[dict[str, Any]] = None): data = self._default_test_notification_data() if notification: data.update(notification) @@ -90,7 +87,7 @@ def _send_message_notification_to_users(self, users: List[User], notification: O def _has_expired(self, expiration_time: Optional[datetime]) -> bool: return expiration_time < datetime.utcnow() if expiration_time else False - def _assert_notification_expected(self, actual_notification: Any, expected_notification: Dict[str, Any]): + def _assert_notification_expected(self, actual_notification: Any, expected_notification: dict[str, Any]): assert actual_notification assert actual_notification.id assert actual_notification.source == expected_notification["source"] @@ -236,7 +233,7 @@ def _default_broadcast_notification_data(self): }, } - def _send_broadcast_notification(self, broadcast_notification_data: Dict[str, Any]): + def _send_broadcast_notification(self, broadcast_notification_data: dict[str, Any]): request = BroadcastNotificationCreateRequest(**broadcast_notification_data) created_notification = self.notification_manager.create_broadcast_notification(request) return created_notification @@ -500,7 +497,7 @@ def _create_test_recipients_scenario(self): role_ids=[role3.id], ) - expected_user_ids: Set[int] = { + expected_user_ids: set[int] = { users[9].id, # From direct recipients.user_ids users[5].id, # From group3.user_ids users[4].id, # -From role2.user_ids @@ -515,12 +512,12 @@ def _create_test_recipients_scenario(self): return recipients, expected_user_ids - def _assert_resolved_match_expected_users(self, resolved_users: List[User], expected_user_ids: Set[int]): + def _assert_resolved_match_expected_users(self, resolved_users: list[User], expected_user_ids: set[int]): assert len(resolved_users) == len(expected_user_ids) for user in resolved_users: assert user.id in expected_user_ids - def _create_test_group(self, name: str, users: List[User], roles: List[Role]): + def _create_test_group(self, name: str, users: list[User], roles: list[Role]): sa_session = self.trans.sa_session group = Group(name=name) sa_session.add(group) @@ -529,7 +526,7 @@ def _create_test_group(self, name: str, users: List[User], roles: List[Role]): self.trans.app.security_agent.set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) return group - def _create_test_role(self, name: str, users: List[User], groups: List[Group]): + def _create_test_role(self, name: str, users: list[User], groups: list[Group]): sa_session = self.trans.sa_session role = Role(name=name) sa_session.add(role) diff --git a/test/unit/app/managers/test_user_file_sources.py b/test/unit/app/managers/test_user_file_sources.py index 841f79ce0838..cc849865360b 100644 --- a/test/unit/app/managers/test_user_file_sources.py +++ b/test/unit/app/managers/test_user_file_sources.py @@ -1,9 +1,7 @@ import os from typing import ( cast, - List, Optional, - Type, ) from unittest import SkipTest from uuid import uuid4 @@ -53,10 +51,10 @@ class Config: - file_source_templates: Optional[List[RawTemplateConfig]] = None + file_source_templates: Optional[list[RawTemplateConfig]] = None file_source_templates_config_file: Optional[str] = None - def __init__(self, templates: List[RawTemplateConfig]): + def __init__(self, templates: list[RawTemplateConfig]): self.file_source_templates = templates @@ -398,7 +396,7 @@ def test_find_best_match_filters_deactivated(self, tmp_path): match = self.file_sources.find_best_match(user_file_source.uri_root) assert not match - def _uri_roots(self) -> List[str]: + def _uri_roots(self) -> list[str]: sources_as_dicts = self.file_sources.user_file_sources_to_dicts( False, cast(FileSourcesUserContext, self.trans), @@ -824,7 +822,7 @@ def _assert_secret_absent(self, user_file_source: UserFileSourceModel, secret_na assert sec_val in ["", None] def _assert_modify_throws_exception( - self, user_file_source: UserFileSourceModel, modify: ModifyInstancePayload, exception_type: Type[Exception] + self, user_file_source: UserFileSourceModel, modify: ModifyInstancePayload, exception_type: type[Exception] ): exception_thrown = False try: diff --git a/test/unit/app/managers/test_user_object_stores.py b/test/unit/app/managers/test_user_object_stores.py index 5485df25a0f6..c4b8bfa70b8f 100644 --- a/test/unit/app/managers/test_user_object_stores.py +++ b/test/unit/app/managers/test_user_object_stores.py @@ -1,7 +1,5 @@ from typing import ( - List, Optional, - Type, ) from yaml import safe_load @@ -33,10 +31,10 @@ class Config: - object_store_templates: Optional[List[RawTemplateConfig]] = None + object_store_templates: Optional[list[RawTemplateConfig]] = None object_store_templates_config_file: Optional[str] = None - def __init__(self, templates: List[RawTemplateConfig]): + def __init__(self, templates: list[RawTemplateConfig]): self.object_store_templates = templates @@ -507,7 +505,7 @@ def _assert_modify_throws_exception( self, user_object_store: UserConcreteObjectStoreModel, modify: ModifyInstancePayload, - exception_type: Type[Exception], + exception_type: type[Exception], ): exception_thrown = False try: diff --git a/test/unit/app/test_galaxy_install.py b/test/unit/app/test_galaxy_install.py index c60ad947d074..ccaf8010cc6d 100644 --- a/test/unit/app/test_galaxy_install.py +++ b/test/unit/app/test_galaxy_install.py @@ -7,7 +7,6 @@ from pathlib import Path from typing import ( Any, - Dict, ) from galaxy.model.tool_shed_install import ToolShedRepository @@ -26,7 +25,7 @@ def test_against_production_shed(tmp_path: Path): install_target = StandaloneInstallationTarget(tmp_path) install_manager = InstallRepositoryManager(install_target) - install_options: Dict[str, Any] = {} + install_options: dict[str, Any] = {} install_manager.install( DEFAULT_TOOL_SHED_URL, repo_name, diff --git a/test/unit/app/test_remote_shell.py b/test/unit/app/test_remote_shell.py index 3bb83c1b678e..2b9d46408d1a 100644 --- a/test/unit/app/test_remote_shell.py +++ b/test/unit/app/test_remote_shell.py @@ -2,7 +2,6 @@ import unittest from typing import ( Any, - Dict, ) try: @@ -21,7 +20,7 @@ class TestCliInterface(TestCase): ssh_keys: SSHKeys username: str - shell_params: Dict[str, Any] + shell_params: dict[str, Any] cli_interface: CliInterface @classmethod diff --git a/test/unit/app/test_tasks.py b/test/unit/app/test_tasks.py index 5e0f2fddff7b..e689fe79ac0b 100644 --- a/test/unit/app/test_tasks.py +++ b/test/unit/app/test_tasks.py @@ -1,5 +1,3 @@ -from typing import List - from galaxy.app_unittest_utils.galaxy_mock import MockApp from galaxy.celery.tasks import clean_object_store_caches from galaxy.objectstore import BaseObjectStore @@ -7,16 +5,16 @@ class MockObjectStore: - def __init__(self, cache_targets: List[CacheTarget]): + def __init__(self, cache_targets: list[CacheTarget]): self._cache_targets = cache_targets - def cache_targets(self) -> List[CacheTarget]: + def cache_targets(self) -> list[CacheTarget]: return self._cache_targets def test_clean_object_store_caches(tmp_path): container = MockApp() - cache_targets: List[CacheTarget] = [] + cache_targets: list[CacheTarget] = [] container[BaseObjectStore] = MockObjectStore(cache_targets) # type: ignore[assignment] # similar code used in object store unit tests diff --git a/test/unit/app/tools/test_data_parameters.py b/test/unit/app/tools/test_data_parameters.py index ea7a7352d92e..c7dc8fb82828 100644 --- a/test/unit/app/tools/test_data_parameters.py +++ b/test/unit/app/tools/test_data_parameters.py @@ -1,7 +1,6 @@ from typing import ( Any, Optional, - Tuple, ) from galaxy import model @@ -197,7 +196,7 @@ def __init__(self, test_dataset=None, name="Test Dataset", id=1): self.deleted = False self.dataset = test_dataset self.visible = True - self.conversion_destination: Tuple[bool, Optional[str], Optional[Any]] = (True, None, None) + self.conversion_destination: tuple[bool, Optional[str], Optional[Any]] = (True, None, None) self.extension = "txt" self.dbkey = "hg19" self.implicitly_converted_parent_datasets = False diff --git a/test/unit/app/tools/test_parameter_parsing.py b/test/unit/app/tools/test_parameter_parsing.py index e33e183fc962..e826ff5068bd 100644 --- a/test/unit/app/tools/test_parameter_parsing.py +++ b/test/unit/app/tools/test_parameter_parsing.py @@ -1,6 +1,5 @@ from typing import ( Any, - Dict, ) from galaxy.tools.parameters.wrapped import ( @@ -27,7 +26,7 @@ def test_nested_key_to_path(): class TestProcessKey: def test_process_key(self): - nested_dict: Dict[str, Any] = {} + nested_dict: dict[str, Any] = {} d = { "repeat_1|inner_repeat_1|data_table_column_value": "bla4", "repeat_0|inner_repeat_1|data_table_column_value": "bla2", @@ -45,7 +44,7 @@ def test_process_key(self): assert nested_dict == expected_dict def test_process_key_2(self): - nested_dict: Dict[str, Any] = {} + nested_dict: dict[str, Any] = {} d = { "data_tables_0|columns_0|data_table_column_value": "Amel_HAv3.1", "data_tables": [], diff --git a/test/unit/app/tools/test_populate_state.py b/test/unit/app/tools/test_populate_state.py index d641ff079397..711115eabf90 100644 --- a/test/unit/app/tools/test_populate_state.py +++ b/test/unit/app/tools/test_populate_state.py @@ -1,7 +1,6 @@ from typing import ( Any, cast, - Dict, ) from galaxy.tools.parameters import ( @@ -47,7 +46,7 @@ def test_populate_state(): f.cases = [mock_when(value="true", inputs={"h": h}), mock_when(value="false", inputs={"i": i})] inputs = {"a": a, "b": b} flat = {"a": 1, "b_0|c": 2, "b_0|d_0|e": 3, "b_0|d_0|f|h": 4, "b_0|d_0|f|g": True} - state: Dict[str, Any] = {} + state: dict[str, Any] = {} populate_state(trans, cast(ToolInputsT, inputs), flat, state, check=False) assert state["a"] == 1 assert state["b"][0]["c"] == 2 @@ -55,7 +54,7 @@ def test_populate_state(): assert state["b"][0]["d"][0]["f"]["h"] == 4 # now test with input_format='21.01' nested = {"a": 1, "b": [{"c": 2, "d": [{"e": 3, "f": {"h": 4, "g": True}}]}]} - state_new: Dict[str, Any] = {} + state_new: dict[str, Any] = {} populate_state(trans, cast(ToolInputsT, inputs), nested, state_new, check=False, input_format="21.01") assert state_new["a"] == 1 assert state_new["b"][0]["c"] == 2 diff --git a/test/unit/authnz/test_authnz.py b/test/unit/authnz/test_authnz.py index 880675596d94..f7b93dcf1b79 100644 --- a/test/unit/authnz/test_authnz.py +++ b/test/unit/authnz/test_authnz.py @@ -1,5 +1,4 @@ import tempfile -from typing import Tuple from unittest.mock import MagicMock import pytest @@ -38,7 +37,7 @@ def mock_app(): """ -def create_oidc_config(extra_properties: str = "") -> Tuple[str, str]: +def create_oidc_config(extra_properties: str = "") -> tuple[str, str]: contents = OIDC_CONFIG_TEMPLATE.format(extra_properties=extra_properties) file = tempfile.NamedTemporaryFile(mode="w", delete=False) file.write(contents) @@ -54,7 +53,7 @@ def create_backend_config( require_create_confirmation="false", accepted_audiences="https://audience.example.com", username_key="custom_username", -) -> Tuple[str, str]: +) -> tuple[str, str]: contents = OIDC_BACKEND_CONFIG_TEMPLATE.format( provider_name=provider_name, url=url, diff --git a/test/unit/data/dataset_collections/test_auto_pairing.py b/test/unit/data/dataset_collections/test_auto_pairing.py index 21298ecd7d51..eec8309a3049 100644 --- a/test/unit/data/dataset_collections/test_auto_pairing.py +++ b/test/unit/data/dataset_collections/test_auto_pairing.py @@ -1,8 +1,4 @@ from dataclasses import dataclass -from typing import ( - Dict, - List, -) import yaml from pydantic import ( @@ -26,11 +22,11 @@ class PairedItem(BaseModel): class AutoPairingSpecItem(BaseModel): doc: str - inputs: List[str] - paired: Dict[str, PairedItem] + inputs: list[str] + paired: dict[str, PairedItem] -AutoPairingSpecification = RootModel[List[AutoPairingSpecItem]] +AutoPairingSpecification = RootModel[list[AutoPairingSpecItem]] def test_auto_pairing_specification(): diff --git a/test/unit/data/datatypes/dataproviders/test_base_dataproviders.py b/test/unit/data/datatypes/dataproviders/test_base_dataproviders.py index 95c1d68e679e..0bfb6c41d77c 100644 --- a/test/unit/data/datatypes/dataproviders/test_base_dataproviders.py +++ b/test/unit/data/datatypes/dataproviders/test_base_dataproviders.py @@ -6,7 +6,6 @@ import os import tempfile from io import StringIO -from typing import Type from galaxy.datatypes.dataproviders import ( base, @@ -70,7 +69,7 @@ def parses_default_content_as(self): class Test_BaseDataProvider(BaseTestCase): - provider_class: Type[base.DataProvider] = base.DataProvider + provider_class: type[base.DataProvider] = base.DataProvider def contents_provider_and_data(self, filename=None, contents=None, source=None, *provider_args, **provider_kwargs): # to remove boiler plate @@ -163,7 +162,7 @@ def test_file(self): class Test_FilteredDataProvider(Test_BaseDataProvider): - provider_class: Type[base.DataProvider] = base.FilteredDataProvider + provider_class: type[base.DataProvider] = base.FilteredDataProvider def assertCounters(self, provider, read, valid, returned): assert provider.num_data_read == read @@ -190,7 +189,7 @@ def filter_ts(string): class Test_LimitedOffsetDataProvider(Test_FilteredDataProvider): - provider_class: Type[base.DataProvider] = base.LimitedOffsetDataProvider + provider_class: type[base.DataProvider] = base.LimitedOffsetDataProvider def test_offset_1(self): """when offset is 1, should skip first""" @@ -289,7 +288,7 @@ def only_ts(string): class Test_MultiSourceDataProvider(BaseTestCase): - provider_class: Type[base.DataProvider] = base.MultiSourceDataProvider + provider_class: type[base.DataProvider] = base.MultiSourceDataProvider def contents_and_tmpfile(self, contents=None): # TODO: hmmmm... diff --git a/test/unit/data/datatypes/dataproviders/test_line_dataproviders.py b/test/unit/data/datatypes/dataproviders/test_line_dataproviders.py index 67d1eeb2aaff..6d04fd534a2f 100644 --- a/test/unit/data/datatypes/dataproviders/test_line_dataproviders.py +++ b/test/unit/data/datatypes/dataproviders/test_line_dataproviders.py @@ -4,7 +4,6 @@ """ import logging -from typing import Type from galaxy.datatypes.dataproviders import ( base, @@ -17,7 +16,7 @@ # TODO: TestCase hierarchy is a bit of mess here. class Test_FilteredLineDataProvider(test_base_dataproviders.Test_FilteredDataProvider): - provider_class: Type[base.DataProvider] = line.FilteredLineDataProvider + provider_class: type[base.DataProvider] = line.FilteredLineDataProvider default_file_contents = """ # this should be stripped out One diff --git a/test/unit/data/datatypes/test_images.py b/test/unit/data/datatypes/test_images.py index b05be9f40bdd..96cfbf6d4a16 100644 --- a/test/unit/data/datatypes/test_images.py +++ b/test/unit/data/datatypes/test_images.py @@ -1,6 +1,5 @@ from typing import ( Any, - Type, ) from galaxy.datatypes.images import ( @@ -17,7 +16,7 @@ # Define test decorator -def __test(image_cls: Type[Image], input_filename: str): +def __test(image_cls: type[Image], input_filename: str): def decorator(test_impl): @@ -36,7 +35,7 @@ def test(): # Define test factory -def __create_test(image_cls: Type[Image], input_filename: str, metadata_key: str, expected_value: Any): +def __create_test(image_cls: type[Image], input_filename: str, metadata_key: str, expected_value: Any): @__test(image_cls, input_filename) def test(metadata): diff --git a/test/unit/data/model/db/conftest.py b/test/unit/data/model/db/conftest.py index 8cd81ed50904..e7d5c6ad0b3d 100644 --- a/test/unit/data/model/db/conftest.py +++ b/test/unit/data/model/db/conftest.py @@ -1,5 +1,5 @@ +from collections.abc import Generator from typing import ( - Generator, TYPE_CHECKING, ) diff --git a/test/unit/data/model/migration_fixes/test_migrations.py b/test/unit/data/model/migration_fixes/test_migrations.py index 0c6c8979d8cc..5c2c01c07851 100644 --- a/test/unit/data/model/migration_fixes/test_migrations.py +++ b/test/unit/data/model/migration_fixes/test_migrations.py @@ -1,5 +1,5 @@ +from collections.abc import Generator from typing import ( - Generator, TYPE_CHECKING, ) diff --git a/test/unit/data/model/test_model_discovery.py b/test/unit/data/model/test_model_discovery.py index af3685669c6a..0e4af7b5a73e 100644 --- a/test/unit/data/model/test_model_discovery.py +++ b/test/unit/data/model/test_model_discovery.py @@ -2,7 +2,6 @@ from tempfile import mkdtemp from typing import ( Any, - Dict, ) from sqlalchemy import select @@ -281,7 +280,7 @@ def _assert_one_library_created(sa_session): return new_library -def _import_library_target(target: Dict[str, Any], work_directory: str): +def _import_library_target(target: dict[str, Any], work_directory: str): app = _mock_app() temp_directory = mkdtemp() with store.DirectoryModelExportStore(temp_directory, app=app, serialize_dataset_objects=True) as export_store: diff --git a/test/unit/data/model/test_model_store.py b/test/unit/data/model/test_model_store.py index e60c77f2a59a..0b075c4bf2e4 100644 --- a/test/unit/data/model/test_model_store.py +++ b/test/unit/data/model/test_model_store.py @@ -11,7 +11,6 @@ ) from typing import ( Any, - Dict, NamedTuple, Optional, ) @@ -1377,7 +1376,7 @@ def setup_fixture_context_with_history( def perform_import_from_store_dict( fixture_context: StoreFixtureContextWithHistory, - import_dict: Dict[str, Any], + import_dict: dict[str, Any], import_options: Optional[store.ImportOptions] = None, ) -> None: import_options = import_options or store.ImportOptions() diff --git a/test/unit/data/test_galaxy_mapping.py b/test/unit/data/test_galaxy_mapping.py index 96eebacc0516..24addd8f26d2 100644 --- a/test/unit/data/test_galaxy_mapping.py +++ b/test/unit/data/test_galaxy_mapping.py @@ -2,7 +2,6 @@ import random import uuid from tempfile import NamedTemporaryFile -from typing import List import pytest from sqlalchemy import ( @@ -113,8 +112,8 @@ def test_dataset_instance_order(self) -> None: random.shuffle(elements) for item in elements: self.persist(item) - forward_hdas: List[model.HistoryDatasetAssociation] = [] - reverse_hdas: List[model.HistoryDatasetAssociation] = [] + forward_hdas: list[model.HistoryDatasetAssociation] = [] + reverse_hdas: list[model.HistoryDatasetAssociation] = [] for i, dataset_instance in enumerate(list_pair.dataset_instances): if i % 2: reverse_hdas.append(dataset_instance) diff --git a/test/unit/files/test_posix.py b/test/unit/files/test_posix.py index f61671a65e39..09e3b7a70dde 100644 --- a/test/unit/files/test_posix.py +++ b/test/unit/files/test_posix.py @@ -2,7 +2,6 @@ import tempfile from typing import ( Any, - Dict, ) import pytest @@ -487,7 +486,7 @@ def _configured_file_sources_with_root( if include_allowlist: config_kwd["symlink_allowlist"] = [tmp] file_sources_config = FileSourcePluginsConfig(**config_kwd) - plugin: Dict[str, Any] = { + plugin: dict[str, Any] = { "type": "posix", } if writable is not None: diff --git a/test/unit/files/test_temp.py b/test/unit/files/test_temp.py index 2d51a359916b..2d6d56d12a0a 100644 --- a/test/unit/files/test_temp.py +++ b/test/unit/files/test_temp.py @@ -1,5 +1,4 @@ import tempfile -from typing import List import pytest @@ -202,7 +201,7 @@ def _upload_to(file_source: TempFilesSource, target_uri: str, content: str, user file_source.write_from(target_uri, f.name, user_context=user_context) -def assert_list_names(file_source: TempFilesSource, uri: str, recursive: bool, expected_names: List[str]): +def assert_list_names(file_source: TempFilesSource, uri: str, recursive: bool, expected_names: list[str]): result, count = file_source.list(uri, recursive=recursive) assert count == len(expected_names) assert sorted([entry["name"] for entry in result]) == sorted(expected_names) diff --git a/test/unit/objectstore/test_template_models.py b/test/unit/objectstore/test_template_models.py index 524bc9184837..32772dfea7e8 100644 --- a/test/unit/objectstore/test_template_models.py +++ b/test/unit/objectstore/test_template_models.py @@ -1,7 +1,5 @@ import os -import sys -import pytest from yaml import safe_load from galaxy.objectstore.templates.examples import get_example @@ -16,10 +14,6 @@ ) from galaxy.util.config_templates import VariablesDict -if sys.version_info < (3, 8): # noqa: UP036 - pytest.skip(reason="Pydantic tool parameter models require python3.8 or higher", allow_module_level=True) - - LIBRARY_1 = """ - id: amazon_bucket name: Amazon Bucket diff --git a/test/unit/tool_shed/_util.py b/test/unit/tool_shed/_util.py index bb830d7c70f3..7ca702cfad06 100644 --- a/test/unit/tool_shed/_util.py +++ b/test/unit/tool_shed/_util.py @@ -9,7 +9,6 @@ ) from typing import ( Any, - Dict, Optional, ) @@ -188,7 +187,7 @@ def random_name(len: int = 10) -> str: return "".join(random.choice(string.ascii_lowercase + string.digits) for _ in range(len)) -def create_category(provides_repositories: ProvidesRepositoriesContext, create: Dict[str, Any]) -> Category: +def create_category(provides_repositories: ProvidesRepositoriesContext, create: dict[str, Any]) -> Category: from tool_shed.managers.categories import CategoryManager request = CreateCategoryRequest(**create) diff --git a/test/unit/tool_util/test_assertion_models.py b/test/unit/tool_util/test_assertion_models.py index 6195bbe34852..26af471f8ec3 100644 --- a/test/unit/tool_util/test_assertion_models.py +++ b/test/unit/tool_util/test_assertion_models.py @@ -1,4 +1,3 @@ -import sys from string import Template import lxml.etree as ET @@ -207,10 +206,6 @@ ) -if sys.version_info < (3, 8): # noqa: UP036 - pytest.skip(reason="Pydantic assertion models require python3.8 or higher", allow_module_level=True) - - def test_valid_json_models_validate(): assertion_list.model_validate(valid_assertions) diff --git a/test/unit/tool_util/test_parameter_specification.py b/test/unit/tool_util/test_parameter_specification.py index 0096f2c102e7..fb5e304db2cc 100644 --- a/test/unit/tool_util/test_parameter_specification.py +++ b/test/unit/tool_util/test_parameter_specification.py @@ -1,4 +1,3 @@ -import sys from functools import partial from typing import ( Callable, @@ -6,7 +5,6 @@ Optional, ) -import pytest import yaml from galaxy.exceptions import RequestParameterInvalidException @@ -38,9 +36,6 @@ ) from galaxy.util.resources import resource_string -if sys.version_info < (3, 8): # noqa: UP036 - pytest.skip(reason="Pydantic tool parameter models require python3.8 or higher", allow_module_level=True) - def specification_object(): try: diff --git a/test/unit/tool_util/test_parameter_test_cases.py b/test/unit/tool_util/test_parameter_test_cases.py index 9311c2805393..0a3a70349475 100644 --- a/test/unit/tool_util/test_parameter_test_cases.py +++ b/test/unit/tool_util/test_parameter_test_cases.py @@ -1,6 +1,5 @@ import os import re -import sys from typing import ( Any, List, @@ -8,8 +7,6 @@ Tuple, ) -import pytest - from galaxy.tool_util.model_factory import parse_tool from galaxy.tool_util.parameters import ( DataCollectionRequest, @@ -83,10 +80,6 @@ MOCK_ID = "thisisafakeid" -if sys.version_info < (3, 8): # noqa: UP036 - pytest.skip(reason="Pydantic tool parameter models require python3.8 or higher", allow_module_level=True) - - def test_parameter_test_cases_validate(): validation_result = validate_test_cases_for("column_param") assert len(validation_result[0].warnings) == 0 diff --git a/test/unit/webapps/test_query_params_lists.py b/test/unit/webapps/test_query_params_lists.py index 6d50e07b6ab5..f85ce736afa5 100644 --- a/test/unit/webapps/test_query_params_lists.py +++ b/test/unit/webapps/test_query_params_lists.py @@ -1,5 +1,3 @@ -from typing import List - from fastapi.applications import FastAPI from fastapi.param_functions import ( Depends, @@ -16,7 +14,7 @@ @app.get("/test/get_value_as_list") async def get_value_as_list( - values: List[str] = Depends(query_parameter_as_list(Query(alias="value"))), + values: list[str] = Depends(query_parameter_as_list(Query(alias="value"))), ): return values diff --git a/test/unit/webapps/test_service_base.py b/test/unit/webapps/test_service_base.py index 4a8f351af080..e9cc55f1bc79 100644 --- a/test/unit/webapps/test_service_base.py +++ b/test/unit/webapps/test_service_base.py @@ -1,5 +1,3 @@ -from typing import Tuple - import pytest from galaxy.schema.schema import ModelStoreFormat @@ -34,7 +32,7 @@ def new_target(self, filename, mime_type, duration=None, security=None): ("test", ModelStoreFormat.BCO_JSON.value, ("test.bco.json", "application/json")), ], ) -def test_model_store_storage_target(file_name: str, model_store_format: str, expected: Tuple[str, str]): +def test_model_store_storage_target(file_name: str, model_store_format: str, expected: tuple[str, str]): mock_sts_allocator = MockShortTermStorageAllocator(*expected) model_store_storage_target( short_term_storage_allocator=mock_sts_allocator, file_name=file_name, model_store_format=model_store_format diff --git a/test/unit/workflows/test_modules.py b/test/unit/workflows/test_modules.py index d9fc8ef499bd..2070c32e1a67 100644 --- a/test/unit/workflows/test_modules.py +++ b/test/unit/workflows/test_modules.py @@ -1,11 +1,8 @@ import json from typing import ( Any, - Dict, - List, NamedTuple, Optional, - Tuple, Union, ) from unittest import mock @@ -279,13 +276,13 @@ def test_to_cwl_dataset_collection_element(): class MapOverTestCase(NamedTuple): data_input: str - step_input_def: Union[str, List[str]] + step_input_def: Union[str, list[str]] step_output_def: str expected_collection_type: Optional[str] - steps: Dict[int, Any] + steps: dict[int, Any] -def _construct_steps_for_map_over() -> List[MapOverTestCase]: +def _construct_steps_for_map_over() -> list[MapOverTestCase]: test_case = MapOverTestCase # these are the cartesian product of # data_input = ['dataset', 'list', 'list:pair', 'list:list'] @@ -293,7 +290,7 @@ def _construct_steps_for_map_over() -> List[MapOverTestCase]: # step_output_definition = ['dataset', 'list', 'list:list'] # list(itertools.product(data_input, step_input_definition, step_output_definition, [None])), # with the last item filled in manually - test_case_args: List[Tuple[str, Union[str, List[str]], str, Optional[str]]] = [ + test_case_args: list[tuple[str, Union[str, list[str]], str, Optional[str]]] = [ ("dataset", "dataset", "dataset", None), ("dataset", "dataset", "list", "list"), ("dataset", "dataset", "list:list", "list:list"), @@ -348,7 +345,7 @@ def _construct_steps_for_map_over() -> List[MapOverTestCase]: ] test_cases = [] for data_input, step_input_def, step_output_def, expected_collection_type in test_case_args: - steps: Dict[int, Dict[str, Any]] = { + steps: dict[int, dict[str, Any]] = { 0: _input_step(collection_type=data_input), 1: _output_step(step_input_def=step_input_def, step_output_def=step_output_def), } @@ -364,8 +361,8 @@ def _construct_steps_for_map_over() -> List[MapOverTestCase]: return test_cases -def _input_step(collection_type) -> Dict[str, Any]: - output: Dict[str, Any] = {"name": "output", "extensions": ["input_collection"]} +def _input_step(collection_type) -> dict[str, Any]: + output: dict[str, Any] = {"name": "output", "extensions": ["input_collection"]} if collection_type != "dataset": output["collection"] = True output["collection_type"] = collection_type @@ -380,7 +377,7 @@ def _input_step(collection_type) -> Dict[str, Any]: } -def _output_step(step_input_def, step_output_def) -> Dict[str, Any]: +def _output_step(step_input_def, step_output_def) -> dict[str, Any]: multiple = False if step_input_def in ["dataset", "dataset_multiple"]: input_type = "dataset" @@ -390,7 +387,7 @@ def _output_step(step_input_def, step_output_def) -> Dict[str, Any]: else: input_type = "dataset_collection" collection_types = step_input_def if isinstance(step_input_def, list) else [step_input_def] - output: Dict[str, Any] = {"name": "output", "extensions": ["data"]} + output: dict[str, Any] = {"name": "output", "extensions": ["data"]} if step_output_def != "dataset": output["collection"] = True output["collection_type"] = step_output_def