diff --git a/Gemfile.lock b/Gemfile.lock index fd1344e86..ec1a997e8 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -372,7 +372,7 @@ GEM puma (5.6.9) nio4r (~> 2.0) racc (1.8.1) - rack (2.2.14) + rack (2.2.19) rack-brotli (1.1.0) brotli (>= 0.1.7) rack (>= 1.4) @@ -546,6 +546,7 @@ GEM PLATFORMS arm64-darwin-21 arm64-darwin-23 + arm64-darwin-24 x86_64-darwin-19 x86_64-darwin-20 x86_64-darwin-21 diff --git a/app/controllers/api/v1/search_controller.rb b/app/controllers/api/v1/search_controller.rb index a2040a0b0..751294b3b 100644 --- a/app/controllers/api/v1/search_controller.rb +++ b/app/controllers/api/v1/search_controller.rb @@ -282,9 +282,14 @@ def index sort_type = params[:order].to_sym end + result_types = params[:data_types]&.split(',')&.map(&:to_sym) || [] + @studies = StudySearchService.filter_results_by_data_type(@studies, result_types) + # convert to array to allow appending external search results (Azul, TDR, etc.) @studies = @studies.to_a + # filter results by file type, if requested + # perform Azul search if there are facets/terms provided by user, and they requested HCA results # run this before inferred search so that they are weighted and sorted correctly # skip if user is searching inside a collection or they are performing global gene search @@ -391,6 +396,7 @@ def index # only show results where we found a hit in gene search @inferred_studies = Study.where(:id.in => new_genes[:study_ids]) end + @inferred_studies = StudySearchService.filter_results_by_data_type(@inferred_studies, result_types) @inferred_accessions = @inferred_studies.pluck(:accession) logger.info "Found #{@inferred_accessions.count} inferred matches: #{@inferred_accessions}" @matching_accessions += @inferred_accessions @@ -399,7 +405,6 @@ def index end @matching_accessions = @studies.map { |study| self.class.get_study_attribute(study, :accession) } - logger.info "studies_by_facet: #{@studies_by_facet}" logger.info "Final list of matching studies: #{@matching_accessions}" @results = @studies.paginate(page: params[:page], per_page: Study.per_page) if params[:export].present? diff --git a/app/controllers/api/v1/study_files_controller.rb b/app/controllers/api/v1/study_files_controller.rb index 558a33f0b..8f275f1a9 100644 --- a/app/controllers/api/v1/study_files_controller.rb +++ b/app/controllers/api/v1/study_files_controller.rb @@ -331,8 +331,19 @@ def perform_update!(study_file) end if safe_file_params[:custom_color_updates] parsed_update = JSON.parse(safe_file_params[:custom_color_updates]) - safe_file_params['cluster_file_info'] = {custom_colors: ClusterFileInfo.merge_color_updates(study_file, parsed_update)} + safe_file_params['cluster_file_info'] = { + custom_colors: ClusterFileInfo.merge_color_updates(study_file, parsed_update) + } safe_file_params.delete(:custom_color_updates) + if safe_file_params[:global_color_update] + @study.clustering_files.reject { |f| f.id.to_s == params[:id] }.each do |file| + update_params = { + 'cluster_file_info' => { custom_colors: ClusterFileInfo.merge_color_updates(file, parsed_update) } + } + file.delay.update(update_params) + end + end + safe_file_params.delete(:global_color_update) end # manually check first if species/assembly was supplied by name @@ -741,7 +752,7 @@ def study_file_params :human_fastq_url, :human_data, :use_metadata_convention, :cluster_type, :generation, :x_axis_label, :y_axis_label, :z_axis_label, :x_axis_min, :x_axis_max, :y_axis_min, :y_axis_max, :z_axis_min, :z_axis_max, :species, :assembly, :external_link_url, :external_link_title, :external_link_description, :parse_on_upload, - :custom_color_updates, :reference_anndata_file, + :custom_color_updates, :global_color_update, :reference_anndata_file, spatial_cluster_associations: [], options: [ :cluster_group_id, :font_family, :font_size, :font_color, :matrix_id, :submission_id, :bam_id, :bed_id, diff --git a/app/javascript/components/search/controls/OptionsButton.jsx b/app/javascript/components/search/controls/OptionsButton.jsx index d5d6a3159..442065ea6 100644 --- a/app/javascript/components/search/controls/OptionsButton.jsx +++ b/app/javascript/components/search/controls/OptionsButton.jsx @@ -10,19 +10,24 @@ export default function OptionsButton() { const searchContext = useContext(StudySearchContext) const [showOptions, setShowOptions] = useState(false) const configuredOptions = [ - { searchProp: 'external', value: 'hca', label: 'Include HCA results' } + { searchProp: 'external', value: 'hca', label: 'Include HCA results' }, + { searchProp: 'data_types', value: 'raw_counts', label: 'Has raw counts', multiple: true }, + { searchProp: 'data_types', value: 'diff_exp', label: 'Has differential expression', multiple: true }, + { searchProp: 'data_types', value: 'spatial', label: 'Has spatial data', multiple: true } ] const optionsPopover = diff --git a/app/javascript/components/search/controls/OptionsControl.jsx b/app/javascript/components/search/controls/OptionsControl.jsx index c50a56262..b508f837b 100644 --- a/app/javascript/components/search/controls/OptionsControl.jsx +++ b/app/javascript/components/search/controls/OptionsControl.jsx @@ -1,20 +1,44 @@ import React, { useState } from 'react' -export default function OptionsControl({searchContext, searchProp, value, label}) { - const defaultChecked = searchContext.params[searchProp] === value +/** checkbox control for adding optional parameters to search query */ +export default function OptionsControl({ searchContext, searchProp, value, label, multiple = false }) { + const defaultChecked = isDefaultChecked() const [isChecked, setIsChecked] = useState(defaultChecked) + /** return existing url query params for this option */ + function getExistingOpts() { + return searchContext.params[searchProp]?.split(',').filter(o => o !== '') || [] + } + + /** set the default state for this option checkbox */ + function isDefaultChecked() { + if (multiple) { + return getExistingOpts().includes(value) + } else { + return searchContext.params[searchProp] === value + } + } + /** toggle state of checkbox */ function toggleCheckbox(checked) { setIsChecked(checked) - searchContext.updateSearch({ [searchProp] : checked ? value : null }) + if (multiple) { + const existingOpts = getExistingOpts() + const newOpts = checked ? existingOpts.concat(value) : existingOpts.filter(v => v !== value) + searchContext.updateSearch({ [searchProp] : newOpts.join(',') }) + } else { + searchContext.updateSearch({ [searchProp] : checked ? value : null }) + } } return (
  • ) diff --git a/app/javascript/components/search/results/SearchQueryDisplay.jsx b/app/javascript/components/search/results/SearchQueryDisplay.jsx index 821c74966..7591cff22 100644 --- a/app/javascript/components/search/results/SearchQueryDisplay.jsx +++ b/app/javascript/components/search/results/SearchQueryDisplay.jsx @@ -78,6 +78,8 @@ export const ClearAllButton = () => { const emptySearchParams = { terms: '', genes: '', + external: '', + data_types: '', facets: emptyFilters } selectionContext.updateSelection(emptySearchParams, true) diff --git a/app/javascript/components/visualization/ScatterPlot.jsx b/app/javascript/components/visualization/ScatterPlot.jsx index b09440cc1..a233ecf62 100644 --- a/app/javascript/components/visualization/ScatterPlot.jsx +++ b/app/javascript/components/visualization/ScatterPlot.jsx @@ -143,13 +143,14 @@ function RawScatterPlot({ }, [editedCustomColors]) /** Save any changes to the legend colors */ - async function saveCustomColors(newColors) { + async function saveCustomColors(newColors, globalColorUpdate = false) { const colorObj = {} // read the annotation name off of scatterData to ensure it's the real name, and not '' or '_default' colorObj[scatterData?.annotParams?.name] = newColors const newFileObj = { _id: scatterData?.clusterFileId, - custom_color_updates: colorObj + custom_color_updates: colorObj, + global_color_update: globalColorUpdate } setIsLoading(true) try { diff --git a/app/javascript/components/visualization/controls/ScatterPlotLegend.jsx b/app/javascript/components/visualization/controls/ScatterPlotLegend.jsx index 344d40140..2ae4cfc6a 100644 --- a/app/javascript/components/visualization/controls/ScatterPlotLegend.jsx +++ b/app/javascript/components/visualization/controls/ScatterPlotLegend.jsx @@ -1,6 +1,13 @@ import React, { useEffect, useState } from 'react' import { FontAwesomeIcon } from '@fortawesome/react-fontawesome' -import { faPalette, faExternalLinkAlt, faTimes, faSearch } from '@fortawesome/free-solid-svg-icons' +import { + faPalette, + faExternalLinkAlt, + faTimes, + faSearch, + faFileUpload, + faGlobe, faFileDownload +} from '@fortawesome/free-solid-svg-icons' import Modal from 'react-bootstrap/lib/Modal' import { HexColorPicker, HexColorInput } from 'react-colorful' import Button from 'react-bootstrap/lib/Button' @@ -215,6 +222,9 @@ export default function ScatterPlotLegend({ }) { // is the user currently in color-editing mode const [showColorControls, setShowColorControls] = useState(false) + const [globalColorUpdate, setGlobalColorUpdate] = useState(false) + const [toggleClassName, setToggleClassName] = useState('fa-toggle-off') + // whether a request to the server to save colors is pending const labels = getLegendSortedLabels(countsByLabel) const numLabels = labels.length @@ -246,7 +256,7 @@ export default function ScatterPlotLegend({ /** resets any unsaved changes to user colors and clears custom colors */ async function resetColors() { setEditedCustomColors({}) - await saveCustomColors({}) + await saveCustomColors({}, globalColorUpdate) setShowColorControls(false) } @@ -254,10 +264,32 @@ export default function ScatterPlotLegend({ async function saveColors() { // merge the user picked colors with existing custom colors so previously saved values are preserved const colorsToSave = Object.assign(customColors, editedCustomColors) - await saveCustomColors(colorsToSave) + await saveCustomColors(colorsToSave, globalColorUpdate) setShowColorControls(false) } + function exportColors() { + const colorMap = Object.keys(customColors).length > 0 ? customColors : refColorMap + const lines = Object.entries(colorMap).map(([label, color]) => { + return `${label}\t${color}\n` + }) + + // Create an element with an anchor link and connect this to the blob + const element = document.createElement('a') + const colorExport = new Blob(lines, { type: 'text/plain' }) + element.href = URL.createObjectURL(colorExport) + + // name the file and indicate it should download + element.download = `${name}_color_map.tsv` + + // Simulate clicking the link resulting in downloading the file + document.body.appendChild(element) + element.click() + + // Cleanup + document.body.removeChild(element) + } + /** collect general information when a user's mouse enters the legend */ function logMouseEnter() { log('hover:scatterlegend', { numLabels }) @@ -323,6 +355,40 @@ export default function ScatterPlotLegend({ setLabelsToShow(labels) } + /** handle clicking global color update toggle */ + function handleToggleGlobalColor() { + const toggleClass = toggleClassName === 'fa-toggle-on' ? 'fa-toggle-off' : 'fa-toggle-on' + setGlobalColorUpdate(!globalColorUpdate) + setToggleClassName(toggleClass) + } + + /** read uploaded manifest and apply colors to current scatter plot */ + function readColorManifest(file) { + const colorUpdate = {} + const fileReader = new FileReader() + fileReader.onloadend = () => { + const lines = fileReader.result.trim().split(/\n/) + lines.map((line, _) => { + const entry = line.split(/[\t,]/).map((l, _) => {return l.trim()}) + const label = entry[0] + const color = entry[1] + colorUpdate[label] = color + }) + saveCustomColors(colorUpdate, globalColorUpdate) + } + fileReader.readAsText(file) + } + + const globalSwitch = + + return (
    Cancel
    -   + {globalSwitch} Reset to defaults @@ -387,10 +453,33 @@ export default function ScatterPlotLegend({ } { !showColorControls && - setShowColorControls(true)}> - Customize colors - - } + <> + setShowColorControls(true)} + > + Customize + + {globalSwitch} + + + + }
    }
    diff --git a/app/javascript/lib/scp-api.jsx b/app/javascript/lib/scp-api.jsx index fdbecfa3c..ecb6bfe88 100644 --- a/app/javascript/lib/scp-api.jsx +++ b/app/javascript/lib/scp-api.jsx @@ -926,7 +926,7 @@ export async function fetchSearch(type, searchParams, mock=false) { export function buildSearchQueryString(type, searchParams) { const facetsParam = buildFacetQueryString(searchParams.facets) - const params = ['page', 'order', 'terms', 'external', 'export', 'preset', 'genes', 'genePage'] + const params = ['page', 'order', 'terms', 'external', 'export', 'data_types', 'preset', 'genes', 'genePage'] let otherParamString = params.map(param => { return searchParams[param] ? `&${param}=${searchParams[param]}` : '' }).join('') diff --git a/app/javascript/providers/GeneSearchProvider.jsx b/app/javascript/providers/GeneSearchProvider.jsx index ca32e322a..beebb1efb 100644 --- a/app/javascript/providers/GeneSearchProvider.jsx +++ b/app/javascript/providers/GeneSearchProvider.jsx @@ -128,6 +128,7 @@ export function buildParamsFromQuery(query, preset) { genes: cleanGeneParams, terms: queryParams.terms ? queryParams.terms : '', external: queryParams.external ? queryParams.external : '', + data_types: queryParams.data_types ? queryParams.data_types : '', facets: buildFacetsFromQueryString(queryParams.facets), preset: preset ? preset : queryString.preset_search } diff --git a/app/javascript/providers/SearchSelectionProvider.jsx b/app/javascript/providers/SearchSelectionProvider.jsx index 63f7375df..31359fe6c 100644 --- a/app/javascript/providers/SearchSelectionProvider.jsx +++ b/app/javascript/providers/SearchSelectionProvider.jsx @@ -43,7 +43,7 @@ export default function SearchSelectionProvider(props) { const [selection, setSelection] = useState( appliedSelection ? appliedSelection : - { terms: '', facets: {}, external: '' }) + { terms: '', facets: {}, external: '', data_types: '' }) selection.updateSelection = updateSelection selection.updateFacet = updateFacet selection.performSearch = performSearch diff --git a/app/javascript/providers/StudySearchProvider.jsx b/app/javascript/providers/StudySearchProvider.jsx index 6f02bafbf..0cfce72c6 100644 --- a/app/javascript/providers/StudySearchProvider.jsx +++ b/app/javascript/providers/StudySearchProvider.jsx @@ -18,6 +18,7 @@ const emptySearch = { terms: '', facets: {}, external: '', + data_types: '', page: 1, preset_search: undefined, order: undefined @@ -75,12 +76,12 @@ export function useContextStudySearch() { return useContext(StudySearchContext) } -/** Merges the external parameter into the searchParams object */ -export function mergeExternalParam(searchParams, newParams) { - if (Object.keys(newParams).length === 1 && Object.keys(newParams)[0] === 'external') { - return newParams.external +/** Merges any optional parameter into the searchParams object */ +export function mergeOptionalParam(searchParams, newParams, paramName) { + if (Object.keys(newParams).length === 1 && Object.keys(newParams)[0] === paramName) { + return newParams[paramName] } else { - return searchParams.external + return searchParams[paramName] } } @@ -107,7 +108,8 @@ export function PropsStudySearchProvider(props) { // reset the page to 1 for new searches, unless otherwise specified search.page = newParams.page ? newParams.page : 1 search.preset = undefined // for now, exclude preset from the page URL--it's in the component props instead - search.external = mergeExternalParam(searchParams, newParams) + search.external = mergeOptionalParam(searchParams, newParams, 'external') + search.data_types = mergeOptionalParam(searchParams, newParams, 'data_types') const mergedParams = Object.assign(buildGeneParamsFromQuery(window.location.search), search) const queryString = buildSearchQueryString('study', mergedParams) navigate(`?${queryString}`) @@ -165,6 +167,7 @@ export function buildParamsFromQuery(query, preset) { terms: queryParams.terms ? queryParams.terms : '', facets: buildFacetsFromQueryString(queryParams.facets), external: queryParams.external ? queryParams.external : '', + data_types: queryParams.data_types ? queryParams.data_types : '', preset: preset ? preset : queryString.preset_search, order: queryParams.order } diff --git a/app/javascript/styles/_explore.scss b/app/javascript/styles/_explore.scss index dca255df0..4fe9e0c07 100644 --- a/app/javascript/styles/_explore.scss +++ b/app/javascript/styles/_explore.scss @@ -73,6 +73,14 @@ .violin-title { margin-left: 110px; } + + .color-update-toggle { + font-weight: normal; + } + + .customize-color-palette { + margin-right: 1em; + } } .popover-badge { @@ -204,7 +212,7 @@ margin-left: 4px; } -#gene-list-upload { +#gene-list-upload, #color-manifest-upload { display: none; } diff --git a/app/javascript/styles/_search.scss b/app/javascript/styles/_search.scss index 244b773d6..b05819b0d 100644 --- a/app/javascript/styles/_search.scss +++ b/app/javascript/styles/_search.scss @@ -68,6 +68,7 @@ nav.search-links, #search-panel { width: 100%; box-sizing: border-box; display: inline-block; + user-select: none; } &.active > a, &.selected > a { background-color: $action-color; diff --git a/app/lib/differential_expression_service.rb b/app/lib/differential_expression_service.rb index 638ba4676..08dcf2120 100644 --- a/app/lib/differential_expression_service.rb +++ b/app/lib/differential_expression_service.rb @@ -64,7 +64,7 @@ def self.run_differential_expression_on_all(study_accession, user: nil, machine_ skip_existing: false) study = Study.find_by(accession: study_accession) validate_study(study) - eligible_annotations = find_eligible_annotations(study, skip_existing:) + eligible_annotations = find_eligible_annotations(study) raise ArgumentError, "#{study_accession} does not have any eligible annotations" if eligible_annotations.empty? log_message "#{study_accession} has annotations eligible for DE; validating inputs" @@ -77,6 +77,8 @@ def self.run_differential_expression_on_all(study_accession, user: nil, machine_ # skip if this is a cluster-based annotation and is not available on this cluster object next if annotation[:annotation_scope] == 'cluster' && annotation[:cluster_group_id] != cluster_group.id + next if skip_existing && results_exist?(study, cluster_group, annotation) + annotation_params = annotation.deep_dup # make a copy so we don't lose the association next time we check annotation_params.delete(:cluster_group_id) annotation_params.merge!(dry_run:, machine_type:) @@ -240,11 +242,10 @@ def self.backfill_new_results(study_accessions: nil) # # * *params* # - +study+ (Study) => Associated study object - # - +skip_existing+ (Boolean) => Skip annotations that already have DE results # # * *returns* # - (Array) => Array of annotation objects available for DE - def self.find_eligible_annotations(study, skip_existing: false) + def self.find_eligible_annotations(study) annotations = [] metadata = study.cell_metadata.where(annotation_type: 'group').select do |meta| annotation_eligible?(meta.name) && meta.can_visualize? @@ -275,11 +276,7 @@ def self.find_eligible_annotations(study, skip_existing: false) cluster_group_id: annot[:cluster_group_id] } end - if skip_existing - annotations.reject { |annotation| results_exist?(study, annotation) } - else - annotations - end + annotations end # match an annotation name against any potentially valid annotations for DE analysis @@ -308,25 +305,21 @@ def self.find_existing_result(study, cluster_group, annotation_name, annotation_ DifferentialExpressionResult.find_by(study:, cluster_group:, annotation_name:, annotation_scope:) end - # determine if a study already has DE results for an annotation, taking scope into account - # cluster-based annotations must match to the specified cluster in the annotation object - # for study-wide annotations, return true if any results exist, regardless of cluster as this indicates that DE - # was already invoked on this annotation, and all valid results should already exist (barring errors) - # missing entries can still be backfilled with :run_differential_expression_job manually + # determine if a study already has DE results for a given annotation/cluster combination # # * *params* # - +study+ (Study) => study to run DE jobs in + # - +cluster_group+ (ClusterGroup) => clustering object # - +annotation+ (Hash) => annotation object # # * *returns* # - (Boolean) - def self.results_exist?(study, annotation) - ids = annotation[:scope] == 'cluster' ? [annotation[:cluster_group_id]] : study.cluster_groups.pluck(:id) + def self.results_exist?(study, cluster_group, annotation) DifferentialExpressionResult.where( - :study => study, - :cluster_group_id.in => ids, - :annotation_name => annotation[:annotation_name], - :annotation_scope => annotation[:annotation_scope] + study:, + cluster_group:, + annotation_name: annotation[:annotation_name], + annotation_scope: annotation[:annotation_scope] ).exists? end @@ -355,14 +348,13 @@ def self.set_cluster_name(study, cluster_group, annotation_name, annotation_scop # # * *params* # - +study+ (Study) => study to check eligibility for differential expression jobs - # - +skip_existing+ (Boolean) => Skip annotations that already have DE results # # * *returns* # - (Boolean) - def self.study_eligible?(study, skip_existing: false) + def self.study_eligible?(study) begin validate_study(study) - find_eligible_annotations(study, skip_existing:).any? && + find_eligible_annotations(study).any? && study.has_raw_counts_matrices? && !study_has_author_de?(study) rescue ArgumentError diff --git a/app/lib/study_search_service.rb b/app/lib/study_search_service.rb index 252576447..92abc11dd 100644 --- a/app/lib/study_search_service.rb +++ b/app/lib/study_search_service.rb @@ -199,6 +199,30 @@ def self.perform_mongo_facet_search(facets) end end + def self.filter_results_by_data_type(studies, data_types) + return studies if data_types.empty? + + matches = data_types.index_with { [] } + # note: this has to be updated if a new data_type is added + matchers = { + raw_counts: :has_raw_counts_matrices?, + diff_exp: :has_differential_expression_results?, + spatial: :has_spatial_clustering? + } + + # run matching in parallel to reduce UI blocking + Parallel.map(studies, in_threads: 10) do |study| + data_types.each do |data_type| + matches[data_type] << study.accession if study.send(matchers[data_type]) + end + end + + # find the intersection of all matches by data types + study_matches = matches.values + accessions = study_matches[0].intersection(*study_matches[1..]) + studies.where(:accession.in => accessions) + end + # deal with ontology id formatting inconsistencies def self.convert_id_format(id) parts = id.split(/[_:]/) diff --git a/app/models/hca_azul_client.rb b/app/models/hca_azul_client.rb index b7cd7f936..4c4d9a46a 100644 --- a/app/models/hca_azul_client.rb +++ b/app/models/hca_azul_client.rb @@ -15,7 +15,7 @@ class HcaAzulClient MANIFEST_FORMATS = %w[compact full terra.bdbag terra.pfb curl].freeze # maximum number of results to return - MAX_RESULTS = 100 + MAX_RESULTS = 80 # maximum length of query string (in characters) for requests MAX_QUERY_LENGTH = 8192 @@ -23,7 +23,6 @@ class HcaAzulClient # Default headers for API requests DEFAULT_HEADERS = { 'Accept' => 'application/json', - 'Content-Type' => 'application/json', 'x-app-id' => 'single-cell-portal', 'x-domain-id' => "#{ENV['HOSTNAME']}" }.freeze @@ -104,11 +103,23 @@ def process_api_request(http_method, path, payload: nil, retry_count: 0) # * *raises* # - (RestClient::Exception) => if HTTP request fails for any reason def execute_http_request(http_method, path, payload = nil) - response = RestClient::Request.execute(method: http_method, url: path, payload:, headers: DEFAULT_HEADERS) + response = RestClient::Request.execute(method: http_method, url: path, payload:, headers: set_headers(http_method)) # handle response using helper handle_response(response) end + # set HTTP headers based on method + # GET requests do not support the Content-Type header, but all PUT/POST/PATCH requests do + # + # * *params* + # - +http_method+ (String, Symbol) => HTTP method, e.g. :get, :post + # + # * *returns* + # - (Hash) => HTTP headers object + def set_headers(http_method) + http_method.to_sym == :get ? DEFAULT_HEADERS : DEFAULT_HEADERS.merge('Content-Type' => 'application/json') + end + # FROM SCP-4592: Temporarily disable automatic retries while we investigate the rise in 503 errors from Azul def should_retry?(code) false diff --git a/app/models/ingest_job.rb b/app/models/ingest_job.rb index bc80a793b..0b7ab3057 100644 --- a/app/models/ingest_job.rb +++ b/app/models/ingest_job.rb @@ -750,7 +750,7 @@ def set_subsampling_flags # determine if differential expression should be run for study, and submit available jobs (skipping existing results) def launch_differential_expression_jobs - if DifferentialExpressionService.study_eligible?(study, skip_existing: true) + if DifferentialExpressionService.study_eligible?(study) Rails.logger.info "#{study.accession} is eligible for differential expression, launching available jobs" DifferentialExpressionService.run_differential_expression_on_all(study.accession, skip_existing: true) end diff --git a/app/models/nemo_client.rb b/app/models/nemo_client.rb index 82dbd0cfb..26867885e 100644 --- a/app/models/nemo_client.rb +++ b/app/models/nemo_client.rb @@ -4,7 +4,7 @@ class NemoClient attr_accessor :api_root, :username, :password - BASE_URL = 'https://beta-assets.nemoarchive.org/api'.freeze + BASE_URL = 'https://assets.nemoarchive.org/api'.freeze DEFAULT_HEADERS = { 'Accept' => 'application/json', @@ -12,7 +12,7 @@ class NemoClient }.freeze # types of available entities - ENTITY_TYPES = %w[collection file grant project publication sample subject].freeze + ENTITY_TYPES = %w[collection file grant project sample subject].freeze # identifier format validator IDENTIFIER_FORMAT = /nemo:[a-z]{3}-[a-z0-9]{7}$/ @@ -21,10 +21,8 @@ class NemoClient # # * *return* # - +NemoClient+ object - def initialize(api_root: BASE_URL, username: ENV['NEMO_API_USERNAME'], password: ENV['NEMO_API_PASSWORD']) + def initialize(api_root: BASE_URL) self.api_root = api_root.chomp('/') - self.username = username - self.password = password end # submit a request to NeMO API @@ -68,12 +66,6 @@ def process_api_request(http_method, path, payload: nil, retry_count: 0) end end - # add basic HTTP auth header - # TODO: remove after public release of API - def authorization_header - { Authorization: "Basic #{Base64.encode64("#{username}:#{password}")}" } - end - # sub-handler for making external HTTP request # does not have error handling, this is done by process_api_request # allows for some methods to implement their own error handling (like health checks) @@ -89,8 +81,7 @@ def authorization_header # * *raises* # - (RestClient::Exception) => if HTTP request fails for any reason def execute_http_request(http_method, path, payload = nil) - headers = authorization_header.merge(DEFAULT_HEADERS) - response = RestClient::Request.execute(method: http_method, url: path, payload:, headers:) + response = RestClient::Request.execute(method: http_method, url: path, payload:, headers: DEFAULT_HEADERS) # handle response using helper handle_response(response) end @@ -194,17 +185,6 @@ def project(identifier) fetch_entity(:project, identifier) end - # get information about a publication - # - # * *params* - # - +identifier+ (String) => sample identifier - # - # * *returns* - # - (Hash) => publication metadata - def publication(identifier) - fetch_entity(:publication, identifier) - end - # get information about a sample # # * *params* diff --git a/app/models/study.rb b/app/models/study.rb index 223b8ac00..dd37c09b3 100644 --- a/app/models/study.rb +++ b/app/models/study.rb @@ -1062,6 +1062,14 @@ def has_visualization_matrices? end.any? end + def has_differential_expression_results? + differential_expression_results.any? + end + + def has_spatial_clustering? + spatial_cluster_groups.any? + end + # check if study has any files that can be streamed from the bucket for visualization # this includes BAM, BED, inferCNV Ideogram annotations, Image files, and DE files # diff --git a/test/api/search_controller_test.rb b/test/api/search_controller_test.rb index ab1fac713..0af72746e 100644 --- a/test/api/search_controller_test.rb +++ b/test/api/search_controller_test.rb @@ -226,7 +226,7 @@ class SearchControllerTest < ActionDispatch::IntegrationTest "Did not return correct array of matching accessions, expected #{expected_accessions} but found #{matching_accessions}" assert_equal @random_seed, json['studies'].first['term_matches'].first - assert_equal 2, json['match_by_data']['numResults:scp:text'] + assert_equal expected_accessions.count, json['match_by_data']['numResults:scp:text'] # test exact phrase search_phrase = '"API Test Study"' @@ -559,4 +559,57 @@ class SearchControllerTest < ActionDispatch::IntegrationTest found_accessions = lines.map { |l| l.split(/\t/)[1] }.flatten.uniq.sort assert_equal all_accessions, found_accessions end + + test 'should find studies based on available data types' do + search_study = FactoryBot.create(:detached_study, + name_prefix: "Raw Counts Search Study #{@random_seed}", + public: true, + user: @user, + test_array: @@studies_to_clean) + detail = search_study.build_study_detail + detail.full_description = '

    This is the description.

    ' + detail.save! + cells = %w[cellA cellB cellC cellD cellE cellF cellG] + FactoryBot.create( + :metadata_file, name: 'metadata.txt', study: search_study, cell_input: cells, + annotation_input: [{ name: 'species', type: 'group', values: %w[dog cat dog dog cat cat cat] }] + ) + coordinates = 1.upto(7).to_a + cluster_file = FactoryBot.create( + :cluster_file, name: 'cluster_diffexp.txt', study: search_study, + cell_input: { x: coordinates, y: coordinates, cells: } + ) + + %w[raw_counts diff_exp spatial].each do |data_type| + execute_http_request(:get, api_v1_search_path(type: 'study', data_types: data_type)) + assert_response :success + assert_empty json['studies'] + end + + # create raw counts matrix + exp_matrix = FactoryBot.create(:expression_file, + name: 'matrix.tsv', + study: search_study) + exp_matrix.build_expression_file_info( + is_raw_counts: true, units: 'raw counts', library_preparation_protocol: "10x 5' v3", + modality: 'Transcriptomic: unbiased', biosample_input_type: 'Whole cell' + ) + exp_matrix.save + # create DE results + DifferentialExpressionResult.create( + study: search_study, cluster_group: cluster_file.cluster_groups.first, annotation_name: 'species', + annotation_scope: 'study', matrix_file_id: exp_matrix.id + ) + # create spatial cluster + FactoryBot.create( + :cluster_file, name: 'spatial.txt', study: search_study, is_spatial: true, + cell_input: { x: coordinates, y: coordinates, cells: } + ) + + %w[raw_counts diff_exp spatial].each do |data_type| + execute_http_request(:get, api_v1_search_path(type: 'study', data_types: data_type)) + assert_response :success + assert_equal search_study.accession, json['studies'].first['accession'] + end + end end diff --git a/test/api/study_files_controller_test.rb b/test/api/study_files_controller_test.rb index 90fde290a..cd440f1df 100644 --- a/test/api/study_files_controller_test.rb +++ b/test/api/study_files_controller_test.rb @@ -7,6 +7,7 @@ class StudyFilesControllerTest < ActionDispatch::IntegrationTest before(:all) do + Delayed::Worker.delay_jobs = false # run all .delay calls synchronously @user = FactoryBot.create(:api_user, test_array: @@users_to_clean) @other_user = FactoryBot.create(:api_user, test_array: @@users_to_clean) @study = FactoryBot.create(:detached_study, @@ -14,9 +15,12 @@ class StudyFilesControllerTest < ActionDispatch::IntegrationTest public: true, user: @user, test_array: @@studies_to_clean) - @study_file = FactoryBot.create(:cluster_file, - name: 'clusterA.txt', - study: @study) + @study_file = FactoryBot.create(:cluster_file, name: 'clusterA.txt', study: @study) + @other_study_file = FactoryBot.create(:cluster_file, name: 'clusterB.txt', study: @study) + end + + after(:all) do + Delayed::Worker.delay_jobs = true # restore background job processing end setup do @@ -175,7 +179,8 @@ class StudyFilesControllerTest < ActionDispatch::IntegrationTest @study_file.reload # check that the new annotation colors were added, and the previous ones remain - assert_equal annot2_color_hash.merge(annot1_color_hash), @study_file.cluster_file_info.custom_colors_as_hash.with_indifferent_access + assert_equal annot2_color_hash.merge(annot1_color_hash), + @study_file.cluster_file_info.custom_colors_as_hash.with_indifferent_access updated_annot2_color_hash = { 'annotation1': { @@ -190,7 +195,21 @@ class StudyFilesControllerTest < ActionDispatch::IntegrationTest @study_file.reload # confirm the annotation1 colors were completely replaced, and annotation2 colors were preserved - assert_equal updated_annot2_color_hash.merge(annot2_color_hash), @study_file.cluster_file_info.custom_colors_as_hash.with_indifferent_access + assert_equal updated_annot2_color_hash.merge(annot2_color_hash), + @study_file.cluster_file_info.custom_colors_as_hash.with_indifferent_access + + # test global update + study_file_attributes[:study_file][:global_color_update] = true + + assert_nil @other_study_file.cluster_file_info + execute_http_request(:patch, api_v1_study_study_file_path(study_id: @study.id, id: @study_file.id), + request_payload: study_file_attributes) + assert_response :success + # sleep 3 # let Delayed::Job process update in background + @other_study_file.reload + assert_equal updated_annot2_color_hash, + @other_study_file.cluster_file_info.custom_colors_as_hash.with_indifferent_access + end test 'should create and update AnnData file' do diff --git a/test/integration/external/import_service_config/nemo_test.rb b/test/integration/external/import_service_config/nemo_test.rb index e2e13eaef..8c658b75b 100644 --- a/test/integration/external/import_service_config/nemo_test.rb +++ b/test/integration/external/import_service_config/nemo_test.rb @@ -48,13 +48,12 @@ class NemoTest < ActiveSupport::TestCase assert_equal @branding_group, @configuration.branding_group end - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should traverse associations to set ids' do - # config = ImportServiceConfig::Nemo.new(file_id: @attributes[:file_id]) - # config.traverse_associations! - # assert_equal @attributes[:study_id], config.study_id - # assert_equal 'nemo:grn-gyy3k8j', config.project_id - # end + test 'should traverse associations to set ids' do + config = ImportServiceConfig::Nemo.new(file_id: @attributes[:file_id]) + config.traverse_associations! + assert_equal @attributes[:study_id], config.study_id + assert_equal 'nemo:grn-gyy3k8j', config.project_id + end test 'should load defaults' do study_defaults = { @@ -93,91 +92,87 @@ class NemoTest < ActiveSupport::TestCase assert_equal 'application/octet-stream', @configuration.get_file_content_type('csv') end - # test 'should load study analog' do - # study = @configuration.load_study - # assert_equal '"Human variation study (10x), GRU"', study['name'] - # assert_equal ["10x chromium 3' v3 sequencing"], study['techniques'] - # assert_equal [{"name"=>"human", "cv_term_id"=>"NCBI:txid9606"}], study['taxa'] - # end - - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should load file analog' do - # file = @configuration.load_file - # assert_equal 'human_var_scVI_VLMC.h5ad', file['file_name'] - # assert_equal 'h5ad', file['file_format'] - # assert_equal 'counts', file['data_type'] - # end - - # test 'should load collection analog' do - # collection = @configuration.load_collection - # assert_equal 'ecker_sn_mCseq_proj', collection['short_name'] - # end - - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should extract association ids' do - # file = @configuration.load_file - # study = @configuration.load_study - # assert_equal @attributes[:study_id], @configuration.id_from(file, :collections) - # assert_equal 'nemo:grn-gyy3k8j', @configuration.id_from(study, :projects) - # end - - # test 'should load taxon common names' do - # assert_equal %w[human], @configuration.taxon_names - # end - # - # test 'should find library preparation protocol' do - # assert_equal "10x 3' v3", @configuration.find_library_prep("10x chromium 3' v3 sequencing") - # assert_equal 'Drop-seq', @configuration.find_library_prep('drop-seq') - # end - - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should populate study and study_file' do - # scp_study = @configuration.populate_study - # assert_equal 'Human variation study (10x), GRU', scp_study.name - # assert_not scp_study.public - # assert scp_study.full_description.present? - # assert_equal @user_id, scp_study.user_id - # assert_equal @branding_group_id, scp_study.branding_group_ids.first - # assert_equal @configuration.service_name, scp_study.imported_from - # # populate StudyFile, using above study - # scp_study_file = @configuration.populate_study_file(scp_study.id) - # assert scp_study_file.use_metadata_convention - # assert_equal 'human_var_scVI_VLMC.h5ad', scp_study_file.upload_file_name - # assert_equal "10x 3' v3", scp_study_file.expression_file_info.library_preparation_protocol - # assert_equal @configuration.service_name, scp_study_file.imported_from - # assert_not scp_study_file.ann_data_file_info.reference_file - # @configuration.obsm_keys.each do |obsm_key_name| - # assert scp_study_file.ann_data_file_info.find_fragment(data_type: :cluster, obsm_key_name:).present? - # end - # assert scp_study_file.ann_data_file_info.find_fragment(data_type: :expression).present? - # end - - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should import all from service' do - # access_url = 'gs://nemo-public/other/grant/u01_lein/lein/transcriptome/sncell/10x_v3/' \ - # 'human/processed/counts/human_var_scVI_VLMC.h5ad' - # file_mock = ::Minitest::Mock.new - # file_mock.expect :generation, '123456789' - # # for study to save, we need to mock all Terra orchestration API calls for creating workspace & setting acls - # fc_client_mock = ::Minitest::Mock.new - # owner_group = { groupEmail: 'sa-owner-group@firecloud.org' }.with_indifferent_access - # assign_workspace_mock!(fc_client_mock, owner_group, 'human-variation-study-10x-gru') - # AdminConfiguration.stub :find_or_create_ws_user_group!, owner_group do - # ImportService.stub :copy_file_to_bucket, file_mock do - # ApplicationController.stub :firecloud_client, fc_client_mock do - # @configuration.stub :taxon_from, Taxon.new(common_name: 'human') do - # study, study_file = @configuration.import_from_service - # file_mock.verify - # fc_client_mock.verify - # assert study.persisted? - # assert study_file.persisted? - # assert_equal study.external_identifier, @attributes[:study_id] - # assert_equal study_file.external_identifier, @attributes[:file_id] - # assert_equal study_file.external_link_url, access_url - # end - # end - # end - # end - # end + test 'should load study analog' do + study = @configuration.load_study + assert_equal '"Human variation study (10x), GRU"', study['name'] + assert_equal ["10x chromium 3' v3 sequencing"], study['techniques'] + assert_equal [{"name"=>"human", "cv_term_id"=>"NCBI:txid9606"}], study['taxa'] + end + + test 'should load file analog' do + file = @configuration.load_file + assert_equal 'human_var_scVI_VLMC.h5ad', file['file_name'] + assert_equal 'h5ad', file['file_format'] + assert_equal 'counts', file['data_type'] + end + + test 'should load collection analog' do + collection = @configuration.load_collection + assert_equal 'ecker_sn_mCseq_proj', collection['short_name'] + end + + test 'should extract association ids' do + file = @configuration.load_file + study = @configuration.load_study + assert_equal @attributes[:study_id], @configuration.id_from(file, :collections) + assert_equal 'nemo:grn-gyy3k8j', @configuration.id_from(study, :projects) + end + + test 'should load taxon common names' do + assert_equal %w[human], @configuration.taxon_names + end + + test 'should find library preparation protocol' do + assert_equal "10x 3' v3", @configuration.find_library_prep("10x chromium 3' v3 sequencing") + assert_equal 'Drop-seq', @configuration.find_library_prep('drop-seq') + end + + test 'should populate study and study_file' do + scp_study = @configuration.populate_study + assert_equal 'Human variation study (10x), GRU', scp_study.name + assert_not scp_study.public + assert scp_study.full_description.present? + assert_equal @user_id, scp_study.user_id + assert_equal @branding_group_id, scp_study.branding_group_ids.first + assert_equal @configuration.service_name, scp_study.imported_from + # populate StudyFile, using above study + scp_study_file = @configuration.populate_study_file(scp_study.id) + assert scp_study_file.use_metadata_convention + assert_equal 'human_var_scVI_VLMC.h5ad', scp_study_file.upload_file_name + assert_equal "10x 3' v3", scp_study_file.expression_file_info.library_preparation_protocol + assert_equal @configuration.service_name, scp_study_file.imported_from + assert_not scp_study_file.ann_data_file_info.reference_file + @configuration.obsm_keys.each do |obsm_key_name| + assert scp_study_file.ann_data_file_info.find_fragment(data_type: :cluster, obsm_key_name:).present? + end + assert scp_study_file.ann_data_file_info.find_fragment(data_type: :expression).present? + end + + test 'should import all from service' do + access_url = 'gs://nemo-public/other/grant/u01_lein/lein/transcriptome/sncell/10x_v3/' \ + 'human/processed/counts/human_var_scVI_VLMC.h5ad' + file_mock = ::Minitest::Mock.new + file_mock.expect :generation, '123456789' + # for study to save, we need to mock all Terra orchestration API calls for creating workspace & setting acls + fc_client_mock = ::Minitest::Mock.new + owner_group = { groupEmail: 'sa-owner-group@firecloud.org' }.with_indifferent_access + assign_workspace_mock!(fc_client_mock, owner_group, 'human-variation-study-10x-gru') + AdminConfiguration.stub :find_or_create_ws_user_group!, owner_group do + ImportService.stub :copy_file_to_bucket, file_mock do + ApplicationController.stub :firecloud_client, fc_client_mock do + @configuration.stub :taxon_from, Taxon.new(common_name: 'human') do + study, study_file = @configuration.import_from_service + file_mock.verify + fc_client_mock.verify + assert study.persisted? + assert study_file.persisted? + assert_equal study.external_identifier, @attributes[:study_id] + assert_equal study_file.external_identifier, @attributes[:file_id] + assert_equal study_file.external_link_url, access_url + end + end + end + end + end end end diff --git a/test/integration/external/import_service_test.rb b/test/integration/external/import_service_test.rb index 513143ec2..7af552cca 100644 --- a/test/integration/external/import_service_test.rb +++ b/test/integration/external/import_service_test.rb @@ -9,16 +9,15 @@ class ImportServiceTest < ActiveSupport::TestCase } end - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should call API client method' do - # client = NemoClient.new - # nemo_file = ImportService.call_api_client(client, :file, @nemo_attributes[:file_id]) - # assert_equal 'BI006_marm028_Munchkin_M1_rxn1.4.bam.bai', nemo_file['file_name'] - # assert_equal 'bam', nemo_file['file_format'] - # assert_raises ArgumentError do - # ImportService.call_api_client(FireCloudClient.new, :api_available?) - # end - # end + test 'should call API client method' do + client = NemoClient.new + nemo_file = ImportService.call_api_client(client, :file, @nemo_attributes[:file_id]) + assert_equal 'BI006_marm028_Munchkin_M1_rxn1.4.bam.bai', nemo_file['file_name'] + assert_equal 'bam', nemo_file['file_format'] + assert_raises ArgumentError do + ImportService.call_api_client(FireCloudClient.new, :api_available?) + end + end test 'should call import from external service' do mock = Minitest::Mock.new @@ -40,7 +39,6 @@ class ImportServiceTest < ActiveSupport::TestCase bucket = ImportService.load_public_bucket bucket_id assert bucket.present? bucket.is_a?(Google::Cloud::Storage::Bucket) - assert bucket.lazy? # skip_lookup: true end test 'should get public file from bucket' do @@ -51,7 +49,6 @@ class ImportServiceTest < ActiveSupport::TestCase assert file.is_a?(Google::Cloud::Storage::File) assert_equal filepath, file.name assert_equal bucket_id, file.bucket - assert file.lazy? # skip_lookup: true end test 'should parse gs URL' do diff --git a/test/integration/external/nemo_client_test.rb b/test/integration/external/nemo_client_test.rb index a26ddcc43..5bdc17e4b 100644 --- a/test/integration/external/nemo_client_test.rb +++ b/test/integration/external/nemo_client_test.rb @@ -2,8 +2,6 @@ class NemoClientTest < ActiveSupport::TestCase before(:all) do - @username = ENV['NEMO_API_USERNAME'] - @password = ENV['NEMO_API_PASSWORD'] @nemo_client = NemoClient.new @nemo_is_ok = @nemo_client.api_available? @skip_message = '-- skipping due to NeMO API being unavailable --' @@ -17,7 +15,7 @@ class NemoClientTest < ActiveSupport::TestCase } end - # skip a test if Azul is not up ; prevents unnecessary build failures due to releases/maintenance + # skip a test if Nemo API is not up ; prevents unnecessary build failures due to releases/maintenance def skip_if_api_down unless @nemo_is_ok puts @skip_message; skip @@ -27,8 +25,6 @@ def skip_if_api_down test 'should instantiate client' do client = NemoClient.new assert_equal NemoClient::BASE_URL, client.api_root - assert_equal @username, client.username - assert_equal @password, client.password end test 'should check if NeMO is up' do @@ -36,12 +32,6 @@ def skip_if_api_down assert @nemo_client.api_available? end - test 'should format authentication header' do - auth_header = @nemo_client.authorization_header - username_password = auth_header[:Authorization].split.last # trim off 'Basic ' - assert_equal "#{@nemo_client.username}:#{@nemo_client.password}", Base64.decode64(username_password) - end - test 'should validate entity type' do assert_raises ArgumentError do @nemo_client.fetch_entity(:foo, 'bar') @@ -54,86 +44,70 @@ def skip_if_api_down end end - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should get an entity' do - # skip_if_api_down - # entity_type = @identifiers.keys.sample - # identifier = @identifiers[entity_type] - # entity = @nemo_client.fetch_entity(entity_type, identifier) - # assert entity.present? - # end - # - # test 'should get collection' do - # skip_if_api_down - # identifier = @identifiers[:collection] - # collection = @nemo_client.collection(identifier) - # assert collection.present? - # assert_equal 'human_variation_10X', collection['short_name'] - # end + test 'should get an entity' do + skip_if_api_down + entity_type = @identifiers.keys.sample + identifier = @identifiers[entity_type] + entity = @nemo_client.fetch_entity(entity_type, identifier) + assert entity.present? + end - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should get file' do - # skip_if_api_down - # identifier = @identifiers[:file] - # file = @nemo_client.file(identifier) - # assert file.present? - # filename = 'human_var_scVI_VLMC.h5ad' - # assert_equal filename, file['file_name'] - # assert_equal 'h5ad', file['file_format'] - # access_url = file['manifest_file_urls'].first['url'] - # assert_equal filename, access_url.split('/').last - # end + test 'should get collection' do + skip_if_api_down + identifier = @identifiers[:collection] + collection = @nemo_client.collection(identifier) + assert collection.present? + assert_equal 'human_variation_10X', collection['short_name'] + end - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should get grant' do - # skip_if_api_down - # identifier = @identifiers[:grant] - # grant = @nemo_client.grant(identifier) - # assert grant.present? - # assert_equal 'Allen Institute Funder', grant.dig('grant_info','grant_number') - # assert_equal 'Allen Institute Funder', grant['funding_agency'] - # end - # - # test 'should get project' do - # skip_if_api_down - # identifier = @identifiers[:project] - # project = @nemo_client.project(identifier) - # assert project.present? - # assert_equal 'DNA methylation profiling of genomic DNA in individual mouse brain cell nuclei (RS1.1)', - # project['title'] - # assert_equal 'biccn', project['program'] - # assert_equal 'ecker_sn_mCseq_proj', project['short_name'] - # end + test 'should get file' do + skip_if_api_down + identifier = @identifiers[:file] + file = @nemo_client.file(identifier) + assert file.present? + filename = 'human_var_scVI_VLMC.h5ad' + assert_equal filename, file['file_name'] + assert_equal 'h5ad', file['file_format'] + access_url = file['manifest_file_urls'].first['url'] + assert_equal filename, access_url.split('/').last + end - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should get publication' do - # skip_if_api_down - # identifier = @identifiers[:publication] - # publication = @nemo_client.publication(identifier) - # assert publication.present? - # assert_equal 'eLife', publication['journal'] - # assert_equal 'https://doi.org/10.7554/eLife.64875', publication['doi'] - # assert_equal ["human", "macaques", "house mouse"].sort, publication['taxonomies'].sort - # end + test 'should get grant' do + skip_if_api_down + identifier = @identifiers[:grant] + grant = @nemo_client.grant(identifier) + assert grant.present? + assert_equal 'Allen Institute Funder', grant.dig('grant_info','grant_number') + assert_equal 'Allen Institute Funder', grant['funding_agency'] + end + + test 'should get project' do + skip_if_api_down + identifier = @identifiers[:project] + project = @nemo_client.project(identifier) + assert project.present? + assert_equal 'DNA methylation profiling of genomic DNA in individual mouse brain cell nuclei (RS1.1)', + project['title'] + assert_equal 'biccn', project['program'] + assert_equal 'ecker_sn_mCseq_proj', project['short_name'] + end - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should get sample' do - # skip_if_api_down - # identifier = @identifiers[:sample] - # sample = @nemo_client.sample(identifier) - # assert sample.present? - # assert_equal 'marm028_M1', sample['sample_name'] - # assert sample['subjects'].any? - # end + test 'should get sample' do + skip_if_api_down + identifier = @identifiers[:sample] + sample = @nemo_client.sample(identifier) + assert sample.present? + assert_equal 'marm028_M1', sample['sample_name'] + assert sample['subjects'].any? + end - # TODO: SCP-5565 Check with NeMO re API, update and re-enable this test - # test 'should get subject' do - # skip_if_api_down - # identifier = @identifiers[:subject] - # subject = @nemo_client.subject(identifier) - # assert subject.present? - # assert_equal 'nonhuman-1U01MH114819', subject.dig('cohort_info', 'cohort_name') - # assert_equal 'A Molecular and cellular atlas of the marmoset brain', subject['grant_title'] - # assert subject['samples'].any? - # end + test 'should get subject' do + skip_if_api_down + identifier = @identifiers[:subject] + subject = @nemo_client.subject(identifier) + assert subject.present? + assert_equal 'nonhuman-1U01MH114819', subject.dig('cohort_info', 'cohort_name') + assert_equal 'A Molecular and cellular atlas of the marmoset brain', subject['grant_title'] + assert subject['samples'].any? + end end diff --git a/test/js/search/options-control.test.js b/test/js/search/options-control.test.js index 57c7e44e0..4dce9cc6b 100644 --- a/test/js/search/options-control.test.js +++ b/test/js/search/options-control.test.js @@ -1,5 +1,5 @@ import React from 'react' -import { render, fireEvent } from '@testing-library/react' +import { render, fireEvent, screen } from '@testing-library/react' import '@testing-library/jest-dom/extend-expect' import OptionsControl from '~/components/search/controls/OptionsControl' @@ -7,15 +7,28 @@ import OptionsControl from '~/components/search/controls/OptionsControl' describe('OptionsControl component', () => { it('renders with default checked state', () => { const searchContext = { - params: { 'external': 'hca' }, + params: { 'external': 'hca', 'data_types': 'raw_counts,spatial' }, updateSearch: jest.fn() } const { getByText, getByRole } = render( - + <> + + + + ) expect(getByText('Include HCA results')).toBeInTheDocument() - expect(getByRole('checkbox')).toBeChecked() + expect(getByText('Has raw counts')).toBeInTheDocument() + expect(getByText('Has spatial')).toBeInTheDocument() + expect(screen.getByTestId('options-checkbox-external-hca')).toBeChecked() + expect(screen.getByTestId('options-checkbox-data_types-raw_counts')).toBeChecked() + expect(screen.getByTestId('options-checkbox-data_types-spatial')).toBeChecked() }) it('toggles checkbox state on click', () => { @@ -38,4 +51,24 @@ describe('OptionsControl component', () => { expect(checkbox).toBeChecked() expect(searchContext.updateSearch).toHaveBeenCalledWith({ 'external': 'hca' }) }) + + it('merges multiple option controls into same parameter', () => { + const searchContext = { + params: { 'data_types': 'raw_counts' }, + updateSearch: jest.fn() + } + const { getByText } = render( + <> + + + + ) + fireEvent.click(getByText('Has spatial')) + expect(searchContext.updateSearch).toHaveBeenCalledWith({ 'data_types': 'raw_counts,spatial' }) + }) }) diff --git a/test/services/differential_expression_service_test.rb b/test/services/differential_expression_service_test.rb index 19fa991c1..5467a13d3 100644 --- a/test/services/differential_expression_service_test.rb +++ b/test/services/differential_expression_service_test.rb @@ -283,9 +283,9 @@ class DifferentialExpressionServiceTest < ActiveSupport::TestCase ) assert result.present? @basic_study.reload - assert DifferentialExpressionService.results_exist?(@basic_study, annotation) - no_results = { annotation_name: 'foo', annotation_scope: 'cluster', cluster_group_id: cluster.id } - assert_not DifferentialExpressionService.results_exist?(@basic_study, no_results) + assert DifferentialExpressionService.results_exist?(@basic_study, cluster, annotation) + no_results = { annotation_name: 'foo', annotation_scope: 'cluster' } + assert_not DifferentialExpressionService.results_exist?(@basic_study, cluster, no_results) end test 'should find eligible annotations' do