antspymm.rsfmri

  1def resting_state_fmri_networks( fmri, fmri_template, t1, t1segmentation,
  2    f=[0.03, 0.08],
  3    FD_threshold=5.0,
  4    spa = None,
  5    spt = None,
  6    nc = 5,
  7    outlier_threshold=0.250,
  8    ica_components = 0,
  9    impute = True,
 10    censor = True,
 11    despike = 2.5,
 12    motion_as_nuisance = True,
 13    powers = False,
 14    upsample = 3.0,
 15    clean_tmp = None,
 16    paramset='unset', # Placeholder for parameter set name
 17    verbose=False ):
 18  """
 19  Compute resting state network correlation maps based on the J Power labels.
 20  This includes fMRI preprocessing steps and subsequent network analysis.
 21
 22  Arguments are detailed in the function signature.
 23
 24  Returns
 25  ---------
 26  a dictionary containing the derived network maps
 27  """
 28  warnings.filterwarnings("ignore", category=UserWarning) # Suppress UserWarnings
 29  global debug # Original code uses 'debug' without defining it.
 30  debug = False # Initialize debug to False, can be set to True if needed for internal checks.
 31
 32  # --- Local Helper Functions (Defined within the main function scope) ---
 33  # These are adapted from the original monolithic function.
 34
 35  # Note: `temporal_derivative_same_shape`, `compute_tSTD`, `get_compcor_matrix` in original code
 36  # are defined as nested functions. For clarity and reusability (and to avoid re-definition overhead),
 37  # if they are general utilities, they should be top-level. Given the constraint,
 38  # I'll embed them here as they were, but comment on good practice.
 39
 40  # Original: `temporal_derivative_same_shape`
 41  def _calculate_temporal_derivative(array): 
 42    # This is a local version of `make_time_series_derivative` used in original
 43    derivative = np.diff(array, axis=0)
 44    zeros_row = np.zeros((1, array.shape[1]))
 45    return np.vstack((zeros_row, derivative ))
 46
 47  # Original: `compute_tSTD`
 48  def _compute_tsnr_std_mask_threshold(M_matrix, quantile, x_val=0, axis_val=0):
 49    stdM = np.std(M_matrix, axis=axis_val)
 50    stdM[stdM == 0] = x_val
 51    stdM[np.isnan(stdM)] = x_val
 52    threshold_val = np.percentile(stdM, round(quantile * 100))
 53    return {'tSTD': stdM, 'threshold_std': threshold_val}
 54
 55  # Original: `get_compcor_matrix` - this is similar to ants.compcor itself, may be redundant
 56  # Given the presence of `compcor` later, this helper might be for a very specific internal purpose.
 57  def _get_compcor_matrix_helper(boldImage_antspy, mask_antspy, quantile_val):
 58    if mask_antspy is None:
 59        temp = ants.slice_image(boldImage_antspy, axis=boldImage_antspy.dimension - 1, idx=0)
 60        mask_antspy = ants.get_mask(temp)
 61    imagematrix = ants.timeseries_to_matrix(boldImage_antspy, mask_antspy)
 62    temp = _compute_tsnr_std_mask_threshold(imagematrix, quantile_val, 0)
 63    tsnrmask_img = ants.make_image(mask_antspy, temp['tSTD']) # Ensure mask is ants image
 64    tsnrmask_img = ants.threshold_image(tsnrmask_img, temp['threshold_std'], temp['tSTD'].max())
 65    M = ants.timeseries_to_matrix(boldImage_antspy, tsnrmask_img)
 66    return M
 67
 68  # Original: `find_indices`
 69  def _find_indices_gt_value(lst, value):
 70    return [index for index, element in enumerate(lst) if element > value]
 71
 72  # Original: `mean_of_list`
 73  def _calculate_mean_of_list(lst):
 74    if not lst: return 0 
 75    return sum(lst) / len(lst)
 76
 77
 78  # --- Pipeline Execution Starts Here ---
 79
 80  # 0. Initial Setup and Temporary Directories
 81  if clean_tmp is not None:
 82    clean_tmp_directory( age_hours = clean_tmp )
 83
 84  # Ensure nc is an integer for n_compcor values, or float for variance threshold
 85  nc_raw_value = nc # Use a new variable to keep original nc intact
 86  if isinstance(nc_raw_value, (int, float)) and nc_raw_value > 1:
 87    nc_compcor_val = int(nc_raw_value)
 88  else:
 89    nc_variance_thresh = float(nc_raw_value)
 90
 91  output_directory = tempfile.mkdtemp()
 92  output_directory_w = os.path.join(output_directory, "ts_t1_reg")
 93  os.makedirs(output_directory_w,exist_ok=True)
 94  ofnt1tx = tempfile.NamedTemporaryFile(delete=False,suffix='t1_deformation',dir=output_directory_w).name
 95  
 96  if verbose: print("--- Starting Resting State fMRI Network Analysis ---")
 97
 98  # 1. Image Resolution Harmonization (Upsampling fmri_template)
 99  if upsample > 0.0:
100      fmri_spacing = ants.get_spacing( fmri )
101      min_fmri_spacing_xyz = min(fmri_spacing[0:3])
102      target_isotropic_spacing = min(upsample, min_fmri_spacing_xyz)
103      
104      # Only resample if target is different from current image's min spacing
105      if not np.allclose( (target_isotropic_spacing,)*3, fmri_template.spacing[:3] ): # Check if already at target isotropic
106          if verbose:
107              print(f"1.1. Resampling fmri_template to {target_isotropic_spacing} mm isotropic (Original: {fmri_template.spacing[:3]} mm).")
108              # If current spacing is coarser than desired upsample, we use closest possible
109              if target_isotropic_spacing < min_fmri_spacing_xyz: 
110                  warnings.warn(f"Upsampling fmri_template from {min_fmri_spacing_xyz}mm. Target {target_isotropic_spacing}mm. ")
111
112          fmri_template = ants.resample_image( fmri_template, (target_isotropic_spacing, target_isotropic_spacing, target_isotropic_spacing), interp_type=0 )
113      else:
114          if verbose: print("1.1. fmri_template already at target isotropic resolution. Skipping resampling.")
115  else:
116      if verbose: print("1.1. Upsampling disabled. Skipping fmri_template resampling.")
117
118  # 2. Initial Parameter Setup (Smoothing and Network Resources)
119  # Dynamic smoothing parameter calculation if not provided.
120  fmri_full_spacing = list( ants.get_spacing( fmri ) )
121  if spa is None:
122    spa = _calculate_mean_of_list( fmri_full_spacing[0:3] ) * 1.0 # Spatial smoothing sigma
123  if spt is None:
124    spt = fmri_full_spacing[3] * 0.5 # Temporal smoothing sigma
125      
126  # Load network parcellation data.
127  dfnname='DefaultMode' # Default network for output dictionary (not processed yet)
128  network='powers' if powers else 'yeo_17_500_2023' # Name of network scheme
129  
130  if verbose: print(f"2.1. Loading network parcellation data: {network}.")
131  powers_areal_mni_itk = pd.read_csv( get_data(network, target_extension=".csv"))
132  
133  if powers:
134      ch2_img = mm_read( ants.get_ants_data( "ch2" ) ) # Example of core ANTs data
135  else:
136      ch2_img = mm_read( get_data( "PPMI_template0_brain", target_extension='.nii.gz' ) ) # A template image for registration
137
138
139  # 3. Brain Extraction and Masking (Initial)
140  if verbose: print("3.1. Performing brain extraction and initial masking.")
141  fmri = ants.iMath( fmri, 'Normalize' ) # Normalize intensity
142  # Use antspynet for robust brain extraction
143  bmask = antspynet.brain_extraction( fmri_template, 'bold' ).threshold_image(0.5,1).iMath("FillHoles")
144
145
146  # 4. Motion Correction
147  if verbose: print("4.1. Beginning fMRI motion correction.")
148  corrmo = ants.motion_correction(
149    fmri, fmri_template,
150    type_of_transform=type_of_transform, # e.g., "antsRegistrationSyNQuickRepro[r]"
151    total_sigma=0.5,
152    fdOffset=2.0,
153    trim = 8,
154    output_directory=None, # Process in memory
155    verbose=verbose,
156    syn_metric='CC', # Mutual information for registration
157    syn_sampling=2,
158    reg_iterations=[40,20,5], # Iterations for registration optimization
159    return_numpy_motion_parameters=True )
160  
161  if verbose:
162      print("4.2. End fMRI motion correction.")
163      print(f"--- Maximum observed motion (FD): {corrmo['FD'].max():.2f} mm.")
164
165
166  # 5. Despiking (AFNI-style)
167  if despike > 0.0:
168      if verbose: print(f"5.1. Applying voxel-wise despiking with threshold: {despike}.")
169      # despike_time_series_afni is called
170      corrmo['motion_corrected'], despiking_count_per_volume = despike_time_series_afni( corrmo['motion_corrected'], c1=despike )
171  
172  despiking_count_summary = despiking_count_per_volume.sum() / np.prod( corrmo['motion_corrected'].shape[:3] )
173  high_motion_count = (_find_indices_gt_value(corrmo['FD'], FD_threshold) ) # Indices with high motion
174  high_motion_pct = len(high_motion_count) / fmri.shape[3]
175  
176  # 6. TSNR-based Mask Refinement
177  if verbose: print("6.1. Refining brain mask based on TSNR.")
178  mytsnr = tsnr( corrmo['motion_corrected'], bmask )
179  mytsnrThresh = np.quantile( mytsnr.numpy(), 0.995 ) # 0.995 quantile of TSNR values
180  tsnrmask = ants.threshold_image( mytsnr, 0, mytsnrThresh ).morphology("close",2)
181  bmask = bmask * tsnrmask # Final brain mask after TSNR filtering
182
183  # 7. Anatomical Segmentation Mapping to BOLD Space
184  if verbose: print("7.1. Registering T1 to BOLD space and mapping segmentations.")
185  und = fmri_template * bmask # Underscored image (template with brain mask)
186  t1reg = ants.registration( und, t1, # Register BOLD (und) to T1 image
187     "antsRegistrationSyNQuickRepro[s]", outprefix=ofnt1tx ) # Ouptut prefix for transforms file
188  
189  # Process and map Gray Matter segmentation to BOLD space
190  gmseg = ants.threshold_image( t1segmentation, 2, 2 ) 
191  gmseg = gmseg + ants.threshold_image( t1segmentation, 4, 4 ) # Combine specific GM labels
192  gmseg = ants.threshold_image( gmseg, 1, 4 ) # Binarize GM
193  gmseg = ants.iMath( gmseg, 'MD', 1 ) # Morphological opening on GM mask
194  gmseg_bold_space = ants.apply_transforms( und, gmseg,
195    t1reg['fwdtransforms'], interpolator = 'nearestNeighbor' ) * bmask
196
197  # Process and map CSF + WM segmentations to BOLD space for nuisance regression
198  csf_t1_mask = ants.threshold_image( t1segmentation, 1, 1 ) 
199  wm_t1_mask = ants.threshold_image( t1segmentation, 3, 3 ).morphology("erode",1) # Eroded WM for robustness
200  
201  csf_bold_space = ants.apply_transforms( und, csf_t1_mask, t1reg['fwdtransforms'], interpolator = 'nearestNeighbor' ) * bmask
202  wm_bold_space = ants.apply_transforms( und, wm_t1_mask, t1reg['fwdtransforms'], interpolator = 'nearestNeighbor' ) * bmask
203  csfAndWM_bold_space = (csf_bold_space + wm_bold_space).threshold_image(0.5,1) # Combined CSF+WM mask
204
205  # 8. Network Atlas Mapping to BOLD Space
206  if verbose: print("8.1. Mapping network atlas/parcellation to BOLD space.")
207  t1_resampled_for_treg = ants.resample_image(t1, [1.0,1.0,1.0], interp_type=0) # T1 resampled for consistent registration
208  treg = ants.registration( t1_resampled_for_treg, ch2_img, "antsRegistrationSyNQuickRepro[s]" ) # Register T1 to ANTs template (ch2_img)
209
210  if powers: # Power's nodes (point-based definition)
211    concat_point_transforms = treg['invtransforms'] + t1reg['invtransforms'] # Map from MNI (ch2) to BOLD
212    pts2bold_df_coords_original = ants.apply_transforms_to_points( 3, powers_areal_mni_itk, concat_point_transforms,
213        whichtoinvert = ( True, False, True, False ) )
214    locations = pts2bold_df_coords_original.iloc[:,:3].values
215    ptImg_labels = ants.make_points_image( locations, bmask, radius = 2 ) # Creates label image from points
216    pts2bold_df = powers_areal_mni_itk # Dataframe of ROI names and MNI coords
217    
218  else: # Yeo 2023 nodes (volumetric parcellation)
219    concat_vol_transforms = t1reg['fwdtransforms'] + treg['fwdtransforms'] # Map ANTs template parcellation to BOLD
220    yeo_seg_fn = get_data('ppmi_template_500Parcels_Yeo2011_17Networks_2023_homotopic', target_extension=".nii.gz")
221    yeo_seg_img = ants.image_read( yeo_seg_fn )
222    ptImg_labels = ants.apply_transforms( und, yeo_seg_img, concat_vol_transforms, interpolator='nearestNeighbor' ) * bmask
223    pts2bold_df = powers_areal_mni_itk # Dataframe of ROI names and cluster info
224
225  # 9. Spatial-Temporal Smoothing
226  # simg is the smoothed BOLD timeseries used throughout the rest of pipeline
227  tr = ants.get_spacing( corrmo['motion_corrected'] )[3] # Repetition Time (TR)
228  smth_tuple = ( spa, spa, spa, spt ) # Smoothing sigmas (spatial + temporal)
229  simg = ants.smooth_image( corrmo['motion_corrected'], smth_tuple, sigma_in_physical_coordinates = True )
230
231
232  # 10. Volume Censoring (Flagging outlier time points)
233  censoring_indices = _find_indices_gt_value(corrmo['FD'], FD_threshold) # Indices of high motion volumes
234  if outlier_threshold < 1.0 and outlier_threshold > 0.0:
235    if verbose: print(f"10.1. Identifying additional outlier time points using outlier_threshold: {outlier_threshold}.")
236    # loop_timeseries_censoring identifies outliers (e.g., from DVARS)
237    # The output fmrimotcorr represents input, hlinds2 are indices of new outliers
238    _dummy_fmri_data_after_loop_censor, hlinds2_outliers = loop_timeseries_censoring( corrmo['motion_corrected'], 
239      threshold=outlier_threshold, verbose=verbose )
240    censoring_indices.extend( hlinds2_outliers )
241    # _dummy_fmri_data_after_loop_censor is discarded as `simg` is used later after smoothing
242    
243  # Final set of unique censoring indices, sorted for deterministic removal
244  censoring_indices = list(set(censoring_indices)) 
245  censoring_indices.sort() 
246
247  # 11. Nuisance Regressor Generation
248  if verbose: print("11.1. Generating nuisance regressors.")
249  
250  # Global Signal
251  global_bold_matrix = ants.timeseries_to_matrix( simg, bmask )
252  globalsignal = np.nanmean( global_bold_matrix, axis = 1 ) 
253  del global_bold_matrix # Free memory
254
255  # CompCor Components (from CSF and WM masks)
256  compcor_quantile_val=0.50 # Quantile for tissue definition within CompCor
257  n_compcor_csf = nc_wm = nc_csf = None # Initialize variables
258
259  # Determine number of CompCor components (fixed or by variance explained)
260  if nc <= 0: # User provided a variance threshold (e.g., 0.99)
261    if verbose: print(f"11.2. Estimating optimal CompCor components based on variance threshold: {nc_variance_thresh}.")
262    csf_matrix_for_pca = ants.timeseries_to_matrix(simg, csf_bold_space)
263    n_compcor_csf = int(estimate_optimal_pca_components(csf_matrix_for_pca, variance_threshold=nc_variance_thresh))
264    wm_matrix_for_pca = ants.timeseries_to_matrix(simg, wm_bold_space)
265    n_compcor_wm = int(estimate_optimal_pca_components(wm_matrix_for_pca, variance_threshold=nc_variance_thresh))
266  else: # User provided a fixed number of components (e.g., 5)
267    n_compcor_csf = int(nc_raw_value)
268    n_compcor_wm = int(nc_raw_value)
269
270  if verbose: print(f"11.3. Including CompCor components: CSF={n_compcor_csf}, WM={n_compcor_wm}.")
271  mycompcor_csf_results = compcor( simg, ncompcor=n_compcor_csf, quantile=compcor_quantile_val, 
272                                   mask = csf_bold_space, filter_type='polynomial', degree=2 )
273  mycompcor_wm_results = compcor( simg, ncompcor=n_compcor_wm, quantile=compcor_quantile_val, 
274                                 mask = wm_bold_space, filter_type='polynomial', degree=2 )
275  nuisance_regressors = np.c_[ mycompcor_csf_results[ 'components' ], mycompcor_wm_results[ 'components' ] ]
276
277  # Motion Parameters and their Derivatives
278  if motion_as_nuisance:
279      if verbose: print("11.4. Adding motion parameters and derivatives as nuisance regressors.")
280      # make_time_series_derivative is assumed to be deterministic.
281      motion_derivs = make_time_series_derivative( corrmo['motion_parameters'] ) 
282      nuisance_regressors = np.c_[ nuisance_regressors, corrmo['motion_parameters'], motion_derivs ]
283
284  # ICA Components (if specified)
285  if ica_components > 0:
286    if verbose: print(f"11.5. Including {ica_components} ICA components as nuisance regressors.")
287    # FastICA is stochastic by default; random_state=42 ensures reproducibility.
288    ica_model = FastICA(n_components=ica_components, max_iter=10000, tol=0.001, random_state=42 )
289    csfAndWM_matrix_for_ica = ants.timeseries_to_matrix( simg, csfAndWM_bold_space ) 
290    ica_nuisance_components = ica_model.fit_transform(csfAndWM_matrix_for_ica)
291    nuisance_regressors = np.c_[ nuisance_regressors, ica_nuisance_components ] 
292    del csfAndWM_matrix_for_ica # Free memory
293
294  # Combine with global signal
295  nuisance_regressors = np.c_[ nuisance_regressors, globalsignal ]
296
297  # 12. Bandpass Filtering (optional, applied to nuisance and BOLD)
298  if f[0] > 0 and f[1] < 1.0: 
299    if verbose: print(f"12.1. Applying bandpass filter [{f[0]} - {f[1]}] Hz.")
300    # Ensure bandpass_filter_matrix is deterministic.
301    nuisance_regressors = bandpass_filter_matrix( nuisance_regressors, tr = tr, lowf=f[0], highf=f[1] ) 
302    
303    # Apply bandpass filter to the BOLD signal directly
304    simg_matrix_original = ants.timeseries_to_matrix( simg, bmask )
305    simg_matrix_bandpassed = bandpass_filter_matrix( simg_matrix_original, tr = tr, lowf=f[0], highf=f[1] )
306    simg = ants.matrix_to_timeseries(simg, simg_matrix_bandpassed, bmask)
307    gc.collect() 
308
309  # 13. Nuisance Regression and Censoring Application
310  if verbose: print("13.1. Regressing nuisance parameters and applying censoring.")
311
312  # Select data and nuisance based on censoring indices
313  if len( censoring_indices ) > 0 : 
314    if censor: 
315        nuisance_regressors_final = remove_elements_from_numpy_array( nuisance_regressors, censoring_indices )
316        simg_final = remove_volumes_from_timeseries( simg, censoring_indices )
317    else: 
318        nuisance_regressors_final = nuisance_regressors
319        simg_final = simg
320  else: 
321      nuisance_regressors_final = nuisance_regressors
322      simg_final = simg
323
324  # Ensure simg_final is not None (if all volumes were censored)
325  if simg_final is None:
326      warnings.warn("All volumes censored. Cannot proceed with regression or network analysis.")
327      return None # Or raise an error
328  
329  # Perform nuisance regression (OLS for each voxel time series)
330  fmri_cleaned_matrix = regress_components( ants.timeseries_to_matrix( simg_final, bmask ), nuisance_regressors_final )
331  simg_cleaned = ants.matrix_to_timeseries(simg_final, fmri_cleaned_matrix, bmask) # Final cleaned BOLD time series
332  gc.collect() 
333
334
335  # 14. ALFF / fALFF and PerAF Calculation
336  if verbose: print( "14.1. Calculating ALFF / fALFF and PerAF images." )
337  myfalff_results = alff_image( simg_cleaned, bmask, flo=f[0], fhi=f[1], nuisance=nuisance_regressors_final ) 
338  peraf_image = PerAF( simg_cleaned, bmask )
339
340  # 15. ROI Time Series and Full Correlation Matrix Calculation
341  if verbose: print("15.1. Calculating ROI time series and full correlation matrix.")
342
343  # Extract unique ROI labels from the mapped parcellation image
344  unique_roi_labels_found_in_mask = np.sort(np.unique(ptImg_labels.numpy()[bmask.numpy() > 0]))
345  unique_roi_labels_found_in_mask = unique_roi_labels_found_in_mask[unique_roi_labels_found_in_mask > 0] # Exclude 0 (background)
346
347  # Check nPoints from original code, assuming it corresponds to largest ROI ID or count of ROIs.
348  nPoints = int(powers_areal_mni_itk['ROI'].max()) # Total unique ROIs from definition
349  # If label image has higher max label (e.g. from background or different scheme), adjust nPoints
350  if ptImg_labels.max() > nPoints and ants.get_total_volume(ptImg_labels) > 0: 
351      nPoints = int(ptImg_labels.max())
352  
353  mean_roi_matrix = np.zeros([simg_cleaned.shape[3], nPoints], dtype=np.float32) # Time x ROIs (potentially sparse)
354  roi_names = [] 
355
356  # --- Start Parallel Section 1: Extract Mean ROI Time Series ---
357  # Prepare for parallel meanROI calculation
358  # Store relevant data in gc for the worker access using global GC object
359  setattr(gc, 'simg_cleaned_numpy_ref', ants.timeseries_to_matrix(simg_cleaned, bmask) ) # Pass as matrix for direct use
360  setattr(gc, 'ptImg_labels_numpy_ref', ptImg_labels.numpy())
361  setattr(gc, 'bmask_numpy_ref', bmask.numpy())
362
363  labels_per_subprocess_chunk = max(1, len(unique_roi_labels_found_in_mask) // num_threads // 4) # Adjust chunk size
364  chunks_of_roi_labels = []
365  for i in range(0, len(unique_roi_labels_found_in_mask), labels_per_subprocess_chunk):
366      chunks_of_roi_labels.append(unique_roi_labels_found_in_mask[i : min(i + labels_per_subprocess_chunk, len(unique_roi_labels_found_in_mask))])
367
368  # Execute tasks for mean_roi_matrix in parallel
369  with ThreadPoolExecutor(max_workers=num_threads) as executor:
370      futures_mean_roi = {executor.submit(_extract_mean_roi_chunk_worker, label_chunk, gc.simg_cleaned_numpy_ref, gc.ptImg_labels_numpy_ref, gc.bmask_numpy_ref): label_chunk for label_chunk in chunks_of_roi_labels}
371      
372      # Collect results in submission order for deterministic write (though order doesn't strictly matter for full matrix)
373      for future_chunk_of_labels in tqdm(futures_mean_roi.keys(), total=len(chunks_of_roi_labels), desc="Extracting ROI Mean Timeseries"):
374          start_label_value_in_chunk, mean_roi_chunk_result = futures_mean_roi[future_chunk_of_labels].result()
375          
376          # Find where to place this chunk in the mean_roi_matrix based on original label values
377          # This requires that `mean_roi_matrix` supports this specific mapping.
378          # The range `start_col_idx : start_col_idx + mean_roi_chunk_result.shape[1]` implicitly assumes
379          # that the column index in `mean_roi_matrix` for `roi_code` is `roi_code - 1` if ROIs are 1-indexed.
380          # Or, if `nPoints` is the actual count, then you need a mapping from `roi_code` to sorted `unique_roi_labels_found_in_mask`'s index (`np.where`).
381          
382          # Simplest approach for dense matrix: iterate unique_roi_labels_found_in_mask
383          # and ensure target index is handled.
384          idx_in_unique_labels = np.where(unique_roi_labels_found_in_mask == start_label_value_in_chunk)[0][0]
385          mean_roi_matrix[:, idx_in_unique_labels : idx_in_unique_labels + mean_roi_chunk_result.shape[1]] = mean_roi_chunk_result
386  
387  gc.collect() # Aggressive garbage collection after parallel part
388
389
390  # Reconstruct roi_names for correlation matrix columns (based on sorted unique labels)
391  for roi_code in unique_roi_labels_found_in_mask:
392      # Find original row in powers_areal_mni_itk for this ROI code
393      matching_row = powers_areal_mni_itk[powers_areal_mni_itk['ROI'] == roi_code]
394      if not matching_row.empty:
395          netLabel = re.sub( " ", "", matching_row.iloc[0]['SystemName'])
396          netLabel = re.sub( "-", "", netLabel )
397          netLabel = re.sub( "/", "", netLabel )
398          roi_names.append( f"ROI{roi_code}_{netLabel}" )
399      else:
400          roi_names.append(f"ROI{roi_code}_Unknown") # Fallback for unmatched ROIs
401
402  # Calculate full correlation matrix
403  corMat = np.corrcoef(mean_roi_matrix, rowvar=False)
404  outputMat = pd.DataFrame(corMat)
405  outputMat.columns = roi_names
406  outputMat['ROIs'] = roi_names
407  
408  outdict = {} # Final output dictionary structure
409  outdict['paramset'] = paramset
410  outdict['upsampling'] = upsample
411  outdict['coords'] = network # From 'network' variable
412  outdict['dfnname']=dfnname # From 'dfnname' variable
413  outdict['meanBold'] = und # Mean BOLD image (fmri_template * bmask)
414  outdict['fullCorrMat'] = outputMat
415
416  # 16. Network-level Correlation Maps Calculation
417  # This section generates a correlation image for each network, against every brain voxel.
418  networks_unique = powers_areal_mni_itk['SystemName'].unique()
419  
420  # Prepare arguments for network correlation worker (pass network_name, not index or label)
421  # The worker will retrieve all ROIs belonging to that network
422  tasks_for_network_corr = list(networks_unique) # List of unique network names
423
424  if verbose: print(f"16.1. Calculating {len(tasks_for_network_corr)} network correlation maps in parallel.")
425
426  with ThreadPoolExecutor(max_workers=num_threads) as executor:
427      futures_network_corr = {executor.submit(_calculate_network_correlation_map_worker, net_name): net_name for net_name in tasks_for_network_corr}
428      
429      # Collect results as they complete (as_completed) and store in outdict.
430      # Order does not affect the final dictionary structure.
431      for future_net in tqdm(as_completed(futures_network_corr), total=len(tasks_for_network_corr), desc="Calculating network maps"):
432          net_name_for_key, corr_img_result = future_net.result()
433          # The worker returns the processed network name as key, and the image.
434          outdict[net_name_for_key] = corr_img_result
435
436  gc.collect() 
437
438  # 17. Final A and A_wide matrices (Network x Network correlation of correlation maps)
439  # This section computes correlations between network maps.
440  num_nets = len(networks_unique)
441  A = np.zeros( ( num_nets , num_nets ), dtype=np.float32 ) # Explicit dtype
442  A_wide = np.zeros( ( 1, num_nets * num_nets ), dtype=np.float32 )
443  newnames=[]
444  newnames_wide=[]
445  
446  for i, net_name_i_raw in enumerate(networks_unique): # Iterate through rows (network i)
447      netnamei = re.sub( " ", "", net_name_i_raw )
448      netnamei = re.sub( "-", "", netnamei )
449      netnamei = re.sub( "/", "", netnamei )
450      newnames.append( netnamei )
451
452      dfn_img_i = _get_network_mask_image_internal(net_name_i_raw, gc.bmask_ref, gc.ptImg_labels_ref, gc.pts2bold_df_ref, gc.powers_flag_ref)
453
454      for j, net_name_j_raw in enumerate(networks_unique): # Iterate through columns (network j)
455          netnamej = re.sub( " ", "", net_name_j_raw )
456          netnamej = re.sub( "-", "", net_name_j_raw )
457          netnamej = re.sub( "/", "", net_name_j_raw )
458          newnames_wide.append( netnamei + "_2_" + netnamej )
459          
460          current_net_corr_image = outdict.get(netnamej, None) # Get the correlation image for netnamej
461          
462          if dfn_img_i is not None and dfn_img_i.max() >= 1 and current_net_corr_image is not None:
463             # Calculate mean correlation within network 'i' from 'j''s correlation image.
464             # timeseries_to_matrix can extract values using a mask from a single ANTsImage.
465             correlation_values_in_region = ants.timeseries_to_matrix(current_net_corr_image, dfn_img_i)
466             A[i,j] = np.mean(correlation_values_in_region)
467          else:
468             A[i,j] = 0
469
470          A_wide[0, (i * num_nets) + j] = A[i,j] 
471
472  # Store final correlation matrices and other data in outdict
473  outdict['corr'] = pd.DataFrame(A, index=newnames, columns=newnames) # Use newnames for both index and columns
474  outdict['corr']['networks'] = newnames # Add networks column explicitly
475  outdict['corr_wide'] = pd.DataFrame(A_wide, columns=newnames_wide) 
476  outdict['fmri_template'] = fmri_template
477  outdict['brainmask'] = bmask
478  outdict['gmmask'] = gmseg_bold_space # Use the mapped GM mask
479
480  # ALFF/fALFF results
481  outdict['alff'] = myfalff_results['alff']
482  outdict['falff'] = myfalff_results['falff']
483  outdict['alff_mean'] = (myfalff_results['alff'].numpy()[myfalff_results['alff'].numpy()!=0]).mean()
484  outdict['alff_sd'] = (myfalff_results['alff'].numpy()[myfalff_results['alff'].numpy()!=0]).std()
485  outdict['falff_mean'] = (myfalff_results['falff'].numpy()[myfalff_results['falff'].numpy()!=0]).mean()
486  outdict['falff_sd'] = (myfalff_results['falff'].numpy()[myfalff_results['falff'].numpy()!=0]).std()
487
488  # 18. PerAF and Point-wise ALFF/fALFF/PerAF Extraction
489  # This section involves looping through points and extracting values.
490  # This part of the code is still sequential but is generally efficient for point-wise extraction.
491  # If `pointrange` (len(roi_info_df)) is very large, this loop could be parallelized using ThreadPoolExecutor.
492  for k_idx in tqdm(range(len(roi_info_df)), desc="Calculating point-wise ALFF/fALFF/PerAF"):
493    anatname_raw = roi_info_df['AAL'][k_idx]
494    anatname = re.sub("_","", str(anatname_raw)) if isinstance(anatname_raw, (str, np.str_)) else 'Unk'
495    
496    kk_prefix = ""
497    # Original logic for kk from user's Rmd.
498    if powers: # Power's ROI numbering can be simple k_idx
499        kk_prefix = f"{k_idx:0>3}"+"_"
500    else: # Yeo nodes are often paired or have complex numbering from original CSV
501        yeo_roi_id_from_df = roi_info_df['ROI'][k_idx] # Get the actual ROI ID for this row
502        nPoints_divisor = int(nPoints/2 + 1e-9) if nPoints > 0 else 1 # Avoid div by zero
503        kk_prefix = f"{yeo_roi_id_from_df % nPoints_divisor:0>3}"+"_"
504
505    fname='falffPoint'+kk_prefix+anatname
506    aname='alffPoint'+kk_prefix+anatname
507    pname='perafPoint'+kk_prefix+anatname
508
509    current_roi_label_value = roi_info_df['ROI'][k_idx] # Get ROI ID
510    
511    # Create the specific ROI mask image (same logic from network mapping)
512    if powers:
513        loc_for_roi = roi_info_df.iloc[[k_idx],:3].values 
514        localsel_img = ants.make_points_image(loc_for_roi, bmask, radius=1).threshold_image( 1, 1e9 )
515    else: 
516        localsel_img = ants.threshold_image( ptImg_labels , current_roi_label_value , current_roi_label_value ) * bmask
517
518    # Extract mean values within the ROI mask
519    if localsel_img.max() >= 1 : # Check if ROI mask is non-empty
520        # All these functions were assumed to be available directly or through ANTsPy.
521        outdict[fname] = np.mean(ants.timeseries_to_matrix(myfalff_results['falff'], localsel_img))
522        outdict[aname] = np.mean(ants.timeseries_to_matrix(myfalff_results['alff'], localsel_img))
523        outdict[pname] = np.mean(ants.timeseries_to_matrix(peraf_image, localsel_img))
524    else: # If ROI mask is empty
525        outdict[fname]=math.nan
526        outdict[aname]=math.nan
527        outdict[pname]=math.nan
528
529  # 19. Final Output Structuring and Cleanup
530  rsfNuisance_df = pd.DataFrame( nuisance_all ) # Complete nuisance regressors as DataFrame
531  
532  # Cleanup temp directory if specified
533  if remove_it: 
534    shutil.rmtree(output_directory, ignore_errors=True )
535
536  # Combine network-specific maps for composite network maps (if applicable)
537  # This part relies on networks being correctly identified and stored in outdict.
538  if not powers: # Yeo-specific network combinations (Default Mode, Visual)
539      # Use .get() to avoid KeyError if a network was not populated by parallel task
540      dfnsum_d = outdict.get('DefaultA', None)
541      if dfnsum_d is not None:
542          dfnsum_d = dfnsum_d + outdict.get('DefaultB', dfnsum_d * 0) # Add empty image if missing
543          dfnsum_d = dfnsum_d + outdict.get('DefaultC', dfnsum_d * 0)
544          outdict['DefaultMode'] = dfnsum_d
545
546      dfnsum_v = outdict.get('VisCent', None)
547      if dfnsum_v is not None:
548          dfnsum_v = dfnsum_v + outdict.get('VisPeri', dfnsum_v * 0)
549          outdict['Visual'] = dfnsum_v
550
551  # Additional output metrics
552  nonbrainmask = ants.iMath( bmask, "MD",2) - bmask 
553  trimmask = ants.iMath( bmask, "ME",2) 
554  edgemask = ants.iMath( bmask, "ME",1) - trimmask 
555
556  outdict['motion_corrected'] = corrmo['motion_corrected'] 
557  outdict['nuisance'] = rsfNuisance_df 
558  outdict['PerAF'] = peraf_image 
559  outdict['tsnr'] = mytsnr 
560  outdict['ssnr'] = slice_snr( corrmo['motion_corrected'], csfAndWM_bold_space, gmseg_bold_space ) 
561  outdict['dvars'] = dvars( corrmo['motion_corrected'], gmseg_bold_space ) 
562
563  # Store pipeline parameters and summary statistics
564  outdict['bandpass_freq_0']=f[0]
565  outdict['bandpass_freq_1']=f[1]
566  outdict['censor']=int(censor)
567  outdict['spatial_smoothing']=spa
568  outdict['outlier_threshold']=outlier_threshold
569  outdict['FD_threshold']=FD_threshold
570  outdict['high_motion_count'] = high_motion_count
571  outdict['high_motion_pct'] = high_motion_pct
572  outdict['despiking_count_summary'] = despiking_count_summary
573  outdict['FD_max'] = corrmo['FD'].max()
574  outdict['FD_mean'] = corrmo['FD'].mean()
575  outdict['FD_sd'] = corrmo['FD'].std()
576  outdict['bold_evr'] = patch_eigenvalue_ratio( und, 512, [16,16,16], evdepth = 0.9, mask = bmask ) 
577  outdict['n_outliers'] = len(censoring_indices) # Use the correct variable name
578  outdict['nc_wm'] = int(n_compcor_wm)
579  outdict['nc_csf'] = int(n_compcor_csf)
580  outdict['minutes_original_data'] = ( tr * fmri.shape[3] ) / 60.0 
581  outdict['minutes_censored_data'] = ( tr * simg_cleaned.shape[3] ) / 60.0 
582
583  return convert_np_in_dict( outdict )
def resting_state_fmri_networks( fmri, fmri_template, t1, t1segmentation, f=[0.03, 0.08], FD_threshold=5.0, spa=None, spt=None, nc=5, outlier_threshold=0.25, ica_components=0, impute=True, censor=True, despike=2.5, motion_as_nuisance=True, powers=False, upsample=3.0, clean_tmp=None, paramset='unset', verbose=False):
  3def resting_state_fmri_networks( fmri, fmri_template, t1, t1segmentation,
  4    f=[0.03, 0.08],
  5    FD_threshold=5.0,
  6    spa = None,
  7    spt = None,
  8    nc = 5,
  9    outlier_threshold=0.250,
 10    ica_components = 0,
 11    impute = True,
 12    censor = True,
 13    despike = 2.5,
 14    motion_as_nuisance = True,
 15    powers = False,
 16    upsample = 3.0,
 17    clean_tmp = None,
 18    paramset='unset', # Placeholder for parameter set name
 19    verbose=False ):
 20  """
 21  Compute resting state network correlation maps based on the J Power labels.
 22  This includes fMRI preprocessing steps and subsequent network analysis.
 23
 24  Arguments are detailed in the function signature.
 25
 26  Returns
 27  ---------
 28  a dictionary containing the derived network maps
 29  """
 30  warnings.filterwarnings("ignore", category=UserWarning) # Suppress UserWarnings
 31  global debug # Original code uses 'debug' without defining it.
 32  debug = False # Initialize debug to False, can be set to True if needed for internal checks.
 33
 34  # --- Local Helper Functions (Defined within the main function scope) ---
 35  # These are adapted from the original monolithic function.
 36
 37  # Note: `temporal_derivative_same_shape`, `compute_tSTD`, `get_compcor_matrix` in original code
 38  # are defined as nested functions. For clarity and reusability (and to avoid re-definition overhead),
 39  # if they are general utilities, they should be top-level. Given the constraint,
 40  # I'll embed them here as they were, but comment on good practice.
 41
 42  # Original: `temporal_derivative_same_shape`
 43  def _calculate_temporal_derivative(array): 
 44    # This is a local version of `make_time_series_derivative` used in original
 45    derivative = np.diff(array, axis=0)
 46    zeros_row = np.zeros((1, array.shape[1]))
 47    return np.vstack((zeros_row, derivative ))
 48
 49  # Original: `compute_tSTD`
 50  def _compute_tsnr_std_mask_threshold(M_matrix, quantile, x_val=0, axis_val=0):
 51    stdM = np.std(M_matrix, axis=axis_val)
 52    stdM[stdM == 0] = x_val
 53    stdM[np.isnan(stdM)] = x_val
 54    threshold_val = np.percentile(stdM, round(quantile * 100))
 55    return {'tSTD': stdM, 'threshold_std': threshold_val}
 56
 57  # Original: `get_compcor_matrix` - this is similar to ants.compcor itself, may be redundant
 58  # Given the presence of `compcor` later, this helper might be for a very specific internal purpose.
 59  def _get_compcor_matrix_helper(boldImage_antspy, mask_antspy, quantile_val):
 60    if mask_antspy is None:
 61        temp = ants.slice_image(boldImage_antspy, axis=boldImage_antspy.dimension - 1, idx=0)
 62        mask_antspy = ants.get_mask(temp)
 63    imagematrix = ants.timeseries_to_matrix(boldImage_antspy, mask_antspy)
 64    temp = _compute_tsnr_std_mask_threshold(imagematrix, quantile_val, 0)
 65    tsnrmask_img = ants.make_image(mask_antspy, temp['tSTD']) # Ensure mask is ants image
 66    tsnrmask_img = ants.threshold_image(tsnrmask_img, temp['threshold_std'], temp['tSTD'].max())
 67    M = ants.timeseries_to_matrix(boldImage_antspy, tsnrmask_img)
 68    return M
 69
 70  # Original: `find_indices`
 71  def _find_indices_gt_value(lst, value):
 72    return [index for index, element in enumerate(lst) if element > value]
 73
 74  # Original: `mean_of_list`
 75  def _calculate_mean_of_list(lst):
 76    if not lst: return 0 
 77    return sum(lst) / len(lst)
 78
 79
 80  # --- Pipeline Execution Starts Here ---
 81
 82  # 0. Initial Setup and Temporary Directories
 83  if clean_tmp is not None:
 84    clean_tmp_directory( age_hours = clean_tmp )
 85
 86  # Ensure nc is an integer for n_compcor values, or float for variance threshold
 87  nc_raw_value = nc # Use a new variable to keep original nc intact
 88  if isinstance(nc_raw_value, (int, float)) and nc_raw_value > 1:
 89    nc_compcor_val = int(nc_raw_value)
 90  else:
 91    nc_variance_thresh = float(nc_raw_value)
 92
 93  output_directory = tempfile.mkdtemp()
 94  output_directory_w = os.path.join(output_directory, "ts_t1_reg")
 95  os.makedirs(output_directory_w,exist_ok=True)
 96  ofnt1tx = tempfile.NamedTemporaryFile(delete=False,suffix='t1_deformation',dir=output_directory_w).name
 97  
 98  if verbose: print("--- Starting Resting State fMRI Network Analysis ---")
 99
100  # 1. Image Resolution Harmonization (Upsampling fmri_template)
101  if upsample > 0.0:
102      fmri_spacing = ants.get_spacing( fmri )
103      min_fmri_spacing_xyz = min(fmri_spacing[0:3])
104      target_isotropic_spacing = min(upsample, min_fmri_spacing_xyz)
105      
106      # Only resample if target is different from current image's min spacing
107      if not np.allclose( (target_isotropic_spacing,)*3, fmri_template.spacing[:3] ): # Check if already at target isotropic
108          if verbose:
109              print(f"1.1. Resampling fmri_template to {target_isotropic_spacing} mm isotropic (Original: {fmri_template.spacing[:3]} mm).")
110              # If current spacing is coarser than desired upsample, we use closest possible
111              if target_isotropic_spacing < min_fmri_spacing_xyz: 
112                  warnings.warn(f"Upsampling fmri_template from {min_fmri_spacing_xyz}mm. Target {target_isotropic_spacing}mm. ")
113
114          fmri_template = ants.resample_image( fmri_template, (target_isotropic_spacing, target_isotropic_spacing, target_isotropic_spacing), interp_type=0 )
115      else:
116          if verbose: print("1.1. fmri_template already at target isotropic resolution. Skipping resampling.")
117  else:
118      if verbose: print("1.1. Upsampling disabled. Skipping fmri_template resampling.")
119
120  # 2. Initial Parameter Setup (Smoothing and Network Resources)
121  # Dynamic smoothing parameter calculation if not provided.
122  fmri_full_spacing = list( ants.get_spacing( fmri ) )
123  if spa is None:
124    spa = _calculate_mean_of_list( fmri_full_spacing[0:3] ) * 1.0 # Spatial smoothing sigma
125  if spt is None:
126    spt = fmri_full_spacing[3] * 0.5 # Temporal smoothing sigma
127      
128  # Load network parcellation data.
129  dfnname='DefaultMode' # Default network for output dictionary (not processed yet)
130  network='powers' if powers else 'yeo_17_500_2023' # Name of network scheme
131  
132  if verbose: print(f"2.1. Loading network parcellation data: {network}.")
133  powers_areal_mni_itk = pd.read_csv( get_data(network, target_extension=".csv"))
134  
135  if powers:
136      ch2_img = mm_read( ants.get_ants_data( "ch2" ) ) # Example of core ANTs data
137  else:
138      ch2_img = mm_read( get_data( "PPMI_template0_brain", target_extension='.nii.gz' ) ) # A template image for registration
139
140
141  # 3. Brain Extraction and Masking (Initial)
142  if verbose: print("3.1. Performing brain extraction and initial masking.")
143  fmri = ants.iMath( fmri, 'Normalize' ) # Normalize intensity
144  # Use antspynet for robust brain extraction
145  bmask = antspynet.brain_extraction( fmri_template, 'bold' ).threshold_image(0.5,1).iMath("FillHoles")
146
147
148  # 4. Motion Correction
149  if verbose: print("4.1. Beginning fMRI motion correction.")
150  corrmo = ants.motion_correction(
151    fmri, fmri_template,
152    type_of_transform=type_of_transform, # e.g., "antsRegistrationSyNQuickRepro[r]"
153    total_sigma=0.5,
154    fdOffset=2.0,
155    trim = 8,
156    output_directory=None, # Process in memory
157    verbose=verbose,
158    syn_metric='CC', # Mutual information for registration
159    syn_sampling=2,
160    reg_iterations=[40,20,5], # Iterations for registration optimization
161    return_numpy_motion_parameters=True )
162  
163  if verbose:
164      print("4.2. End fMRI motion correction.")
165      print(f"--- Maximum observed motion (FD): {corrmo['FD'].max():.2f} mm.")
166
167
168  # 5. Despiking (AFNI-style)
169  if despike > 0.0:
170      if verbose: print(f"5.1. Applying voxel-wise despiking with threshold: {despike}.")
171      # despike_time_series_afni is called
172      corrmo['motion_corrected'], despiking_count_per_volume = despike_time_series_afni( corrmo['motion_corrected'], c1=despike )
173  
174  despiking_count_summary = despiking_count_per_volume.sum() / np.prod( corrmo['motion_corrected'].shape[:3] )
175  high_motion_count = (_find_indices_gt_value(corrmo['FD'], FD_threshold) ) # Indices with high motion
176  high_motion_pct = len(high_motion_count) / fmri.shape[3]
177  
178  # 6. TSNR-based Mask Refinement
179  if verbose: print("6.1. Refining brain mask based on TSNR.")
180  mytsnr = tsnr( corrmo['motion_corrected'], bmask )
181  mytsnrThresh = np.quantile( mytsnr.numpy(), 0.995 ) # 0.995 quantile of TSNR values
182  tsnrmask = ants.threshold_image( mytsnr, 0, mytsnrThresh ).morphology("close",2)
183  bmask = bmask * tsnrmask # Final brain mask after TSNR filtering
184
185  # 7. Anatomical Segmentation Mapping to BOLD Space
186  if verbose: print("7.1. Registering T1 to BOLD space and mapping segmentations.")
187  und = fmri_template * bmask # Underscored image (template with brain mask)
188  t1reg = ants.registration( und, t1, # Register BOLD (und) to T1 image
189     "antsRegistrationSyNQuickRepro[s]", outprefix=ofnt1tx ) # Ouptut prefix for transforms file
190  
191  # Process and map Gray Matter segmentation to BOLD space
192  gmseg = ants.threshold_image( t1segmentation, 2, 2 ) 
193  gmseg = gmseg + ants.threshold_image( t1segmentation, 4, 4 ) # Combine specific GM labels
194  gmseg = ants.threshold_image( gmseg, 1, 4 ) # Binarize GM
195  gmseg = ants.iMath( gmseg, 'MD', 1 ) # Morphological opening on GM mask
196  gmseg_bold_space = ants.apply_transforms( und, gmseg,
197    t1reg['fwdtransforms'], interpolator = 'nearestNeighbor' ) * bmask
198
199  # Process and map CSF + WM segmentations to BOLD space for nuisance regression
200  csf_t1_mask = ants.threshold_image( t1segmentation, 1, 1 ) 
201  wm_t1_mask = ants.threshold_image( t1segmentation, 3, 3 ).morphology("erode",1) # Eroded WM for robustness
202  
203  csf_bold_space = ants.apply_transforms( und, csf_t1_mask, t1reg['fwdtransforms'], interpolator = 'nearestNeighbor' ) * bmask
204  wm_bold_space = ants.apply_transforms( und, wm_t1_mask, t1reg['fwdtransforms'], interpolator = 'nearestNeighbor' ) * bmask
205  csfAndWM_bold_space = (csf_bold_space + wm_bold_space).threshold_image(0.5,1) # Combined CSF+WM mask
206
207  # 8. Network Atlas Mapping to BOLD Space
208  if verbose: print("8.1. Mapping network atlas/parcellation to BOLD space.")
209  t1_resampled_for_treg = ants.resample_image(t1, [1.0,1.0,1.0], interp_type=0) # T1 resampled for consistent registration
210  treg = ants.registration( t1_resampled_for_treg, ch2_img, "antsRegistrationSyNQuickRepro[s]" ) # Register T1 to ANTs template (ch2_img)
211
212  if powers: # Power's nodes (point-based definition)
213    concat_point_transforms = treg['invtransforms'] + t1reg['invtransforms'] # Map from MNI (ch2) to BOLD
214    pts2bold_df_coords_original = ants.apply_transforms_to_points( 3, powers_areal_mni_itk, concat_point_transforms,
215        whichtoinvert = ( True, False, True, False ) )
216    locations = pts2bold_df_coords_original.iloc[:,:3].values
217    ptImg_labels = ants.make_points_image( locations, bmask, radius = 2 ) # Creates label image from points
218    pts2bold_df = powers_areal_mni_itk # Dataframe of ROI names and MNI coords
219    
220  else: # Yeo 2023 nodes (volumetric parcellation)
221    concat_vol_transforms = t1reg['fwdtransforms'] + treg['fwdtransforms'] # Map ANTs template parcellation to BOLD
222    yeo_seg_fn = get_data('ppmi_template_500Parcels_Yeo2011_17Networks_2023_homotopic', target_extension=".nii.gz")
223    yeo_seg_img = ants.image_read( yeo_seg_fn )
224    ptImg_labels = ants.apply_transforms( und, yeo_seg_img, concat_vol_transforms, interpolator='nearestNeighbor' ) * bmask
225    pts2bold_df = powers_areal_mni_itk # Dataframe of ROI names and cluster info
226
227  # 9. Spatial-Temporal Smoothing
228  # simg is the smoothed BOLD timeseries used throughout the rest of pipeline
229  tr = ants.get_spacing( corrmo['motion_corrected'] )[3] # Repetition Time (TR)
230  smth_tuple = ( spa, spa, spa, spt ) # Smoothing sigmas (spatial + temporal)
231  simg = ants.smooth_image( corrmo['motion_corrected'], smth_tuple, sigma_in_physical_coordinates = True )
232
233
234  # 10. Volume Censoring (Flagging outlier time points)
235  censoring_indices = _find_indices_gt_value(corrmo['FD'], FD_threshold) # Indices of high motion volumes
236  if outlier_threshold < 1.0 and outlier_threshold > 0.0:
237    if verbose: print(f"10.1. Identifying additional outlier time points using outlier_threshold: {outlier_threshold}.")
238    # loop_timeseries_censoring identifies outliers (e.g., from DVARS)
239    # The output fmrimotcorr represents input, hlinds2 are indices of new outliers
240    _dummy_fmri_data_after_loop_censor, hlinds2_outliers = loop_timeseries_censoring( corrmo['motion_corrected'], 
241      threshold=outlier_threshold, verbose=verbose )
242    censoring_indices.extend( hlinds2_outliers )
243    # _dummy_fmri_data_after_loop_censor is discarded as `simg` is used later after smoothing
244    
245  # Final set of unique censoring indices, sorted for deterministic removal
246  censoring_indices = list(set(censoring_indices)) 
247  censoring_indices.sort() 
248
249  # 11. Nuisance Regressor Generation
250  if verbose: print("11.1. Generating nuisance regressors.")
251  
252  # Global Signal
253  global_bold_matrix = ants.timeseries_to_matrix( simg, bmask )
254  globalsignal = np.nanmean( global_bold_matrix, axis = 1 ) 
255  del global_bold_matrix # Free memory
256
257  # CompCor Components (from CSF and WM masks)
258  compcor_quantile_val=0.50 # Quantile for tissue definition within CompCor
259  n_compcor_csf = nc_wm = nc_csf = None # Initialize variables
260
261  # Determine number of CompCor components (fixed or by variance explained)
262  if nc <= 0: # User provided a variance threshold (e.g., 0.99)
263    if verbose: print(f"11.2. Estimating optimal CompCor components based on variance threshold: {nc_variance_thresh}.")
264    csf_matrix_for_pca = ants.timeseries_to_matrix(simg, csf_bold_space)
265    n_compcor_csf = int(estimate_optimal_pca_components(csf_matrix_for_pca, variance_threshold=nc_variance_thresh))
266    wm_matrix_for_pca = ants.timeseries_to_matrix(simg, wm_bold_space)
267    n_compcor_wm = int(estimate_optimal_pca_components(wm_matrix_for_pca, variance_threshold=nc_variance_thresh))
268  else: # User provided a fixed number of components (e.g., 5)
269    n_compcor_csf = int(nc_raw_value)
270    n_compcor_wm = int(nc_raw_value)
271
272  if verbose: print(f"11.3. Including CompCor components: CSF={n_compcor_csf}, WM={n_compcor_wm}.")
273  mycompcor_csf_results = compcor( simg, ncompcor=n_compcor_csf, quantile=compcor_quantile_val, 
274                                   mask = csf_bold_space, filter_type='polynomial', degree=2 )
275  mycompcor_wm_results = compcor( simg, ncompcor=n_compcor_wm, quantile=compcor_quantile_val, 
276                                 mask = wm_bold_space, filter_type='polynomial', degree=2 )
277  nuisance_regressors = np.c_[ mycompcor_csf_results[ 'components' ], mycompcor_wm_results[ 'components' ] ]
278
279  # Motion Parameters and their Derivatives
280  if motion_as_nuisance:
281      if verbose: print("11.4. Adding motion parameters and derivatives as nuisance regressors.")
282      # make_time_series_derivative is assumed to be deterministic.
283      motion_derivs = make_time_series_derivative( corrmo['motion_parameters'] ) 
284      nuisance_regressors = np.c_[ nuisance_regressors, corrmo['motion_parameters'], motion_derivs ]
285
286  # ICA Components (if specified)
287  if ica_components > 0:
288    if verbose: print(f"11.5. Including {ica_components} ICA components as nuisance regressors.")
289    # FastICA is stochastic by default; random_state=42 ensures reproducibility.
290    ica_model = FastICA(n_components=ica_components, max_iter=10000, tol=0.001, random_state=42 )
291    csfAndWM_matrix_for_ica = ants.timeseries_to_matrix( simg, csfAndWM_bold_space ) 
292    ica_nuisance_components = ica_model.fit_transform(csfAndWM_matrix_for_ica)
293    nuisance_regressors = np.c_[ nuisance_regressors, ica_nuisance_components ] 
294    del csfAndWM_matrix_for_ica # Free memory
295
296  # Combine with global signal
297  nuisance_regressors = np.c_[ nuisance_regressors, globalsignal ]
298
299  # 12. Bandpass Filtering (optional, applied to nuisance and BOLD)
300  if f[0] > 0 and f[1] < 1.0: 
301    if verbose: print(f"12.1. Applying bandpass filter [{f[0]} - {f[1]}] Hz.")
302    # Ensure bandpass_filter_matrix is deterministic.
303    nuisance_regressors = bandpass_filter_matrix( nuisance_regressors, tr = tr, lowf=f[0], highf=f[1] ) 
304    
305    # Apply bandpass filter to the BOLD signal directly
306    simg_matrix_original = ants.timeseries_to_matrix( simg, bmask )
307    simg_matrix_bandpassed = bandpass_filter_matrix( simg_matrix_original, tr = tr, lowf=f[0], highf=f[1] )
308    simg = ants.matrix_to_timeseries(simg, simg_matrix_bandpassed, bmask)
309    gc.collect() 
310
311  # 13. Nuisance Regression and Censoring Application
312  if verbose: print("13.1. Regressing nuisance parameters and applying censoring.")
313
314  # Select data and nuisance based on censoring indices
315  if len( censoring_indices ) > 0 : 
316    if censor: 
317        nuisance_regressors_final = remove_elements_from_numpy_array( nuisance_regressors, censoring_indices )
318        simg_final = remove_volumes_from_timeseries( simg, censoring_indices )
319    else: 
320        nuisance_regressors_final = nuisance_regressors
321        simg_final = simg
322  else: 
323      nuisance_regressors_final = nuisance_regressors
324      simg_final = simg
325
326  # Ensure simg_final is not None (if all volumes were censored)
327  if simg_final is None:
328      warnings.warn("All volumes censored. Cannot proceed with regression or network analysis.")
329      return None # Or raise an error
330  
331  # Perform nuisance regression (OLS for each voxel time series)
332  fmri_cleaned_matrix = regress_components( ants.timeseries_to_matrix( simg_final, bmask ), nuisance_regressors_final )
333  simg_cleaned = ants.matrix_to_timeseries(simg_final, fmri_cleaned_matrix, bmask) # Final cleaned BOLD time series
334  gc.collect() 
335
336
337  # 14. ALFF / fALFF and PerAF Calculation
338  if verbose: print( "14.1. Calculating ALFF / fALFF and PerAF images." )
339  myfalff_results = alff_image( simg_cleaned, bmask, flo=f[0], fhi=f[1], nuisance=nuisance_regressors_final ) 
340  peraf_image = PerAF( simg_cleaned, bmask )
341
342  # 15. ROI Time Series and Full Correlation Matrix Calculation
343  if verbose: print("15.1. Calculating ROI time series and full correlation matrix.")
344
345  # Extract unique ROI labels from the mapped parcellation image
346  unique_roi_labels_found_in_mask = np.sort(np.unique(ptImg_labels.numpy()[bmask.numpy() > 0]))
347  unique_roi_labels_found_in_mask = unique_roi_labels_found_in_mask[unique_roi_labels_found_in_mask > 0] # Exclude 0 (background)
348
349  # Check nPoints from original code, assuming it corresponds to largest ROI ID or count of ROIs.
350  nPoints = int(powers_areal_mni_itk['ROI'].max()) # Total unique ROIs from definition
351  # If label image has higher max label (e.g. from background or different scheme), adjust nPoints
352  if ptImg_labels.max() > nPoints and ants.get_total_volume(ptImg_labels) > 0: 
353      nPoints = int(ptImg_labels.max())
354  
355  mean_roi_matrix = np.zeros([simg_cleaned.shape[3], nPoints], dtype=np.float32) # Time x ROIs (potentially sparse)
356  roi_names = [] 
357
358  # --- Start Parallel Section 1: Extract Mean ROI Time Series ---
359  # Prepare for parallel meanROI calculation
360  # Store relevant data in gc for the worker access using global GC object
361  setattr(gc, 'simg_cleaned_numpy_ref', ants.timeseries_to_matrix(simg_cleaned, bmask) ) # Pass as matrix for direct use
362  setattr(gc, 'ptImg_labels_numpy_ref', ptImg_labels.numpy())
363  setattr(gc, 'bmask_numpy_ref', bmask.numpy())
364
365  labels_per_subprocess_chunk = max(1, len(unique_roi_labels_found_in_mask) // num_threads // 4) # Adjust chunk size
366  chunks_of_roi_labels = []
367  for i in range(0, len(unique_roi_labels_found_in_mask), labels_per_subprocess_chunk):
368      chunks_of_roi_labels.append(unique_roi_labels_found_in_mask[i : min(i + labels_per_subprocess_chunk, len(unique_roi_labels_found_in_mask))])
369
370  # Execute tasks for mean_roi_matrix in parallel
371  with ThreadPoolExecutor(max_workers=num_threads) as executor:
372      futures_mean_roi = {executor.submit(_extract_mean_roi_chunk_worker, label_chunk, gc.simg_cleaned_numpy_ref, gc.ptImg_labels_numpy_ref, gc.bmask_numpy_ref): label_chunk for label_chunk in chunks_of_roi_labels}
373      
374      # Collect results in submission order for deterministic write (though order doesn't strictly matter for full matrix)
375      for future_chunk_of_labels in tqdm(futures_mean_roi.keys(), total=len(chunks_of_roi_labels), desc="Extracting ROI Mean Timeseries"):
376          start_label_value_in_chunk, mean_roi_chunk_result = futures_mean_roi[future_chunk_of_labels].result()
377          
378          # Find where to place this chunk in the mean_roi_matrix based on original label values
379          # This requires that `mean_roi_matrix` supports this specific mapping.
380          # The range `start_col_idx : start_col_idx + mean_roi_chunk_result.shape[1]` implicitly assumes
381          # that the column index in `mean_roi_matrix` for `roi_code` is `roi_code - 1` if ROIs are 1-indexed.
382          # Or, if `nPoints` is the actual count, then you need a mapping from `roi_code` to sorted `unique_roi_labels_found_in_mask`'s index (`np.where`).
383          
384          # Simplest approach for dense matrix: iterate unique_roi_labels_found_in_mask
385          # and ensure target index is handled.
386          idx_in_unique_labels = np.where(unique_roi_labels_found_in_mask == start_label_value_in_chunk)[0][0]
387          mean_roi_matrix[:, idx_in_unique_labels : idx_in_unique_labels + mean_roi_chunk_result.shape[1]] = mean_roi_chunk_result
388  
389  gc.collect() # Aggressive garbage collection after parallel part
390
391
392  # Reconstruct roi_names for correlation matrix columns (based on sorted unique labels)
393  for roi_code in unique_roi_labels_found_in_mask:
394      # Find original row in powers_areal_mni_itk for this ROI code
395      matching_row = powers_areal_mni_itk[powers_areal_mni_itk['ROI'] == roi_code]
396      if not matching_row.empty:
397          netLabel = re.sub( " ", "", matching_row.iloc[0]['SystemName'])
398          netLabel = re.sub( "-", "", netLabel )
399          netLabel = re.sub( "/", "", netLabel )
400          roi_names.append( f"ROI{roi_code}_{netLabel}" )
401      else:
402          roi_names.append(f"ROI{roi_code}_Unknown") # Fallback for unmatched ROIs
403
404  # Calculate full correlation matrix
405  corMat = np.corrcoef(mean_roi_matrix, rowvar=False)
406  outputMat = pd.DataFrame(corMat)
407  outputMat.columns = roi_names
408  outputMat['ROIs'] = roi_names
409  
410  outdict = {} # Final output dictionary structure
411  outdict['paramset'] = paramset
412  outdict['upsampling'] = upsample
413  outdict['coords'] = network # From 'network' variable
414  outdict['dfnname']=dfnname # From 'dfnname' variable
415  outdict['meanBold'] = und # Mean BOLD image (fmri_template * bmask)
416  outdict['fullCorrMat'] = outputMat
417
418  # 16. Network-level Correlation Maps Calculation
419  # This section generates a correlation image for each network, against every brain voxel.
420  networks_unique = powers_areal_mni_itk['SystemName'].unique()
421  
422  # Prepare arguments for network correlation worker (pass network_name, not index or label)
423  # The worker will retrieve all ROIs belonging to that network
424  tasks_for_network_corr = list(networks_unique) # List of unique network names
425
426  if verbose: print(f"16.1. Calculating {len(tasks_for_network_corr)} network correlation maps in parallel.")
427
428  with ThreadPoolExecutor(max_workers=num_threads) as executor:
429      futures_network_corr = {executor.submit(_calculate_network_correlation_map_worker, net_name): net_name for net_name in tasks_for_network_corr}
430      
431      # Collect results as they complete (as_completed) and store in outdict.
432      # Order does not affect the final dictionary structure.
433      for future_net in tqdm(as_completed(futures_network_corr), total=len(tasks_for_network_corr), desc="Calculating network maps"):
434          net_name_for_key, corr_img_result = future_net.result()
435          # The worker returns the processed network name as key, and the image.
436          outdict[net_name_for_key] = corr_img_result
437
438  gc.collect() 
439
440  # 17. Final A and A_wide matrices (Network x Network correlation of correlation maps)
441  # This section computes correlations between network maps.
442  num_nets = len(networks_unique)
443  A = np.zeros( ( num_nets , num_nets ), dtype=np.float32 ) # Explicit dtype
444  A_wide = np.zeros( ( 1, num_nets * num_nets ), dtype=np.float32 )
445  newnames=[]
446  newnames_wide=[]
447  
448  for i, net_name_i_raw in enumerate(networks_unique): # Iterate through rows (network i)
449      netnamei = re.sub( " ", "", net_name_i_raw )
450      netnamei = re.sub( "-", "", netnamei )
451      netnamei = re.sub( "/", "", netnamei )
452      newnames.append( netnamei )
453
454      dfn_img_i = _get_network_mask_image_internal(net_name_i_raw, gc.bmask_ref, gc.ptImg_labels_ref, gc.pts2bold_df_ref, gc.powers_flag_ref)
455
456      for j, net_name_j_raw in enumerate(networks_unique): # Iterate through columns (network j)
457          netnamej = re.sub( " ", "", net_name_j_raw )
458          netnamej = re.sub( "-", "", net_name_j_raw )
459          netnamej = re.sub( "/", "", net_name_j_raw )
460          newnames_wide.append( netnamei + "_2_" + netnamej )
461          
462          current_net_corr_image = outdict.get(netnamej, None) # Get the correlation image for netnamej
463          
464          if dfn_img_i is not None and dfn_img_i.max() >= 1 and current_net_corr_image is not None:
465             # Calculate mean correlation within network 'i' from 'j''s correlation image.
466             # timeseries_to_matrix can extract values using a mask from a single ANTsImage.
467             correlation_values_in_region = ants.timeseries_to_matrix(current_net_corr_image, dfn_img_i)
468             A[i,j] = np.mean(correlation_values_in_region)
469          else:
470             A[i,j] = 0
471
472          A_wide[0, (i * num_nets) + j] = A[i,j] 
473
474  # Store final correlation matrices and other data in outdict
475  outdict['corr'] = pd.DataFrame(A, index=newnames, columns=newnames) # Use newnames for both index and columns
476  outdict['corr']['networks'] = newnames # Add networks column explicitly
477  outdict['corr_wide'] = pd.DataFrame(A_wide, columns=newnames_wide) 
478  outdict['fmri_template'] = fmri_template
479  outdict['brainmask'] = bmask
480  outdict['gmmask'] = gmseg_bold_space # Use the mapped GM mask
481
482  # ALFF/fALFF results
483  outdict['alff'] = myfalff_results['alff']
484  outdict['falff'] = myfalff_results['falff']
485  outdict['alff_mean'] = (myfalff_results['alff'].numpy()[myfalff_results['alff'].numpy()!=0]).mean()
486  outdict['alff_sd'] = (myfalff_results['alff'].numpy()[myfalff_results['alff'].numpy()!=0]).std()
487  outdict['falff_mean'] = (myfalff_results['falff'].numpy()[myfalff_results['falff'].numpy()!=0]).mean()
488  outdict['falff_sd'] = (myfalff_results['falff'].numpy()[myfalff_results['falff'].numpy()!=0]).std()
489
490  # 18. PerAF and Point-wise ALFF/fALFF/PerAF Extraction
491  # This section involves looping through points and extracting values.
492  # This part of the code is still sequential but is generally efficient for point-wise extraction.
493  # If `pointrange` (len(roi_info_df)) is very large, this loop could be parallelized using ThreadPoolExecutor.
494  for k_idx in tqdm(range(len(roi_info_df)), desc="Calculating point-wise ALFF/fALFF/PerAF"):
495    anatname_raw = roi_info_df['AAL'][k_idx]
496    anatname = re.sub("_","", str(anatname_raw)) if isinstance(anatname_raw, (str, np.str_)) else 'Unk'
497    
498    kk_prefix = ""
499    # Original logic for kk from user's Rmd.
500    if powers: # Power's ROI numbering can be simple k_idx
501        kk_prefix = f"{k_idx:0>3}"+"_"
502    else: # Yeo nodes are often paired or have complex numbering from original CSV
503        yeo_roi_id_from_df = roi_info_df['ROI'][k_idx] # Get the actual ROI ID for this row
504        nPoints_divisor = int(nPoints/2 + 1e-9) if nPoints > 0 else 1 # Avoid div by zero
505        kk_prefix = f"{yeo_roi_id_from_df % nPoints_divisor:0>3}"+"_"
506
507    fname='falffPoint'+kk_prefix+anatname
508    aname='alffPoint'+kk_prefix+anatname
509    pname='perafPoint'+kk_prefix+anatname
510
511    current_roi_label_value = roi_info_df['ROI'][k_idx] # Get ROI ID
512    
513    # Create the specific ROI mask image (same logic from network mapping)
514    if powers:
515        loc_for_roi = roi_info_df.iloc[[k_idx],:3].values 
516        localsel_img = ants.make_points_image(loc_for_roi, bmask, radius=1).threshold_image( 1, 1e9 )
517    else: 
518        localsel_img = ants.threshold_image( ptImg_labels , current_roi_label_value , current_roi_label_value ) * bmask
519
520    # Extract mean values within the ROI mask
521    if localsel_img.max() >= 1 : # Check if ROI mask is non-empty
522        # All these functions were assumed to be available directly or through ANTsPy.
523        outdict[fname] = np.mean(ants.timeseries_to_matrix(myfalff_results['falff'], localsel_img))
524        outdict[aname] = np.mean(ants.timeseries_to_matrix(myfalff_results['alff'], localsel_img))
525        outdict[pname] = np.mean(ants.timeseries_to_matrix(peraf_image, localsel_img))
526    else: # If ROI mask is empty
527        outdict[fname]=math.nan
528        outdict[aname]=math.nan
529        outdict[pname]=math.nan
530
531  # 19. Final Output Structuring and Cleanup
532  rsfNuisance_df = pd.DataFrame( nuisance_all ) # Complete nuisance regressors as DataFrame
533  
534  # Cleanup temp directory if specified
535  if remove_it: 
536    shutil.rmtree(output_directory, ignore_errors=True )
537
538  # Combine network-specific maps for composite network maps (if applicable)
539  # This part relies on networks being correctly identified and stored in outdict.
540  if not powers: # Yeo-specific network combinations (Default Mode, Visual)
541      # Use .get() to avoid KeyError if a network was not populated by parallel task
542      dfnsum_d = outdict.get('DefaultA', None)
543      if dfnsum_d is not None:
544          dfnsum_d = dfnsum_d + outdict.get('DefaultB', dfnsum_d * 0) # Add empty image if missing
545          dfnsum_d = dfnsum_d + outdict.get('DefaultC', dfnsum_d * 0)
546          outdict['DefaultMode'] = dfnsum_d
547
548      dfnsum_v = outdict.get('VisCent', None)
549      if dfnsum_v is not None:
550          dfnsum_v = dfnsum_v + outdict.get('VisPeri', dfnsum_v * 0)
551          outdict['Visual'] = dfnsum_v
552
553  # Additional output metrics
554  nonbrainmask = ants.iMath( bmask, "MD",2) - bmask 
555  trimmask = ants.iMath( bmask, "ME",2) 
556  edgemask = ants.iMath( bmask, "ME",1) - trimmask 
557
558  outdict['motion_corrected'] = corrmo['motion_corrected'] 
559  outdict['nuisance'] = rsfNuisance_df 
560  outdict['PerAF'] = peraf_image 
561  outdict['tsnr'] = mytsnr 
562  outdict['ssnr'] = slice_snr( corrmo['motion_corrected'], csfAndWM_bold_space, gmseg_bold_space ) 
563  outdict['dvars'] = dvars( corrmo['motion_corrected'], gmseg_bold_space ) 
564
565  # Store pipeline parameters and summary statistics
566  outdict['bandpass_freq_0']=f[0]
567  outdict['bandpass_freq_1']=f[1]
568  outdict['censor']=int(censor)
569  outdict['spatial_smoothing']=spa
570  outdict['outlier_threshold']=outlier_threshold
571  outdict['FD_threshold']=FD_threshold
572  outdict['high_motion_count'] = high_motion_count
573  outdict['high_motion_pct'] = high_motion_pct
574  outdict['despiking_count_summary'] = despiking_count_summary
575  outdict['FD_max'] = corrmo['FD'].max()
576  outdict['FD_mean'] = corrmo['FD'].mean()
577  outdict['FD_sd'] = corrmo['FD'].std()
578  outdict['bold_evr'] = patch_eigenvalue_ratio( und, 512, [16,16,16], evdepth = 0.9, mask = bmask ) 
579  outdict['n_outliers'] = len(censoring_indices) # Use the correct variable name
580  outdict['nc_wm'] = int(n_compcor_wm)
581  outdict['nc_csf'] = int(n_compcor_csf)
582  outdict['minutes_original_data'] = ( tr * fmri.shape[3] ) / 60.0 
583  outdict['minutes_censored_data'] = ( tr * simg_cleaned.shape[3] ) / 60.0 
584
585  return convert_np_in_dict( outdict )

Compute resting state network correlation maps based on the J Power labels. This includes fMRI preprocessing steps and subsequent network analysis.

Arguments are detailed in the function signature.

Returns

a dictionary containing the derived network maps