Coverage for /builds/tms-localization/papers/tmsloc_proto/scripts/07_calc_r2.py: 83%

147 statements  

« prev     ^ index     » next       coverage.py v7.5.1, created at 2024-05-20 07:31 +0000

1#!/usr/bin/env python 

2 

3""" 

4This script calculates the goodness-of-fits for each muscle in the ROI. 

5""" 

6 

7import os 

8import h5py 

9import pynibs 

10import argparse 

11import pandas as pd 

12import numpy as np 

13 

14from pynibs.regression import regress_data 

15from pynibs.neuron.neuron_regression import calc_e_effective 

16 

17parser = argparse.ArgumentParser(description='Calculate fits for 3 muscles in ROI using all TMS pulses ' 

18 'and save results in .hdf5 file\n') 

19 

20# required arguments 

21parser.add_argument('-s', '--fn_subject', help='Path to subject obj', 

22 required=True, type=str) 

23parser.add_argument('-m', '--mesh_idx', help='Mesh idx', 

24 required=True, type=str) 

25parser.add_argument('-e', '--exp_idx', help='Exp idx', 

26 required=True, type=str) 

27parser.add_argument('-r', '--roi_idx', help='ROI idx', 

28 required=False, type=str, default=1) 

29parser.add_argument('-n', '--n_cpu', help='How many cpus to use', 

30 type=int, default=10) 

31parser.add_argument('-f', '--function', help='Function to fit data to ("linear", "sigmoid4", "sigmoid4_log")', 

32 type=str, default='sigmoid4') 

33parser.add_argument('-i', '--n_refit', help='Number of refit iterations', 

34 required=False, type=int, default=20) 

35parser.add_argument('-v', '--verbose', help='Show detailed output messages', 

36 required=False, action='store_true') 

37parser.add_argument('--map2surf', help='Map results to whole brain surface', 

38 required=False, action='store_true') 

39parser.add_argument('--convergence', help='Assess convergence metrics (n vs n-1)', 

40 required=False, action='store_true', default=False) 

41parser.add_argument('--layerid', help='Perform mapping based on neuronal meanfield model on the specified layer. ("L23", "L5")', 

42 required=False) 

43parser.add_argument('--neuronmodel', help='Select neuron model for localization. ' 

44 '("sensitivity_weighting", "threshold_subtract", "threshold_binary",' 

45 ' "IOcurve")', 

46 required=False, default=None) 

47parser.add_argument('--waveform', help='Waveform of TMS pulse. ("monophasic", "biphasic")', 

48 required=False, default='biphasic') 

49 

50args = parser.parse_args() 

51 

52# print parameter info 

53# ================================================================================ 

54print("-" * 64) 

55print(f"{parser.description}") 

56args_dict = vars(args) 

57print('Parameters:') 

58for key in args_dict.keys(): 

59 print(f"{key: >15}: {args_dict[key]}") 

60print("-" * 64) 

61 

62# parameters 

63# ================================================================================ 

64# file containing the subject information 

65fn_subject = args.fn_subject 

66 

67# load subject object 

68subject = pynibs.load_subject(fn_subject) 

69 

70# exp identifier 

71exp_idx = args.exp_idx 

72 

73# mesh identifier 

74mesh_idx = args.mesh_idx 

75 

76# ROI index the fields are extracted for goodness-of-fit calculation 

77roi_idx = args.roi_idx 

78 

79# number threads (cpu cores) to use for parallelization 

80n_cpu = args.n_cpu 

81 

82# show detailed output messages 

83verbose = args.verbose 

84 

85# functions to perform the fit with 

86functions = [getattr(pynibs, args.function)] 

87 

88# hand muscles 

89muscles = subject.exp[exp_idx]["channels"] 

90 

91# quantities of electric field the fit is performed for 

92e_qoi_list = ["E_mag"] 

93 

94# map data to whole brain surface 

95map2surf = args.map2surf 

96 

97# perform mapping using neuronal meanfield model 

98layerid = args.layerid 

99 

100# neuron regression approach 

101neuronmodel = args.neuronmodel 

102 

103# TMS waveform 

104waveform = args.waveform 

105 

106# number of refit iterations 

107n_refit = args.n_refit 

108 

109# output measure ("SR": Relative standard error of regression, "R2": R2 score) 

110score_type = "R2" 

111 

112# Assess the convergence by comparing n vs n-1 stimulations 

113convergence = args.convergence 

114norm_to_perc = 99 

115 

116# loading subject objects 

117subject = pynibs.load_subject(fname=fn_subject) 

118subject_dir = os.path.split(fn_subject)[0] 

119 

120# parent folder containing the output data 

121fn_results = os.path.join(subject_dir, "results", f"exp_{exp_idx}", "r2", 

122 f"mesh_{mesh_idx}", f"roi_{roi_idx}") 

123 

124# file containing the electric field in the ROI [n_zaps x n_ele] 

125fn_e_roi = os.path.join(subject_dir, "results", f"exp_{exp_idx}", "electric_field", 

126 f"mesh_{mesh_idx}", f"roi_{roi_idx}", f"e_scaled.hdf5") 

127 

128if not os.path.exists(fn_e_roi): 

129 raise FileNotFoundError(f"No electric field file found!") 

130 

131fn_result_suffix = f"_neuron_{layerid}_{neuronmodel}" if layerid is not None else "" 

132 

133subject.exp[exp_idx]["fn_exp_hdf5"] = [os.path.join(subject.subject_folder, "exp", exp_idx, f"mesh_{mesh_idx}", 

134 "experiment.hdf5")] 

135 

136print(f"Processing subject: {subject.id}, experiment: {exp_idx}") 

137print("=====================================================") 

138 

139# load ROI 

140roi = pynibs.load_roi_surface_obj_from_hdf5(subject.mesh[mesh_idx]['fn_mesh_hdf5'])[roi_idx] 

141con = roi.node_number_list 

142mesh_folder = subject.mesh[mesh_idx]["mesh_folder"] 

143 

144# load exp data 

145df_phys_data_postproc_EMG = pd.read_hdf(subject.exp[exp_idx]["fn_exp_hdf5"][0], "phys_data/postproc/EMG") 

146 

147convergence_results = {'muscle': [], 

148 'qoi': [], 

149 'fun': [], 

150 'nrmsd': [], 

151 'geodesic dist': []} 

152 

153for i_m, m in enumerate(muscles): 

154 try: 

155 m = str(int(m)) 

156 except ValueError: 

157 pass 

158 print(f"\n> Muscle: {m}") 

159 

160 # load MEPs 

161 mep = np.array(df_phys_data_postproc_EMG[f"p2p_{m}"]) 

162 

163 for i_fun, fun in enumerate(functions): 

164 if neuronmodel and not (fun == pynibs.sigmoid4 or fun == pynibs.sigmoid4_log): 

165 raise NotImplementedError("Neuron regression can only be " 

166 "performed with sigmoid4 or sigmoid4_log functions.") 

167 

168 print(f"> fun: {fun.__name__}") 

169 

170 gof_dict = dict() 

171 

172 for i_e_qoi, e_qoi in enumerate(e_qoi_list): 

173 print(f"> E-QOI: {e_qoi}") 

174 

175 if e_qoi == "E_norm": 

176 select_signed_data = True 

177 else: 

178 select_signed_data = False 

179 

180 # load electric field 

181 with h5py.File(fn_e_roi, "r") as f: 

182 if layerid is not None: 

183 print(f"Performing neuron-enhanced mapping on Layer {layerid} using '{neuronmodel}' approach") 

184 e_matrix = f[f"/{layerid}/E_mag"][:] 

185 e_matrix_orig = f[f"/{layerid}/E_mag"][:] 

186 # load e-field angle to local surface normal (= theta) 

187 # and relative gradient of e-field between GM and WM (= gradient) 

188 theta = f[f"/{layerid}/E_theta"][:] 

189 gradient = f[f"/{layerid}/E_gradient"][:] 

190 

191 # calculate by how much the individual threshold of the 

192 # neuronal population was exceeded by the applied e-field 

193 # neuronmodel can be either "threshold" or "IOcurve" 

194 e_matrix = calc_e_effective(e=e_matrix, 

195 layerid=layerid, 

196 theta=theta, 

197 gradient=gradient, 

198 neuronmodel=neuronmodel, 

199 mep=mep, 

200 waveform=waveform) 

201 

202 if layerid is not None: 

203 for layer in roi.layers: 

204 if layer.id == layerid: 

205 nodes = layer.surface.nodes.node_coord 

206 con = layer.surface.elm.node_number_list[:, :3] - np.min(layer.surface.elm.node_number_list[:, :3]) 

207 else: 

208 e_matrix = f[e_qoi][:] 

209 nodes = roi.node_coord_mid 

210 con = roi.node_number_list 

211 

212 # check for zero e-fields and filter them (problems in FEM!) 

213 zero_mask = (e_matrix == 0).all(axis=1) 

214 

215 if zero_mask.any(): 

216 print(f"Warning! {np.sum(zero_mask)} zero e-fields detected in element! Check FEM! Ignoring them for now!") 

217 e_matrix = e_matrix[np.logical_not(zero_mask), :] 

218 mep = mep[np.logical_not(zero_mask)] 

219 

220 if layerid is not None and neuronmodel == "IOcurve": 

221 gof_dict[e_qoi] = 1/np.var(e_matrix, axis=0) 

222 

223 else: 

224 gof_dict[e_qoi] = regress_data(e_matrix=e_matrix, 

225 mep=mep, 

226 fun=fun, 

227 n_cpu=n_cpu, 

228 con=con, 

229 n_refit=n_refit, 

230 return_fits=False, 

231 score_type=score_type, 

232 verbose=verbose, 

233 pool=None, 

234 refit_discontinuities=True, 

235 select_signed_data=select_signed_data) 

236 

237 if convergence: 

238 print(f"Computing n-1 results to assess convergence.") 

239 res_prev = regress_data(e_matrix=e_matrix[:-1, :], 

240 mep=mep[:-1], 

241 fun=fun, 

242 n_cpu=n_cpu, 

243 con=con, 

244 n_refit=n_refit, 

245 return_fits=False, 

246 score_type=score_type, 

247 verbose=verbose, 

248 pool=None, 

249 refit_discontinuities=True, 

250 select_signed_data=select_signed_data) 

251 

252 # compute normalised root-mean-square deviation between n and n-1 solution 

253 norm_to_perc = 99 

254 res_final = gof_dict[e_qoi] / np.percentile(gof_dict[e_qoi][gof_dict[e_qoi] > 0], 

255 norm_to_perc) 

256 res_prev = res_prev / np.percentile(res_prev[res_prev > 0], norm_to_perc) 

257 nrmsd = pynibs.nrmsd(res_final, res_prev) 

258 

259 # compute geodesic distance between best element n and n-1 solution 

260 best_elm = np.argmax(res_final) 

261 best_elm_prev = np.argmax(res_prev) 

262 nodes_dist, tris_dist = pynibs.geodesic_dist(nodes, 

263 con, 

264 best_elm, 

265 source_is_node=False) 

266 geod_dist = tris_dist[best_elm_prev] 

267 

268 convergence_results['muscle'].append(m) 

269 convergence_results['qoi'].append(e_qoi) 

270 convergence_results['fun'].append(fun.__name__) 

271 convergence_results['nrmsd'].append(nrmsd) 

272 convergence_results['geodesic dist'].append(geod_dist) 

273 

274 # save results 

275 fn_gof_hdf5 = os.path.join(fn_results, m, fun.__name__, f"r2{fn_result_suffix}_roi_data.hdf5") 

276 

277 # create folder 

278 if not os.path.exists(os.path.split(fn_gof_hdf5)[0]): 

279 os.makedirs(os.path.split(fn_gof_hdf5)[0]) 

280 fn_geo_hdf5 = os.path.join(os.path.split(fn_gof_hdf5)[0], f"r2{fn_result_suffix}_roi_geo.hdf5") 

281 

282 # write hdf5 _geo file 

283 pynibs.write_geo_hdf5_surf(out_fn=fn_geo_hdf5, 

284 points=nodes, 

285 con=con, 

286 replace=True, 

287 hdf5_path='/mesh') 

288 

289 # write _data file 

290 data = [gof_dict[e_qoi] for e_qoi in e_qoi_list] 

291 data_names = [f'c_{e_qoi}' for e_qoi in e_qoi_list] 

292 

293 print(f"Writing results to {fn_gof_hdf5}.") 

294 pynibs.write_data_hdf5_surf(data=data, 

295 data_names=data_names, 

296 data_hdf_fn_out=fn_gof_hdf5, 

297 geo_hdf_fn=fn_geo_hdf5, 

298 replace=True) 

299 

300 if not map2surf or layerid is not None: # skip this step also when mapping on the layers is requested 

301 continue 

302 print("> Mapping data to brain surface") 

303 c_mapped = pynibs.map_data_to_surface(datasets=data, 

304 points_datasets=[roi.node_coord_mid] * len(data), 

305 con_datasets=[roi.node_number_list] * len(data), 

306 fname_fsl_gm=[None, None], 

307 fname_fsl_wm=[None, None], 

308 fname_midlayer=[ 

309 os.path.join(mesh_folder, subject.mesh[mesh_idx]['fn_lh_midlayer']), 

310 os.path.join(mesh_folder, subject.mesh[mesh_idx]['fn_rh_midlayer']) 

311 ], 

312 delta=subject.roi[mesh_idx][roi_idx]['delta'], 

313 input_data_in_center=True, 

314 return_data_in_center=True, 

315 data_substitute=-1) 

316 

317 # recreate complete midlayer surface to write in .hdf5 geo file 

318 points_midlayer, con_midlayer = pynibs.make_GM_WM_surface(gm_surf_fname=[subject.mesh[mesh_idx]['fn_lh_gm'], 

319 subject.mesh[mesh_idx]['fn_rh_gm']], 

320 wm_surf_fname=[subject.mesh[mesh_idx]['fn_lh_wm'], 

321 subject.mesh[mesh_idx]['fn_rh_wm']], 

322 mesh_folder=mesh_folder, 

323 midlayer_surf_fname=[ 

324 subject.mesh[mesh_idx]['fn_lh_midlayer'], 

325 subject.mesh[mesh_idx]['fn_rh_midlayer'] 

326 ], 

327 delta=subject.roi[mesh_idx][roi_idx]['delta'], 

328 X_ROI=None, 

329 Y_ROI=None, 

330 Z_ROI=None, 

331 layer=1, 

332 fn_mask=None) 

333 

334 # save hdf5 _geo file (mapped) 

335 print(f" > Writing mapped brain and roi to {os.path.join(os.path.split(fn_gof_hdf5)[0], f'r2{fn_result_suffix}_data.hdf5')}") 

336 pynibs.write_geo_hdf5_surf(out_fn=os.path.join(os.path.split(fn_gof_hdf5)[0], f"r2{fn_result_suffix}_geo.hdf5"), 

337 points=points_midlayer, 

338 con=con_midlayer, 

339 replace=True, 

340 hdf5_path='/mesh') 

341 

342 # save hdf5 _data file (mapped) 

343 

344 pynibs.write_data_hdf5_surf(data=c_mapped, 

345 data_names=data_names, 

346 data_hdf_fn_out=os.path.join(os.path.split(fn_gof_hdf5)[0], f"r2{fn_result_suffix}_data.hdf5"), 

347 geo_hdf_fn=os.path.join(os.path.split(fn_gof_hdf5)[0], f"r2{fn_result_suffix}_geo.hdf5"), 

348 replace=True) 

349 

350print("DONE") 

351 

352if convergence: 

353 print("Convergence for n vs n-1 stimulations:") 

354 convergence_results = pd.DataFrame().from_dict(convergence_results) 

355 print(convergence_results) 

356 convergence_fn = os.path.join(fn_results, f"convergence{fn_result_suffix}.csv") 

357 print(f"Saved to {convergence_fn}") 

358 convergence_results.to_csv(convergence_fn) 

359print("====")