-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathused_load_hcp_data.py
38 lines (30 loc) · 1014 Bytes
/
used_load_hcp_data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
def load_hcp_data(credentials, slice_number=None):
# Import dictionaries
import neuropythy as ny
import ipyvolume as ipv
import nibabel as nib
import numpy as np
# Configure neuropythy
ny.config['hcp_credentials'] = credentials
fs = ny.data['hcp'].s3fs
# Get full path to T1w NIFTIs
fid = fs.ls('hcp-openaccess/HCP_1200/')
fid.pop(0)
fpath = []
for f in fid:
fpath.append(f +'/MNINonLinear/T1w.nii.gz')
# Get list of subject IDs
sid = []
for f in fid:
sid.append(f.split('/')[2])
# Save single slice for each subject into concatenated array
im_array = []
for i in enumerate(sid):
print(i)
sub = ny.hcp_subject(i[1])
im = sub.load('MNINonLinear/T1w.nii.gz')
data = im.get_fdata()
im_array.append(data)
arr = np.asarray(im_array)
# arr_padded = np.pad(arr, [(0,0), (25,26), (0,0)], mode = 'constant') # hard coded to pad this dimension!
return arr, sid