Package mvpa :: Package tests :: Module test_niftidataset
[hide private]
[frames] | no frames]

Source Code for Module mvpa.tests.test_niftidataset

  1  #emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*- 
  2  #ex: set sts=4 ts=4 sw=4 et: 
  3  ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 
  4  # 
  5  #   See COPYING file distributed along with the PyMVPA package for the 
  6  #   copyright and license terms. 
  7  # 
  8  ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 
  9  """Unit tests for PyMVPA nifti dataset""" 
 10   
 11  import unittest 
 12  import os.path 
 13  import numpy as N 
 14   
 15  from mvpa import pymvpa_dataroot 
 16  from mvpa.datasets.nifti import * 
 17  from mvpa.misc.exceptions import * 
 18  from mvpa.misc.fsl import FslEV3 
 19   
20 -class NiftiDatasetTests(unittest.TestCase):
21
22 - def testNiftiDataset(self):
23 data = NiftiDataset(samples=os.path.join(pymvpa_dataroot,'example4d'), 24 labels=[1,2]) 25 self.failUnless(data.nfeatures == 294912) 26 self.failUnless(data.nsamples == 2) 27 28 self.failUnless((data.mapper.metric.elementsize \ 29 == data.niftihdr['pixdim'][3:0:-1]).all()) 30 31 #check that mapper honours elementsize 32 nb22=N.array([i for i in data.mapper.getNeighborIn((1,1,1), 2.2)]) 33 nb20=N.array([i for i in data.mapper.getNeighborIn((1,1,1), 2.0)]) 34 self.failUnless(nb22.shape[0] == 7) 35 self.failUnless(nb20.shape[0] == 5) 36 37 # Can't rely on released pynifties, so doing really vague testing 38 # XXX 39 self.failUnless(data.dt in [2.0, 2000.0]) 40 self.failUnless(data.samplingrate in [5e-4, 5e-1]) 41 merged = data + data 42 43 self.failUnless(merged.nfeatures == 294912) 44 self.failUnless(merged.nsamples == 4) 45 46 # check that the header survives 47 #self.failUnless(merged.niftihdr == data.niftihdr) 48 for k in merged.niftihdr.keys(): 49 self.failUnless(N.mean(merged.niftihdr[k] == data.niftihdr[k]) == 1) 50 51 # throw away old dataset and see if new one survives 52 del data 53 self.failUnless(merged.samples[3, 120000] == merged.samples[1, 120000]) 54 55 # check whether we can use a plain ndarray as mask 56 mask = N.zeros((24, 96, 128), dtype='bool') 57 mask[12,20,40] = True 58 nddata = NiftiDataset(samples=os.path.join(pymvpa_dataroot,'example4d'), 59 labels=[1,2], 60 mask=mask) 61 self.failUnless(nddata.nfeatures == 1) 62 rmap = nddata.mapReverse([44]) 63 self.failUnless(rmap.shape == (24, 96, 128)) 64 self.failUnless(N.sum(rmap) == 44) 65 self.failUnless(rmap[12,20,40] == 44)
66 67
68 - def testNiftiMapper(self):
69 data = NiftiDataset(samples=os.path.join(pymvpa_dataroot,'example4d'), 70 labels=[1,2]) 71 72 # test mapping of ndarray 73 vol = data.map2Nifti(N.ones((294912,), dtype='int16')) 74 self.failUnless(vol.data.shape == (24,96,128)) 75 self.failUnless((vol.data == 1).all()) 76 77 # test mapping of the dataset 78 vol = data.map2Nifti(data) 79 self.failUnless(vol.data.shape == (2, 24, 96, 128))
80 81
82 - def testNiftiSelfMapper(self):
83 example_path = os.path.join(pymvpa_dataroot, 'example4d') 84 example = NiftiImage(example_path) 85 data = NiftiDataset(samples=example_path, 86 labels=[1,2]) 87 88 # Map read data to itself 89 vol = data.map2Nifti() 90 91 self.failUnless(vol.data.shape == example.data.shape) 92 self.failUnless((vol.data == example.data).all()) 93 94 data.samples[:] = 1 95 vol = data.map2Nifti() 96 self.failUnless((vol.data == 1).all())
97 98
99 - def testMultipleCalls(self):
100 # test if doing exactly the same operation twice yields the same 101 # result 102 data = NiftiDataset(samples=os.path.join(pymvpa_dataroot,'example4d'), 103 labels=1) 104 data2 = NiftiDataset(samples=os.path.join(pymvpa_dataroot,'example4d'), 105 labels=1) 106 107 # Currently this test fails and I don't know why! 108 # The problem occurs, because in the second call to 109 # NiftiDataset.__init__() there is already a dsattr that has a 'mapper' 110 # key, although dsattr is set to be an empty dict. Therefore the 111 # constructor does not set the proper elementsize, because it thinks 112 # there is already a mapper present. Actually this test is just looking 113 # for a symptom of a buggy dsattr handling. 114 # The tricky part is: I have no clue, what is going on... :( 115 self.failUnless((data.mapper.metric.elementsize \ 116 == data2.mapper.metric.elementsize).all())
117 118
119 - def testERNiftiDataset(self):
120 self.failUnlessRaises(DatasetError, ERNiftiDataset) 121 122 # setup data sources 123 tssrc = os.path.join(pymvpa_dataroot, 'bold') 124 evsrc = os.path.join(pymvpa_dataroot, 'fslev3.txt') 125 masrc = os.path.join(pymvpa_dataroot, 'mask') 126 evs = FslEV3(evsrc).toEvents() 127 128 # more failure ;-) 129 # no label! 130 self.failUnlessRaises(ValueError, ERNiftiDataset, 131 samples=tssrc, events=evs) 132 133 # set some label for each ev 134 for ev in evs: 135 ev['label'] = 1 136 137 # for real! 138 # using TR from nifti header 139 ds = ERNiftiDataset(samples=tssrc, events=evs) 140 141 # 40x20 volume, 9 volumes per sample + 1 intensity score = 7201 features 142 self.failUnless(ds.nfeatures == 7201) 143 self.failUnless(ds.nsamples == len(evs)) 144 145 # check samples 146 origsamples = getNiftiFromAnySource(tssrc).data 147 for i, ev in enumerate(evs): 148 self.failUnless((ds.samples[i][:-1] \ 149 == origsamples[ev['onset']:ev['onset'] + ev['duration']].ravel() 150 ).all()) 151 152 # do again -- with conversion 153 ds = ERNiftiDataset(samples=tssrc, events=evs, evconv=True, 154 storeoffset=True) 155 self.failUnless(ds.nsamples == len(evs)) 156 # TR=2.5, 40x20 volume, 9 second per sample (4volumes), 1 intensity 157 # score + 1 offset = 3202 features 158 self.failUnless(ds.nfeatures == 3202) 159 160 # map back into voxel space, should ignore addtional features 161 nim = ds.map2Nifti() 162 self.failUnless(nim.data.shape == origsamples.shape) 163 # check shape of a single sample 164 nim = ds.map2Nifti(ds.samples[0]) 165 self.failUnless(nim.data.shape == (4, 1, 20, 40))
166 167
168 - def testNiftiDatasetFrom3D(self):
169 tssrc = os.path.join(pymvpa_dataroot, 'bold') 170 masrc = os.path.join(pymvpa_dataroot, 'mask') 171 172 # Test loading of 3D volumes 173 174 # it should puke if we are not enforcing 4D: 175 self.failUnlessRaises(Exception, NiftiDataset, 176 masrc, mask=masrc, labels=1, enforce4D=False) 177 # by default we are enforcing it 178 ds = NiftiDataset(masrc, mask=masrc, labels=1) 179 180 plain_data = NiftiImage(masrc).data 181 # Lets check if mapping back works as well 182 self.failUnless(N.all(plain_data == \ 183 ds.map2Nifti().data.reshape(plain_data.shape))) 184 185 # test loading from a list of filenames 186 187 # for now we should fail if trying to load a mix of 4D and 3D volumes 188 self.failUnlessRaises(ValueError, NiftiDataset, (masrc, tssrc), 189 mask=masrc, labels=1) 190 191 # Lets prepare some custom NiftiImage 192 dsfull = NiftiDataset(tssrc, mask=masrc, labels=1) 193 ds_selected = dsfull['samples', [3]] 194 nifti_selected = ds_selected.map2Nifti() 195 196 # Load dataset from a mix of 3D volumes (given by filenames and NiftiImages) 197 labels = [123,2,123] 198 ds2 = NiftiDataset((masrc, masrc, nifti_selected), mask=masrc, labels=labels) 199 self.failUnless(ds2.nsamples == 3) 200 self.failUnless((ds2.samples[0] == ds2.samples[1]).all()) 201 self.failUnless((ds2.samples[2] == dsfull.samples[3]).all()) 202 self.failUnless((ds2.labels == labels).all())
203 204
205 -def suite():
206 return unittest.makeSuite(NiftiDatasetTests)
207 208 209 if __name__ == '__main__': 210 import runner 211