mirror of
https://gitlab.tugraz.at/ibi/projects/julia-pfitzer/esmrmb-educational.git
synced 2024-11-25 02:52:27 +00:00
Cleaner notebook with python.
This commit is contained in:
parent
a27b222671
commit
307fdb132b
5 changed files with 1626 additions and 1175 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -5,4 +5,6 @@ fig/
|
|||
.ipynb_checkpoints/
|
||||
data/
|
||||
__pycache__/
|
||||
*.mat
|
||||
*.mat
|
||||
|
||||
!T2_CPMG.mat
|
File diff suppressed because it is too large
Load diff
BIN
T2_CPMG.mat
Normal file
BIN
T2_CPMG.mat
Normal file
Binary file not shown.
114
cfl.py
114
cfl.py
|
@ -1,114 +0,0 @@
|
|||
# Copyright 2013-2015. The Regents of the University of California.
|
||||
# Copyright 2021. Uecker Lab. University Center Göttingen.
|
||||
# All rights reserved. Use of this source code is governed by
|
||||
# a BSD-style license which can be found in the LICENSE file.
|
||||
#
|
||||
# Authors:
|
||||
# 2013 Martin Uecker <uecker@eecs.berkeley.edu>
|
||||
# 2015 Jonathan Tamir <jtamir@eecs.berkeley.edu>
|
||||
|
||||
from __future__ import print_function
|
||||
from __future__ import with_statement
|
||||
|
||||
import numpy as np
|
||||
import mmap
|
||||
import os
|
||||
|
||||
|
||||
def readcfl(name):
|
||||
# get dims from .hdr
|
||||
with open(name + ".hdr", "rt") as h:
|
||||
h.readline() # skip
|
||||
l = h.readline()
|
||||
dims = [int(i) for i in l.split()]
|
||||
|
||||
# remove singleton dimensions from the end
|
||||
n = np.prod(dims)
|
||||
dims_prod = np.cumprod(dims)
|
||||
dims = dims[:np.searchsorted(dims_prod, n)+1]
|
||||
|
||||
# load data and reshape into dims
|
||||
with open(name + ".cfl", "rb") as d:
|
||||
a = np.fromfile(d, dtype=np.complex64, count=n);
|
||||
return a.reshape(dims, order='F') # column-major
|
||||
|
||||
def readmulticfl(name):
|
||||
# get dims from .hdr
|
||||
with open(name + ".hdr", "rt") as h:
|
||||
lines = h.read().splitlines()
|
||||
|
||||
index_dim = 1 + lines.index('# Dimensions')
|
||||
total_size = int(lines[index_dim])
|
||||
index_sizes = 1 + lines.index('# SizesDimensions')
|
||||
sizes = [int(i) for i in lines[index_sizes].split()]
|
||||
index_dims = 1 + lines.index('# MultiDimensions')
|
||||
|
||||
with open(name + ".cfl", "rb") as d:
|
||||
a = np.fromfile(d, dtype=np.complex64, count=total_size)
|
||||
|
||||
offset = 0
|
||||
result = []
|
||||
for i in range(len(sizes)):
|
||||
dims = ([int(i) for i in lines[index_dims + i].split()])
|
||||
n = np.prod(dims)
|
||||
result.append(a[offset:offset+n].reshape(dims, order='F'))
|
||||
offset += n
|
||||
|
||||
if total_size != offset:
|
||||
print("Error")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def writecfl(name, array):
|
||||
with open(name + ".hdr", "wt") as h:
|
||||
h.write('# Dimensions\n')
|
||||
for i in (array.shape):
|
||||
h.write("%d " % i)
|
||||
h.write('\n')
|
||||
|
||||
size = np.prod(array.shape) * np.dtype(np.complex64).itemsize
|
||||
|
||||
with open(name + ".cfl", "a+b") as d:
|
||||
os.ftruncate(d.fileno(), size)
|
||||
mm = mmap.mmap(d.fileno(), size, flags=mmap.MAP_SHARED, prot=mmap.PROT_WRITE)
|
||||
if array.dtype != np.complex64:
|
||||
array = array.astype(np.complex64)
|
||||
mm.write(np.ascontiguousarray(array.T))
|
||||
mm.close()
|
||||
#with mmap.mmap(d.fileno(), size, flags=mmap.MAP_SHARED, prot=mmap.PROT_WRITE) as mm:
|
||||
# mm.write(array.astype(np.complex64).tobytes(order='F'))
|
||||
|
||||
def writemulticfl(name, arrays):
|
||||
size = 0
|
||||
dims = []
|
||||
|
||||
for array in arrays:
|
||||
size += array.size
|
||||
dims.append(array.shape)
|
||||
|
||||
with open(name + ".hdr", "wt") as h:
|
||||
h.write('# Dimensions\n')
|
||||
h.write("%d\n" % size)
|
||||
|
||||
h.write('# SizesDimensions\n')
|
||||
for dim in dims:
|
||||
h.write("%d " % len(dim))
|
||||
h.write('\n')
|
||||
|
||||
h.write('# MultiDimensions\n')
|
||||
for dim in dims:
|
||||
for i in dim:
|
||||
h.write("%d " % i)
|
||||
h.write('\n')
|
||||
|
||||
size = size * np.dtype(np.complex64).itemsize
|
||||
|
||||
with open(name + ".cfl", "a+b") as d:
|
||||
os.ftruncate(d.fileno(), size)
|
||||
mm = mmap.mmap(d.fileno(), size, flags=mmap.MAP_SHARED, prot=mmap.PROT_WRITE)
|
||||
for array in arrays:
|
||||
if array.dtype != np.complex64:
|
||||
array = array.astype(np.complex64)
|
||||
mm.write(np.ascontiguousarray(array.T))
|
||||
mm.close()
|
78
reco.py
78
reco.py
|
@ -1,78 +0,0 @@
|
|||
import scipy.io as sio
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from scipy.optimize import curve_fit
|
||||
import sys,os
|
||||
from cfl import writecfl
|
||||
|
||||
os.environ['TOOLBOX_PATH'] = '/home/jpfitzer/bart-0.9.00'
|
||||
os.environ['BART_TOOLBOX_PATH'] = '/home/jpfitzer/bart-0.9.00'
|
||||
sys.path.append('/home/jpfitzer/bart-0.9.00/bart/python')
|
||||
|
||||
rawName = 'T2_CPMG.mat'
|
||||
mat_data_0=sio.loadmat(rawName)
|
||||
|
||||
# Format: sampled, # kSpace [kRd, kPh, kSl, kSpace_echo_1, kSpace_echo_2, ..., kSpace_echo_nETL] (102400, 23)
|
||||
# So the first three are the coordinates of the kspace, and the rest are the echoes
|
||||
print(mat_data_0['kSpaces3D'].shape)
|
||||
|
||||
kSpaces3D = mat_data_0['kSpaces3D']
|
||||
# self.mapVals['sampled'] = np.concatenate((kRD, kPH, kSL, dataAll_sampled), axis=1)
|
||||
|
||||
# nReadout, nPhase, nSlice
|
||||
nPoints = (80, 80, 16)
|
||||
|
||||
echo_train_length = mat_data_0['kSpaces3D'].shape[1] - 3 # Because the first 3 are kRD, kPH, kSL -> should give 20
|
||||
print(f"Echo train length: {echo_train_length}")
|
||||
|
||||
echo_spacing = mat_data_0['echoSpacing'][0][0]
|
||||
print(f"Echo spacing: {echo_spacing}")
|
||||
|
||||
k_readout = kSpaces3D[:, 0]
|
||||
k_phase = kSpaces3D[:, 1]
|
||||
k_slice = kSpaces3D[:, 2]
|
||||
|
||||
# The rest of the data is the echoes
|
||||
echos = kSpaces3D[:, 3:]
|
||||
|
||||
# Reshape the kspace data for bart
|
||||
kSpace = echos.reshape(nPoints[2], nPoints[1], nPoints[0], echo_train_length)
|
||||
|
||||
|
||||
|
||||
print(kSpace.shape)
|
||||
|
||||
# cfl = writecfl('kSpace', kSpace)
|
||||
|
||||
# Create the image with bart fft -i 7 kSpace fft -> three dimensional
|
||||
# Put the echos on the fifth dimension:
|
||||
# bart transpose 3 5
|
||||
# Put the slices on the correct dimension
|
||||
# bart transpose 0 2
|
||||
|
||||
# traj = writecfl('traj', np.stack((k_readout, k_phase, k_slice), axis=1))
|
||||
|
||||
# Echo times with echo spacing
|
||||
TE = np.linspace(echo_spacing, echo_spacing * echo_train_length, echo_train_length, endpoint=True)
|
||||
print("TE: ", TE)
|
||||
|
||||
# Create the echotimes file:
|
||||
# bart vec ... echo_times
|
||||
# bart scale 0.001 echo_times echo_times_scaled
|
||||
# Move the echo_times to the correct dimension:
|
||||
# bart transpose 0 5 echo_times_scaled echo_times_final
|
||||
|
||||
# Fit the model:
|
||||
# bart mobafit -T echo_times_final fft_transposed fit
|
||||
|
||||
# Now some values will be very large so we can apply a threshold to obtain a mask
|
||||
# bart threshold -M 1000 reco/fit reco/mask
|
||||
|
||||
# Multiply the fit with the mask
|
||||
# bart fmac reco/fit reco/fit reco/fit_mask
|
||||
|
||||
# Select slice
|
||||
# bart slice 6 1 reco/fit_mask reco/R2_map
|
||||
|
||||
# Invert the data to get T2
|
||||
# bart invert reco/R2_map reco/T2_map
|
Loading…
Reference in a new issue