Skip to content

Commit

Permalink
Update B0fieldmap/fieldsource when having multiple fieldmap runs (Git…
Browse files Browse the repository at this point in the history
…hub issue #198)
  • Loading branch information
marcelzwiers committed Mar 20, 2024
1 parent 1b3984e commit a749548
Show file tree
Hide file tree
Showing 6 changed files with 138 additions and 56 deletions.
1 change: 0 additions & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ Features
``** = Only Twix, SDAT/SPAR and P-file source data``

.. note::

* All **source code** is hosted at `Github <https://github.com/Donders-Institute/bidscoin>`__ and **freely available** under the GPL-3.0-or-later `license <https://spdx.org/licenses/GPL-3.0-or-later.html>`__.
* The full BIDScoin **documentation** is hosted at `Read the Docs <https://bidscoin.readthedocs.io>`__
* For citation and more information, see our `BIDScoin publication <https://www.frontiersin.org/articles/10.3389/fninf.2021.770608>`__ in **Frontiers in Neuroinformatics** (`DOI: 10.3389/fninf.2021.770608 <https://doi.org/10.3389/fninf.2021.770608>`__). See `here <./plugins.html>`__ for citations of the plugins. A custom-made citation report can be generated with the help of `duecredit <https://github.com/duecredit/duecredit>`__
Expand Down
18 changes: 9 additions & 9 deletions bidscoin/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@
import shutil
import tempfile
import warnings
import fnmatch
import pandas as pd
import ast
import datetime
import jsonschema
from fnmatch import fnmatch
from functools import lru_cache
from pathlib import Path
from typing import List, Set, Tuple, Union, Dict, Any, Iterable, NewType
Expand Down Expand Up @@ -1314,7 +1314,7 @@ def check_ignore(entry: str, bidsignore: Union[str,list], datatype: str= 'dir')
if datatype == 'file' and item.endswith('/'): continue
if item.endswith('/'):
item = item[0:-1]
if fnmatch.fnmatch(entry, item):
if fnmatch(entry, item):
ignore = True
break

Expand Down Expand Up @@ -1461,7 +1461,7 @@ def get_run(bidsmap: Bidsmap, datatype: str, suffix_idx: Union[int, str], dataso
metavalue = copy.copy(metavalue)
if metakey == 'IntendedFor':
run_['meta'][metakey] = metavalue
elif metakey in ('B0FieldSource', 'B0FieldIdentifier') and '<<session>>' in str(metavalue):
elif metakey in ('B0FieldSource', 'B0FieldIdentifier') and fnmatch(str(metavalue), '*<<session*>>*'):
run_['meta'][metakey] = metavalue
else:
run_['meta'][metakey] = datasource.dynamicvalue(metavalue, cleanup=False)
Expand Down Expand Up @@ -1769,7 +1769,7 @@ def get_matching_run(datasource: DataSource, bidsmap: Bidsmap, runtime=False) ->
# Replace the dynamic meta values, except the IntendedFor value (e.g. <<task>>)
if metakey == 'IntendedFor':
run_['meta'][metakey] = metavalue
elif metakey in ('B0FieldSource', 'B0FieldIdentifier') and '<<session>>' in str(metavalue):
elif metakey in ('B0FieldSource', 'B0FieldIdentifier') and fnmatch(str(metavalue), '*<<session*>>*'):
run_['meta'][metakey] = metavalue
else:
run_['meta'][metakey] = datasource.dynamicvalue(metavalue, cleanup=False, runtime=runtime)
Expand Down Expand Up @@ -2111,7 +2111,7 @@ def updatemetadata(datasource: DataSource, targetmeta: Path, usermeta: Meta, ext

# Add all the metadata to the metadict. NB: the dynamic `IntendedFor` value is handled separately later
for metakey, metaval in usermeta.items():
if metakey != 'IntendedFor' and not (metakey in ('B0FieldSource', 'B0FieldIdentifier') and '<<session>>' in str(metaval)):
if metakey != 'IntendedFor' and not (metakey in ('B0FieldSource', 'B0FieldIdentifier') and fnmatch(str(metaval), '*<<session*>>*')):
metaval = datasource.dynamicvalue(metaval, cleanup=False, runtime=True)
try:
metaval = ast.literal_eval(str(metaval)) # E.g. convert stringified list or int back to list or int
Expand All @@ -2123,16 +2123,16 @@ def updatemetadata(datasource: DataSource, targetmeta: Path, usermeta: Meta, ext
LOGGER.debug(f"Adding '{metakey}: {metaval}' to: {targetmeta}")
metapool[metakey] = metaval or None

# Update B0FieldIdentifiers / Sources
# Update <<session>> in B0FieldIdentifiers/Sources. NB: Leave range specifiers (<<session:[-2:2]>>) untouched (-> bidscoiner)
for key in ('B0FieldSource', 'B0FieldIdentifier'):

# Replace <<session>> with the actual session label
if '<<session>>' in str(metapool.get(key)):
if fnmatch(str(metapool.get(key)), '*<<session*>>*'):
ses = get_bidsvalue(targetmeta, 'ses')
if isinstance(metapool[key], str):
metapool[key] = metapool[key].replace('<<session>>', ses)
metapool[key] = metapool[key].replace('<<session', f"<<ses{ses}")
elif isinstance(metapool[key], list):
metapool[key] = [item.replace('<<session>>', ses) for item in metapool[key]]
metapool[key] = [item.replace('<<session', f"<<ses{ses}") for item in metapool[key]]

# Remove unused (but added from the template) B0FieldIdentifiers / Sources
if not metapool.get(key):
Expand Down
138 changes: 100 additions & 38 deletions bidscoin/bidscoiner.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
import logging
import shutil
import urllib.request, urllib.error
from typing import List, Set
from fnmatch import fnmatch
from tqdm import tqdm
from tqdm.contrib.logging import logging_redirect_tqdm
from pathlib import Path
Expand Down Expand Up @@ -311,6 +313,62 @@ def bidscoiner(rawfolder: str, bidsfolder: str, subjects: list=(), force: bool=F
bcoin.reporterrors()


def limitmatches(fmap: str, matches: List[str], limits: str, niifiles: Set[str], scans_table: pd.DataFrame):
"""
Helper function for addmetadata() to check if there are multiple fieldmap runs and get the lower- and upperbound from
the AcquisitionTime to bound the grand list of matches to adjacent runs. The resulting list is appended to niifiles
:param fmap: The fieldmap (relative to the session folder)
:param matches: The images (relative to the session folder) associated with the fieldmap
:param limits: The bounding limits from the dynamic value: '[lowerlimit:upperlimit]'
:param niifiles: The list to which the bounded results are appended
:param scans_table: The scans table with the acquisition times
:return:
"""

# Check the input
if limits == '[]':
limits = '[:]'

# Set fallback upper and lower bounds if parsing the scans-table is not possible
fmaptime = dateutil.parser.parse('1925-01-01') # Use the BIDS stub acquisition time
lowerbound = fmaptime.replace(year=1900) # Use an ultra-wide lower limit for the search
upperbound = fmaptime.replace(year=2100) # Idem for the upper limit

# There may be more fieldmaps, hence try to limit down the matches to the adjacent acquisitions
try:
fmaptime = dateutil.parser.parse(scans_table.loc[fmap, 'acq_time'])
runindex = bids.get_bidsvalue(fmap, 'run')
prevfmap = bids.get_bidsvalue(fmap, 'run', str(int(runindex) - 1))
nextfmap = bids.get_bidsvalue(fmap, 'run', str(int(runindex) + 1))
if prevfmap in scans_table.index:
lowerbound = dateutil.parser.parse(scans_table.loc[prevfmap, 'acq_time']) # Narrow the lower search limit down to the preceding fieldmap
if nextfmap in scans_table.index:
upperbound = dateutil.parser.parse(scans_table.loc[nextfmap, 'acq_time']) # Narrow the upper search limit down to the succeeding fieldmap
except (TypeError, ValueError, KeyError, dateutil.parser.ParserError) as acqtimeerror:
pass # Raise this only if there are limits and matches, i.e. below

# Limit down the matches if the user added a range specifier/limits
if limits and matches:
try:
limits = limits[1:-1].split(':', 1) # limits: '[lowerlimit:upperlimit]' -> ['lowerlimit', 'upperlimit']
lowerlimit = int(limits[0]) if limits[0].strip() else float('-inf')
upperlimit = int(limits[1]) if limits[1].strip() else float('inf')
acqtimes = []
for match in set(matches):
acqtimes.append((dateutil.parser.parse(scans_table.loc[match, 'acq_time']), match)) # Time + filepath relative to the session-folder
acqtimes.sort(key=lambda acqtime: acqtime[0])
offset = sum([acqtime[0] < fmaptime for acqtime in acqtimes]) # The nr of preceding runs
for nr, acqtime in enumerate(acqtimes):
if (lowerbound < acqtime[0] < upperbound) and (lowerlimit <= nr-offset <= upperlimit):
niifiles.add(acqtime[1])
except Exception as matcherror:
LOGGER.error(f"Could not bound the fieldmaps using <*:{limits}> as it requires a *_scans.tsv file with acq_time values for: {fmap}\n{matcherror}")
niifiles.update(matches)
else:
niifiles.update(matches)


def addmetadata(bidsses: Path, subid: str, sesid: str) -> None:
"""
Adds the special fieldmap metadata (IntendedFor, TE, etc.)
Expand Down Expand Up @@ -343,50 +401,17 @@ def addmetadata(bidsses: Path, subid: str, sesid: str) -> None:
intendedfor = jsondata.get('IntendedFor')
if intendedfor and isinstance(intendedfor, str):

# Check if there are multiple runs and get the lower- and upperbound from the AcquisitionTime to limit down the IntendedFor search
fmaptime = dateutil.parser.parse('1925-01-01') # If nothing, use the BIDS stub acquisition time
lowerbound = fmaptime.replace(year=1900) # If nothing, use an ultra-wide lower limit for the IntendedFor search
upperbound = fmaptime.replace(year=2100) # Idem for the upper limit
try: # There may be more fieldmaps, hence try to limit down the search to the adjacently acquired data
fmaptime = dateutil.parser.parse(scans_table.loc[fmap, 'acq_time'])
runindex = bids.get_bidsvalue(fmap, 'run')
prevfmap = bids.get_bidsvalue(fmap, 'run', str(int(runindex) - 1))
nextfmap = bids.get_bidsvalue(fmap, 'run', str(int(runindex) + 1))
if prevfmap in fmaps:
lowerbound = dateutil.parser.parse(scans_table.loc[prevfmap, 'acq_time']) # Narrow the lower search limit down to the preceding fieldmap
if nextfmap in fmaps:
upperbound = dateutil.parser.parse(scans_table.loc[nextfmap, 'acq_time']) # Narrow the upper search limit down to the succeeding fieldmap
except (TypeError, ValueError, KeyError, dateutil.parser.ParserError) as acqtimeerror:
pass # Raise this only if there are limits and matches, i.e. below

# Search with multiple patterns for matching NIfTI-files in all runs and store the relative path to the session folder
niifiles = []
# Search with multiple patterns for matching NIfTI-files in all runs and store the relative paths to the session folder
niifiles = set()
if intendedfor.startswith('<') and intendedfor.endswith('>'):
intendedfor = intendedfor[2:-2].split('><')
elif not isinstance(intendedfor, list):
intendedfor = [intendedfor]
for part in intendedfor:
limits = part.split(':',1)[1].strip() if ':' in part else '' # part = 'pattern: [lowerlimit:upperlimit]'
pattern = part.split(':',1)[0].strip()
pattern = part.split(':',1)[0].strip() # part = 'pattern: [lowerlimit:upperlimit]'
limits = part.split(':',1)[1].strip() if ':' in part else ''
matches = [niifile.relative_to(bidsses).as_posix() for niifile in sorted(bidsses.rglob(f"*{pattern}*")) if pattern and '.nii' in niifile.suffixes]
if limits and matches:
try:
limits = limits[1:-1].split(':',1) # limits: '[lowerlimit:upperlimit]' -> ['lowerlimit', 'upperlimit']
lowerlimit = int(limits[0]) if limits[0].strip() else float('-inf')
upperlimit = int(limits[1]) if limits[1].strip() else float('inf')
acqtimes = []
for match in matches:
acqtimes.append((dateutil.parser.parse(scans_table.loc[match,'acq_time']), match)) # Time + filepath relative to the session-folder
acqtimes.sort(key = lambda acqtime: acqtime[0])
offset = sum([acqtime[0] < fmaptime for acqtime in acqtimes]) # The nr of preceding series
for n, acqtime in enumerate(acqtimes):
if lowerbound < acqtime[0] < upperbound and lowerlimit <= n-offset < upperlimit:
niifiles.append(acqtime[1])
except Exception as intendedforerror:
LOGGER.error(f"Could not bound the <{part}> IntendedFor search as it requires a *_scans.tsv file with acq_time values for: {fmap}\n{intendedforerror}")
niifiles.extend(matches)
else:
niifiles.extend(matches)
limitmatches(fmap, matches, limits, niifiles, scans_table)

# Add the IntendedFor data. NB: The BIDS URI paths need to use forward slashes and be relative to the bids root folder
if niifiles:
Expand All @@ -403,6 +428,43 @@ def addmetadata(bidsses: Path, subid: str, sesid: str) -> None:
if not jsondata.get('IntendedFor'):
jsondata.pop('IntendedFor', None)

# Bound all matching B0FieldIdentifier/Source files
b0fieldtag = jsondata.get('B0FieldIdentifier') or ''
if fnmatch(b0fieldtag, '*<<*:[*]>>*'): # b0fieldtag = 'tag<<session:[lowerlimit:upperlimit]>>tag'

# Search in all runs for the b0fieldtag and store the relative paths to the session folder
niifiles = set()
matches = []
dynamic = b0fieldtag.split('<<')[1].split('>>')[0] # dynamic = 'session:[lowerlimit:upperlimit]'
limits = dynamic.split(':',1)[1].strip() # limits = '[lowerlimit:upperlimit]'
for match in sorted(bidsses.rglob(f"sub-*.nii*")):
if match.with_suffix('').with_suffix('.json').is_file():
with match.with_suffix('').with_suffix('.json').open('r') as sidecar:
metadata = json.load(sidecar)
if metadata.get('B0FieldIdentifier') == b0fieldtag or metadata.get('B0FieldSource') == b0fieldtag:
matches.append(match.relative_to(bidsses).as_posix())
limitmatches(fmap, matches, limits, niifiles, scans_table)

# In the b0fieldtags, replace the limits with fieldmap runindex
newfieldtag = b0fieldtag.replace(':'+limits, '_'+bids.get_bidsvalue(fmap,'run'))
for niifile in niifiles:
metafile = (bidsses/niifile).with_suffix('').with_suffix('.json')
LOGGER.debug(f"Updating the b0fieldtag ({b0fieldtag} -> {newfieldtag}) for: {metafile}")
if niifile == fmap:
metadata = jsondata
elif metafile.is_file():
with metafile.open('r') as sidecar:
metadata = json.load(sidecar)
else:
continue
if 'B0FieldIdentifier' in metadata:
metadata['B0FieldIdentifier'] = newfieldtag
if 'B0FieldSource' in metadata:
metadata['B0FieldSource'] = newfieldtag
if niifile != fmap:
with metafile.open('w') as sidecar:
json.dump(metadata, sidecar, indent=4)

# Extract the echo times from magnitude1 and magnitude2 and add them to the phasediff json-file
if jsonfile.name.endswith('phasediff.json'):
json_magnitude = [None, None]
Expand Down
8 changes: 8 additions & 0 deletions docs/_static/dictionary-custom.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ CMRR
CSA
CSV
Changelog
CONFIGDIR
DCCN
DICOM
DICOMDIR
Expand Down Expand Up @@ -125,6 +126,7 @@ anonymization
anonymized
apptainer
args
avwutils
backported
bcoin
behaviour
Expand Down Expand Up @@ -189,13 +191,15 @@ fmap
fmriprep
fninf
fnmatch
fsl
fslmaths
fullmatch
fullpath
func
github
gz
hardcoded
hoc
importlib
inputimage
iso
Expand All @@ -204,6 +208,7 @@ jitter
jneumeth
json
libEGL
libvis
logfiles
lookup
magnitude1
Expand Down Expand Up @@ -256,6 +261,7 @@ pydeface
pydicom
pypi
qsub
qt5
rawfolder
rawmapper
rebranded
Expand Down Expand Up @@ -297,11 +303,13 @@ tempdir
tooltip
tooltips
tsv
underspecified
unhandled
unevaluated
url
v1
v3
v4
v7
validator
versa
Expand Down
Loading

0 comments on commit a749548

Please sign in to comment.