Skip to content

Commit

Permalink
[MAINT] Improve coverage (#68)
Browse files Browse the repository at this point in the history
* remove commented code and add tests to improve coverage

* add doc string

* import fix

* expand test owl reader

* add smoke test cli
  • Loading branch information
Remi-Gau authored Feb 20, 2024
1 parent a63bbdd commit 6c4a060
Show file tree
Hide file tree
Showing 11 changed files with 146 additions and 171 deletions.
1 change: 1 addition & 0 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ max-line-length = 93
# For PEP8 error codes see
# http://pep8.readthedocs.org/en/latest/intro.html#error-codes
ignore =
D100,
D101,
D102,
D103,
Expand Down
6 changes: 3 additions & 3 deletions nidmresults/cli/nidm_mkda_convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import sys

from nidmresults import __version__
from nidmresults.graph import Graph
from nidmresults.graph import NIDMResults


def main(argv=sys.argv):
Expand All @@ -28,7 +28,7 @@ def main(argv=sys.argv):
nargs="+",
)
parser.add_argument(
"--version", action="version", version=f"{__version__}"
"-v", "--version", action="version", version=f"{__version__}"
)

args = parser.parse_args(argv[1:])
Expand All @@ -52,7 +52,7 @@ def main(argv=sys.argv):
if not os.path.isfile(nidmpack):
raise Exception(f"Unknown file: {str(nidmpack)}")

nidmgraph = Graph(nidm_zip=nidmpack)
nidmgraph = NIDMResults(nidm_zip=nidmpack)
con_ids = nidmgraph.serialize(
outfile,
"mkda",
Expand Down
6 changes: 3 additions & 3 deletions nidmresults/cli/nidmreader.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@
import sys

from nidmresults import __version__
from nidmresults.graph import Graph
from nidmresults.graph import NIDMResults


def main(argv=sys.argv):
parser = argparse.ArgumentParser(description="NIDM-Results reader.")
parser.add_argument("nidm_pack", help="Path to NIDM-Results pack.")
parser.add_argument(
"--version", action="version", version=f"{__version__}"
"-v", "--version", action="version", version=f"{__version__}"
)

args = parser.parse_args(argv[1:])
Expand All @@ -30,7 +30,7 @@ def main(argv=sys.argv):
if not os.path.isfile(nidm_pack):
raise Exception(f"Unknown file: {str(nidm_pack)}")

nidm_graph = Graph(nidm_zip=nidm_pack)
nidm_graph = NIDMResults(nidm_zip=nidm_pack)
nidm_graph.parse()

nidm_graph.get_peaks()
Expand Down
79 changes: 6 additions & 73 deletions nidmresults/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,15 +144,13 @@ def cleanup(self):

def add_object(self, nidm_object, export_file=True):
"""Add a NIDMObject to a NIDM-Results export."""
if not export_file:
export_dir = None
else:
export_dir = self.export_dir
export_dir = None if not export_file else self.export_dir

if not isinstance(nidm_object, NIDMFile):
nidm_object.export(self.version, export_dir)
else:
nidm_object.export(self.version, export_dir, self.prepend_path)

# ProvDocument: add object to the bundle
if nidm_object.prov_type == PROV["Activity"]:
self.bundle.activity(
Expand All @@ -166,7 +164,6 @@ def add_object(self, nidm_object, export_file=True):
self.bundle.agent(
nidm_object.id, other_attributes=nidm_object.attributes
)
# self.bundle.update(nidm_object.p)

def export(self):
"""Generate a NIDM-Results export."""
Expand All @@ -185,7 +182,6 @@ def export(self):

for model_fitting in self.model_fittings:
# Design Matrix
# model_fitting.activity.used(model_fitting.design_matrix)
self.bundle.used(
model_fitting.activity.id, model_fitting.design_matrix.id
)
Expand All @@ -204,7 +200,6 @@ def export(self):
self.version["major"] == 1 and self.version["minor"] >= 3
):
# Machine
# model_fitting.data.wasAttributedTo(model_fitting.machine)
self.bundle.wasAttributedTo(
model_fitting.data.id, model_fitting.machine.id
)
Expand All @@ -213,28 +208,24 @@ def export(self):
# Imaged subject or group(s)
for sub in model_fitting.subjects:
self.add_object(sub)
# model_fitting.data.wasAttributedTo(sub)
self.bundle.wasAttributedTo(
model_fitting.data.id, sub.id
)

# Data
# model_fitting.activity.used(model_fitting.data)
self.bundle.used(
model_fitting.activity.id, model_fitting.data.id
)
self.add_object(model_fitting.data)

# Error Model
# model_fitting.activity.used(model_fitting.error_model)
self.bundle.used(
model_fitting.activity.id, model_fitting.error_model.id
)
self.add_object(model_fitting.error_model)

# Parameter Estimate Maps
for param_estimate in model_fitting.param_estimates:
# param_estimate.wasGeneratedBy(model_fitting.activity)
self.bundle.wasGeneratedBy(
param_estimate.id, model_fitting.activity.id
)
Expand All @@ -252,7 +243,6 @@ def export(self):
)

# Residual Mean Squares Map
# model_fitting.rms_map.wasGeneratedBy(model_fitting.activity)
self.add_object(model_fitting.rms_map)
self.bundle.wasGeneratedBy(
model_fitting.rms_map.id, model_fitting.activity.id
Expand Down Expand Up @@ -294,7 +284,6 @@ def export(self):
)

# Mask
# model_fitting.mask_map.wasGeneratedBy(model_fitting.activity)
self.bundle.wasGeneratedBy(
model_fitting.mask_map.id, model_fitting.activity.id
)
Expand All @@ -315,7 +304,6 @@ def export(self):
self.add_object(model_fitting.mask_map.file)

# Grand Mean map
# model_fitting.grand_mean_map.wasGeneratedBy(model_fitting.activity)
self.bundle.wasGeneratedBy(
model_fitting.grand_mean_map.id, model_fitting.activity.id
)
Expand All @@ -330,8 +318,6 @@ def export(self):
self.bundle.wasAssociatedWith(
model_fitting.activity.id, self.software.id
)
# model_fitting.activity.wasAssociatedWith(self.software)
# self.add_object(model_fitting)

# Add contrast estimation steps
analysis_masks = {}
Expand All @@ -340,12 +326,9 @@ def export(self):
):
for contrast in contrasts:
model_fitting = self._get_model_fitting(model_fitting_id)
# for contrast in contrasts:
# contrast.estimation.used(model_fitting.rms_map)
self.bundle.used(
contrast.estimation.id, model_fitting.rms_map.id
)
# contrast.estimation.used(model_fitting.mask_map)
self.bundle.used(
contrast.estimation.id, model_fitting.mask_map.id
)
Expand All @@ -358,13 +341,11 @@ def export(self):
self.bundle.used(
contrast.estimation.id, model_fitting.design_matrix.id
)
# contrast.estimation.wasAssociatedWith(self.software)
self.bundle.wasAssociatedWith(
contrast.estimation.id, self.software.id
)

for pe_id in pe_ids:
# contrast.estimation.used(pe_id)
self.bundle.used(contrast.estimation.id, pe_id)

# Create estimation activity
Expand All @@ -375,7 +356,6 @@ def export(self):

if contrast.contrast_map is not None:
# Create contrast Map
# contrast.contrast_map.wasGeneratedBy(contrast.estimation)
self.bundle.wasGeneratedBy(
contrast.contrast_map.id, contrast.estimation.id
)
Expand Down Expand Up @@ -429,7 +409,6 @@ def export(self):
self.add_object(stderr_explmeansq_map.file)

# Create Statistic Map
# contrast.stat_map.wasGeneratedBy(contrast.estimation)
self.bundle.wasGeneratedBy(
contrast.stat_map.id, contrast.estimation.id
)
Expand All @@ -449,7 +428,6 @@ def export(self):

# Create Z Statistic Map
if contrast.z_stat_map:
# contrast.z_stat_map.wasGeneratedBy(contrast.estimation)
self.bundle.wasGeneratedBy(
contrast.z_stat_map.id, contrast.estimation.id
)
Expand All @@ -469,16 +447,12 @@ def export(self):
used_id = contrast.z_stat_map.id
else:
used_id = contrast.stat_map.id
# inference.inference_act.used(used_id)
self.bundle.used(inference.inference_act.id, used_id)
# inference.inference_act.wasAssociatedWith(self.software)
self.bundle.wasAssociatedWith(
inference.inference_act.id, self.software.id
)

# self.add_object(inference)
# Excursion set
# inference.excursion_set.wasGeneratedBy(inference.inference_act)
self.bundle.wasGeneratedBy(
inference.excursion_set.id, inference.inference_act.id
)
Expand Down Expand Up @@ -516,7 +490,6 @@ def export(self):
# Display Mask (potentially more than 1)
if inference.disp_mask:
for mask in inference.disp_mask:
# inference.inference_act.used(mask)
self.bundle.used(
inference.inference_act.id, mask.id
)
Expand All @@ -539,15 +512,13 @@ def export(self):
self.bundle.wasGeneratedBy(
inference.search_space.id, inference.inference_act.id
)
# inference.search_space.wasGeneratedBy(inference.inference_act)
self.add_object(inference.search_space)
self.add_object(inference.search_space.coord_space)
# Copy "Mask map" in export directory
self.add_object(inference.search_space.file)

# Peak Definition
if inference.peak_criteria:
# inference.inference_act.used(inference.peak_criteria)
self.bundle.used(
inference.inference_act.id,
inference.peak_criteria.id,
Expand All @@ -556,7 +527,6 @@ def export(self):

# Cluster Definition
if inference.cluster_criteria:
# inference.inference_act.used(inference.cluster_criteria)
self.bundle.used(
inference.inference_act.id,
inference.cluster_criteria.id,
Expand All @@ -566,7 +536,6 @@ def export(self):
if inference.clusters:
# Clusters and peaks
for cluster in inference.clusters:
# cluster.wasDerivedFrom(inference.excursion_set)
self.bundle.wasDerivedFrom(
cluster.id, inference.excursion_set.id
)
Expand All @@ -584,12 +553,9 @@ def export(self):
self.add_object(cluster.cog.coordinate)

# Inference activity
# inference.inference_act.wasAssociatedWith(inference.software_id)
# inference.inference_act.used(inference.height_thresh)
self.bundle.used(
inference.inference_act.id, inference.height_thresh.id
)
# inference.inference_act.used(inference.extent_thresh)
self.bundle.used(
inference.inference_act.id, inference.extent_thresh.id
)
Expand Down Expand Up @@ -651,9 +617,6 @@ def _create_bundle(self, version):

self.bundle_ent.export(self.version, self.export_dir)

# # provn export
# self.bundle = ProvBundle(identifier=bundle_id)

self.doc.entity(
self.bundle_ent.id, other_attributes=self.bundle_ent.attributes
)
Expand All @@ -663,7 +626,6 @@ def _create_bundle(self, version):
if not hasattr(self, "export_act"):
self.export_act = NIDMResultsExport()
self.export_act.export(self.version, self.export_dir)
# self.doc.update(self.export_act.p)
self.doc.activity(
self.export_act.id, other_attributes=self.export_act.attributes
)
Expand All @@ -689,7 +651,6 @@ def _create_bundle(self, version):
if not hasattr(self, "exporter"):
self.exporter = self._get_exporter()
self.exporter.export(self.version, self.export_dir)
# self.doc.update(self.exporter.p)
self.doc.agent(
self.exporter.id, other_attributes=self.exporter.attributes
)
Expand All @@ -702,10 +663,9 @@ def _get_model_parameters_estimations(self, error_model):
Return an object of type ModelParametersEstimation.
"""
if error_model.dependence == NIDM_INDEPEDENT_ERROR:
if error_model.variance_homo:
estimation_method = STATO_OLS
else:
estimation_method = STATO_WLS
estimation_method = (
STATO_OLS if error_model.variance_homo else STATO_WLS
)
else:
estimation_method = STATO_GLS

Expand Down Expand Up @@ -735,11 +695,6 @@ def use_prefixes(self, ttl):
def save_prov_to_files(self, showattributes=False):
"""Write-out provn serialisation to nidm.provn."""
self.doc.add_bundle(self.bundle)
# provn_file = os.path.join(self.export_dir, 'nidm.provn')
# provn_fid = open(provn_file, 'w')
# # FIXME None
# # provn_fid.write(self.doc.get_provn(4).replace("None", "-"))
# provn_fid.close()

ttl_file = os.path.join(self.export_dir, "nidm.ttl")
ttl_txt = self.doc.serialize(format="rdf", rdf_format="turtle")
Expand Down Expand Up @@ -780,16 +735,6 @@ def save_prov_to_files(self, showattributes=False):
with open(jsonld_11_file, "w") as fid:
fid.write(jsonld_11)

# provjsonld_file = os.path.join(self.export_dir, 'nidm.provjsonld')
# provjsonld_txt = self.doc.serialize(format='jsonld')
# with open(provjsonld_file, 'w') as provjsonld_fid:
# provjsonld_fid.write(provjsonld_txt)

# provn_file = os.path.join(self.export_dir, 'nidm.provn')
# provn_txt = self.doc.serialize(format='provn')
# with open(provn_file, 'w') as provn_fid:
# provn_fid.write(provn_txt)

# Post-processing
if not self.zipped:
# Just rename temp directory to output_path
Expand All @@ -799,23 +744,11 @@ def save_prov_to_files(self, showattributes=False):
os.chdir(self.export_dir)
zf = zipfile.ZipFile(os.path.join("..", self.out_dir), mode="w")
try:
for root, dirnames, filenames in os.walk("."):
for _, _, filenames in os.walk("."):
for filename in filenames:
zf.write(os.path.join(filename))
finally:
zf.close()
# Need to move up before deleting the folder
os.chdir("..")
shutil.rmtree(os.path.join("..", self.export_dir))

# ttl_fid = open(ttl_file, 'w');
# serialization is done in xlm rdf
# graph = Graph()
# graph.parse(data=self.doc.serialize(format='rdf'), format="xml")
# ttl_fid.write(graph.serialize(format="turtle"))
# ttl_fid.write(self.doc.serialize(format='rdf').
# replace("inf", '"INF"'))
# ttl_fid.close()
# print("provconvert -infile " + provn_file + " -outfile " + ttl_file)
# check_call("provconvert -infile " + provn_file +
# " -outfile " + ttl_file, shell=True)
Loading

0 comments on commit 6c4a060

Please sign in to comment.