Loading datahipy/bids/dataset.py +12 −10 Original line number Diff line number Diff line Loading @@ -22,7 +22,7 @@ from datahipy.bids.validation import ( get_bids_validator_output_info, ) from datahipy.bids.version import determine_bids_schema_version from datahipy.utils.versioning import get_latest_tag # Set the number of threads to use for parallel processing # Modify this value if you want to use more or less threads or Loading Loading @@ -53,7 +53,7 @@ def create_initial_bids_readme(bids_dir, dataset_desc): ) def create_initial_bids_changes(bids_dir): def create_initial_bids_changes(bids_dir, content_lines=None): """Create an initial `CHANGES` file for a BIDS dataset. Parameters Loading @@ -61,13 +61,13 @@ def create_initial_bids_changes(bids_dir): bids_dir : str Path to the BIDS dataset. """ with open(os.path.join(bids_dir, "CHANGES"), "w") as f: f.writelines( [ if content_lines is None: content_lines = [ f"0.0.0 {date.today().strftime('%Y-%m-%d')}\n", "\t- Creation of the dataset.", ] ) with open(os.path.join(bids_dir, "CHANGES"), "w") as f: f.writelines(content_lines) def create_initial_participants_tsv(bids_dir): Loading Loading @@ -119,8 +119,8 @@ def create_empty_bids_dataset(bids_dir=None, dataset_desc=None, project_dir=None json.dump(dataset_desc, f, indent=4) # Create initial README file create_initial_bids_readme(bids_dir, dataset_desc) # Create initial CHANGES file create_initial_bids_changes(bids_dir) # Create initial empty CHANGES file create_initial_bids_changes(bids_dir, content_lines=[]) # Create the .bidsignore file and add the line to ignore CT files # (not yet supported by the validator) add_bidsignore_validation_rule(bids_dir, "**/*_ct.*") Loading Loading @@ -260,6 +260,8 @@ def get_bidsdataset_content(bids_dir=None): dataset_desc.update(get_bids_validator_output_info(bids_dir, bids_schema_version)) # Add information retrieved with pybids to dataset_desc dataset_desc.update(get_bids_layout_info(bids_dir)) # Add the latest tag of the dataset as the dataset version dataset_desc["DatasetVersion"] = get_latest_tag(bids_dir) # Return the created dataset_desc dictionary to be indexed return dataset_desc Loading datahipy/bids/version.py +47 −0 Original line number Diff line number Diff line Loading @@ -3,6 +3,8 @@ """Utility functions to retrieve version related information from a BIDS dataset.""" import os from datetime import date from packaging import version from datahipy.bids.const import BIDS_VERSION Loading Loading @@ -37,3 +39,48 @@ def determine_bids_schema_version(dataset_desc): else: bids_schema_version = BIDS_VERSION return bids_schema_version def create_bids_changes_tag_entry(tag, changes_list): """Create a release text block entry to be added to the CHANGES file of a BIDS dataset. Parameters ---------- tag : str Tag of the dataset. changes_list : list List of changes to add to the `CHANGES` file. Returns ------- list List of lines to be written to the `CHANGES` file. """ return [ f"{tag} {date.today().strftime('%Y-%m-%d')}\n", "\n\t- " + "\n\t- ".join(changes_list), "\n\n", ] def update_bids_changes(bids_dir, changes_tag_entry): """Append a new release text block to the top to the `CHANGES` file of a BIDS dataset. Parameters ---------- bids_dir : str Path to the BIDS dataset. tag : str Tag of the dataset. changes_tag_entry : list List of lines to be written to the `CHANGES` file. """ # Read the current content of the CHANGES file with open(os.path.join(bids_dir, "CHANGES"), "r") as f: content = f.readlines() # Create a new CHANGES file with the new release text block at the top with open(os.path.join(bids_dir, "CHANGES"), "w") as f: f.writelines(changes_tag_entry + content) datahipy/handlers/dataset.py +10 −1 Original line number Diff line number Diff line Loading @@ -20,7 +20,11 @@ try: except ImportError: print("WARNING: BIDS Manager Python package is not accessible.") from datahipy.bids.dataset import get_bidsdataset_content from datahipy.bids.dataset import ( get_bidsdataset_content, create_initial_bids_changes, create_initial_bids_readme, ) class DatasetHandler: Loading Loading @@ -55,11 +59,16 @@ class DatasetHandler: if not os.path.isfile(datasetdesc_path): # Write the dataset_description.json file datasetdesc_dict.write_file(jsonfilename=datasetdesc_path) # Write an initial README.md file create_initial_bids_readme(ds_path, datasetdesc_dict) # Write an initial empty CHANGES file create_initial_bids_changes(ds_path, content_lines=[]) # Save the state of the dataset with Datalad save_params = { "dataset": ds_path, "message": "Initial BIDS dataset state", "recursive": True, # "version_tag": "0.0.0", # Uncomment if you wish to create here a tag for the initial state } datalad.api.save(**save_params) # Load the created BIDS dataset in BIDS Manager (creates companion files) Loading datahipy/utils/versioning.py +76 −6 Original line number Diff line number Diff line Loading @@ -3,12 +3,15 @@ """Methods supporting versioning of BIDS and Collaborative project datasets.""" from datetime import datetime import os import json from sre_constants import SUCCESS import datalad from datalad.support.gitrepo import GitRepo from datahipy.bids.version import create_bids_changes_tag_entry, update_bids_changes TAG_EXCEPTIONS = ["master", "main", "HEAD"] Loading Loading @@ -52,7 +55,6 @@ def create_tag(input_data): If the dataset is a Collaborative Project, the tag will be created recursively on the project dataset and on the nested BIDS dataset of the project. Parameters ---------- input_data : dict Loading @@ -61,8 +63,9 @@ def create_tag(input_data): { "path": "/path/to/dataset", "type": "bids", # or "project" "tag": "1.0.0", "message": "Description of the changes related to the version tag" "changes_list": ["Change 1", "Change 2"] } """ # Load input data Loading @@ -84,15 +87,28 @@ def create_tag(input_data): f"Impossible to create tag {input_data['tag']}. " f"Tag {input_data['tag']} already exists." ) # Create the tag message based on tag, date, and list of changes changes_tag_entry = create_bids_changes_tag_entry( tag=input_data["tag"], changes_list=input_data["changes_list"], ) if input_data["type"] == "bids": bids_dir = input_data["path"] elif input_data["type"] == "project": bids_dir = os.path.join(input_data["path"], "inputs", "bids-dataset") # Update the CHANGES file with the generated tag text entry update_bids_changes( bids_dir=bids_dir, changes_tag_entry=changes_tag_entry, ) # Create a tag on the dataset save_params = { "dataset": input_data["path"], "message": input_data["message"], "message": "".join(changes_tag_entry), "version_tag": input_data["tag"], "recursive": True, } res = datalad.api.save(**save_params) print(f"Tag creation results: {res}") datalad.api.save(**save_params) print(SUCCESS) Loading Loading @@ -176,3 +192,57 @@ def checkout_tag(input_data): options=checkout_opts, ) print(SUCCESS) def get_latest_tag(path): """Get the latest tag of a dataset managed by Git/Datalad. Parameters ---------- path : str Absolute path to the dataset. Returns ------- str The latest tag of the dataset. """ tags = [tag_dict["name"] for tag_dict in GitRepo(path).get_tags()] tags.sort(key=lambda s: list(map(int, s.split("."))), reverse=True) if len(tags) == 0: return "0.0.0" return tags[0] def increment_tag(tag, level): """Increment a version tag by a specific level. Parameters ---------- tag : str Version tag to increment in the format X.Y.Z, where X, Y, and Z are integers. level : str Level to increment. Can be "major", "minor", or "patch". Returns ------- str The incremented tag. """ if not validate_tag(tag, discard_exceptions=True): raise ValueError( f"Impossible to increment tag {tag}. The format is not valid. " "Please use the format X.Y.Z, where X, Y, and Z are integers." ) tag = tag.split(".") if level == "major": tag[0] = str(int(tag[0]) + 1) tag[1] = "0" tag[2] = "0" elif level == "minor": tag[1] = str(int(tag[1]) + 1) tag[2] = "0" elif level == "patch": tag[2] = str(int(tag[2]) + 1) return ".".join(tag) Loading
datahipy/bids/dataset.py +12 −10 Original line number Diff line number Diff line Loading @@ -22,7 +22,7 @@ from datahipy.bids.validation import ( get_bids_validator_output_info, ) from datahipy.bids.version import determine_bids_schema_version from datahipy.utils.versioning import get_latest_tag # Set the number of threads to use for parallel processing # Modify this value if you want to use more or less threads or Loading Loading @@ -53,7 +53,7 @@ def create_initial_bids_readme(bids_dir, dataset_desc): ) def create_initial_bids_changes(bids_dir): def create_initial_bids_changes(bids_dir, content_lines=None): """Create an initial `CHANGES` file for a BIDS dataset. Parameters Loading @@ -61,13 +61,13 @@ def create_initial_bids_changes(bids_dir): bids_dir : str Path to the BIDS dataset. """ with open(os.path.join(bids_dir, "CHANGES"), "w") as f: f.writelines( [ if content_lines is None: content_lines = [ f"0.0.0 {date.today().strftime('%Y-%m-%d')}\n", "\t- Creation of the dataset.", ] ) with open(os.path.join(bids_dir, "CHANGES"), "w") as f: f.writelines(content_lines) def create_initial_participants_tsv(bids_dir): Loading Loading @@ -119,8 +119,8 @@ def create_empty_bids_dataset(bids_dir=None, dataset_desc=None, project_dir=None json.dump(dataset_desc, f, indent=4) # Create initial README file create_initial_bids_readme(bids_dir, dataset_desc) # Create initial CHANGES file create_initial_bids_changes(bids_dir) # Create initial empty CHANGES file create_initial_bids_changes(bids_dir, content_lines=[]) # Create the .bidsignore file and add the line to ignore CT files # (not yet supported by the validator) add_bidsignore_validation_rule(bids_dir, "**/*_ct.*") Loading Loading @@ -260,6 +260,8 @@ def get_bidsdataset_content(bids_dir=None): dataset_desc.update(get_bids_validator_output_info(bids_dir, bids_schema_version)) # Add information retrieved with pybids to dataset_desc dataset_desc.update(get_bids_layout_info(bids_dir)) # Add the latest tag of the dataset as the dataset version dataset_desc["DatasetVersion"] = get_latest_tag(bids_dir) # Return the created dataset_desc dictionary to be indexed return dataset_desc Loading
datahipy/bids/version.py +47 −0 Original line number Diff line number Diff line Loading @@ -3,6 +3,8 @@ """Utility functions to retrieve version related information from a BIDS dataset.""" import os from datetime import date from packaging import version from datahipy.bids.const import BIDS_VERSION Loading Loading @@ -37,3 +39,48 @@ def determine_bids_schema_version(dataset_desc): else: bids_schema_version = BIDS_VERSION return bids_schema_version def create_bids_changes_tag_entry(tag, changes_list): """Create a release text block entry to be added to the CHANGES file of a BIDS dataset. Parameters ---------- tag : str Tag of the dataset. changes_list : list List of changes to add to the `CHANGES` file. Returns ------- list List of lines to be written to the `CHANGES` file. """ return [ f"{tag} {date.today().strftime('%Y-%m-%d')}\n", "\n\t- " + "\n\t- ".join(changes_list), "\n\n", ] def update_bids_changes(bids_dir, changes_tag_entry): """Append a new release text block to the top to the `CHANGES` file of a BIDS dataset. Parameters ---------- bids_dir : str Path to the BIDS dataset. tag : str Tag of the dataset. changes_tag_entry : list List of lines to be written to the `CHANGES` file. """ # Read the current content of the CHANGES file with open(os.path.join(bids_dir, "CHANGES"), "r") as f: content = f.readlines() # Create a new CHANGES file with the new release text block at the top with open(os.path.join(bids_dir, "CHANGES"), "w") as f: f.writelines(changes_tag_entry + content)
datahipy/handlers/dataset.py +10 −1 Original line number Diff line number Diff line Loading @@ -20,7 +20,11 @@ try: except ImportError: print("WARNING: BIDS Manager Python package is not accessible.") from datahipy.bids.dataset import get_bidsdataset_content from datahipy.bids.dataset import ( get_bidsdataset_content, create_initial_bids_changes, create_initial_bids_readme, ) class DatasetHandler: Loading Loading @@ -55,11 +59,16 @@ class DatasetHandler: if not os.path.isfile(datasetdesc_path): # Write the dataset_description.json file datasetdesc_dict.write_file(jsonfilename=datasetdesc_path) # Write an initial README.md file create_initial_bids_readme(ds_path, datasetdesc_dict) # Write an initial empty CHANGES file create_initial_bids_changes(ds_path, content_lines=[]) # Save the state of the dataset with Datalad save_params = { "dataset": ds_path, "message": "Initial BIDS dataset state", "recursive": True, # "version_tag": "0.0.0", # Uncomment if you wish to create here a tag for the initial state } datalad.api.save(**save_params) # Load the created BIDS dataset in BIDS Manager (creates companion files) Loading
datahipy/utils/versioning.py +76 −6 Original line number Diff line number Diff line Loading @@ -3,12 +3,15 @@ """Methods supporting versioning of BIDS and Collaborative project datasets.""" from datetime import datetime import os import json from sre_constants import SUCCESS import datalad from datalad.support.gitrepo import GitRepo from datahipy.bids.version import create_bids_changes_tag_entry, update_bids_changes TAG_EXCEPTIONS = ["master", "main", "HEAD"] Loading Loading @@ -52,7 +55,6 @@ def create_tag(input_data): If the dataset is a Collaborative Project, the tag will be created recursively on the project dataset and on the nested BIDS dataset of the project. Parameters ---------- input_data : dict Loading @@ -61,8 +63,9 @@ def create_tag(input_data): { "path": "/path/to/dataset", "type": "bids", # or "project" "tag": "1.0.0", "message": "Description of the changes related to the version tag" "changes_list": ["Change 1", "Change 2"] } """ # Load input data Loading @@ -84,15 +87,28 @@ def create_tag(input_data): f"Impossible to create tag {input_data['tag']}. " f"Tag {input_data['tag']} already exists." ) # Create the tag message based on tag, date, and list of changes changes_tag_entry = create_bids_changes_tag_entry( tag=input_data["tag"], changes_list=input_data["changes_list"], ) if input_data["type"] == "bids": bids_dir = input_data["path"] elif input_data["type"] == "project": bids_dir = os.path.join(input_data["path"], "inputs", "bids-dataset") # Update the CHANGES file with the generated tag text entry update_bids_changes( bids_dir=bids_dir, changes_tag_entry=changes_tag_entry, ) # Create a tag on the dataset save_params = { "dataset": input_data["path"], "message": input_data["message"], "message": "".join(changes_tag_entry), "version_tag": input_data["tag"], "recursive": True, } res = datalad.api.save(**save_params) print(f"Tag creation results: {res}") datalad.api.save(**save_params) print(SUCCESS) Loading Loading @@ -176,3 +192,57 @@ def checkout_tag(input_data): options=checkout_opts, ) print(SUCCESS) def get_latest_tag(path): """Get the latest tag of a dataset managed by Git/Datalad. Parameters ---------- path : str Absolute path to the dataset. Returns ------- str The latest tag of the dataset. """ tags = [tag_dict["name"] for tag_dict in GitRepo(path).get_tags()] tags.sort(key=lambda s: list(map(int, s.split("."))), reverse=True) if len(tags) == 0: return "0.0.0" return tags[0] def increment_tag(tag, level): """Increment a version tag by a specific level. Parameters ---------- tag : str Version tag to increment in the format X.Y.Z, where X, Y, and Z are integers. level : str Level to increment. Can be "major", "minor", or "patch". Returns ------- str The incremented tag. """ if not validate_tag(tag, discard_exceptions=True): raise ValueError( f"Impossible to increment tag {tag}. The format is not valid. " "Please use the format X.Y.Z, where X, Y, and Z are integers." ) tag = tag.split(".") if level == "major": tag[0] = str(int(tag[0]) + 1) tag[1] = "0" tag[2] = "0" elif level == "minor": tag[1] = str(int(tag[1]) + 1) tag[2] = "0" elif level == "patch": tag[2] = str(int(tag[2]) + 1) return ".".join(tag)