Loading datahipy/bids/dataset.py +2 −1 Original line number Diff line number Diff line Loading @@ -22,7 +22,6 @@ from datahipy.bids.validation import ( get_bids_validator_output_info, ) from datahipy.bids.version import determine_bids_schema_version from datahipy.utils.versioning import get_latest_tag # Set the number of threads to use for parallel processing # Modify this value if you want to use more or less threads or Loading Loading @@ -241,6 +240,8 @@ def get_bidsdataset_content(bids_dir=None): dataset_desc : dict Dictionary storing dataset information indexed by the HIP platform. """ # Import here to avoid circular import from datahipy.utils.versioning import get_latest_tag # Load the dataset_description.json as initial dictionary-based description with open(os.path.join(bids_dir, "dataset_description.json"), "r") as f: dataset_desc = json.load(f) Loading datahipy/cli/run.py +10 −1 Original line number Diff line number Diff line Loading @@ -9,7 +9,7 @@ from datahipy.bids.dataset import get_all_datasets_content from datahipy.handlers.dataset import DatasetHandler from datahipy.handlers.participants import ParticipantHandler from datahipy.handlers.project import create_project, import_subject, import_document from datahipy.utils.versioning import create_tag, get_tags, checkout_tag from datahipy.utils.versioning import create_tag, get_tags, checkout_tag, release_version VALID_COMMANDS = [ "dataset.create", Loading @@ -18,6 +18,7 @@ VALID_COMMANDS = [ "dataset.get_tags", "dataset.checkout_tag", "datasets.get", "dataset.release_version", "sub.get", "sub.import", "sub.edit.clinical", Loading @@ -29,6 +30,7 @@ VALID_COMMANDS = [ "project.create_tag", "project.get_tags", "project.checkout_tag", "project.release_version", ] Loading Loading @@ -69,6 +71,7 @@ def main(): dhdl = DatasetHandler(dataset_path=dataset_path) phdl = ParticipantHandler(dataset_path=dataset_path, input_path=input_path) # Dataset commands if command == "dataset.create": return dhdl.dataset_create(input_data=input_data) if command == "dataset.get": Loading @@ -84,6 +87,9 @@ def main(): input_data=input_data, output_file=output_file, ) if command == "dataset.release_version": return release_version(input_data=input_data, output_file=output_file) # Dataset subject / participant-level commands if command == "sub.import": return phdl.sub_import(input_data=input_data) if command == "sub.edit.clinical": Loading @@ -94,6 +100,7 @@ def main(): return phdl.sub_delete(input_data=input_data) if command == "sub.delete.file": return phdl.sub_delete_file(input_data=input_data) # Project commands if command == "project.create": create_project(input_data=input_data, output_file=output_file) if command == "project.sub.import": Loading @@ -106,6 +113,8 @@ def main(): return get_tags(input_data=input_data, output_file=output_file) if command == "project.checkout_tag": return checkout_tag(input_data=input_data) if command == "project.release_version": return release_version(input_data=input_data, output_file=output_file) if __name__ == "__main__": Loading datahipy/utils/versioning.py +72 −11 Original line number Diff line number Diff line Loading @@ -12,7 +12,6 @@ from datalad.support.gitrepo import GitRepo from datahipy.bids.version import create_bids_changes_tag_entry, update_bids_changes TAG_EXCEPTIONS = ["master", "main", "HEAD"] Loading Loading @@ -58,7 +57,7 @@ def create_tag(input_data): Parameters ---------- input_data : dict Dictionary containing the input data for the command Dictionary or JSON file containing the input data for the command in the format:: { Loading @@ -69,10 +68,12 @@ def create_tag(input_data): } """ # Load input data if isinstance(input_data, str) or isinstance(input_data, os.PathLike): with open(input_data, "r") as f: input_data = json.load(f) elif isinstance(input_data, dict): pass print(f"Create tag {input_data['tag']} for dataset {input_data['path']}...") # Check if the tag is valid if not validate_tag(input_data["tag"], discard_exceptions=True): raise ValueError( Loading Loading @@ -101,7 +102,7 @@ def create_tag(input_data): bids_dir=bids_dir, changes_tag_entry=changes_tag_entry, ) # Create a tag on the dataset # Create a tag on the BIDS dataset or on the nested BIDS dataset of a project save_params = { "dataset": input_data["path"], "message": "".join(changes_tag_entry), Loading Loading @@ -137,17 +138,14 @@ def get_tags(input_data, output_file): # Load input data with open(input_data, "r") as f: input_data = json.load(f) tags = [tag_dict["name"] for tag_dict in GitRepo(input_data["path"]).get_tags()] # Save the tags to a JSON file dict_tags = { "path": input_data["path"], "tags": tags, } print(f"Tags: {tags}") # Save the tags to a JSON file with open(output_file, "w") as f: json.dump(dict_tags, f) json.dump(dict_tags, f, indent=4) print(SUCCESS) Loading Loading @@ -246,3 +244,66 @@ def increment_tag(tag, level): elif level == "patch": tag[2] = str(int(tag[2]) + 1) return ".".join(tag) def release_version(input_data, output_file): """Release a new version of a project / BIDS dataset managed by Git/Datalad. **Note:** The tag is created on a Datalad dataset, not on a specific file. If the dataset is a BIDS dataset, the tag will be created only on this dataset. If the dataset is a Collaborative Project, the tag will be created recursively on the project dataset and on the nested BIDS dataset of the project. Parameters ---------- input_data : dict Dictionary containing the input data for the command in the format:: { "path": "/path/to/dataset", "type": "bids", # or "project" "level": "major", # or "minor" or "patch" "changes_list": ["Change 1", "Change 2"] } output_file : str Absolute path to the output JSON file containing the dataset summary generated by :py:func:`datahipy.bids.dataset.get_bidsdataset_content`. """ # Import here to avoid circular imports from datahipy.bids.dataset import get_bidsdataset_content # Load input data with open(input_data, "r") as f: input_data = json.load(f) print(f"Release version for dataset {input_data['path']}...") # Get the latest tag of the dataset latest_tag = get_latest_tag(input_data["path"]) # Increment the latest tag by the specified level new_tag = increment_tag(latest_tag, input_data["level"]) # Create a tag on the dataset create_tag( input_data={ "path": input_data["path"], "type": input_data["type"], "tag": new_tag, "changes_list": input_data["changes_list"], } ) # Get the BIDS directory of the dataset if it is # a project dataset or a BIDS dataset itself bids_dir = ( input_data["path"] if input_data["type"] == "bids" else os.path.join( input_data["path"], "inputs", "bids-dataset" ) ) # Generate the dataset summary dictionary dataset_summary = get_bidsdataset_content( bids_dir=bids_dir ) # Save the dataset summary to a JSON file with open(output_file, "w") as f: json.dump(dataset_summary, f, indent=4) print(SUCCESS) Loading
datahipy/bids/dataset.py +2 −1 Original line number Diff line number Diff line Loading @@ -22,7 +22,6 @@ from datahipy.bids.validation import ( get_bids_validator_output_info, ) from datahipy.bids.version import determine_bids_schema_version from datahipy.utils.versioning import get_latest_tag # Set the number of threads to use for parallel processing # Modify this value if you want to use more or less threads or Loading Loading @@ -241,6 +240,8 @@ def get_bidsdataset_content(bids_dir=None): dataset_desc : dict Dictionary storing dataset information indexed by the HIP platform. """ # Import here to avoid circular import from datahipy.utils.versioning import get_latest_tag # Load the dataset_description.json as initial dictionary-based description with open(os.path.join(bids_dir, "dataset_description.json"), "r") as f: dataset_desc = json.load(f) Loading
datahipy/cli/run.py +10 −1 Original line number Diff line number Diff line Loading @@ -9,7 +9,7 @@ from datahipy.bids.dataset import get_all_datasets_content from datahipy.handlers.dataset import DatasetHandler from datahipy.handlers.participants import ParticipantHandler from datahipy.handlers.project import create_project, import_subject, import_document from datahipy.utils.versioning import create_tag, get_tags, checkout_tag from datahipy.utils.versioning import create_tag, get_tags, checkout_tag, release_version VALID_COMMANDS = [ "dataset.create", Loading @@ -18,6 +18,7 @@ VALID_COMMANDS = [ "dataset.get_tags", "dataset.checkout_tag", "datasets.get", "dataset.release_version", "sub.get", "sub.import", "sub.edit.clinical", Loading @@ -29,6 +30,7 @@ VALID_COMMANDS = [ "project.create_tag", "project.get_tags", "project.checkout_tag", "project.release_version", ] Loading Loading @@ -69,6 +71,7 @@ def main(): dhdl = DatasetHandler(dataset_path=dataset_path) phdl = ParticipantHandler(dataset_path=dataset_path, input_path=input_path) # Dataset commands if command == "dataset.create": return dhdl.dataset_create(input_data=input_data) if command == "dataset.get": Loading @@ -84,6 +87,9 @@ def main(): input_data=input_data, output_file=output_file, ) if command == "dataset.release_version": return release_version(input_data=input_data, output_file=output_file) # Dataset subject / participant-level commands if command == "sub.import": return phdl.sub_import(input_data=input_data) if command == "sub.edit.clinical": Loading @@ -94,6 +100,7 @@ def main(): return phdl.sub_delete(input_data=input_data) if command == "sub.delete.file": return phdl.sub_delete_file(input_data=input_data) # Project commands if command == "project.create": create_project(input_data=input_data, output_file=output_file) if command == "project.sub.import": Loading @@ -106,6 +113,8 @@ def main(): return get_tags(input_data=input_data, output_file=output_file) if command == "project.checkout_tag": return checkout_tag(input_data=input_data) if command == "project.release_version": return release_version(input_data=input_data, output_file=output_file) if __name__ == "__main__": Loading
datahipy/utils/versioning.py +72 −11 Original line number Diff line number Diff line Loading @@ -12,7 +12,6 @@ from datalad.support.gitrepo import GitRepo from datahipy.bids.version import create_bids_changes_tag_entry, update_bids_changes TAG_EXCEPTIONS = ["master", "main", "HEAD"] Loading Loading @@ -58,7 +57,7 @@ def create_tag(input_data): Parameters ---------- input_data : dict Dictionary containing the input data for the command Dictionary or JSON file containing the input data for the command in the format:: { Loading @@ -69,10 +68,12 @@ def create_tag(input_data): } """ # Load input data if isinstance(input_data, str) or isinstance(input_data, os.PathLike): with open(input_data, "r") as f: input_data = json.load(f) elif isinstance(input_data, dict): pass print(f"Create tag {input_data['tag']} for dataset {input_data['path']}...") # Check if the tag is valid if not validate_tag(input_data["tag"], discard_exceptions=True): raise ValueError( Loading Loading @@ -101,7 +102,7 @@ def create_tag(input_data): bids_dir=bids_dir, changes_tag_entry=changes_tag_entry, ) # Create a tag on the dataset # Create a tag on the BIDS dataset or on the nested BIDS dataset of a project save_params = { "dataset": input_data["path"], "message": "".join(changes_tag_entry), Loading Loading @@ -137,17 +138,14 @@ def get_tags(input_data, output_file): # Load input data with open(input_data, "r") as f: input_data = json.load(f) tags = [tag_dict["name"] for tag_dict in GitRepo(input_data["path"]).get_tags()] # Save the tags to a JSON file dict_tags = { "path": input_data["path"], "tags": tags, } print(f"Tags: {tags}") # Save the tags to a JSON file with open(output_file, "w") as f: json.dump(dict_tags, f) json.dump(dict_tags, f, indent=4) print(SUCCESS) Loading Loading @@ -246,3 +244,66 @@ def increment_tag(tag, level): elif level == "patch": tag[2] = str(int(tag[2]) + 1) return ".".join(tag) def release_version(input_data, output_file): """Release a new version of a project / BIDS dataset managed by Git/Datalad. **Note:** The tag is created on a Datalad dataset, not on a specific file. If the dataset is a BIDS dataset, the tag will be created only on this dataset. If the dataset is a Collaborative Project, the tag will be created recursively on the project dataset and on the nested BIDS dataset of the project. Parameters ---------- input_data : dict Dictionary containing the input data for the command in the format:: { "path": "/path/to/dataset", "type": "bids", # or "project" "level": "major", # or "minor" or "patch" "changes_list": ["Change 1", "Change 2"] } output_file : str Absolute path to the output JSON file containing the dataset summary generated by :py:func:`datahipy.bids.dataset.get_bidsdataset_content`. """ # Import here to avoid circular imports from datahipy.bids.dataset import get_bidsdataset_content # Load input data with open(input_data, "r") as f: input_data = json.load(f) print(f"Release version for dataset {input_data['path']}...") # Get the latest tag of the dataset latest_tag = get_latest_tag(input_data["path"]) # Increment the latest tag by the specified level new_tag = increment_tag(latest_tag, input_data["level"]) # Create a tag on the dataset create_tag( input_data={ "path": input_data["path"], "type": input_data["type"], "tag": new_tag, "changes_list": input_data["changes_list"], } ) # Get the BIDS directory of the dataset if it is # a project dataset or a BIDS dataset itself bids_dir = ( input_data["path"] if input_data["type"] == "bids" else os.path.join( input_data["path"], "inputs", "bids-dataset" ) ) # Generate the dataset summary dictionary dataset_summary = get_bidsdataset_content( bids_dir=bids_dir ) # Save the dataset summary to a JSON file with open(output_file, "w") as f: json.dump(dataset_summary, f, indent=4) print(SUCCESS)