Skip to content
Commits on Source (63)
fixes:
- "/builds/hip/datahipy/datahipy::datahipy/"
......@@ -3,7 +3,7 @@
branch = False
concurrency = multiprocessing
parallel = True
source = bids_tools
source = datahipy
# Omit files that are generated by versioneer
omit = */_version.py
......
bids_tools/_version.py export-subst
datahipy/_version.py export-subst
......@@ -8,7 +8,7 @@ jobs:
- uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: Install bids_tools and dependencies
- name: Install datahipy and dependencies
run: |
pip install -r docs/requirements.txt
pip install -e .[all]
......
......@@ -13,6 +13,7 @@ variables:
REG_VERSION: 0.16.1
GL_TOKEN: $GL_TOKEN
GH_TOKEN: $GH_TOKEN
SSH_PRIVATE_KEY: $SSH_PRIVATE_KEY
services:
- name: docker:20-dind
alias: docker
......@@ -40,8 +41,24 @@ before_script:
- curl --fail --show-error --location "https://github.com/genuinetools/reg/releases/download/v$REG_VERSION/reg-linux-amd64" --output ./reg
- echo "$REG_SHA256 ./reg" | sha256sum -c -
- chmod a+x ./reg
# install additional dependencies required by codecov uploader
- apk add --no-cache gnupg coreutils
# login the gitlab container registry
- echo $CI_REGISTRY_PASSWORD | docker login $CI_REGISTRY -u $CI_REGISTRY_USER --password-stdin
# install ssh-agent if not already installed, it is required by Docker
- 'command -v ssh-agent >/dev/null || ( apt-get update -y && apt-get install openssh-client -y )'
# run ssh-agent (inside the build environment)
- eval $(ssh-agent -s)
# create the SSH directory and give it the right permissions
- mkdir -p ~/.ssh
- chmod 700 ~/.ssh
# give the right permissions, otherwise ssh-add will refuse to add files
- echo "$SSH_PRIVATE_KEY" > ~/.ssh/id_gitlab_rsa
- chmod 400 ~/.ssh/id_gitlab_rsa
# add github to the list of known host
- ssh-keyscan -H github.com >> ~/.ssh/known_hosts
# add the SSH key stored in SSH_PRIVATE_KEY file type CI/CD variable to the agent store
- ssh-add ~/.ssh/id_gitlab_rsa
stages:
- test-python-install
......@@ -55,13 +72,23 @@ test:
stage: test
needs: ["build"]
script:
# Run tests
- make test TAG=$(python get_version.py)-dev.${CI_COMMIT_REF_NAME}
coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/'
artifacts:
reports:
coverage_report:
coverage_format: cobertura
path: test/report/cov.xml
# Download codecov uploader with integrity check
# - curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --keyring trustedkeys.gpg --import
- curl -Os https://uploader.codecov.io/latest/alpine/codecov
# - curl -Os https://uploader.codecov.io/latest/alpine/codecov.SHA256SUM
# - curl -Os https://uploader.codecov.io/latest/alpine/codecov.SHA256SUM.sig
# - gpgv codecov.SHA256SUM.sig codecov.SHA256SUM
- chmod +x codecov
# Upload the coverage report to codecov
- ./codecov -t ${CODECOV_TOKEN} --file test/report/cov.xml
# artifacts:
# reports:
# coverage_report:
# coverage_format: cobertura
# path: test/report/cov.xml
test-python-install:
stage: test-python-install
......@@ -107,7 +134,8 @@ deploy-release:
- make build-docker TAG=$(python get_version.py)
- make push-docker-ci TAG=$(python get_version.py)
# Add our Github remote repository and fined-grained token authentication
- git remote add github_origin "https://oauth2:$GH_TOKEN@github.com/HIP-infrastructure/bids-tools"
- echo "GH_TOKEN=${GH_TOKEN}"
- GH_TOKEN=${GH_TOKEN} git remote add github_origin git@github.com:HIP-infrastructure/datahipy.git
# Push commits created by semantic-release for version change and changelog
- git push github_origin $CI_COMMIT_REF_NAME
# Push the new tag created by semantic-release
......
......@@ -14,18 +14,10 @@
"replacements": [
{
"files": [
"bids_tools/info.py"
"datahipy/info.py"
],
"from": "__version__ = \".*\"",
"to": "__version__ = \"${nextRelease.version}\"",
"results": [
{
"file": "bids_tools/info.py",
"hasChanged": true,
"numMatches": 1,
"numReplacements": 1
}
],
"countMatches": true
}
]
......@@ -50,7 +42,7 @@
"@semantic-release/git",
{
"assets": [
"bids_tools/info.py",
"datahipy/info.py",
"docs/CHANGES.md"
],
"message": "chore(release): ${nextRelease.version} [skip ci]\n\n${nextRelease.notes}"
......
......@@ -104,23 +104,23 @@ RUN pip3 install \
rm -rf /var/lib/apt/lists/*
###############################################################################
# Install bids-tools
# Install datahipy
###############################################################################
# Set the working directory to /app/bids_tools
WORKDIR /apps/bids_tools
# Set the working directory to /app/datahipy
WORKDIR /apps/datahipy
# Copy necessary contents of this repository.
COPY ./.coveragerc ./.coveragerc
COPY setup.py ./setup.py
COPY setup.cfg ./setup.cfg
COPY README.md ./README.md
COPY bids_tools ./bids_tools
COPY datahipy ./datahipy
# COPY LICENSE ./LICENSE
# Install bids-tools with static version taken from the argument
# Install datahipy with static version taken from the argument
ARG VERSION=unknown
RUN echo "${VERSION}" > /apps/bids_tools/bids_tools/VERSION \
RUN echo "${VERSION}" > /apps/datahipy/datahipy/VERSION \
&& pip install -e ".[test]"
###############################################################################
......@@ -174,15 +174,15 @@ LABEL org.label-schema.build-date=${BUILD_DATE} \
org.label-schema.docker.cmd="docker run --rm \
-v /path/to/dataset:/output \
-v /path/to/input:/input \
bids-tools \
datahipy \
USERNAME USERID \
[--command {dataset.create,dataset.get,datasets.get,sub.get,sub.import,sub.edit.clinical,sub.delete,sub.delete.file}] \
[--input_data /input/input_data.json] [--output_file /input/output_data.json] \
[--dataset_path /output] [--input_path /input]" \
org.label-schema.docker.cmd.test="docker run --rm \
--entrypoint /entrypoint_pytest.sh \
-v /path/to/bids-tools/test:/test \
-v /path/to/bids-tools/bids_tools:/apps/bids_tools/bids_tools \
bids-tools \
-v /path/to/datahipy/test:/test \
-v /path/to/datahipy/datahipy:/apps/datahipy/datahipy \
datahipy \
USERNAME USERID \
/test"
......@@ -14,7 +14,7 @@ override TAG := $(subst _,-,$(TAG))
$(info TAG (Normalized) = $(TAG))
# Define the complete docker image tag
IMAGE_TAG = $(if $(CI_REGISTRY),$(CI_REGISTRY)/hip/bids-tools:$(TAG),bids-tools:$(TAG))
IMAGE_TAG = $(if $(CI_REGISTRY),$(CI_REGISTRY)/hip/datahipy:$(TAG),datahipy:$(TAG))
# Define the build date and vcs reference
BUILD_DATE = $(shell date -u +"%Y-%m-%dT%H:%M:%SZ")
......@@ -34,14 +34,14 @@ test:
docker run -t --rm \
--entrypoint "/entrypoint_pytest.sh" \
-v $(PROJECT_DIR)/test:/test \
-v $(PROJECT_DIR)/bids_tools:/apps/bids_tools/bids_tools \
-v $(PROJECT_DIR)/datahipy:/apps/datahipy/datahipy \
$(IMAGE_TAG) \
$(USER) \
$(USER_ID) \
/test
@echo "Fix path in coverage xml report..."
sed -i -r \
"s|/apps/bids_tools/bids_tools|$(PROJECT_DIR)/bids_tools|g" \
"s|/apps/datahipy/datahipy|$(PROJECT_DIR)/datahipy|g" \
$(PROJECT_DIR)/test/report/cov.xml
#build-docker: @ Builds the Docker image
......@@ -54,7 +54,7 @@ build-docker:
#push-docker-ci: @ Push the Docker image with TAG to the CI registry
push-docker-ci:
docker push $(CI_REGISTRY)/hip/bids-tools:$(TAG)
docker push $(CI_REGISTRY)/hip/datahipy:$(TAG)
#rm-docker-ci: @ Remove the Docker image with TAG to the CI registry
# from https://docs.gitlab.com/ee/user/packages/container_registry/delete_container_registry_images.html#use-gitlab-cicd
......@@ -71,7 +71,7 @@ install-python:
#install-python-wheel: @ Installs the python wheel
install-python-wheel: build-python-wheel
pip install bids_tools
pip install datahipy
#build-python-wheel: @ Builds the python wheel
build-python-wheel:
......@@ -79,7 +79,7 @@ build-python-wheel:
#test-python-install: @ Tests the python package installation
test-python-install: install-python install-python-wheel
bids_tools --version
datahipy --version
#help: @ List available tasks on this project
help:
......
# `bids_tools`: Tools to manage BIDS datasets in the Human intracranial EEG platform
# ![DataHIPy logo](https://raw.githubusercontent.com/HIP-infrastructure/datahipy/chore/update-tool-name-and-logo/docs/logos/datahipy-logo-text.png)
Copyright © 2012-2023 The HIP team and Contributors, All rights reserved.
Copyright © 2022-2023 The HIP team, University Hospital of Lausanne (CHUV), Switzerland & Contributors, All rights reserved.
This software is distributed under the open-source Apache 2.0 license. See [LICENSE](LICENSE.txt) file for details.
---
![Latest GitHub Release](https://img.shields.io/github/v/release/HIP-infrastructure/bids-tools?include_prereleases) ![Latest GitHub Release Date](https://img.shields.io/github/release-date-pre/HIP-infrastructure/bids-tools) [![CI/CD](https://gitlab.hbp.link/hip/bids-tools/badges/master/pipeline.svg)](https://gitlab.hbp.link/hip/bids-tools/-/commits/master) [![Coverage](https://gitlab.hbp.link/hip/bids-tools/badges/master/coverage.svg)](https://gitlab.hbp.link/hip/bids-tools/-/commits/master)
![Latest GitHub Release](https://img.shields.io/github/v/release/HIP-infrastructure/datahipy?include_prereleases) ![Latest GitHub Release Date](https://img.shields.io/github/release-date-pre/HIP-infrastructure/datahipy) [![Digital Object Identifier (DOI)](https://zenodo.org/badge/428721094.svg)](https://zenodo.org/badge/latestdoi/428721094) [![CI/CD](https://gitlab.hbp.link/hip/datahipy/badges/master/pipeline.svg?private_token=glpat-a_qxRwZSNcAq9CMoK2tA)](https://gitlab.hbp.link/hip/datahipy/-/commits/master) [![codecov](https://codecov.io/github/HIP-infrastructure/datahipy/branch/master/graph/badge.svg?token=F1CWBIGXJN)](https://codecov.io/github/HIP-infrastructure/datahipy)
`bids_tools` is an open-source tool written in Python and encapsulated in a Docker image to handle neuroimaging data on the Human Intracranial EEG Platform (HIP) following Brain Imaging Data Structure ([BIDS](https://bids-specification.readthedocs.io)).
`DataHIPy` is an open-source tool written in Python and encapsulated in a Docker image to handle neuroimaging data on the Human Intracranial EEG Platform (HIP) following Brain Imaging Data Structure ([BIDS](https://bids-specification.readthedocs.io)).
### Resources
* **Documentation:** https://hip-infrastructure.github.io/bids-tools/
* **Source:** https://github.com/HIP-infrastructure/bids-tools
* **Bug reports:** https://github.com/HIP-infrastructure/bids-tools/issues
* **Documentation:** https://hip-infrastructure.github.io/datahipy/
* **Source:** https://github.com/HIP-infrastructure/datahipy
* **Bug reports:** https://github.com/HIP-infrastructure/datahipy/issues
## Installation
* Install Docker engine (See [instructions](https://hip-infrastructure.github.io/bids-tools/installation.html#installation-of-docker-engine))
* Install Docker engine (See [instructions](https://hip-infrastructure.github.io/datahipy/installation.html#installation-of-docker-engine))
* Clone this repository and go to the `bids-tools` directory:
* Clone this repository and go to the `datahipy` directory:
```bash
$ git clone https://github.com/HIP-infrastructure/bids-tools.git
$ cd bids-tools
$ git clone https://github.com/HIP-infrastructure/DataHIPy.git
$ cd DataHIPy
```
* Checkout submodules:
......@@ -39,7 +39,7 @@ This software is distributed under the open-source Apache 2.0 license. See [LICE
$ make -B build-docker
```
* You are ready to use `bids_tools` :rocket:!
* You are ready to use `DataHIPy` :rocket:!
## Test
Run `test/run_tests.sh` in a terminal:
......@@ -53,7 +53,7 @@ After completion, coverage report in HTML format can be found in ``test/report/c
The tool can be easily run as follows:
```output
usage: bids_tools [-h]
usage: datahipy [-h]
[--command {dataset.create,dataset.get,datasets.get,sub.get,sub.import,sub.edit.clinical,sub.delete,sub.delete.file}]
[--input_data INPUT_DATA]
[--output_file OUTPUT_FILE]
......
from bids_tools.info import (
from datahipy.info import (
__version__,
__author__,
__email__,
......
......@@ -7,7 +7,7 @@ import os
import re
import json
from bids_tools.bids.dataset import create_bids_layout
from datahipy.bids.dataset import create_bids_layout
def post_import_bids_refinement(bids_dir):
......
# Copyright (C) 2022-2023, The HIP team and Contributors, All rights reserved.
# This software is distributed under the open-source Apache 2.0 license.
"""Define constants used by packages of `bids_tools.bids`."""
"""Define constants used by packages of `datahipy.bids`."""
BIDS_VERSION = "v1.7.0"
......
......@@ -9,16 +9,17 @@ import subprocess
from concurrent.futures import ProcessPoolExecutor
from pkg_resources import resource_filename
from sre_constants import SUCCESS
from datetime import date
from bids import BIDSLayout
from bids_tools.bids.electrophy import get_ieeg_info
from bids_tools.bids.participant import get_participants_info
from bids_tools.bids.validation import (
from datahipy.bids.electrophy import get_ieeg_info
from datahipy.bids.participant import get_participants_info
from datahipy.bids.validation import (
add_bidsignore_validation_rule,
get_bids_validator_output_info,
)
from bids_tools.bids.version import determine_bids_schema_version
from datahipy.bids.version import determine_bids_schema_version
# Set the number of threads to use for parallel processing
......@@ -27,6 +28,86 @@ from bids_tools.bids.version import determine_bids_schema_version
NUM_THREADS = os.cpu_count() - 1 if os.cpu_count() > 1 else 1
def create_initial_bids_readme(bids_dir, dataset_desc):
"""Create an initial `README` file for a BIDS dataset.
Parameters
----------
bids_dir : str
Path to the BIDS dataset.
dataset_desc : dict
Dictionary with the content of the dataset_description.json file.
"""
with open(os.path.join(bids_dir, "README"), "w") as f:
f.writelines(
[
f'# {dataset_desc["Name"]}\n\n',
f"To be completed...\n\n",
f"Use it as the dataset landing page, "
"which should provide enough information "
"about the dataset and its creation context.",
]
)
def create_initial_bids_changes(bids_dir):
"""Create an initial `CHANGES` file for a BIDS dataset.
Parameters
----------
bids_dir : str
Path to the BIDS dataset.
"""
with open(os.path.join(bids_dir, "CHANGES"), "w") as f:
f.writelines(
[
f"0.0.0 {date.today().strftime('%Y-%m-%d')}\n",
"\t- Creation of the dataset.",
]
)
def create_initial_participants_tsv(bids_dir):
"""Create an initial `participants.tsv` file for a BIDS dataset.
Parameters
----------
bids_dir : str
Path to the BIDS dataset.
"""
with open(os.path.join(bids_dir, "participants.tsv"), "w") as f:
f.write("participant_id\tage\tsex\tgroup")
def create_empty_bids_dataset(bids_dir=None, dataset_desc=None):
"""Create an empty BIDS dataset.
Parameters
----------
bids_dir : str
Path to the BIDS dataset.
dataset_desc : dict
Dictionary with the content of the dataset_description.json file.
"""
print("> Creating an empty BIDS dataset at: ", bids_dir, "...")
# Create the BIDS dataset directory
os.makedirs(bids_dir, exist_ok=True)
# Create the dataset_description.json file
with open(os.path.join(bids_dir, "dataset_description.json"), "w") as f:
json.dump(dataset_desc, f, indent=4)
# Create initial README file
create_initial_bids_readme(bids_dir, dataset_desc)
# Create initial CHANGES file
create_initial_bids_changes(bids_dir)
# Create the .bidsignore file and add the line to ignore CT files
# (not yet supported by the validator)
add_bidsignore_validation_rule(bids_dir, "**/*_ct.*")
# Create an initial participants.tsv file
create_initial_participants_tsv(bids_dir)
def create_bids_layout(bids_dir=None, **kwargs):
"""Create a pybids representation of a BIDS dataset.
......@@ -47,7 +128,7 @@ def create_bids_layout(bids_dir=None, **kwargs):
layout = BIDSLayout(
root=bids_dir,
validate=False,
config=resource_filename("bids_tools", "bids/config/bids.json"),
config=resource_filename("datahipy", "bids/config/bids.json"),
**kwargs,
)
return layout
......@@ -101,9 +182,7 @@ def get_dataset_size(bids_dir=None):
"""
# Get total number of files and size
total_size_megabytes = (
subprocess.check_output(["du", "-sh", bids_dir])
.split()[0]
.decode("utf-8")
subprocess.check_output(["du", "-sh", bids_dir]).split()[0].decode("utf-8")
)
## Alternative: Count only files outside sourcedata/
# total_size_bytes = 0
......@@ -146,9 +225,7 @@ def get_bidsdataset_content(bids_dir=None):
add_bidsignore_validation_rule(bids_dir, "**/*_ct.*")
# Run the bids-validator on the dataset with the specified schema version and
# update dataset_desc with the execution dictionary output
dataset_desc.update(
get_bids_validator_output_info(bids_dir, bids_schema_version)
)
dataset_desc.update(get_bids_validator_output_info(bids_dir, bids_schema_version))
# Add information retrieved with pybids to dataset_desc
dataset_desc.update(get_bids_layout_info(bids_dir))
# Return the created dataset_desc dictionary to be indexed
......
......@@ -5,7 +5,7 @@
import pandas as pd
from os import path as op
from bids_tools.bids.const import (
from datahipy.bids.const import (
VALID_EXTENSIONS,
BIDS_ENTITY_MAP,
BIDSJSONFILE_DATATYPE_KEY_MAP,
......@@ -30,8 +30,8 @@ def get_subject_bidsfile_info(bids_dir, **kwargs):
List of dictionaries with BIDS file information for a given subject.
"""
# Import the required functions
from bids_tools.bids.dataset import create_bids_layout
from bids_tools.bids.electrophy import get_channels_info
from datahipy.bids.dataset import create_bids_layout
from datahipy.bids.electrophy import get_channels_info
# Create a pybids representation of the dataset
layout = create_bids_layout(bids_dir)
......
......@@ -6,7 +6,7 @@
import json
import subprocess
from os import path as op
from bids_tools.bids.const import BIDS_VERSION
from datahipy.bids.const import BIDS_VERSION
def validate_bids_dataset(container_dataset_path, *args):
......
......@@ -4,7 +4,7 @@
"""Utility functions to retrieve version related information from a BIDS dataset."""
from packaging import version
from bids_tools.bids.const import BIDS_VERSION
from datahipy.bids.const import BIDS_VERSION
def determine_bids_schema_version(dataset_desc):
......