diff --git a/.github/workflows/compatibility-test copy.yaml b/.github/workflows/compatibility-test copy.yaml index 7fd1e7b..1c7cff8 100644 --- a/.github/workflows/compatibility-test copy.yaml +++ b/.github/workflows/compatibility-test copy.yaml @@ -15,13 +15,13 @@ jobs: uses: actions/checkout@v4 - name: Rename files - working-directory: ./dev + working-directory: . run: | cp template.env .env cp local_nb_nodes.template.json local_nb_nodes.json - name: Run docker compose - working-directory: ./dev + working-directory: . run: | docker compose --profile full_stack up -d @@ -34,9 +34,9 @@ jobs: run: npm install - name: wait for graph set up - working-directory: ./dev + working-directory: ./scripts run: | - while ! tail -n 0 -f ./DEPLOY.log | grep -q "Finished setting up the Neurobagel graph backend."; do + while ! tail -n 0 -f ./logs/DEPLOY.log | grep -q "Finished setting up the Neurobagel graph backend."; do : done echo "Finished setting up the Neurobagel graph backend." diff --git a/.github/workflows/compatibility-test.yaml b/.github/workflows/compatibility-test.yaml index f505dec..e7c6acc 100644 --- a/.github/workflows/compatibility-test.yaml +++ b/.github/workflows/compatibility-test.yaml @@ -12,14 +12,14 @@ jobs: uses: actions/checkout@v4 - name: Rename files - working-directory: ./dev + working-directory: . run: | cp template.env .env cp local_nb_nodes.template.json local_nb_nodes.json sed -i 's/latest/nightly/g' .env - name: Run docker compose - working-directory: ./dev + working-directory: . run: | docker compose --profile full_stack up -d @@ -32,9 +32,9 @@ jobs: run: npm install - name: wait for graph set up - working-directory: ./dev + working-directory: ./scripts run: | - while ! tail -n 0 -f ./DEPLOY.log | grep -q "Finished setting up the Neurobagel graph backend."; do + while ! tail -n 0 -f ./logs/DEPLOY.log | grep -q "Finished setting up the Neurobagel graph backend."; do : done echo "Finished setting up the Neurobagel graph backend." diff --git a/README.md b/README.md index 29446eb..fd5e8ae 100644 --- a/README.md +++ b/README.md @@ -7,28 +7,24 @@ For detailed instructions on the deployment options for Neurobagel, see the offi ### Using the full-stack Docker Compose file 1. Clone the repository -```bash -git clone https://github.com/neurobagel/recipes.git -``` + ```bash + git clone https://github.com/neurobagel/recipes.git + ``` + +2. Copy and rename the required template configuration files + ```bash + cp template.env .env -2. `cd` into the directory containing the Neurobagel deployment recipe - -```bash -cd recipes/dev -``` + # if also setting up local federation + cp local_nb_nodes.template.json local_nb_nodes.json + ``` -3. Copy and rename template files in the directory -```bash -cp template.env .env + Ensure to edit the configuration file(s) according to your deployment. -# if also setting up local federation -cp local_nb_nodes.template.json local_nb_nodes.json -``` -Ensure to edit the file(s) according to your deployment. + :warning: **Note**: You **must** change the value of the `NB_API_QUERY_URL` variable in the `.env` file before you can launch any service stack that includes a query tool (i.e., `local_node_query`, `full_stack`). +See comments in the `.env` file for more information. -4. Start the Docker Compose stack and specify your desired deployment profile +3. In the repository root, start the Docker Compose stack and specify your desired deployment profile **To set up only a local node:** ```bash @@ -48,30 +44,4 @@ Ensure to edit the file(s) according to your deployment. ```bash docker compose --profile full_stack up -d ``` -A log file `DEPLOY.log` will be automatically created in the current directory with a copy of the STDOUT from the automatic deployment process. - -### Using deployment type-specific Docker Compose files - -**Note: These instructions will soon be superceded by the full-stack Docker Compose file instructions.** - -1. Clone the repository -```bash -git clone https://github.com/neurobagel/recipes.git -``` - -2. `cd` into the directory containing the appropriate configuration files -for your deployment scenario. - -3. Depending on your deployment scenario, -copy and/or rename the template files in the directory -and edit them accordingly: - - `local_federation/` - - `local_nb_nodes.json` - - `.env` - - `local_node/` - - `.env` - - `local_node_with_query_tool/` - - `.env` +A log file `DEPLOY.log` will be automatically created under `scripts/logs/` with a copy of the STDOUT from the automatic deployment process. diff --git a/dev/data/example_synthetic_pheno-bids.jsonld b/data/example_synthetic_pheno-bids.jsonld similarity index 100% rename from dev/data/example_synthetic_pheno-bids.jsonld rename to data/example_synthetic_pheno-bids.jsonld diff --git a/dev/add_data_to_graph.sh b/dev/add_data_to_graph.sh deleted file mode 100755 index 869d13b..0000000 --- a/dev/add_data_to_graph.sh +++ /dev/null @@ -1,244 +0,0 @@ -#!/bin/bash -# -# ARG_HELP([Upload JSONLD and Turtle data to a Neurobagel graph]) -# ARG_POSITIONAL_SINGLE([dir],[Path to directory containing .jsonld and/or .ttl files. ALL .jsonld and .ttl files in this directory will be uploaded.]) -# ARG_POSITIONAL_SINGLE([graph-url],[Host and port at which to access the graph database to add data to (e.g., localhost:7200)]) -# ARG_POSITIONAL_SINGLE([graph-database],[Name of graph database to add data to]) -# ARG_POSITIONAL_SINGLE([user],[Username for graph database access]) -# ARG_POSITIONAL_SINGLE([password],[Password for graph database user]) -# ARG_OPTIONAL_BOOLEAN([clear-data],[],[Whether or not to first clear all existing data from the graph database],[off]) -# ARG_OPTIONAL_BOOLEAN([use-stardog-syntax],[],[Set to use Stardog API endpoints to update the specified graph database. If unset, assumes the graph database is a GraphDB database.],[off]) -# ARG_OPTIONAL_BOOLEAN([log-output],[],[Whether or not to write the output to a log file],[off]) -# ARG_OPTIONAL_SINGLE([log-file],[],[Path to the log file],[LOG.txt]) -# ARGBASH_GO() -# needed because of Argbash --> m4_ignore([ -### START OF CODE GENERATED BY Argbash v2.9.0 one line above ### -# Argbash is a bash code generator used to get arguments parsing right. -# Argbash is FREE SOFTWARE, see https://argbash.io for more info -# Generated online by https://argbash.io/generate - - -die() -{ - local _ret="${2:-1}" - test "${_PRINT_HELP:-no}" = yes && print_help >&2 - echo "$1" >&2 - exit "${_ret}" -} - - -begins_with_short_option() -{ - local first_option all_short_options='h' - first_option="${1:0:1}" - test "$all_short_options" = "${all_short_options/$first_option/}" && return 1 || return 0 -} - -# THE DEFAULTS INITIALIZATION - POSITIONALS -_positionals=() -# THE DEFAULTS INITIALIZATION - OPTIONALS -_arg_clear_data="off" -_arg_use_stardog_syntax="off" - -_arg_log_output="off" -_arg_log_file="LOG.txt" - -print_help() -{ - printf '%s\n' "Upload JSONLD and Turtle data to a Neurobagel graph" - printf 'Usage: %s [-h|--help] [--(no-)clear-data] [--(no-)use-stardog-syntax] [--(no-)log-output] [--log-file ] \n' "$0" - printf '\t%s\n' ": Path to directory containing .jsonld and/or .ttl files. ALL .jsonld and .ttl files in this directory will be uploaded." - printf '\t%s\n' ": Host and port at which to access the graph database to add data to (e.g., localhost:7200)" - printf '\t%s\n' ": Name of graph database to add data to" - printf '\t%s\n' ": Username for graph database access" - printf '\t%s\n' ": Password for graph database user" - printf '\t%s\n' "-h, --help: Prints help" - printf '\t%s\n' "--clear-data, --no-clear-data: Whether or not to first clear all existing data from the graph database (off by default)" - printf '\t%s\n' "--use-stardog-syntax, --no-use-stardog-syntax: Set to use Stardog API endpoints to update the specified graph database. If unset, assumes the graph database is a GraphDB database. (off by default)" - printf '\t%s\n' "--log-output, --no-log-output: Whether or not to write the output to a log file (off by default)" - printf '\t%s\n' "--log-file: Path to the log file (default: 'LOG.txt')" -} - - -parse_commandline() -{ - _positionals_count=0 - while test $# -gt 0 - do - _key="$1" - case "$_key" in - -h|--help) - print_help - exit 0 - ;; - -h*) - print_help - exit 0 - ;; - --no-clear-data|--clear-data) - _arg_clear_data="on" - test "${1:0:5}" = "--no-" && _arg_clear_data="off" - ;; - --no-use-stardog-syntax|--use-stardog-syntax) - _arg_use_stardog_syntax="on" - test "${1:0:5}" = "--no-" && _arg_use_stardog_syntax="off" - ;; - --no-log-output|--log-output) - _arg_log_output="on" - test "${1:0:5}" = "--no-" && _arg_log_output="off" - ;; - --log-file) - test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 - _arg_log_file="$2" - shift - ;; - --log-file=*) - _arg_log_file="${_key##--log-file=}" - ;; - *) - _last_positional="$1" - _positionals+=("$_last_positional") - _positionals_count=$((_positionals_count + 1)) - ;; - esac - shift - done -} - - -handle_passed_args_count() -{ - local _required_args_string="'dir', 'graph-url', 'graph-database', 'user' and 'password'" - test "${_positionals_count}" -ge 5 || _PRINT_HELP=yes die "FATAL ERROR: Not enough positional arguments - we require exactly 5 (namely: $_required_args_string), but got only ${_positionals_count}." 1 - test "${_positionals_count}" -le 5 || _PRINT_HELP=yes die "FATAL ERROR: There were spurious positional arguments --- we expect exactly 5 (namely: $_required_args_string), but got ${_positionals_count} (the last one was: '${_last_positional}')." 1 -} - - -assign_positional_args() -{ - local _positional_name _shift_for=$1 - _positional_names="_arg_dir _arg_graph_url _arg_graph_database _arg_user _arg_password " - - shift "$_shift_for" - for _positional_name in ${_positional_names} - do - test $# -gt 0 || break - eval "$_positional_name=\${1}" || die "Error during argument parsing, possibly an Argbash bug." 1 - shift - done -} - -parse_commandline "$@" -handle_passed_args_count -assign_positional_args 1 "${_positionals[@]}" - -# OTHER STUFF GENERATED BY Argbash - -### END OF CODE GENERATED BY Argbash (sortof) ### ]) -# [ <-- needed because of Argbash - - -# Reassign positional args to more readable named variables (https://argbash.readthedocs.io/en/latest/guide.html#using-parsing-results) -jsonld_dir=$_arg_dir -user=$_arg_user -password=$_arg_password -graph_db=$_arg_graph_database -graph_url=$_arg_graph_url -clear_data=$_arg_clear_data # value is either on or off (https://argbash.readthedocs.io/en/stable/guide.html#optional-arguments) -use_stardog_syntax=$_arg_use_stardog_syntax - -log_output=$_arg_log_output -log_file=$_arg_log_file - -DELETE_TRIPLES_QUERY=" -DELETE { - ?s ?p ?o . -} WHERE { - ?s ?p ?o . -}" - -# Depending on the graph backend used, set URLs for uploading data to and clearing data in graph database -base_url="http://${graph_url}/${graph_db}" -if [ "$use_stardog_syntax" = "on" ]; then - upload_data_url=$base_url - clear_data_url="${base_url}/update" -else - upload_data_url="${base_url}/statements" - clear_data_url=$upload_data_url -fi - -# Main logic -main() { - # Clear existing data in graph database if requested - if [ "$clear_data" = "on" ]; then - echo -e "\nCLEARING EXISTING DATA FROM ${graph_db}..." - - response=$(curl -u "${user}:${password}" -s -S -i -w "\n%{http_code}\n" \ - -X POST $clear_data_url \ - -H "Content-Type: application/sparql-update" \ - --data-binary "${DELETE_TRIPLES_QUERY}") - - # Extract and check status code outputted as final line of response - httpcode=$(tail -n1 <<< "$response") - if (( $httpcode < 200 || $httpcode >= 300 )); then - echo -e "\nERROR: Failed to clear ${graph_db}:" - echo "$(sed '$d' <<< "$response")" - echo -e "\nEXITING..." - exit 1 - fi - fi - - # Add data to specified graph database - echo -e "\nUPLOADING DATA FROM ${jsonld_dir} TO ${graph_db}...\n" - - upload_failed=() - - for db in ${jsonld_dir}/*.jsonld; do - # Prevent edge case where no matching files are present in directory and so loop executes once with glob pattern string itself - [ -e "$db" ] || continue - - echo "$(basename ${db}):" - response=$(curl -u "${user}:${password}" -s -S -i -w "\n%{http_code}\n" \ - -X POST $upload_data_url \ - -H "Content-Type: application/ld+json" \ - --data-binary @${db}) - - httpcode=$(tail -n1 <<< "$response") - if (( $httpcode < 200 || $httpcode >= 300 )); then - upload_failed+=("${db}") - fi - # Print rest of response to stdout - echo -e "$(sed '$d' <<< "$response")\n" - done - - for file in ${jsonld_dir}/*.ttl; do - [ -e "$file" ] || continue - - echo "$(basename ${file}):" - response=$(curl -u "${user}:${password}" -s -S -i -w "\n%{http_code}\n" \ - -X POST $upload_data_url \ - -H "Content-Type: text/turtle" \ - --data-binary @${file}) - - httpcode=$(tail -n1 <<< "$response") - if (( $httpcode < 200 || $httpcode >= 300 )); then - upload_failed+=("${file}") - fi - echo -e "$(sed '$d' <<< "$response")\n" - done - - echo "FINISHED UPLOADING DATA FROM ${jsonld_dir} TO ${graph_db}." - - if (( ${#upload_failed[@]} != 0 )); then - echo -e "\nERROR: Upload failed for these files:" - printf '%s\n' "${upload_failed[@]}" - fi -} - -# Call the main logic function with or without output redirection -if [ "$log_output" = "on" ]; then - main > "$log_file" -else - main -fi -# ] <-- needed because of Argbash \ No newline at end of file diff --git a/dev/data-config_template.ttl b/dev/data-config_template.ttl deleted file mode 100644 index 386b706..0000000 --- a/dev/data-config_template.ttl +++ /dev/null @@ -1,49 +0,0 @@ -# -# RDF4J configuration template for a GraphDB repository -# -@prefix rdfs: . -@prefix rep: . -@prefix sr: . -@prefix sail: . -@prefix graphdb: . - -[] a rep:Repository ; - rep:repositoryID "my_db" ; - rdfs:label "" ; - rep:repositoryImpl [ - rep:repositoryType "graphdb:SailRepository" ; - sr:sailImpl [ - sail:sailType "graphdb:Sail" ; - - graphdb:read-only "false" ; - - # Inference and Validation - graphdb:ruleset "rdfsplus-optimized" ; - graphdb:disable-sameAs "true" ; - graphdb:check-for-inconsistencies "false" ; - - # Indexing - graphdb:entity-id-size "32" ; - graphdb:enable-context-index "false" ; - graphdb:enablePredicateList "true" ; - graphdb:enable-fts-index "false" ; - graphdb:fts-indexes ("default" "iri") ; - graphdb:fts-string-literals-index "default" ; - graphdb:fts-iris-index "none" ; - - # Queries and Updates - graphdb:query-timeout "0" ; - graphdb:throw-QueryEvaluationException-on-timeout "false" ; - graphdb:query-limit-results "0" ; - - # Settable in the file but otherwise hidden in the UI and in the RDF4J console - graphdb:base-URL "http://example.org/owlim#" ; - graphdb:defaultNS "" ; - graphdb:imports "" ; - graphdb:repository-type "file-repository" ; - graphdb:storage-folder "storage" ; - graphdb:entity-index-size "10000000" ; - graphdb:in-memory-literal-properties "true" ; - graphdb:enable-literal-index "true" ; - ] - ]. \ No newline at end of file diff --git a/dev/graphdb_setup.sh b/dev/graphdb_setup.sh deleted file mode 100755 index 676d18f..0000000 --- a/dev/graphdb_setup.sh +++ /dev/null @@ -1,200 +0,0 @@ -#!/bin/bash -# -# ARG_POSITIONAL_SINGLE([admin-pass],[Password for the "admin" superuser that GraphDB creates. If running the first-time user setup, this will be the password set for the admin. The admin user will only be used to create and modify permissions of other database users.]) -# ARG_OPTIONAL_SINGLE([env-file-path],[],[Path to a .env file containing environment variables for Neurobagel node configuration.],[.env]) -# ARG_OPTIONAL_BOOLEAN([run-user-setup],[],[Whether or not to run the first-time GraphDB setup steps, including changing the admin password and creating a new database user.],[on]) -# ARG_HELP([Run first-time user setup for a new GraphDB instance and/or set up a new GraphDB graph database. This script will automatically create a GraphDB configuration file (data-config.ttl) for your newly created database in your current directory. For more information, see https://neurobagel.org/infrastructure/.]) -# ARGBASH_GO() -# needed because of Argbash --> m4_ignore([ -### START OF CODE GENERATED BY Argbash v2.9.0 one line above ### -# Argbash is a bash code generator used to get arguments parsing right. -# Argbash is FREE SOFTWARE, see https://argbash.io for more info -# Generated online by https://argbash.io/generate - - -die() -{ - local _ret="${2:-1}" - test "${_PRINT_HELP:-no}" = yes && print_help >&2 - echo "$1" >&2 - exit "${_ret}" -} - - -begins_with_short_option() -{ - local first_option all_short_options='h' - first_option="${1:0:1}" - test "$all_short_options" = "${all_short_options/$first_option/}" && return 1 || return 0 -} - -# THE DEFAULTS INITIALIZATION - POSITIONALS -_positionals=() -# THE DEFAULTS INITIALIZATION - OPTIONALS -_arg_env_file_path=".env" -_arg_run_user_setup="on" - - -print_help() -{ - printf '%s\n' "Run first-time user setup for a new GraphDB instance and/or set up a new GraphDB graph database. This script will automatically create a GraphDB configuration file (data-config.ttl) for your newly created database in your current directory. For more information, see https://neurobagel.org/infrastructure/." - printf 'Usage: %s [--env-file-path ] [--(no-)run-user-setup] [-h|--help] \n' "$0" - printf '\t%s\n' ": Password for the \"admin\" superuser that GraphDB creates. If running the first-time user setup, this will be the password set for the admin. The admin user will only be used to create and modify permissions of other database users." - printf '\t%s\n' "--env-file-path: Path to a .env file containing environment variables for Neurobagel node configuration. (default: '.env')" - printf '\t%s\n' "--run-user-setup, --no-run-user-setup: Whether or not to run the first-time GraphDB setup steps, including changing the admin password and creating a new database user. (on by default)" - printf '\t%s\n' "-h, --help: Prints help" -} - - -parse_commandline() -{ - _positionals_count=0 - while test $# -gt 0 - do - _key="$1" - case "$_key" in - --env-file-path) - test $# -lt 2 && die "Missing value for the optional argument '$_key'." 1 - _arg_env_file_path="$2" - shift - ;; - --env-file-path=*) - _arg_env_file_path="${_key##--env-file-path=}" - ;; - --no-run-user-setup|--run-user-setup) - _arg_run_user_setup="on" - test "${1:0:5}" = "--no-" && _arg_run_user_setup="off" - ;; - -h|--help) - print_help - exit 0 - ;; - -h*) - print_help - exit 0 - ;; - *) - _last_positional="$1" - _positionals+=("$_last_positional") - _positionals_count=$((_positionals_count + 1)) - ;; - esac - shift - done -} - - -handle_passed_args_count() -{ - local _required_args_string="'admin-pass'" - test "${_positionals_count}" -ge 1 || _PRINT_HELP=yes die "FATAL ERROR: Not enough positional arguments - we require exactly 1 (namely: $_required_args_string), but got only ${_positionals_count}." 1 - test "${_positionals_count}" -le 1 || _PRINT_HELP=yes die "FATAL ERROR: There were spurious positional arguments --- we expect exactly 1 (namely: $_required_args_string), but got ${_positionals_count} (the last one was: '${_last_positional}')." 1 -} - - -assign_positional_args() -{ - local _positional_name _shift_for=$1 - _positional_names="_arg_admin_pass " - - shift "$_shift_for" - for _positional_name in ${_positional_names} - do - test $# -gt 0 || break - eval "$_positional_name=\${1}" || die "Error during argument parsing, possibly an Argbash bug." 1 - shift - done -} - -parse_commandline "$@" -handle_passed_args_count -assign_positional_args 1 "${_positionals[@]}" - -# OTHER STUFF GENERATED BY Argbash - -### END OF CODE GENERATED BY Argbash (sortof) ### ]) -# [ <-- needed because of Argbash - -set -euo pipefail - -# Reassign command line args to more readable named variables -ENV_FILE_PATH=$_arg_env_file_path -ADMIN_PASS=$_arg_admin_pass -RUN_USER_SETUP=$_arg_run_user_setup - -# Set the environment variables in the shell, to use in the script -source "${ENV_FILE_PATH}" -echo "Environment variables have been set from ${ENV_FILE_PATH}." - -# Extract just the database name -DB_NAME="${NB_GRAPH_DB#repositories/}" -NB_GRAPH_PORT=${NB_GRAPH_PORT:-7200} - -# Get the directory of this script to be able to find the data-config_template.ttl file -SCRIPT_DIR=$(dirname "$0") - -echo "The GraphDB server is being accessed at http://localhost:${NB_GRAPH_PORT}." - -##### First time GraphDB setup ##### - -if [ "${RUN_USER_SETUP}" = "on" ]; then - echo "First time GraphDB user setup enabled." - - # 1. Change database admin password - echo "Changing the admin password (note: if you have previously set the admin password, this has no effect)..." - # TODO: To change a *previously set* admin password, we need to also provide the current password via -u - curl -X PATCH --header 'Content-Type: application/json' http://localhost:${NB_GRAPH_PORT}/rest/security/users/admin -d "{\"password\": \""${ADMIN_PASS}"\"}" - - # 2. If security is not enabled, enable it (i.e. allow only authenticated users access) - is_security_enabled=$(curl -s -X GET http://localhost:${NB_GRAPH_PORT}/rest/security) - if [ "${is_security_enabled}" = "false" ]; then - echo "Enabling password-based access control to all databases ..." - # NOTE: This command fails without credentials once security is enabled - curl -X POST --header 'Content-Type: application/json' -d true http://localhost:${NB_GRAPH_PORT}/rest/security - else - echo "Password-based access control has already been enabled." - fi - - # 3. Create a new database user - # TODO: Separate this out from the first-time setup? As this can technically be run at any time to create additional users. - # NOTE: If user already exists, response will be "An account with the given username already exists." OK for script. - echo "Creating a new database user ${NB_GRAPH_USERNAME}..." - curl -X POST --header 'Content-Type: application/json' -u "admin:${ADMIN_PASS}" -d @- http://localhost:${NB_GRAPH_PORT}/rest/security/users/${NB_GRAPH_USERNAME} < data-config.ttl - -# 5. Create a new database -# Assumes data-config.ttl is in the same directory as this script! -echo "Creating the GraphDB database ${DB_NAME}..." -curl -X PUT -u "admin:${ADMIN_PASS}" http://localhost:${NB_GRAPH_PORT}/${NB_GRAPH_DB} --data-binary "@data-config.ttl" -H "Content-Type: application/x-turtle" - -# 6. Grant newly created user access permission to the database -# Confirm user wants to proceed with changing user permissions -# while true; do -# read -p "WARNING: We will now give ${NB_GRAPH_USERNAME} read/write access to ${NB_GRAPH_DB}. This operation will REPLACE any existing permissions you have granted to user ${NB_GRAPH_USERNAME}, including any access to other databases. ${NB_GRAPH_USERNAME} may lose access to other databases as a result. Proceed? (y/n) " yn -# case $yn in -# [Yy]* ) break;; -# [Nn]* ) echo "Exiting..."; exit;; -# * ) echo "Please answer y or n.";; -# esac -# done - -echo "Granting user ${NB_GRAPH_USERNAME} read/write permissions to database ${DB_NAME}..." -curl -X PUT --header 'Content-Type: application/json' -d " -{\"grantedAuthorities\": [\"WRITE_REPO_${DB_NAME}\",\"READ_REPO_${DB_NAME}\"]}" http://localhost:${NB_GRAPH_PORT}/rest/security/users/${NB_GRAPH_USERNAME} -u "admin:${ADMIN_PASS}" - -echo "Done." - -# ] <-- needed because of Argbash diff --git a/dev/template.env b/dev/template.env deleted file mode 100644 index a9cd4cf..0000000 --- a/dev/template.env +++ /dev/null @@ -1,56 +0,0 @@ -# THIS IS A TEMPLATE .env FILE. MAKE A COPY OF THIS FILE NAMED .env -# AND MODIFY THE VALUES AS DESCRIBED IN THE DOCUMENTATION. - -# ---- CONFIGURATION FOR docker compose ---- -# Default service profile (deployment "flavour") when none are provided to the `docker compose` command -COMPOSE_PROFILES=local_node - -# ---- CONFIGURATION FOR graph ---- -NB_GRAPH_ADMIN_PASSWORD=ADMINPASSWORD - -# **REPLACE DBUSER** with your graph database username -NB_GRAPH_USERNAME=DBUSER - -# **REPLACE DBPASSWORD** with your graph database password -NB_GRAPH_PASSWORD=DBPASSWORD - -NB_GRAPH_DB=repositories/my_db -NB_RETURN_AGG=true -NB_NAPI_TAG=latest -NB_GRAPH_IMG=ontotext/graphdb:10.3.1 -LOCAL_GRAPH_DATA=./data # REPLACE WITH PATH TO YOUR JSONLD FILES - -# ---- CONFIGURATION FOR n-API ---- -## ADDITIONAL CONFIGURABLE PARAMETERS: Uncomment and modify values of the below variables as needed to use non-default values. -# Allow multiple origins of requests. e.g. For a query tool deployed locally using default ports, use: NB_API_ALLOWED_ORIGINS="http://localhost:3000 http://127.0.0.1:3000" -NB_NAPI_ALLOWED_ORIGINS="*" - -NB_NAPI_PORT_HOST=8000 -NB_NAPI_PORT=8000 -NB_GRAPH_ROOT_HOST=~/graphdb-home -NB_GRAPH_ROOT_CONT=/opt/graphdb/home -NB_GRAPH_ADDRESS=graph -NB_GRAPH_PORT_HOST=7200 -NB_GRAPH_PORT=7200 -NB_QUERY_TAG=latest -NB_QUERY_PORT_HOST=3000 - -# ---- CONFIGURATION FOR f-API ---- -# Port that the f-API will run on INSIDE the docker container (default 8000) -NB_FAPI_PORT=8000 -# Port that the f-API will be exposed on to the host computer (and likely the outside network) -NB_FAPI_PORT_HOST=8080 -# Docker image tag of the f-API (default latest) -NB_FAPI_TAG=latest - -# ---- CONFIGURATION FOR QUERY TOOL ---- -# URL of the n-API/f-API the query to will talk to, as it appears to a user -# **REPLACE http://XX.XX.XX.XX** with your API URL (and port, if applicable) as it appears on a user's machine, -# or localhost: if query tool will only be accessed from its host machine -NB_API_QUERY_URL=http://XX.XX.XX.XX - -# Docker image tag of the query tool (default latest) -NB_QUERY_TAG=latest - -# Port that the query tool will be exposed on the host and likely the network (default 3000) -NB_QUERY_PORT_HOST=3000 \ No newline at end of file diff --git a/dev/vocab/nb_vocab.ttl b/dev/vocab/nb_vocab.ttl deleted file mode 100644 index 2dd308b..0000000 --- a/dev/vocab/nb_vocab.ttl +++ /dev/null @@ -1,37 +0,0 @@ -@prefix nb: . -@prefix rdfs: . - -nb:Acquisition - a rdfs:Class. - -nb:Age - a rdfs:Class. - -nb:Assessment - a rdfs:Class ; - rdfs:subClassOf nb:ControlledTerm . - -nb:ControlledTerm - a rdfs:Class. - -nb:Diagnosis - a rdfs:Class ; - rdfs:subClassOf nb:ControlledTerm . - -nb:Image - a rdfs:Class ; - rdfs:subClassOf nb:ControlledTerm . - -nb:Session - a rdfs:Class. - -nb:Sex - a rdfs:Class ; - rdfs:subClassOf nb:ControlledTerm . - -nb:Subject - a rdfs:Class. - -nb:SubjectGroup - a rdfs:Class ; - rdfs:subClassOf nb:ControlledTerm . diff --git a/dev/docker-compose.yml b/docker-compose.yml similarity index 73% rename from dev/docker-compose.yml rename to docker-compose.yml index e5663e2..bedaf46 100644 --- a/dev/docker-compose.yml +++ b/docker-compose.yml @@ -28,14 +28,23 @@ services: - "full_stack" volumes: - "${NB_GRAPH_ROOT_HOST:-~/graphdb-home}:${NB_GRAPH_ROOT_CONT:-/opt/graphdb/home}" - - "${PWD}:/usr/src" + - "./scripts:/usr/src/neurobagel/scripts" + - ".env:/usr/src/neurobagel/.env" + - "./vocab:/usr/src/neurobagel/vocab" - "${LOCAL_GRAPH_DATA:-./data}:/data" ports: - "${NB_GRAPH_PORT_HOST:-7200}:${NB_GRAPH_PORT:-7200}" - env_file: .env + environment: + NB_GRAPH_ROOT_CONT: ${NB_GRAPH_ROOT_CONT:-/opt/graphdb/home} + NB_GRAPH_ADMIN_PASSWORD: ${NB_GRAPH_ADMIN_PASSWORD} + NB_GRAPH_USERNAME: ${NB_GRAPH_USERNAME} + NB_GRAPH_PASSWORD: ${NB_GRAPH_PASSWORD} + NB_GRAPH_ADDRESS: ${NB_GRAPH_ADDRESS:-graph} + NB_GRAPH_PORT: ${NB_GRAPH_PORT:-7200} + NB_GRAPH_DB: ${NB_GRAPH_DB:-repositories/my_db} entrypoint: - - "/usr/src/setup.sh" - working_dir: "/usr/src" + - "/usr/src/neurobagel/scripts/setup.sh" + working_dir: "/usr/src/neurobagel/scripts" federation: image: "neurobagel/federation_api:${NB_FAPI_TAG:-latest}" @@ -45,7 +54,7 @@ services: ports: - "${NB_FAPI_PORT_HOST:-8080}:${NB_FAPI_PORT:-8000}" volumes: - - "${PWD}/local_nb_nodes.json:/usr/src/local_nb_nodes.json:ro" + - "./local_nb_nodes.json:/usr/src/local_nb_nodes.json:ro" environment: NB_API_PORT: ${NB_FAPI_PORT:-8000} diff --git a/docs/neurobagel_environment_variables.tsv b/docs/neurobagel_environment_variables.tsv index b04ace0..201db03 100644 --- a/docs/neurobagel_environment_variables.tsv +++ b/docs/neurobagel_environment_variables.tsv @@ -1,18 +1,23 @@ -Environment variable Required in .env? Description Default value if not set Relevant installation mode(s) -`NB_GRAPH_USERNAME` Yes Username to access graph database that API will communicate with - Docker, Python -`NB_GRAPH_PASSWORD` Yes Password to access graph database that API will communicate with - Docker, Python -`NB_GRAPH_ADDRESS` No IP address for the graph database (or container name, if graph is hosted locally) `206.12.99.17 (graph)` ** Docker, Python -`NB_GRAPH_DB` No Name of graph database endpoint to query (e.g., for a GraphDB database, this will take the format of `repositories/{database_name}`) `repositories/my_db` Docker, Python -`NB_RETURN_AGG` No Whether to return only dataset-level query results (including data locations) and exclude subject-level attributes. One of [true, false] `true` Docker, Python -`NB_API_TAG` No Docker image tag for the API `latest` Docker -`NB_API_PORT_HOST` No Port number on the _host machine_ to map the API container port to `8000` Docker -`NB_API_PORT` No Port number on which to run the API `8000` Docker, Python +Environment variable Set manually in .env? Description Default value if not set Used in these installation modes +`NB_GRAPH_ADMIN_PASSWORD` Yes Secure password to set for the admin user. - Docker +`NB_GRAPH_USERNAME` Yes Username to set for the graph database user. - Docker, Python +`NB_GRAPH_PASSWORD` Yes Secure password to set for the graph database user. - Docker, Python +`NB_GRAPH_DB` Yes Name to give your graph database (e.g., for a GraphDB database, use the format `repositories/{database_name}`) `repositories/my_db` Docker, Python +`LOCAL_GRAPH_DATA` Yes Path on your filesystem to the JSONLD files you want to upload to the graph database `./data` Docker `NB_API_ALLOWED_ORIGINS` Yes, if using a frontend query tool ‡ Origins allowed to make cross-origin resource sharing requests. Multiple origins must be separated with spaces in a single string enclosed in quotes. See ‡ for more info "`""""`" Docker, Python +`NB_API_QUERY_URL` Yes URL (and port number, if needed) of the Neurobagel API that the query tool will send its requests to. The query tool sends requests from a user's machine, so ensure the API URL is provided *as a user would access it from their own machine*. See also the [query tool README](https://github.com/neurobagel/query-tool?tab=readme-ov-file#set-the-environment-variables). - Docker +`NB_RETURN_AGG` Yes Whether to return only aggregate, dataset-level query results (excluding subject/session-level attributes). One of [true, false] `true` Docker, Python +`NB_NAPI_TAG` No Docker image tag for the Neurobagel node API `latest` Docker +`NB_NAPI_PORT_HOST` No Port number on the _host machine_ to map the Neurobagel node API container port to `8000` Docker +`NB_NAPI_PORT` No Port number on which to run the Neurobagel node API _in the API container_ `8000` Docker +`NB_FAPI_TAG` No Docker image tag for the Neurobagel federation API `latest` Docker +`NB_FAPI_PORT_HOST` No Port number on the _host machine_ to map the Neurobagel federation API container port to `8080` Docker +`NB_FAPI_PORT` No Port number on which to run the Neurobagel federation API _in the API container_ `8000` Docker `NB_GRAPH_IMG` No Graph server Docker image `ontotext/graphdb:10.3.1` Docker -`NB_GRAPH_ROOT_HOST` No Path to directory on the _host machine_ to store graph database files and data (the directory does not have to exist beforehand). `~/graphdb-home` Docker +`NB_GRAPH_ADDRESS` No IP address for the graph database (or container name, if graph is hosted locally) `206.12.99.17 (graph)` ** Docker, Python +`NB_GRAPH_ROOT_HOST` No Path to directory on the _host machine_ to store graph database files and data (directory does not have to exist beforehand). `~/graphdb-home` Docker `NB_GRAPH_ROOT_CONT` No Path to directory for graph databases in the _graph server container_ `/opt/graphdb/home` * Docker -`NB_GRAPH_PORT_HOST` No Port number on the _host machine_ to map the graph server container port to `7200` Docker, Python -`NB_GRAPH_PORT` No Port number used by the _graph server container_ `7200` * Docker +`NB_GRAPH_PORT_HOST` No Port number on the _host machine_ to map the graph server container port to `7200` Docker +`NB_GRAPH_PORT` No Port number used by the _graph server container_ `7200` * Docker, Python `NB_QUERY_TAG` No Docker image tag for the query tool `latest` Docker -`NB_QUERY_PORT_HOST` No Port number used by the `query_tool` on the host machine `3000` Docker -`NB_API_QUERY_URL` Yes, unless default is correct URL of the API that the query tool will send its requests to. The port number in the URL **must** correspond to `NB_API_PORT_HOST`. See also the [query tool README](https://github.com/neurobagel/query-tool?tab=readme-ov-file#set-the-environment-variables). **Must** end in a forward slash `/`! `http://localhost:8000/` Docker \ No newline at end of file +`NB_QUERY_PORT_HOST` No Port number used by the `query_tool` on the host machine `3000` Docker \ No newline at end of file diff --git a/local_federation/docker-compose.yml b/local_federation/docker-compose.yml deleted file mode 100644 index 32d32dd..0000000 --- a/local_federation/docker-compose.yml +++ /dev/null @@ -1,19 +0,0 @@ -version: "3.8" - -services: - federation: - image: "neurobagel/federation_api:${NB_API_TAG:-latest}" - ports: - - "${NB_API_PORT_HOST:-8000}:${NB_API_PORT:-8000}" - volumes: - - "${PWD}/local_nb_nodes.json:/usr/src/local_nb_nodes.json:ro" - environment: - - NB_API_PORT=${NB_API_PORT:-8000} - extra_hosts: - - "host.docker.internal:host-gateway" - query: - image: "neurobagel/query_tool:${NB_QUERY_TAG:-latest}" - ports: - - "${NB_QUERY_PORT_HOST:-3000}:5173" - environment: - - NB_API_QUERY_URL=${NB_API_QUERY_URL:-http://localhost:8000/} \ No newline at end of file diff --git a/local_federation/local_nb_nodes.template.json b/local_federation/local_nb_nodes.template.json deleted file mode 100644 index 9a07778..0000000 --- a/local_federation/local_nb_nodes.template.json +++ /dev/null @@ -1,10 +0,0 @@ -[ - { - "NodeName": "Local graph 1", - "ApiURL": "http://host.docker.internal:8000" - }, - { - "NodeName": "Local graph 2", - "ApiURL": "http://192.168.0.1" - } -] diff --git a/local_federation/template.env b/local_federation/template.env deleted file mode 100644 index f6c6e60..0000000 --- a/local_federation/template.env +++ /dev/null @@ -1,18 +0,0 @@ -# THIS IS A TEMPLATE .env FILE. MAKE A COPY OF THIS FILE NAMED .env -# AND MODIFY THE VALUES AS DESCRIBED IN THE DOCUMENTATION. - -# ---- CONFIGURATION FOR f-API ---- -# Port that the f-API will run on INSIDE the docker container (default 8000) -NB_API_PORT=8000 -# Port that the f-API will be exposed on to the host computer (and likely the outside network) -NB_API_PORT_HOST=8080 -# Docker image tag of the f-API (default latest) -NB_API_TAG=latest - -# ---- CONFIGURATION FOR QUERY TOOL ---- -# URL of the f-API as it will appear to a user -NB_API_QUERY_URL=http://localhost:8080 -# Docker image tag of the query tool (default latest) -NB_QUERY_TAG=latest -# Port that the query tool will be exposed on the host and likely the network (default 3000) -NB_QUERY_PORT_HOST=3000 diff --git a/dev/local_nb_nodes.template.json b/local_nb_nodes.template.json similarity index 100% rename from dev/local_nb_nodes.template.json rename to local_nb_nodes.template.json diff --git a/local_node/docker-compose.yml b/local_node/docker-compose.yml deleted file mode 100644 index 7c0506b..0000000 --- a/local_node/docker-compose.yml +++ /dev/null @@ -1,22 +0,0 @@ -version: "3.8" - -services: - api: - image: "neurobagel/api:${NB_API_TAG:-latest}" - ports: - - "${NB_API_PORT_HOST:-8000}:${NB_API_PORT:-8000}" - environment: - NB_GRAPH_USERNAME: ${NB_GRAPH_USERNAME} - NB_GRAPH_PASSWORD: ${NB_GRAPH_PASSWORD} - NB_GRAPH_ADDRESS: ${NB_GRAPH_ADDRESS:-graph} - NB_GRAPH_PORT: ${NB_GRAPH_PORT:-7200} - NB_GRAPH_DB: ${NB_GRAPH_DB:-repositories/my_db} - NB_RETURN_AGG: ${NB_RETURN_AGG:-true} - NB_API_PORT: ${NB_API_PORT:-8000} - NB_API_ALLOWED_ORIGINS: ${NB_API_ALLOWED_ORIGINS} - graph: - image: "${NB_GRAPH_IMG:-ontotext/graphdb:10.3.1}" - volumes: - - "${NB_GRAPH_ROOT_HOST:-~/graphdb-home}:${NB_GRAPH_ROOT_CONT:-/opt/graphdb/home}" - ports: - - "${NB_GRAPH_PORT_HOST:-7200}:${NB_GRAPH_PORT:-7200}" \ No newline at end of file diff --git a/local_node/template.env b/local_node/template.env deleted file mode 100644 index e3f8bd2..0000000 --- a/local_node/template.env +++ /dev/null @@ -1,22 +0,0 @@ -# THIS IS A TEMPLATE .env FILE. MAKE A COPY OF THIS FILE NAMED .env -# AND MODIFY THE VALUES AS DESCRIBED IN THE DOCUMENTATION. -# For more info on each variable, see https://neurobagel.org/infrastructure/#set-the-environment-variables -NB_GRAPH_USERNAME=DBUSER # REPLACE DBUSER WITH YOUR GRAPH DATABASE USERNAME -NB_GRAPH_PASSWORD=DBPASSWORD # REPLACE DBPASSWORD WITH YOUR GRAPH DATABASE PASSWORD -NB_GRAPH_DB=repositories/my_db -NB_RETURN_AGG=true -NB_API_TAG=latest -NB_GRAPH_IMG=ontotext/graphdb:10.3.1 - -## ADDITIONAL CONFIGURABLE PARAMETERS: Uncomment and modify values of the below variables as needed to use non-default values. -# NB_API_ALLOWED_ORIGINS="" # Allow multiple origins of requests. e.g. For a query tool deployed locally using default ports, use: NB_API_ALLOWED_ORIGINS="http://localhost:3000 http://127.0.0.1:3000" -# NB_API_PORT_HOST=8000 -# NB_API_PORT=8000 -# NB_GRAPH_ROOT_HOST=~/graphdb-home -# NB_GRAPH_ROOT_CONT=/opt/graphdb/home -# NB_GRAPH_ADDRESS=graph -# NB_GRAPH_PORT_HOST=7200 -# NB_GRAPH_PORT=7200 -# NB_QUERY_TAG=latest -# NB_QUERY_PORT_HOST=3000 -# NB_API_QUERY_URL=http://localhost:8000/ diff --git a/local_node_with_query_tool/docker-compose.yml b/local_node_with_query_tool/docker-compose.yml deleted file mode 100644 index c622c27..0000000 --- a/local_node_with_query_tool/docker-compose.yml +++ /dev/null @@ -1,28 +0,0 @@ -version: "3.8" - -services: - api: - image: "neurobagel/api:${NB_API_TAG:-latest}" - ports: - - "${NB_API_PORT_HOST:-8000}:${NB_API_PORT:-8000}" - environment: - NB_GRAPH_USERNAME: ${NB_GRAPH_USERNAME} - NB_GRAPH_PASSWORD: ${NB_GRAPH_PASSWORD} - NB_GRAPH_ADDRESS: ${NB_GRAPH_ADDRESS:-graph} - NB_GRAPH_PORT: ${NB_GRAPH_PORT:-7200} - NB_GRAPH_DB: ${NB_GRAPH_DB:-repositories/my_db} - NB_RETURN_AGG: ${NB_RETURN_AGG:-true} - NB_API_PORT: ${NB_API_PORT:-8000} - NB_API_ALLOWED_ORIGINS: ${NB_API_ALLOWED_ORIGINS} - graph: - image: "${NB_GRAPH_IMG:-ontotext/graphdb:10.3.1}" - volumes: - - "${NB_GRAPH_ROOT_HOST:-~/graphdb-home}:${NB_GRAPH_ROOT_CONT:-/opt/graphdb/home}" - ports: - - "${NB_GRAPH_PORT_HOST:-7200}:${NB_GRAPH_PORT:-7200}" - query: - image: "neurobagel/query_tool:${NB_QUERY_TAG:-latest}" - ports: - - "${NB_QUERY_PORT_HOST:-3000}:5173" - environment: - NB_API_QUERY_URL: "${NB_API_QUERY_URL:-http://localhost:8000/}" \ No newline at end of file diff --git a/local_node_with_query_tool/template.env b/local_node_with_query_tool/template.env deleted file mode 100644 index 7d1a1c5..0000000 --- a/local_node_with_query_tool/template.env +++ /dev/null @@ -1,22 +0,0 @@ -# THIS IS A TEMPLATE .env FILE. MAKE A COPY OF THIS FILE NAMED .env -# AND MODIFY THE VALUES AS DESCRIBED IN THE DOCUMENTATION. -# For more info on each variable, see https://neurobagel.org/infrastructure/#set-the-environment-variables -NB_GRAPH_USERNAME=DBUSER # REPLACE DBUSER WITH YOUR GRAPH DATABASE USERNAME -NB_GRAPH_PASSWORD=DBPASSWORD # REPLACE DBPASSWORD WITH YOUR GRAPH DATABASE PASSWORD -NB_GRAPH_DB=repositories/my_db -NB_RETURN_AGG=true -NB_API_TAG=latest -NB_API_ALLOWED_ORIGINS="https://localhost:3000 http://localhost:3000" # e.g., allow multiple origins from a query tool deployed locally with default port settings; edit as needed for your setup -NB_GRAPH_IMG=ontotext/graphdb:10.3.1 -NB_API_QUERY_URL=http://localhost:8000/ - -## ADDITIONAL CONFIGURABLE PARAMETERS: Uncomment and modify values of the below variables as needed to use non-default values. -# NB_API_PORT_HOST=8000 -# NB_API_PORT=8000 -# NB_GRAPH_ROOT_HOST=~/graphdb-home -# NB_GRAPH_ROOT_CONT=/opt/graphdb/home -# NB_GRAPH_ADDRESS=graph -# NB_GRAPH_PORT_HOST=7200 -# NB_GRAPH_PORT=7200 -# NB_QUERY_TAG=latest -# NB_QUERY_PORT_HOST=3000 diff --git a/scripts/graphdb_setup.sh b/scripts/graphdb_setup.sh old mode 100644 new mode 100755 index 86254f4..676d18f --- a/scripts/graphdb_setup.sh +++ b/scripts/graphdb_setup.sh @@ -128,12 +128,12 @@ echo "Environment variables have been set from ${ENV_FILE_PATH}." # Extract just the database name DB_NAME="${NB_GRAPH_DB#repositories/}" -NB_GRAPH_PORT_HOST=${NB_GRAPH_PORT_HOST:-7200} +NB_GRAPH_PORT=${NB_GRAPH_PORT:-7200} # Get the directory of this script to be able to find the data-config_template.ttl file SCRIPT_DIR=$(dirname "$0") -echo "The GraphDB server is being accessed at http://localhost:${NB_GRAPH_PORT_HOST}." +echo "The GraphDB server is being accessed at http://localhost:${NB_GRAPH_PORT}." ##### First time GraphDB setup ##### @@ -143,14 +143,14 @@ if [ "${RUN_USER_SETUP}" = "on" ]; then # 1. Change database admin password echo "Changing the admin password (note: if you have previously set the admin password, this has no effect)..." # TODO: To change a *previously set* admin password, we need to also provide the current password via -u - curl -X PATCH --header 'Content-Type: application/json' http://localhost:${NB_GRAPH_PORT_HOST}/rest/security/users/admin -d "{\"password\": \""${ADMIN_PASS}"\"}" + curl -X PATCH --header 'Content-Type: application/json' http://localhost:${NB_GRAPH_PORT}/rest/security/users/admin -d "{\"password\": \""${ADMIN_PASS}"\"}" # 2. If security is not enabled, enable it (i.e. allow only authenticated users access) - is_security_enabled=$(curl -s -X GET http://localhost:${NB_GRAPH_PORT_HOST}/rest/security) + is_security_enabled=$(curl -s -X GET http://localhost:${NB_GRAPH_PORT}/rest/security) if [ "${is_security_enabled}" = "false" ]; then echo "Enabling password-based access control to all databases ..." # NOTE: This command fails without credentials once security is enabled - curl -X POST --header 'Content-Type: application/json' -d true http://localhost:${NB_GRAPH_PORT_HOST}/rest/security + curl -X POST --header 'Content-Type: application/json' -d true http://localhost:${NB_GRAPH_PORT}/rest/security else echo "Password-based access control has already been enabled." fi @@ -159,7 +159,7 @@ if [ "${RUN_USER_SETUP}" = "on" ]; then # TODO: Separate this out from the first-time setup? As this can technically be run at any time to create additional users. # NOTE: If user already exists, response will be "An account with the given username already exists." OK for script. echo "Creating a new database user ${NB_GRAPH_USERNAME}..." - curl -X POST --header 'Content-Type: application/json' -u "admin:${ADMIN_PASS}" -d @- http://localhost:${NB_GRAPH_PORT_HOST}/rest/security/users/${NB_GRAPH_USERNAME} <? SCRIPT_DIR=$(dirname "$0") +mkdir -p ${SCRIPT_DIR}/logs # Logic for main setup main() { @@ -17,7 +18,7 @@ main() { echo -e "(The GraphDB server is being accessed inside the GraphDB container at http://localhost:${NB_GRAPH_PORT}.)\n" echo "Setting up GraphDB server..." - ./graphdb_setup.sh "${NB_GRAPH_ADMIN_PASSWORD}" + ./graphdb_setup.sh --env-file-path /usr/src/neurobagel/.env "${NB_GRAPH_ADMIN_PASSWORD}" echo "Finished server setup." echo "Adding datasets to the database..." @@ -25,13 +26,13 @@ main() { echo "Finished adding datasets to databases." echo "Adding Neurobagel vocabulary to the database" - ./add_data_to_graph.sh ./vocab localhost:${NB_GRAPH_PORT} ${NB_GRAPH_DB} "${NB_GRAPH_USERNAME}" "${NB_GRAPH_PASSWORD}" + ./add_data_to_graph.sh ../vocab localhost:${NB_GRAPH_PORT} ${NB_GRAPH_DB} "${NB_GRAPH_USERNAME}" "${NB_GRAPH_PASSWORD}" echo "Finished adding the Neurobagel vocabulary to the database." echo "Finished setting up the Neurobagel graph backend." } -main 2>&1 | tee -a ${SCRIPT_DIR}/DEPLOY.log +main 2>&1 | tee -a ${SCRIPT_DIR}/logs/DEPLOY.log # We don't have jobcontrol here, so can't bring GraphDB back to foreground # instead we'll wait diff --git a/template.env b/template.env new file mode 100644 index 0000000..4e121a6 --- /dev/null +++ b/template.env @@ -0,0 +1,70 @@ +# THIS IS A TEMPLATE .env FILE. +# MAKE A COPY OF THIS FILE NAMED .env, AND THEN MODIFY THE VALUES AS NEEDED. +# +# Tip: To verify your changes to the .env have taken effect, run the command: docker compose config +# For more information on each environment variable, see the Neurobagel documentation. + +# ---- CONFIGURATION FOR graph ---- +# Replace ADMINPASSWORD with the secure password you want to set for the admin user +NB_GRAPH_ADMIN_PASSWORD=ADMINPASSWORD +# Replace DBUSER with the username you want to set for your graph database user +NB_GRAPH_USERNAME=DBUSER +# Replace DBPASSWORD with the secure password you want to set for the created database user +NB_GRAPH_PASSWORD=DBPASSWORD +# Replace my_db with the name you want to give your graph database +NB_GRAPH_DB=repositories/my_db +# Replace ./data with the path to your JSONLD files +LOCAL_GRAPH_DATA=./data + +# Additional configurable parameters - uncomment to change the defaults +# Change NB_GRAPH_PORT_HOST if port 7200 is already in use on the machine +# NB_GRAPH_PORT_HOST=7200 +# NB_GRAPH_PORT=7200 +# NB_GRAPH_ROOT_HOST=~/graphdb-home +# NB_GRAPH_ROOT_CONT=/opt/graphdb/home +# NB_GRAPH_IMG=ontotext/graphdb:10.3.1 +# NB_GRAPH_ADDRESS=graph +# --------------------------------- + +# ---- CONFIGURATION FOR n-API ---- +# Origins to allow requests from. To limit to specific origins, replace * with URLs separated by a space. +NB_NAPI_ALLOWED_ORIGINS="*" +# Whether to return only aggregate, dataset-level query results (excluding subject/session-level attributes). +NB_RETURN_AGG=true + +# Additional configurable parameters - uncomment to change the defaults +# Change NB_NAPI_PORT_HOST if port 8000 is already in use on the machine +# NB_NAPI_PORT_HOST=8000 +# NB_NAPI_PORT=8000 +# NB_NAPI_TAG=latest +# --------------------------------- + +# ---- CONFIGURATION FOR f-API ---- +# Additional configurable parameters - uncomment to change the defaults +# Change NB_FAPI_PORT_HOST if port 8080 is already in use on the machine +# NB_FAPI_PORT_HOST=8080 +# NB_FAPI_PORT=8000 +# NB_FAPI_TAG=latest +# --------------------------------- + +# ---- CONFIGURATION FOR QUERY TOOL ---- +# You MUST replace http://XX.XX.XX.XX with the URL (and port, if needed) of the Neurobagel API the query tool should send requests to. +# The query tool sends requests from a user's machine, so ensure you provide the URL of the API *as a user would access it from their own (external) machine*, +# even if the query tool and the Neurobagel API are hosted on the same physical machine. +# e.g., https://neurobagel.myInstitute.edu/federation +# +# Exception: If you are testing your setup locally and will only access the query tool from the SAME machine +# that your API is hosted on, you can use localhost as the URL. +# e.g., http://localhost:8000 (if NB_API_PORT_HOST=8000): +NB_API_QUERY_URL=http://XX.XX.XX.XX + +# Additional configurable parameters - uncomment to change the defaults +# Change NB_QUERY_PORT_HOST if port 3000 is already in use on the machine +# NB_QUERY_PORT_HOST=3000 +# NB_QUERY_TAG=latest +# -------------------------------------- + +# ---- CONFIGURATION FOR docker compose ---- +# Default service profile (deployment "flavour") for `docker compose` command +COMPOSE_PROFILES=local_node +# ------------------------------------------