19
19
20
20
set -e
21
21
22
+ help () {
23
+ echo -n "
24
+ Initialize folders and databases for Spot in Hadoop.
25
+
26
+ Options:
27
+ --no-sudo Do not use sudo with hdfs commands.
28
+ -c Specify config file (default = /etc/spot.conf)
29
+ -d Override databases
30
+ -h, --help Display this help and exit
31
+ "
32
+ exit 0
33
+ }
34
+
22
35
function log() {
23
- printf " hdfs_setup.sh:\n $1 \n "
36
+ printf " hdfs_setup.sh:\\ n %s \\ n \\ n " " $1 "
24
37
}
25
38
26
39
function safe_mkdir() {
@@ -29,11 +42,11 @@ function safe_mkdir() {
29
42
# keeps the script from existing on existing folders
30
43
local hdfs_cmd=$1
31
44
local dir=$2
32
- if $( hdfs dfs -test -d ${dir} ) ; then
45
+ if hdfs dfs -test -d " ${dir} " ; then
33
46
log " ${dir} already exists"
34
47
else
35
48
log " running mkdir on ${dir} "
36
- ${hdfs_cmd} dfs -mkdir ${dir}
49
+ ${hdfs_cmd} dfs -mkdir " ${dir} "
37
50
fi
38
51
}
39
52
@@ -74,12 +87,15 @@ for arg in "$@"; do
74
87
db_override=$1
75
88
shift
76
89
;;
90
+ " -h" |" --help" )
91
+ help
92
+ ;;
77
93
esac
78
94
done
79
95
80
96
# Sourcing spot configuration variables
81
- log " Sourcing ${SPOTCONF} \n "
82
- source $SPOTCONF
97
+ log " Sourcing ${SPOTCONF} "
98
+ source " $SPOTCONF "
83
99
84
100
if [[ ${no_sudo} == " true" ]]; then
85
101
hdfs_cmd=" hdfs"
@@ -95,10 +111,10 @@ else
95
111
fi
96
112
97
113
if [[ -z " ${db_override} " ]]; then
98
- DBENGINE=$( echo ${DBENGINE} | tr ' [:upper:]' ' [:lower:]' )
114
+ DBENGINE=$( echo " ${DBENGINE} " | tr ' [:upper:]' ' [:lower:]' )
99
115
log " setting database engine to ${DBENGINE} "
100
116
else
101
- DBENGINE=$( echo ${db_override} | tr ' [:upper:]' ' [:lower:]' )
117
+ DBENGINE=$( echo " ${db_override} " | tr ' [:upper:]' ' [:lower:]' )
102
118
log " setting database engine to $db_override "
103
119
fi
104
120
@@ -112,7 +128,11 @@ case ${DBENGINE} in
112
128
db_script=" ${db_shell} --var=huser=${HUSER} --var=dbname=${DBNAME} -c -f"
113
129
;;
114
130
hive)
115
- db_shell=" hive"
131
+ if [[ ${no_sudo} == " true" ]]; then
132
+ db_shell=" hive"
133
+ else
134
+ db_shell=" sudo -u hive hive"
135
+ fi
116
136
db_query=" ${db_shell} -e"
117
137
db_script=" ${db_shell} -hiveconf huser=${HUSER} -hiveconf dbname=${DBNAME} -f"
118
138
;;
@@ -128,33 +148,35 @@ case ${DBENGINE} in
128
148
esac
129
149
130
150
# Creating HDFS user's folder
131
- safe_mkdir ${hdfs_cmd} ${HUSER}
132
- ${hdfs_cmd} dfs -chown ${USER} :supergroup ${HUSER}
133
- ${hdfs_cmd} dfs -chmod 775 ${HUSER}
151
+ safe_mkdir " ${hdfs_cmd} " " ${HUSER} "
152
+ ${hdfs_cmd} dfs -chown " ${USER} " :supergroup " ${HUSER} "
153
+ ${hdfs_cmd} dfs -chmod 775 " ${HUSER} "
134
154
135
155
# Creating HDFS paths for each use case
136
156
for d in " ${DSOURCES[@]} "
137
157
do
138
158
echo " creating /$d "
139
- safe_mkdir hdfs ${HUSER} /$d
159
+ safe_mkdir " ${hdfs_cmd} " " ${HUSER} /$d "
140
160
for f in " ${DFOLDERS[@]} "
141
161
do
142
162
echo " creating $d /$f "
143
- safe_mkdir ${hdfs_cmd} ${HUSER} /$d /$f
163
+ safe_mkdir " ${hdfs_cmd} " " ${HUSER} /$d /$f "
144
164
done
145
165
146
166
# Modifying permission on HDFS folders to allow Impala to read/write
147
- hdfs dfs -chmod -R 775 ${HUSER} / $d
148
- ${hdfs_cmd} dfs -setfacl -R -m user:${db_override} :rwx ${HUSER} / $d
149
- ${hdfs_cmd} dfs -setfacl -R -m user:${USER} :rwx ${HUSER} / $d
167
+ ${hdfs_cmd} dfs -chmod -R 775 " ${HUSER} " / " $d "
168
+ ${hdfs_cmd} dfs -setfacl -R -m user:" ${db_override} " :rwx " ${HUSER} " / " $d "
169
+ ${hdfs_cmd} dfs -setfacl -R -m user:" ${USER} " :rwx " ${HUSER} " / " $d "
150
170
done
151
171
152
172
153
173
# Creating Spot Database
154
- ${db_query} " CREATE DATABASE IF NOT EXISTS ${DBNAME} " ;
174
+ log " Creating Spot Database"
175
+ ${db_query} " CREATE DATABASE IF NOT EXISTS ${DBNAME} " ;
155
176
156
177
157
178
# Creating tables
179
+ log " Creating Database tables"
158
180
for d in " ${DSOURCES[@]} "
159
181
do
160
182
${db_script} " ./${DBENGINE} /create_${d} _parquet.hql"
0 commit comments