Statistics
| Branch: | Tag: | Revision:

one / src / datastore_mad / remotes / downloader.sh @ dde9cf31

History | View | Annotate | Download (8.08 KB)

1
#!/bin/bash
2

    
3
# -------------------------------------------------------------------------- #
4
# Copyright 2002-2017, OpenNebula Project, OpenNebula Systems                #
5
#                                                                            #
6
# Licensed under the Apache License, Version 2.0 (the "License"); you may    #
7
# not use this file except in compliance with the License. You may obtain    #
8
# a copy of the License at                                                   #
9
#                                                                            #
10
# http://www.apache.org/licenses/LICENSE-2.0                                 #
11
#                                                                            #
12
# Unless required by applicable law or agreed to in writing, software        #
13
# distributed under the License is distributed on an "AS IS" BASIS,          #
14
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.   #
15
# See the License for the specific language governing permissions and        #
16
# limitations under the License.                                             #
17
#--------------------------------------------------------------------------- #
18

    
19
if [ -z "${ONE_LOCATION}" ]; then
20
    LIB_LOCATION=/usr/lib/one
21
    VAR_LOCATION=/var/lib/one
22
else
23
    LIB_LOCATION=$ONE_LOCATION/lib
24
    VAR_LOCATION=$ONE_LOCATION/var
25
fi
26

    
27
. $LIB_LOCATION/sh/scripts_common.sh
28

    
29
DRIVER_PATH=$(dirname $0)
30

    
31
# Execute a command (first parameter) and use the first kb of stdout
32
# to determine the file type
33
function get_type
34
{
35
    if [ "$NO_DECOMPRESS" = "yes" ]; then
36
        echo "application/octet-stream"
37
    else
38
        command=$1
39

    
40
        ( $command | head -n 1024 | file -b --mime-type - ) 2>/dev/null
41
    fi
42
}
43

    
44
# Gets the command needed to decompress an stream.
45
function get_decompressor
46
{
47
    type=$1
48

    
49
    case "$type" in
50
    "application/x-gzip"|"application/gzip")
51
        echo "gunzip -c -"
52
        ;;
53
    "application/x-bzip2")
54
        echo "bunzip2 -c -"
55
        ;;
56
    *)
57
        echo "cat"
58
        ;;
59
    esac
60
}
61

    
62
# Function called to decompress a stream. The first parameter is the command
63
# used to decompress the stream. Second parameter is the output file or
64
# - for stdout.
65
function decompress
66
{
67
    command="$1"
68
    to="$2"
69

    
70
    if [ "$to" = "-" ]; then
71
        $command
72
    else
73
        $command > "$to"
74
    fi
75
}
76

    
77
# Function called to hash a stream. First parameter is the algorithm name.
78
function hasher
79
{
80
    if [ -n "$1" ]; then
81
        openssl dgst -$1 | awk '{print $NF}' > $HASH_FILE
82
    else
83
        # Needs something consuming stdin or the pipe will break
84
        cat >/dev/null
85
    fi
86
}
87

    
88
# Unarchives a tar or a zip a file to a directpry with the same name.
89
function unarchive
90
{
91
    TO="$1"
92

    
93
    file_type=$(get_type "cat $TO")
94

    
95
    tmp="$TO"
96

    
97
    # Add full path if it is relative
98
    if [ ${tmp:0:1} != "/" ]; then
99
        tmp="$PWD/$tmp"
100
    fi
101

    
102
    IN="$tmp.tmp"
103
    OUT="$tmp"
104

    
105
    case "$file_type" in
106
    "application/x-tar")
107
        command="tar -xf $IN -C $OUT"
108
        ;;
109
    "application/zip")
110
        command="unzip -d $OUT $IN"
111
        ;;
112
    *)
113
        command=""
114
        ;;
115
    esac
116

    
117
    if [ -n "$command" ]; then
118
        mv "$OUT" "$IN"
119
        mkdir "$OUT"
120

    
121
        $command
122

    
123
        if [ "$?" != "0" ]; then
124
            echo "Error uncompressing archive" >&2
125
            exit -1
126
        fi
127

    
128
        rm "$IN"
129
    fi
130
}
131

    
132
function s3_env
133
{
134
    XPATH="$DRIVER_PATH/xpath.rb -b $DRV_ACTION"
135

    
136
    unset i j XPATH_ELEMENTS
137

    
138
    while IFS= read -r -d '' element; do
139
        XPATH_ELEMENTS[i++]="$element"
140
    done < <($XPATH     /DS_DRIVER_ACTION_DATA/MARKETPLACE/TEMPLATE/ACCESS_KEY_ID \
141
                        /DS_DRIVER_ACTION_DATA/MARKETPLACE/TEMPLATE/SECRET_ACCESS_KEY \
142
                        /DS_DRIVER_ACTION_DATA/MARKETPLACE/TEMPLATE/ENDPOINT)
143

    
144
    S3_ACCESS_KEY_ID="${XPATH_ELEMENTS[j++]}"
145
    S3_SECRET_ACCESS_KEY="${XPATH_ELEMENTS[j++]}"
146
    S3_ENDPOINT="${XPATH_ELEMENTS[j++]}"
147
}
148

    
149
function s3_curl_args
150
{
151
    FROM="$1"
152

    
153
    ENDPOINT=${S3_ENDPOINT:-https://s3.amazonaws.com}
154
    OBJECT=$(basename $FROM)
155
    BUCKET=$(basename $(dirname $FROM))
156

    
157
    DATE="`date -u +'%a, %d %b %Y %H:%M:%S GMT'`"
158
    AUTH_STRING="GET\n\n\n${DATE}\n/${BUCKET}/${OBJECT}"
159

    
160
    SIGNED_AUTH_STRING=`echo -en "$AUTH_STRING" | \
161
                        openssl sha1 -hmac ${S3_SECRET_ACCESS_KEY} -binary | \
162
                        base64`
163

    
164
    echo " -H \"Date: ${DATE}\"" \
165
         " -H \"Authorization: AWS ${S3_ACCESS_KEY_ID}:${SIGNED_AUTH_STRING}\"" \
166
         " ${ENDPOINT}/${BUCKET}/${OBJECT}"
167
}
168

    
169
function get_rbd_cmd
170
{
171
    local i j URL_ELEMENTS
172

    
173
    FROM="$1"
174

    
175
    URL_RB="$DRIVER_PATH/url.rb"
176

    
177
    while IFS= read -r -d '' element; do
178
        URL_ELEMENTS[i++]="$element"
179
    done < <($URL_RB    $FROM \
180
                        USER \
181
                        HOST \
182
                        SOURCE \
183
                        PARAM_DS \
184
                        PARAM_CEPH_USER \
185
                        PARAM_CEPH_KEY \
186
                        PARAM_CEPH_CONF)
187

    
188
    USER="${URL_ELEMENTS[j++]}"
189
    DST_HOST="${URL_ELEMENTS[j++]}"
190
    SOURCE="${URL_ELEMENTS[j++]}"
191
    DS="${URL_ELEMENTS[j++]}"
192
    CEPH_USER="${URL_ELEMENTS[j++]}"
193
    CEPH_KEY="${URL_ELEMENTS[j++]}"
194
    CEPH_CONF="${URL_ELEMENTS[j++]}"
195

    
196
    # Remove leading '/'
197
    SOURCE="${SOURCE#/}"
198

    
199
    if [ -n "$USER" ]; then
200
        DST_HOST="$USER@$DST_HOST"
201
    fi
202

    
203
    if [ -n "$CEPH_USER" ]; then
204
        RBD="$RBD --id ${CEPH_USER}"
205
    fi
206

    
207
    if [ -n "$CEPH_KEY" ]; then
208
        RBD="$RBD --keyfile ${CEPH_KEY}"
209
    fi
210

    
211
    if [ -n "$CEPH_CONF" ]; then
212
        RBD="$RBD --conf ${CEPH_CONF}"
213
    fi
214

    
215
    echo "ssh $DST_HOST $RBD export $SOURCE -"
216
}
217

    
218
TEMP=`getopt -o m:s:l:n -l md5:,sha1:,limit:,nodecomp -- "$@"`
219

    
220
if [ $? != 0 ] ; then
221
    echo "Arguments error"
222
    exit -1
223
fi
224

    
225
eval set -- "$TEMP"
226

    
227
while true; do
228
    case "$1" in
229
        -m|--md5)
230
            HASH_TYPE=md5
231
            HASH=$2
232
            shift 2
233
            ;;
234
        -s|--sha1)
235
            HASH_TYPE=sha1
236
            HASH=$2
237
            shift 2
238
            ;;
239
        -n|--nodecomp)
240
            export NO_DECOMPRESS="yes"
241
            shift
242
            ;;
243
        -l|--limit)
244
            export LIMIT_RATE="$2"
245
            shift 2
246
            ;;
247
        --)
248
            shift
249
            break
250
            ;;
251
        *)
252
            shift
253
            ;;
254
    esac
255
done
256

    
257
FROM="$1"
258
TO="$2"
259

    
260
# File used by the hasher function to store the resulting hash
261
export HASH_FILE="/tmp/downloader.hash.$$"
262

    
263
GLOBAL_CURL_ARGS="--fail -sS -k -L"
264

    
265
case "$FROM" in
266
http://*|https://*)
267
    # -k  so it does not check the certificate
268
    # -L  to follow redirects
269
    # -sS to hide output except on failure
270
    # --limit_rate to limit the bw
271
    curl_args="$GLOBAL_CURL_ARGS $FROM"
272

    
273
    if [ -n "$LIMIT_RATE" ]; then
274
        curl_args="--limit-rate $LIMIT_RATE $curl_args"
275
    fi
276

    
277
    command="curl $curl_args"
278
    ;;
279
ssh://*)
280
    # pseudo-url for ssh transfers ssh://user@host:path
281
    # -l to limit the bw
282
    ssh_src=${FROM#ssh://}
283
    ssh_arg=(${ssh_src/:/ })
284

    
285
    rmt_cmd="'cat ${ssh_arg[1]}'"
286

    
287
    command="ssh ${ssh_arg[0]} $rmt_cmd"
288
    ;;
289
s3://*)
290

    
291
    # Read s3 environment
292
    s3_env
293

    
294
    if [ -z "$S3_ACCESS_KEY_ID" -o -z "$S3_SECRET_ACCESS_KEY" ]; then
295
        echo "S3_ACCESS_KEY_ID and S3_SECRET_ACCESS_KEY are required" >&2
296
        exit -1
297
    fi
298

    
299
    curl_args="$(s3_curl_args $FROM)"
300

    
301
    command="curl $GLOBAL_CURL_ARGS $curl_args"
302
    ;;
303
rbd://*)
304
    command="$(get_rbd_cmd $FROM)"
305
    ;;
306
vcenter://*)
307
    command="$VAR_LOCATION/remotes/datastore/vcenter_downloader.rb \"$FROM\""
308
    ;;
309
*)
310
    if [ ! -r $FROM ]; then
311
        echo "Cannot read from $FROM" >&2
312
        exit -1
313
    fi
314
    command="cat $FROM"
315
    ;;
316
esac
317

    
318
file_type=$(get_type "$command")
319
decompressor=$(get_decompressor "$file_type")
320

    
321
eval "$command" | tee >( hasher $HASH_TYPE) | decompress "$decompressor" "$TO"
322

    
323
if [ "$?" != "0" -o "$PIPESTATUS" != "0" ]; then
324
    echo "Error copying" >&2
325
    exit -1
326
fi
327

    
328
if [ -n "$HASH_TYPE" ]; then
329
    HASH_RESULT=$( cat $HASH_FILE)
330
    rm $HASH_FILE
331
    if [ "$HASH_RESULT" != "$HASH" ]; then
332
        echo "Hash does not match" >&2
333
        exit -1
334
    fi
335
fi
336

    
337
# Unarchive only if the destination is filesystem
338
if [ "$TO" != "-" ]; then
339
    unarchive "$TO"
340
fi
341