summaryrefslogtreecommitdiff
path: root/google-startup-scripts/usr/share/google/fetch_script
blob: 72ba9ac094deacf39059044068a914db29c2eb30 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
#! /bin/bash
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# Fetch a script from metadata and store it in the /var/run directory.
declare -r LOGFILE=/var/log/google.log

if [[ -x /usr/bin/logger ]]; then
    declare -r LOGGER=/usr/bin/logger
else
    declare -r LOGGER=/bin/logger
fi

declare -r CURL_RETRY_LIMIT=10
declare -r CURL_TIMEOUT=10

function log() {
  echo "$@" | ${LOGGER} -t google -p daemon.info
  echo "$@" >> ${LOGFILE}
}

function download_url_with_logfile() {
  local readonly url=$1
  local readonly dest=$2
  local readonly logfile=$3

  if [[ "$url" =~ gs://* ]]; then
    log "Downloading url from ${url} to ${dest} using gsutil"
    gsutil cp "${url}" "${dest}" > "${logfile}" 2>&1 && return 0
    log "Failed to download $url"
    return 1
  fi

  # Many of the Google Storage URLs are supported below.
  # It is prefered that customers specify their object using
  # its gs://<bucket>/<object> url.

  bucket="[a-z0-9][-_.a-z0-9]*[a-z0-9]"

  # Accept any non-empty string that doesn't contain a wildcard character
  # gsutil interprets some characters as wildcards
  # These characters in object names make it difficult or impossible
  # to perform various wildcard operations using gsutil
  # For a complete list use "gsutil help naming"
  object="[^\*\?][^\*\?]*"

  # For all validation tests:
  # alphanumeric ranges should only include ascii characters
  export LC_COLLATE=C

  # Check for the Google Storage URLs:
  # http://<bucket>.storage.googleapis.com/<object>/
  # https://<bucket>.storage.googleapis.com/<object>/
  if [[ "$url" =~ http[s]?://${bucket}\.storage\.googleapis\.com/${object} ]]; then
    log "Downloading url from ${url} to ${dest} using gsutil"
    # Create a url that can be interpreted by gsutil
    gsurl=$(echo "$url" | sed "s/^https\?:\/\/\($bucket\)\.storage\.googleapis\.com\/\($object\)$/gs:\/\/\1\/\2/")
    gsutil cp ${gsurl} ${dest} 2> ${logfile} && return 0
  # Check for the other possible Google Storage URLS:
  # http://storage.googleapis.com/<bucket>/<object>/
  # https://storage.googleapis.com/<bucket>/<object>/
  #
  # The following are deprecated but checked
  # http://commondatastorage.googleapis.com/<bucket>/<object>/
  # https://commondatastorage.googleapis.com/<bucket>/<object>/
  elif [[ "$url" =~ http[s]?://(commondata)?storage\.googleapis\.com/${bucket}/${object} ]]; then
    log "Downloading url from ${url} to ${dest} using gsutil"
    # Create a url that can be interpreted by gsutil
    gsurl=$(echo "$url" | sed "s/^https\?:\/\/\(commondata\|\)storage\.googleapis\.com\/\($bucket\)\/\($object\)$/gs:\/\/\2\/\3/")
    gsutil cp "${gsurl}" "${dest}" 2> "${logfile}" && return 0
  else
    log "URL ${url} is not located in Google Storage"
  fi

  # Unauthenticated download of the object.
  log "Downloading url from ${url} to ${dest} using curl"
  curl --max-time "${CURL_TIMEOUT}" --retry "${CURL_RETRY_LIMIT}" \
    2>> "${logfile}" -o "${dest}" -L -- "${url}" && return 0;

  log "Failed to download $url"
  return 1
}

function download_url() {
  local readonly url=$1
  local readonly dest=$2
  local readonly logfile=$(mktemp)
  download_url_with_logfile "${url}" "${dest}" "${logfile}"
  return_code=$?
  # If the script was unable to download then report to the syslog.
  if [[ "${return_code}" != "0" ]]; then
    log "$(<"${logfile}")"
  else
    rm -f "${logfile}"
  fi
  return "${return_code}"
}

function get_metadata_attribute() {
  local readonly varname=$1
  /usr/share/google/get_metadata_value "attributes/${varname}"
  return $?
}

function fetch_script() {
  # Try to use the script-url, then the script metadata.
  # Check the script url first.
  script=$1
  script_type=$2
  url_type="${script_type}-script"
  url="${url_type}-url"

  local readonly script_url="$(get_metadata_attribute ${url})"
  if [[ -n "${script_url}" ]]; then
    log "${url} metadata flag: ${script_url}"
    download_url "${script_url}" "${script}"
    if [[ $? != 0 ]]; then
      log "Could not download ${script_type} script ${script_url}."
    else
      log "Successfully downloaded ${script_type} script ${script_url}."
    fi
  else
    local readonly metadata_script="$(get_metadata_attribute ${url_type})"
    if [[ -n "${metadata_script}" ]]; then
      echo "${metadata_script}" > "${script}"
      log "${script_type} script found in metadata."
    else
      log $(curl "http://metadata.google.internal/computeMetadata/v1/instance/?recursive=True" -H "Metadata-Flavor: Google")
      log "No ${script_type} script found in metadata."
    fi
  fi
  [[ -e "${script}" ]] && chmod 700 "${script}"

  return 0
}

fetch_script "$1" "$2"