Skip retry and continued fetch of userdata when NOT_FOUND
When a 404 http code comes back from the fetching of ec2 data, instead of retrying immediatly stop the fetching process and in the userdata fetching function handle this case as a special case of no userdata being fetched (an empty string in this case).
This commit is contained in:
parent
144fb6fe7d
commit
d01bb4f143
@ -16,6 +16,7 @@
|
|||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import httplib
|
||||||
from urlparse import (urlparse, urlunparse)
|
from urlparse import (urlparse, urlunparse)
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
@ -23,9 +24,11 @@ import json
|
|||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
from cloudinit import log as logging
|
from cloudinit import log as logging
|
||||||
|
from cloudinit import url_helper
|
||||||
from cloudinit import util
|
from cloudinit import util
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
SKIP_USERDATA_CODES = frozenset([httplib.NOT_FOUND])
|
||||||
|
|
||||||
|
|
||||||
def maybe_json_object(text):
|
def maybe_json_object(text):
|
||||||
@ -138,20 +141,38 @@ class MetadataMaterializer(object):
|
|||||||
return joined
|
return joined
|
||||||
|
|
||||||
|
|
||||||
|
def _skip_retry_on_codes(status_codes, request_args, cause):
|
||||||
|
"""Returns if a request should retry based on a given set of codes that
|
||||||
|
case retrying to be stopped/skipped.
|
||||||
|
"""
|
||||||
|
if cause.code in status_codes:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def get_instance_userdata(api_version='latest',
|
def get_instance_userdata(api_version='latest',
|
||||||
metadata_address='http://169.254.169.254',
|
metadata_address='http://169.254.169.254',
|
||||||
ssl_details=None, timeout=5, retries=5):
|
ssl_details=None, timeout=5, retries=5):
|
||||||
ud_url = combine_url(metadata_address, api_version)
|
ud_url = combine_url(metadata_address, api_version)
|
||||||
ud_url = combine_url(ud_url, 'user-data')
|
ud_url = combine_url(ud_url, 'user-data')
|
||||||
|
user_data = ''
|
||||||
try:
|
try:
|
||||||
|
# It is ok for userdata to not exist (thats why we are stopping if
|
||||||
|
# NOT_FOUND occurs) and just in that case returning an empty string.
|
||||||
|
exception_cb = functools.partial(_skip_retry_on_codes,
|
||||||
|
SKIP_USERDATA_CODES)
|
||||||
response = util.read_file_or_url(ud_url,
|
response = util.read_file_or_url(ud_url,
|
||||||
ssl_details=ssl_details,
|
ssl_details=ssl_details,
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
retries=retries)
|
retries=retries,
|
||||||
return str(response)
|
exception_cb=exception_cb)
|
||||||
|
user_data = str(response)
|
||||||
|
except url_helper.UrlError as e:
|
||||||
|
if e.code not in SKIP_USERDATA_CODES:
|
||||||
|
util.logexc(LOG, "Failed fetching userdata from url %s", ud_url)
|
||||||
except Exception:
|
except Exception:
|
||||||
util.logexc(LOG, "Failed fetching userdata from url %s", ud_url)
|
util.logexc(LOG, "Failed fetching userdata from url %s", ud_url)
|
||||||
return ''
|
return user_data
|
||||||
|
|
||||||
|
|
||||||
def get_instance_metadata(api_version='latest',
|
def get_instance_metadata(api_version='latest',
|
||||||
|
@ -103,7 +103,7 @@ class UrlError(IOError):
|
|||||||
|
|
||||||
def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
|
def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
|
||||||
headers=None, headers_cb=None, ssl_details=None,
|
headers=None, headers_cb=None, ssl_details=None,
|
||||||
check_status=True, allow_redirects=True):
|
check_status=True, allow_redirects=True, exception_cb=None):
|
||||||
url = _cleanurl(url)
|
url = _cleanurl(url)
|
||||||
req_args = {
|
req_args = {
|
||||||
'url': url,
|
'url': url,
|
||||||
@ -163,14 +163,13 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
|
|||||||
# Handle retrying ourselves since the built-in support
|
# Handle retrying ourselves since the built-in support
|
||||||
# doesn't handle sleeping between tries...
|
# doesn't handle sleeping between tries...
|
||||||
for i in range(0, manual_tries):
|
for i in range(0, manual_tries):
|
||||||
|
req_args['headers'] = headers_cb(url)
|
||||||
|
filtered_req_args = {}
|
||||||
|
for (k, v) in req_args.items():
|
||||||
|
if k == 'data':
|
||||||
|
continue
|
||||||
|
filtered_req_args[k] = v
|
||||||
try:
|
try:
|
||||||
req_args['headers'] = headers_cb(url)
|
|
||||||
filtered_req_args = {}
|
|
||||||
for (k, v) in req_args.items():
|
|
||||||
if k == 'data':
|
|
||||||
continue
|
|
||||||
filtered_req_args[k] = v
|
|
||||||
|
|
||||||
LOG.debug("[%s/%s] open '%s' with %s configuration", i,
|
LOG.debug("[%s/%s] open '%s' with %s configuration", i,
|
||||||
manual_tries, url, filtered_req_args)
|
manual_tries, url, filtered_req_args)
|
||||||
|
|
||||||
@ -196,6 +195,8 @@ def readurl(url, data=None, timeout=None, retries=0, sec_between=1,
|
|||||||
# ssl exceptions are not going to get fixed by waiting a
|
# ssl exceptions are not going to get fixed by waiting a
|
||||||
# few seconds
|
# few seconds
|
||||||
break
|
break
|
||||||
|
if exception_cb and not exception_cb(filtered_req_args, e):
|
||||||
|
break
|
||||||
if i + 1 < manual_tries and sec_between > 0:
|
if i + 1 < manual_tries and sec_between > 0:
|
||||||
LOG.debug("Please wait %s seconds while we wait to try again",
|
LOG.debug("Please wait %s seconds while we wait to try again",
|
||||||
sec_between)
|
sec_between)
|
||||||
|
@ -691,7 +691,7 @@ def fetch_ssl_details(paths=None):
|
|||||||
|
|
||||||
def read_file_or_url(url, timeout=5, retries=10,
|
def read_file_or_url(url, timeout=5, retries=10,
|
||||||
headers=None, data=None, sec_between=1, ssl_details=None,
|
headers=None, data=None, sec_between=1, ssl_details=None,
|
||||||
headers_cb=None):
|
headers_cb=None, exception_cb=None):
|
||||||
url = url.lstrip()
|
url = url.lstrip()
|
||||||
if url.startswith("/"):
|
if url.startswith("/"):
|
||||||
url = "file://%s" % url
|
url = "file://%s" % url
|
||||||
@ -708,7 +708,8 @@ def read_file_or_url(url, timeout=5, retries=10,
|
|||||||
headers_cb=headers_cb,
|
headers_cb=headers_cb,
|
||||||
data=data,
|
data=data,
|
||||||
sec_between=sec_between,
|
sec_between=sec_between,
|
||||||
ssl_details=ssl_details)
|
ssl_details=ssl_details,
|
||||||
|
exception_cb=exception_cb)
|
||||||
|
|
||||||
|
|
||||||
def load_yaml(blob, default=None, allowed=(dict,)):
|
def load_yaml(blob, default=None, allowed=(dict,)):
|
||||||
|
@ -119,7 +119,8 @@ class TestMAASDataSource(mocker.MockerTestCase):
|
|||||||
mock_request(url, headers=None, timeout=mocker.ANY,
|
mock_request(url, headers=None, timeout=mocker.ANY,
|
||||||
data=mocker.ANY, sec_between=mocker.ANY,
|
data=mocker.ANY, sec_between=mocker.ANY,
|
||||||
ssl_details=mocker.ANY, retries=mocker.ANY,
|
ssl_details=mocker.ANY, retries=mocker.ANY,
|
||||||
headers_cb=my_headers_cb)
|
headers_cb=my_headers_cb,
|
||||||
|
exception_cb=mocker.ANY)
|
||||||
resp = valid.get(key)
|
resp = valid.get(key)
|
||||||
self.mocker.result(util.StringResponse(resp))
|
self.mocker.result(util.StringResponse(resp))
|
||||||
self.mocker.replay()
|
self.mocker.replay()
|
||||||
|
@ -33,6 +33,14 @@ class TestEc2Util(helpers.TestCase):
|
|||||||
userdata = eu.get_instance_userdata(self.VERSION, retries=0)
|
userdata = eu.get_instance_userdata(self.VERSION, retries=0)
|
||||||
self.assertEquals('', userdata)
|
self.assertEquals('', userdata)
|
||||||
|
|
||||||
|
@hp.activate
|
||||||
|
def test_userdata_fetch_fail_server_not_found(self):
|
||||||
|
hp.register_uri(hp.GET,
|
||||||
|
'http://169.254.169.254/%s/user-data' % (self.VERSION),
|
||||||
|
status=404)
|
||||||
|
userdata = eu.get_instance_userdata(self.VERSION)
|
||||||
|
self.assertEquals('', userdata)
|
||||||
|
|
||||||
@hp.activate
|
@hp.activate
|
||||||
def test_metadata_fetch_no_keys(self):
|
def test_metadata_fetch_no_keys(self):
|
||||||
base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
|
base_url = 'http://169.254.169.254/%s/meta-data' % (self.VERSION)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user