python_code
stringlengths 0
258k
|
---|
from decimal import Decimal
from unittest import TestCase
from bluechips import model
from bluechips.model import meta
from bluechips.model.types import Currency
class TestExpenditure(TestCase):
def setUp(self):
self.u = model.User(u'chaz', u'Charles Root', False)
self.e = model.Expenditure(self.u, Currency('444.88'),
u'chaz buys lunch')
meta.Session.add(self.u)
meta.Session.add(self.e)
meta.Session.commit()
def test_constructor(self):
assert self.e.spender == self.u
assert self.e.amount == Currency('444.88')
assert self.e.description == u'chaz buys lunch'
def test_repr(self):
assert (repr(self.e) ==
'<Expenditure: spender: Charles Root spent: $444.88>')
def test_even_split(self):
self.e.even_split()
meta.Session.commit()
for sp in self.e.splits:
assert sp.share == Currency('111.22')
def test_split_change_to_zero(self):
self.e.even_split()
meta.Session.commit()
users = meta.Session.query(model.User).all()
split_dict = dict((user, Decimal('0')) for user in users)
split_dict[self.u] = Decimal(1)
self.e.split(split_dict)
def _two_way_split_test(self, amount, min, max):
e2 = model.Expenditure(self.u, amount,
u'testing splits')
u2 = model.User(u'bo', u'Bo Jangles', False)
meta.Session.add(u2)
meta.Session.add(e2)
meta.Session.commit()
split_dict = {}
split_dict[self.u] = Decimal(1)
split_dict[u2] = Decimal(1)
e2.split(split_dict)
assert min <= e2.share(u2) <= max
meta.Session.delete(e2)
meta.Session.delete(u2)
meta.Session.commit()
def test_split_rounds_down(self):
self._two_way_split_test(Currency('40.01'),
Currency('20.00'),
Currency('20.01'))
def test_split_rounds_up(self):
self._two_way_split_test(Currency('39.99'),
Currency('19.99'),
Currency('20.00'))
def test_split_small(self):
self._two_way_split_test(Currency('0.01'),
Currency('0.00'),
Currency('0.01'))
def test_split_small_negative(self):
self._two_way_split_test(Currency('-0.01'),
Currency('-0.01'),
Currency('-0.00'))
def test_split_irrational_rounding(self):
e2 = model.Expenditure(self.u, Decimal('2375.00'),
u'rounding test')
u2 = model.User(u'rat', u'Irrational Rat', False)
meta.Session.add(u2)
meta.Session.add(e2)
meta.Session.commit()
split_dict = {}
split_dict[u2] = Decimal('750.00')
split_dict[self.u] = Decimal('4000.00')
e2.split(split_dict)
assert e2.share(u2) == Decimal('375.00')
meta.Session.delete(e2)
meta.Session.delete(u2)
meta.Session.commit()
def tearDown(self):
meta.Session.delete(self.e)
meta.Session.delete(self.u)
meta.Session.commit()
|
from unittest import TestCase
from bluechips import model
class TestTransfer(TestCase):
def setUp(self):
self.u1 = model.User('chaz', u'Charles Root', False)
self.u2 = model.User('boo', u'Boo Ghost', True)
self.t = model.Transfer(self.u1, self.u2, 1234)
def test_constructor(self):
assert self.t.debtor == self.u1
assert self.t.creditor == self.u2
assert self.t.amount == 1234
def test_repr(self):
assert (repr(self.t) ==
'<Transfer: from Charles Root to Boo Ghost for 1234>')
def test_involves(self):
other_u = model.User('jim', u'Jimbo James', True)
assert self.t.involves(other_u) == False
assert self.t.involves(self.u1)
assert self.t.involves(self.u2)
|
from unittest import TestCase
from formencode import Invalid
from bluechips.model import types
class TestCurrencyValidator(TestCase):
def setUp(self):
self.v = types.CurrencyValidator()
def test_currency_validator_good(self):
assert (self.v.to_python('12.34') ==
types.Currency('12.34'))
def test_currency_validator_nonzero(self):
try:
self.v.to_python('0')
except Invalid:
pass
def test_currency_validator_precision(self):
try:
self.v.to_python('12.345')
except Invalid:
pass
def test_currency_validator_amount(self):
try:
self.v.to_python('foo')
except Invalid:
pass
|
from unittest import TestCase
from bluechips.lib import totals
class TestReorderingSettle(TestCase):
def test_transfer_minimized(self):
"""
Test that the number of transfers is actually minimized.
Taken from a real-world situation, we discovered that failing
to re-order the debt lists after every transfer could lead to
extra, unnecessary transfers.
"""
self.assertEqual(len(totals.settle({'Alice': 100,
'Bob': -85,
'Charlie': 35,
'Dave': -35,
'Eve': -15})),
3)
def test_settle_even(self):
transfers = totals.settle({'Alice': 0,
'Bob': 0,
'Charlie': 0})
assert transfers == []
def test_settle_positive(self):
transfers = totals.settle({'Alice': -50,
'Bob': 100,
'Charlie': -50})
assert transfers == [('Bob', 'Charlie', 50),
('Bob', 'Alice', 50)]
def test_settle_uneven_positive(self):
try:
transfers = totals.settle({'Alice': -50,
'Bob': -50,
'Charlie': -50})
except totals.DirtyBooks:
pass
def test_settle_uneven_negative(self):
try:
transfers = totals.settle({'Alice': 50,
'Bob': 50,
'Charlie': 50})
except totals.DirtyBooks:
pass
|
from unittest import TestCase
from pylons import request
from datetime import date
from bluechips.lib import helpers as h
class TestHelpers(TestCase):
def test_grab_real_object(self):
class Foo(object):
pass
foo = Foo()
foo.bar = 'some string'
assert h.grab(foo, 'bar') == 'some string'
try:
h.grab(foo, 'baz')
except AttributeError:
pass
else:
raise AssertionError
def test_grab_any_fake(self):
assert h.grab(None, 'nonexistent') == ''
assert h.grab('', 'nonexistent') == ''
def test_grab_date(self):
assert h.grab(None, 'date') == date.today()
def test_grab_user(self):
class FakeRequest(object):
pass
class FakeUser(object):
pass
class SomeObject(object):
pass
req = FakeRequest()
req.environ = {}
req.environ['user'] = FakeUser()
test_obj = SomeObject()
req.environ['user'].id = test_obj
request._push_object(req)
assert h.grab(None, 'spender_id') == test_obj
assert h.grab(None, 'creditor_id') == test_obj
assert h.grab(None, 'debtor_id') == test_obj
request._pop_object()
def test_grab_amount(self):
assert h.grab(None, 'amount') == 0
|
from unittest import TestCase
from bluechips.lib import permissions
class TestReorderingSettle(TestCase):
def test_authenticate(self):
assert permissions.authenticate({}, u'root', u'charliepass')
assert not permissions.authenticate({}, u'root', u'blah')
assert not permissions.authenticate({}, u'blah', u'charliepass')
assert not permissions.authenticate({}, u'blah', u'blah')
|
from datetime import date
from formencode import Invalid
from webhelpers.html.secure_form import token_key
from bluechips.tests import *
from bluechips import model
from bluechips.model import meta
from bluechips.model.types import Currency
from bluechips.controllers.spend import ExpenditureSchema
class TestSpendController(TestController):
def test_index(self):
response = self.app.get(url_for(controller='spend'))
# Test response...
response.mustcontain('Add a New Expenditure')
form = response.form
user = meta.Session.query(model.User).\
filter_by(name=u'Charlie Root').one()
form['spender_id'] = user.id
form['amount'] = '74.04'
# Make sure date is today.
today = date.today()
assert form['date'].value == today.strftime('%m/%d/%Y')
form['description'] = 'A test expenditure'
form['shares-0.amount'] = '1'
form['shares-1.amount'] = '2'
form['shares-2.amount'] = '2'
form['shares-3.amount'] = '1'
for ii in range(4):
assert int(form['shares-%d.user_id' % ii].value) == ii + 1
response = form.submit()
response = response.follow()
response.mustcontain('Expenditure', 'created.')
e = meta.Session.query(model.Expenditure).\
order_by(model.Expenditure.id.desc()).first()
assert e.spender.name == u'Charlie Root'
assert e.amount == 7404
assert e.date == today
assert e.description == u'A test expenditure'
# Test the split.
shares = dict(((sp.user_id, sp.share)
for sp in e.splits))
assert shares[1] == Currency('12.34')
assert shares[2] == Currency('24.68')
assert shares[3] == Currency('24.68')
assert shares[4] == Currency('12.34')
def test_edit_and_delete(self):
user = meta.Session.query(model.User).\
filter_by(name=u'Charlie Root').one()
e = model.Expenditure(user, 53812, u'Lemon bundt cake', None)
e.even_split()
meta.Session.add(e)
meta.Session.commit()
response = self.app.get(url_for(controller='spend',
action='edit',
id=e.id))
response.mustcontain('Edit an Expenditure')
form = response.form
assert int(form['spender_id'].value) == user.id
assert form['amount'].value == '538.12'
assert form['date'].value == date.today().strftime('%m/%d/%Y')
assert form['description'].value == u'Lemon bundt cake'
form['description'] = u'Updated bundt cake'
# Update the split too.
response = form.submit()
response = response.follow()
response.mustcontain('Expenditure', 'updated.')
e = meta.Session.query(model.Expenditure).\
order_by(model.Expenditure.id.desc()).first()
assert e.description == u'Updated bundt cake'
response = self.app.get(url_for(controller='spend',
action='delete',
id=e.id))
response = response.form.submit('delete').follow()
response.mustcontain('Expenditure', 'deleted')
def test_delete_nonexistent(self):
self.app.get(url_for(controller='spend',
action='delete',
id=124344),
status=404)
def test_destroy_nonexistent(self):
response = self.app.get(url_for(controller='spend',
action='edit'))
params = self.sample_params.copy()
params[token_key] = response.form[token_key].value
self.app.post(url_for(controller='spend',
action='destroy',
id=124344),
params=params,
status=404)
def test_delete_xsrf_protection(self):
self.app.post(url_for(controller='spend',
action='destroy',
id=1),
params={'delete': 'Delete'},
status=403)
def test_edit_zero_value(self):
user = meta.Session.query(model.User).\
filter_by(name=u'Charlie Root').one()
e = model.Expenditure(user, 0, u'A zero value expenditure', None)
e.even_split()
meta.Session.add(e)
meta.Session.commit()
response = self.app.get(url_for(controller='spend',
action='edit',
id=e.id))
response.mustcontain('Edit an Expenditure')
form = response.form
assert int(form['spender_id'].value) == user.id
assert form['amount'].value == '0.00'
assert form['date'].value == date.today().strftime('%m/%d/%Y')
assert form['description'].value == u'A zero value expenditure'
for ii in range(4):
assert form['shares-%d.amount' % ii].value == '0'
def test_edit_nonexistent(self):
response = self.app.get(url_for(controller='spend',
action='edit',
id=124234), status=404)
def test_update_nonexistent(self):
response = self.app.get(url_for(controller='spend',
action='edit'))
params = self.sample_params.copy()
params[token_key] = response.form[token_key].value
self.app.post(url_for(controller='spend',
action='update',
id=14234),
params=params,
status=404)
def test_xsrf_protection(self):
self.app.post(url_for(controller='spend',
action='update'),
params=self.sample_params,
status=403)
def test_all_zero_shares_fails(self):
params = self.sample_params.copy()
for ii in range(4):
params['shares-%d.amount' % ii] = '0'
v = ExpenditureSchema()
try:
v.to_python(params)
except Invalid:
pass
def setUp(self):
self.sample_params = {
'spender_id': '1',
'amount': '44.12',
'date': '10/5/2008',
'description': 'Example expenditure post data.',
'shares-0.user_id': '1',
'shares-0.amount': '1',
'shares-1.user_id': '2',
'shares-1.amount': '1',
'shares-2.user_id': '3',
'shares-2.amount': '1',
'shares-3.user_id': '4',
'shares-3.amount': '1'}
def tearDown(self):
expenditures = meta.Session.query(model.Expenditure).all()
for e in expenditures:
meta.Session.delete(e)
meta.Session.commit()
|
from bluechips.tests import *
class TestMobileController(TestController):
def setUp(self):
self.ua = ('Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) '
'AppleWebKit/420+ (KHTML, like Gecko) Version/3.0 '
'Mobile/1A543a Safari/419.3')
self.app.extra_environ['HTTP_USER_AGENT'] = self.ua
def test_switch_interfaces(self):
response = self.app.get('/')
response.mustcontain('apple-touch-icon')
response.mustcontain('Use non mobile interface')
response = response.click('Use non mobile interface')
response.mustcontain('Use mobile interface')
response = response.click('Use mobile interface')
def test_view_nonmobile(self):
self.app.get(url_for(controller='history'))
|
from pylons import config
from bluechips.tests import *
from bluechips import model
from bluechips.model import meta
class TestUserController(TestController):
def test_index(self):
response = self.app.get(url_for(controller='user'))
# Test response...
response.mustcontain('Email Notifications', 'User Settings')
form = response.form
form['new_email'] = '[email protected]'
response = form.submit().follow()
response.mustcontain('Updated email address')
user = meta.Session.query(model.User).\
filter_by(username=unicode(config['fake_username'])).one()
assert user.email == '[email protected]'
def test_clear_email(self):
response = self.app.get(url_for(controller='user'))
form = response.form
form['new_email'] = ''
response = form.submit().follow()
response.mustcontain('Removed email address')
user = meta.Session.query(model.User).\
filter_by(username=unicode(config['fake_username'])).one()
assert user.email == None
def test_xsrf_protection(self):
self.app.post(url_for(controller='user',
action='update'),
{'new_email': '[email protected]'},
status=403)
|
from bluechips.tests import *
class TestStatusController(TestController):
def test_index(self):
response = self.app.get(url_for(controller='status'))
# Test response...
|
from datetime import date
from decimal import Decimal
from webhelpers.html.secure_form import token_key
from bluechips.tests import *
from bluechips import model
from bluechips.model import meta
class TestTransferController(TestController):
def test_index(self):
response = self.app.get(url_for(controller='transfer'))
# Test response...
response.mustcontain('Add a New Transfer')
form = response.form
user_rich = meta.Session.query(model.User).\
filter_by(name=u'Rich Scheme').one()
user_ben = meta.Session.query(model.User).\
filter_by(name=u'Ben Bitdiddle').one()
form['debtor_id'] = user_rich.id
form['creditor_id'] = user_ben.id
form['amount'] = '123.45'
# Make sure date is today.
today = date.today()
assert form['date'].value == today.strftime('%m/%d/%Y')
form['description'] = 'A test transfer from Rich to Ben'
response = form.submit()
response = response.follow()
response.mustcontain('Transfer', 'created.')
t = meta.Session.query(model.Transfer).\
order_by(model.Transfer.id.desc()).first()
assert t.debtor.name == u'Rich Scheme'
assert t.creditor.name == u'Ben Bitdiddle'
assert t.amount == 12345
assert t.date == today
assert t.description == u'A test transfer from Rich to Ben'
def test_edit_and_delete(self):
user_rich = meta.Session.query(model.User).\
filter_by(name=u'Rich Scheme').one()
user_ben = meta.Session.query(model.User).\
filter_by(name=u'Ben Bitdiddle').one()
t = model.Transfer(user_rich, user_ben, 12345)
t.description = u'Test transfer'
meta.Session.add(t)
meta.Session.commit()
response = self.app.get(url_for(controller='transfer',
action='edit',
id=t.id))
response.mustcontain('Edit a Transfer')
form = response.form
assert int(form['debtor_id'].value) == t.debtor_id
assert int(form['creditor_id'].value) == t.creditor_id
assert Decimal(form['amount'].value) * 100 == t.amount
assert form['date'].value == t.date.strftime('%m/%d/%Y')
assert form['description'].value == t.description
form['description'] = u'A new description'
response = form.submit()
response = response.follow()
response.mustcontain('Transfer', 'updated.')
t = meta.Session.query(model.Transfer).\
order_by(model.Transfer.id.desc()).first()
assert t.description == u'A new description'
response = self.app.get(url_for(controller='transfer',
action='delete',
id=t.id))
response = response.form.submit('delete').follow()
response.mustcontain('Transfer', 'deleted')
def test_edit_nonexistent(self):
response = self.app.get(url_for(controller='transfer',
action='edit',
id=21424), status=404)
def test_update_nonexistent(self):
response = self.app.get(url_for(controller='transfer',
action='edit'))
params = self.sample_params.copy()
params[token_key] = response.form[token_key].value
self.app.post(url_for(controller='transfer',
action='update',
id=21424),
params=params,
status=404)
def test_xsrf_protection(self):
self.app.post(url_for(controller='transfer',
action='update'),
params=self.sample_params,
status=403)
def test_update_get_redirects(self):
response = self.app.get(url_for(controller='transfer',
action='update'),
status=302)
assert (dict(response.headers)['location'] ==
url_for(controller='transfer', action='edit', qualified=True))
def test_delete_nonexistent(self):
self.app.get(url_for(controller='transfer',
action='delete',
id=124244),
status=404)
def test_destroy_nonexistent(self):
response = self.app.get(url_for(controller='transfer',
action='edit'))
params = self.sample_params.copy()
params[token_key] = response.form[token_key].value
self.app.post(url_for(controller='transfer',
action='destroy',
id=124344),
params=params,
status=404)
def test_delete_xsrf_protection(self):
self.app.post(url_for(controller='transfer',
action='destroy',
id=1),
params={'delete': 'Delete'},
status=403)
def setUp(self):
self.sample_params = {
'debtor_id': '1',
'creditor_id': '2',
'amount': '33.98',
'date': '4/1/2007',
'description': 'Example transfer params.'}
def tearDown(self):
transfers = meta.Session.query(model.Transfer).all()
for t in transfers:
meta.Session.delete(t)
meta.Session.commit()
|
from bluechips.tests import *
class TestHistoryController(TestController):
def test_index(self):
response = self.app.get(url_for(controller='history'))
# Test response...
|
from unittest import TestCase
from bluechips.tests import *
from bluechips import model
from bluechips.model import meta
from bluechips.model.types import Currency
from decimal import Decimal
class TestSplitFixed(TestCase):
def test_simpleSplit(self):
"""
Test simply splitting a $100 expenditure amongst 4 people
"""
createUsers(4)
e = model.Expenditure(meta.Session.query(model.User).first(),
Currency("100"))
meta.Session.add(e)
e.even_split()
meta.Session.commit()
for s in meta.Session.query(model.Split).\
filter(model.Split.expenditure==e):
self.assertEqual(s.share, Currency("25.00"))
deleteExpenditures()
deleteUsers()
def test_uneven(self):
"""
Test that expenditures can be split non-evenly
"""
createUsers(2)
users = meta.Session.query(model.User).all()
e = model.Expenditure(users[0], Currency("100"))
meta.Session.add(e)
split_dict = {users[0]: Decimal("20"),
users[1]: Decimal("80")}
amount_dict = {users[0]: Currency("20"),
users[1]: Currency("80")}
e.split(split_dict)
meta.Session.commit()
for s in meta.Session.query(model.Split):
self.assertEqual(s.share, amount_dict[s.user])
deleteExpenditures()
deleteUsers()
def test_unevenBadTotal(self):
"""
Test that transactions get split up properly when the uneven
split shares don't add to 100%
"""
createUsers(2)
users = meta.Session.query(model.User).all()
e = model.Expenditure(users[0], Currency("100.00"))
meta.Session.add(e)
split_dict = {users[0]: Decimal(10),
users[1]: Decimal(15)}
amount_dict = {users[0]: Currency("40"),
users[1]: Currency("60")}
e.split(split_dict)
meta.Session.commit()
for s in meta.Session.query(model.Split):
self.assertEqual(s.share, amount_dict[s.user])
deleteExpenditures()
deleteUsers()
def test_negativeExpenditure(self):
"""
Test that negative expenditures get split correctly
"""
createUsers(2)
users = meta.Session.query(model.User).all()
e = model.Expenditure(users[0], Currency("100.00"))
meta.Session.add(e)
# Force a split that will result in needing to distribute
# pennies
split_dict = {users[0]: Decimal(1),
users[1]: Decimal(2)}
e.split(split_dict)
meta.Session.commit()
self.assertEqual(e.amount, sum(s.share for s in meta.Session.query(model.Split)))
deleteExpenditures()
deleteUsers()
|
from unittest import TestCase
from bluechips.tests import *
from bluechips import model
from bluechips.model import meta
from webhelpers.number import standard_deviation as std_dev
class TestSplitRandom(TestCase):
@classmethod
def setUpClass(cls):
createUsers()
@classmethod
def tearDownClass(cls):
deleteUsers()
def setUp(self):
createExpenditures()
def tearDown(self):
deleteExpenditures()
def test_splitTotal(self):
for e in meta.Session.query(model.Expenditure):
self.assertEqual(sum(s.share for s in e.splits), e.amount)
def test_splitDistribution(self):
user_count = meta.Session.query(model.User).count()
for e in meta.Session.query(model.Expenditure):
even_total = (e.amount / user_count) * user_count
difference = abs(even_total - e.amount)
self.assert_(std_dev(list(int(s.share) for s in e.splits)) <= difference, \
"Expenditure doesn't appear to be evenly distributed")
|
class User(object):
def __init__(self, username, name=u"", resident=True):
self.username = username
self.name = name
self.resident = resident
def __repr__(self):
return '<User: %s>' % (self.username)
def __str__(self):
return self.name
__all__ = ['User']
|
"""The application's model objects"""
import sqlalchemy as sa
from sqlalchemy import orm
from bluechips.model.user import User
from bluechips.model.expenditure import Expenditure
from bluechips.model.split import Split
from bluechips.model.subitem import Subitem
from bluechips.model.transfer import Transfer
from bluechips.model import meta
from bluechips.model import types
from datetime import datetime
def init_model(engine):
"""Call me before using any of the tables or classes in the model"""
sm = orm.sessionmaker(autoflush=True, bind=engine)
meta.engine = engine
meta.Session = orm.scoped_session(sm)
### Database Schemas ###
users = sa.Table('users', meta.metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
sa.Column('username', sa.types.Unicode(32), nullable=False),
sa.Column('name', sa.types.Unicode(64)),
sa.Column('resident', sa.types.Boolean, default=True),
sa.Column('email', sa.types.Unicode(64)),
sa.Column('password', sa.types.Unicode(64)),
)
expenditures = sa.Table('expenditures', meta.metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
sa.Column('spender_id', sa.types.Integer,
sa.ForeignKey('users.id'), nullable=False),
sa.Column('amount', types.DBCurrency, nullable=False),
sa.Column('description', sa.types.Text),
sa.Column('date', sa.types.Date, default=datetime.now),
sa.Column('entered_time', sa.types.DateTime,
default=datetime.utcnow)
)
splits = sa.Table('splits', meta.metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
sa.Column('expenditure_id', sa.types.Integer,
sa.ForeignKey('expenditures.id'), nullable=False),
sa.Column('user_id', sa.types.Integer,
sa.ForeignKey('users.id'), nullable=False),
sa.Column('share', types.DBCurrency, nullable=False)
)
subitems = sa.Table('subitems', meta.metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
sa.Column('expenditure_id', sa.types.Integer,
sa.ForeignKey('expenditures.id'), nullable=False),
sa.Column('user_id', sa.types.Integer,
sa.ForeignKey('users.id'), nullable=False),
sa.Column('amount', types.DBCurrency, nullable=False)
)
transfers = sa.Table('transfers', meta.metadata,
sa.Column('id', sa.types.Integer, primary_key=True),
sa.Column('debtor_id', sa.types.Integer,
sa.ForeignKey('users.id'), nullable=False),
sa.Column('creditor_id', sa.types.Integer,
sa.ForeignKey('users.id'), nullable=False),
sa.Column('amount', types.DBCurrency, nullable=False),
sa.Column('description', sa.Text, default=None),
sa.Column('date', sa.types.Date, default=datetime.now),
sa.Column('entered_time', sa.types.DateTime,
default=datetime.utcnow)
)
### DB/Class Mapping ###
orm.mapper(User, users,
properties={
'expenditures': orm.relation(Expenditure,
backref='spender')
})
orm.mapper(Expenditure, expenditures,
order_by=[expenditures.c.date.desc(), expenditures.c.entered_time.desc()],
properties={
'splits': orm.relation(Split, backref='expenditure',
cascade='all, delete'),
'subitems': orm.relation(Subitem, backref='expenditure',
cascade='all, delete')
})
orm.mapper(Split, splits, properties={
'user': orm.relation(User)
})
orm.mapper(Subitem, subitems, properties={
'user': orm.relation(User)
})
orm.mapper(Transfer, transfers,
order_by=[transfers.c.date.desc(), transfers.c.entered_time.desc()],
properties={
'debtor': orm.relation(User,
primaryjoin=(transfers.c.debtor_id==\
users.c.id)),
'creditor': orm.relation(User,
primaryjoin=(transfers.c.creditor_id==\
users.c.id))
})
__all__ = ['users', 'expenditures', 'splits', 'subitems', 'transfers',
'User', 'Expenditure', 'Split', 'Subitem', 'Transfer',
'meta']
|
"""
Define special types used in BlueChips
"""
import locale
from decimal import Decimal, InvalidOperation
import sqlalchemy as sa
from formencode import validators, Invalid
from bluechips.lib.subclass import SmartSubclass
from weakref import WeakValueDictionary
def localeconv():
"Manually install en_US for systems that don't have it."
d = {'currency_symbol': '$',
'decimal_point': '.',
'frac_digits': 2,
'grouping': [3, 3, 0],
'int_curr_symbol': 'USD ',
'int_frac_digits': 2,
'mon_decimal_point': '.',
'mon_grouping': [3, 3, 0],
'mon_thousands_sep': ',',
'n_cs_precedes': 1,
'n_sep_by_space': 0,
'n_sign_posn': 1,
'negative_sign': '-',
'p_cs_precedes': 1,
'p_sep_by_space': 0,
'p_sign_posn': 1,
'positive_sign': '',
'thousands_sep': ','}
return d
locale.localeconv = localeconv
class CurrencyValidator(validators.FancyValidator):
"A validator to convert to Currency objects."
messages = {'amount': "Please enter a valid currency amount",
'precision': "Only two digits after the decimal, please",
'nonzero': "Please enter a non-zero amount"}
def _to_python(self, value, state):
try:
dec = Decimal(value)
except InvalidOperation:
raise Invalid(self.message('amount', state),
value, state)
else:
ret = dec.quantize(Decimal('1.00'))
if ret == 0:
raise Invalid(self.message('nonzero', state),
value, state)
elif ret != dec:
raise Invalid(self.message('precision', state),
value, state)
else:
return Currency(int(ret * 100))
class Currency(object):
"""
Store currency values as an integral number of cents
"""
__metaclass__ = SmartSubclass(int)
__old_values__ = WeakValueDictionary()
def __new__(cls, value):
if value is None:
value = 0
elif isinstance(value, str):
value = int(float(value) * 100)
else:
value = int(value)
if value not in cls.__old_values__:
new_object = super(cls, cls).__new__(cls)
new_object.value = value
cls.__old_values__[value] = new_object
return new_object
else:
return cls.__old_values__[value]
def __int__(self):
"""
If I don't define this, SmartSubclass will return
Currency(int(self.value))
"""
return self.value
def __float__(self):
"""
If I don't define this, SmartSubclass will return
Currency(float(self.value))
"""
return float(self.value)
def __long__(self):
"""
If I don't define this, SmartSubclass will return
Currency(long(self.value))
"""
return long(self.value)
def __cmp__(self, other):
"""
This is overridden for when validators compare a Currency to
''
"""
if other == '':
return 1
else:
return self.value.__cmp__(int(other))
def __mul__(self, other):
"""
If I don't define this, SmartSubclass will convert the other
argument to an int
"""
return Currency(self.value * other)
def __rmul__(self, other):
"""
If I don't define this, SmartSubclass will convert the other
argument to an int
"""
return self.__mul__(other)
def __repr__(self):
return '%s("%s")' % (self.__class__.__name__, str(self))
def __str__(self):
return locale.currency(self.value / 100., grouping=True)
class DBCurrency(sa.types.TypeDecorator):
"""
A type which represents monetary amounts internally as integers.
This avoids binary/decimal float conversion issues
"""
impl = sa.types.Integer
def process_bind_param(self, value, engine):
return int(value)
def convert_result_value(self, value, engine):
return Currency(value)
process_result_value = convert_result_value
|
from types import Currency
class Subitem(object):
def __init__(self, expenditure=None, user=None, amount=Currency(0)):
self.expenditure = expenditure # pragma: nocover
self.user = user # pragma: nocover
self.share = share # pragma: nocover
def __repr__(self):
return ('<Subitem: expense: %s user: %s cost: %s>' %
(self.expense, self.user, self.amount)) # pragma: nocover
__all__ = ['Subitem']
|
from types import Currency
class Transfer(object):
def __init__(self, debtor=None, creditor=None, amount=Currency(0)):
self.debtor = debtor
self.creditor = creditor
self.amount = amount
def __repr__(self):
return '<Transfer: from %s to %s for %s>' % (self.debtor,
self.creditor,
self.amount)
def involves(self, user):
return (self.debtor == user) or (self.creditor == user)
__all__ = ['Transfer']
|
from types import Currency
class Split(object):
def __init__(self, expenditure=None, user=None, share=Currency(0)):
self.expenditure = expenditure
self.user = user
self.share = share
def __repr__(self):
return '<Split: expense: %s user: %s share: %s>' % (self.expenditure,
self.user,
self.share)
__all__ = ['Split']
|
from bluechips.model.user import User
from bluechips.model.split import Split
from bluechips.model import meta
from bluechips.model.types import Currency
from decimal import Decimal
from datetime import datetime
import random
class Expenditure(object):
def __init__(self, spender=None, amount=Currency(0), description=u"",
date=None):
self.spender = spender
self.amount = amount
self.description = description
if self.date == None:
self.date = datetime.now()
def __repr__(self):
return '<Expenditure: spender: %s spent: %s>' % (self.spender,
self.amount)
def even_split(self):
"""
Split up an expenditure evenly among the resident users
"""
residents = meta.Session.query(User).filter(User.resident==True)
split_percentage = Decimal(100) / Decimal(residents.count())
self.split(dict((resident, split_percentage) for resident in residents))
def split(self, split_dict):
"""
Split up an expenditure.
split_dict should be a dict mapping from bluechips.model:User
objects to a decimal:Decimal object representing the percentage
that user is responsible for.
Percentages will be normalized to sum to 100%.
If the split leaks or gains money due to rounding errors, the
pennies will be randomly distributed to one of the users.
I mean, come on. You're already living together. Are you really
going to squabble over a few pennies?
"""
map(meta.Session.delete, meta.Session.query(Split).\
filter_by(expenditure_id=self.id))
total = sum(split_dict.itervalues())
for user, share in split_dict.items():
if share == 0:
del split_dict[user]
amounts_dict = dict()
for user, share in split_dict.iteritems():
amounts_dict[user] = Currency((share * self.amount) / total)
difference = self.amount - sum(amounts_dict.itervalues())
if difference > 0:
for i in xrange(difference):
winner = random.choice(amounts_dict.keys())
amounts_dict[winner] += Currency(1)
elif difference < 0:
for i in xrange(-difference):
winner = random.choice(amounts_dict.keys())
amounts_dict[winner] -= Currency(1)
for user, share in amounts_dict.iteritems():
s = Split(self, user, share)
meta.Session.add(s)
def involves(self, user):
"Returns True if ``user`` is involved in this expenditure."
return (any((split.user == user) and (split.share != 0)
for split in self.splits) or
(self.spender == user))
def share(self, user):
"Return the share corresponding to ``user``."
shares = dict((split.user, split.share)
for split in self.splits)
return shares.get(user, Currency(0))
__all__ = ['Expenditure']
|
"""SQLAlchemy Metadata and Session object"""
from sqlalchemy import MetaData
from sqlalchemy.orm import scoped_session, sessionmaker
# SQLAlchemy database engine. Updated by model.init_model()
engine = None
# SQLAlchemy session manager. Updated by model.init_model()
Session = None
# Global metadata. If you have multiple databases with overlapping table
# names, you'll need a metadata for each database
metadata = MetaData()
__all__ = ['engine', 'Session', 'metadata']
|
"""
Create subclasses that call out to their "superclass" for all methods
but return the "subclass's" type
"""
def wrapper(cls, func):
return (lambda self, *args: cls(getattr(self.value, func)(*map(self.value.__class__, args))))
class SmartSubclass(object):
def __init__(self, superclass, exclude=None):
if exclude is None:
exclude = []
self.superclass = superclass
self.exclude = exclude
def __call__(self, name, bases, dict):
c = type(name, bases, dict)
for func in dir(self.superclass):
if func not in dir(c) and \
callable(getattr(self.superclass, func)) and \
func not in self.exclude:
setattr(c, func, wrapper(c, func))
return c
__all__ = ['SmartSubclass']
|
"""The application's Globals object"""
import logging
from pylons import config, request
from paste.deploy.converters import asbool
from mailer import Message
log = logging.getLogger(__name__)
class Globals(object):
"""Globals acts as a container for objects available throughout the
life of the application
"""
def __init__(self):
"""One instance of Globals is created during application
initialization and is available during requests via the 'g'
variable
"""
pass
def send_message(self, msg):
"""
Wrap the call to mailer.send() so that we can do stuff like defer mail
sending, wrap in a test fixture, selectively disable mailing in certain
environments, etc.
"""
if asbool(config.get('testing')) or asbool(config.get('network_free')):
if 'mailer.messages' not in request.environ:
request.environ['mailer.messages'] = []
request.environ['mailer.messages'].append(msg)
log.info("From: %s\nTo: %s\nSubject: %s\n\n%s",
msg.From, msg.To, msg.Subject, msg.Body)
else:
self.mailer.send(msg) # pragma: nocover
def handle_notification(self, users, subject, body):
"Send a notification email."
recipients = [u.email for u in users if u.email is not None]
if len(recipients) > 0:
msg = Message(From=config.get('mailer.from',
'root@localhost'),
To=recipients)
msg.Subject = "BlueChips: %s" % subject
msg.Body = body
self.send_message(msg)
|
"""
authkit authorization permission objects for BlueChips
"""
from authkit.authenticate import AddDictToEnviron
from authkit.authorize import NotAuthenticatedError, NotAuthorizedError
from authkit.permissions import RequestPermission
from bluechips import model
from bluechips.model import meta
class BlueChipUser(RequestPermission):
def check(self, app, environ, start_response):
if 'REMOTE_USER' not in environ:
raise NotAuthenticatedError('Not Authenticated') # pragma: nocover
environ['user'] = meta.Session.query(model.User).\
filter_by(username=unicode(environ['REMOTE_USER'])).\
first()
if environ['user'] == None:
raise NotAuthorizedError('You are not allowed access.') # pragma: nocover
return app(environ, start_response)
class BlueChipResident(RequestPermission):
def check(self, app, environ, start_response):
if 'user' not in environ:
raise NotAuthenticatedError('Not Authenticated')
if not getattr(environ['user'], 'resident', False):
raise NotAuthorizedError('You are not allowed access.')
return app(environ, start_response)
class DummyAuthenticate(AddDictToEnviron):
"""
Set the authkit.authenticate environment variable so
authkit.authorize shuts up
"""
def __init__(self, app, app_conf):
newenv = {}
newenv['authkit.authenticate'] = True
newenv['authkit.config'] = {'setup.enable': True}
if 'fake_username' in app_conf:
newenv['REMOTE_USER'] = app_conf['fake_username']
super(DummyAuthenticate, self).__init__(app, newenv)
def authenticate(environ, username, password):
user = meta.Session.query(model.User).\
filter_by(username=unicode(username),
password=unicode(password)).first()
return (user is not None)
__all__ = ['BlueChipUser', 'DummyAuthenticate', 'authenticate']
|
"""
Calculate the total state of the books
"""
from bluechips import model
from bluechips.model import meta
from bluechips.model.types import Currency
import sqlalchemy
class DirtyBooks(Exception):
"""
If the books don't work out, raise this
"""
pass
def debts():
# In this scheme, negative numbers represent money the house owes
# the user, and positive numbers represent money the user owes the
# house
users = meta.Session.query(model.User)
debts_dict = dict((u, Currency(0)) for u in users)
# First, credit everyone for expenditures they've made
total_expenditures = meta.Session.query(model.Expenditure).\
add_column(sqlalchemy.func.sum(model.Expenditure.amount).label('total_spend')).\
group_by(model.Expenditure.spender_id)
for expenditure, total_spend in total_expenditures:
debts_dict[expenditure.spender] -= total_spend
# Next, debit everyone for expenditures that they have an
# investment in (i.e. splits)
total_splits = meta.Session.query(model.Split).\
add_column(sqlalchemy.func.sum(model.Split.share).label('total_split')).\
group_by(model.Split.user_id)
for split, total_cents in total_splits:
debts_dict[split.user] += total_cents
# Finally, move transfers around appropriately
#
# To keep this from getting to be expensive, have SQL sum up
# transfers for us
transfer_q = meta.Session.query(model.Transfer).\
add_column(sqlalchemy.func.sum(model.Transfer.amount).label('total_amount'))
total_debits = transfer_q.group_by(model.Transfer.debtor_id)
total_credits = transfer_q.group_by(model.Transfer.creditor_id)
for transfer, total_amount in total_debits:
debts_dict[transfer.debtor] -= total_amount
for transfer, total_amount in total_credits:
debts_dict[transfer.creditor] += total_amount
return debts_dict
def settle(debts_dict):
# This algorithm has been shamelessly stolen from Nelson Elhage's
# <[email protected]> implementation for our 2008 summer apartment.
debts_list = [dict(who=user, amount=amount) for user, amount in \
debts_dict.iteritems()]
#debts_list.sort(reverse=True, key=(lambda x: abs(x['amount'])))
owes_list = [debt for debt in debts_list if debt['amount'] > 0]
owed_list = [debt for debt in debts_list if debt['amount'] < 0]
settle_list = []
while len(owes_list) > 0 and len(owed_list) > 0:
owes_list.sort(reverse=True, key=(lambda x: abs(x['amount'])))
owed_list.sort(reverse=True, key=(lambda x: abs(x['amount'])))
owes = owes_list[0]
owed = owed_list[0]
sum = owes['amount'] + owed['amount']
if sum == 0:
# Perfect balance!
owes_list.pop(0)
owed_list.pop(0)
val = owes['amount']
elif sum > 0:
# person in owes still owes money
owes['amount'] += owed['amount']
owed_list.pop(0)
val = -owed['amount']
else:
# person in owed is owed more than owes has to give
owed['amount'] += owes['amount']
owes_list.pop(0)
val = owes['amount']
settle_list.append((owes['who'], owed['who'], val))
if len(owes_list) > 0:
raise DirtyBooks, ("People still owe money", owes_list)
if len(owed_list) > 0:
raise DirtyBooks, ("People are still owed money", owed_list)
return settle_list
__all__ = ['debts', 'settle']
|
"""Helper functions
Consists of functions to typically be used within templates, but also
available to Controllers. This module is available to both as 'h'.
"""
from datetime import date
from decimal import Decimal
from pylons import request
from routes import url_for, redirect_to
from webhelpers.html import escape, literal, url_escape
from webhelpers.html.tags import *
from webhelpers.html.secure_form import *
from webhelpers.pylonslib import Flash as _Flash
def currency(name, value, *args, **kwargs):
if 'class_' not in kwargs:
kwargs['class_'] = ''
kwargs['class_'] += 'currency'
value = "%0.2f" % (int(value) / 100.)
return text(name, value, *args, **kwargs)
def grab(obj, attr):
if obj:
return getattr(obj, attr)
else:
if attr == 'date':
return date.today()
elif attr in ('spender_id', 'creditor_id', 'debtor_id'):
return request.environ['user'].id
elif attr == 'amount':
return 0
else:
return ''
flash = _Flash()
|
"""The base Controller API
Provides the BaseController class for subclassing.
"""
from decorator import decorator
from pylons import request, session, tmpl_context as c
from pylons.controllers import WSGIController
from pylons.i18n import _, ungettext, N_
from pylons.templating import render_mako
from mako.exceptions import TopLevelLookupException
import bluechips.lib.helpers as h
from bluechips import model
from bluechips.model import meta
class BaseController(WSGIController):
def __call__(self, environ, start_response):
"""Invoke the Controller"""
# WSGIController.__call__ dispatches to the Controller method
# the request is routed to. This routing information is
# available in environ['pylons.routes_dict']
try:
return WSGIController.__call__(self, environ, start_response)
finally:
meta.Session.remove()
def update_sar(record, form_result):
"""
Update a SQLAlchemy record with the results of a validated form submission
"""
for key, value in form_result.items():
setattr(record, key, value)
def redirect_on_get(action):
"""
Decorator for a controller action. If the action is called with a GET
method, 302 redirect to the action specified.
"""
@decorator
def redirect_on_get_wrap(func, *args, **kwargs):
if request.method == 'GET':
controller = request.environ['pylons.routes_dict']['controller']
return h.redirect_to(controller=controller, action=action)
else:
return func(*args, **kwargs)
return redirect_on_get_wrap
def render(name, *args, **kwargs):
if 'iPhone' in request.user_agent:
if 'use_non_mobile' in request.params:
session['use_non_mobile'] = (request.params['use_non_mobile'] ==
'yes')
if session.get('use_non_mobile'):
c.mobile_client = True
else:
try:
return render_mako('/mobile' + name, *args, **kwargs)
except TopLevelLookupException:
# If a mobile template doesn't exist for this page, don't show
# the 'use mobile interface' link.
c.mobile_client = False
return render_mako(name, *args, **kwargs)
__all__ = ['c', 'h', 'render', 'model', 'meta', '_', 'ungettext', 'N_',
'BaseController', 'update_sar', 'redirect_on_get']
|
"""
Calculate the current state of the books
"""
import logging
from bluechips.lib.base import *
from bluechips.lib.permissions import BlueChipResident
import sqlalchemy
from sqlalchemy import orm
from authkit.authorize.pylons_adaptors import authorize
from pylons import request
from pylons.decorators import validate
from pylons.decorators.secure import authenticate_form
from formencode import validators, Schema, FancyValidator, Invalid
log = logging.getLogger(__name__)
class EmailSchema(Schema):
"Validate email updates."
allow_extra_fields = False
new_email = validators.Email()
class UniqueUsername(FancyValidator):
def _to_python(self, value, state):
u = meta.Session.query(model.User).\
filter(model.User.username == value).\
first()
if u:
raise Invalid(
'That username already exists',
value, state)
return value
class NewUserSchema(Schema):
"Validate new users."
allow_extra_fields = False
username = UniqueUsername(not_empty=True)
password = validators.String(if_missing=None)
confirm_password = validators.String(if_missing=None)
name = validators.String(not_empty=False)
resident = validators.StringBoolean(not_empty=True)
chained_validators = [
validators.FieldsMatch('password', 'confirm_password'),
]
class UserController(BaseController):
def index(self):
c.title = 'User Settings'
return render('/user/index.mako')
def email(self):
c.title = 'User Settings'
return render('/user/email.mako')
@authenticate_form
@validate(schema=EmailSchema(), form='index')
def update(self):
new_email = self.form_result['new_email']
request.environ['user'].email = new_email
meta.Session.commit()
if new_email is None:
h.flash("Removed email address.")
else:
h.flash("Updated email address to '%s'." % new_email)
return h.redirect_to('/')
@authorize(BlueChipResident())
def new(self):
c.title = 'Register a New User'
return render('/user/new.mako')
@authenticate_form
@authorize(BlueChipResident())
@validate(schema=NewUserSchema(), form='new')
def create(self):
u = model.User(username=self.form_result['username'],
resident=self.form_result['resident'])
if self.form_result['name']:
u.name = self.form_result['name']
else:
u.name = self.form_result['username']
if self.form_result['password'] is not None:
u.password = self.form_result['password']
meta.Session.add(u)
meta.Session.commit()
h.flash('Successfully created new user %s' % u.username)
return h.redirect_to('/')
|
import cgi
from pylons import request, tmpl_context as c
from bluechips.lib.base import BaseController, render
class ErrorController(BaseController):
"""Generates error documents as and when they are required.
The ErrorDocuments middleware forwards to ErrorController when error
related status codes are returned from the application.
This behaviour can be altered by changing the parameters to the
ErrorDocuments middleware in your config/middleware.py file.
"""
def document(self):
"""Render the error document"""
resp = request.environ.get('pylons.original_response')
c.code = cgi.escape(request.GET.get('code', resp.status))
return render('/error.mako')
|
"""
Handle transfers
"""
import logging
from datetime import date
from bluechips.lib.base import *
from pylons import request, app_globals as g
from pylons.decorators import validate
from pylons.decorators.secure import authenticate_form
from pylons.controllers.util import abort
from formencode import Schema, validators
from mailer import Message
log = logging.getLogger(__name__)
class TransferSchema(Schema):
"Validate a transfer."
allow_extra_fields = False
debtor_id = validators.Int(not_empty=True)
creditor_id = validators.Int(not_empty=True)
amount = model.types.CurrencyValidator(not_empty=True)
description = validators.UnicodeString()
date = validators.DateConverter()
class TransferController(BaseController):
def index(self):
return self.edit()
def edit(self, id=None):
c.users = meta.Session.query(model.User.id, model.User.name)
if id is None:
c.title = 'Add a New Transfer'
c.transfer = model.Transfer()
c.transfer.debtor_id = request.environ['user'].id
c.transfer.date = date.today()
else:
c.title = 'Edit a Transfer'
c.transfer = meta.Session.query(model.Transfer).get(id)
if c.transfer is None:
abort(404)
return render('/transfer/index.mako')
@redirect_on_get('edit')
@authenticate_form
@validate(schema=TransferSchema(), form='edit')
def update(self, id=None):
if id is None:
t = model.Transfer()
meta.Session.add(t)
op = 'created'
else:
t = meta.Session.query(model.Transfer).get(id)
if t is None:
abort(404)
op = 'updated'
update_sar(t, self.form_result)
meta.Session.commit()
show = ('Transfer of %s from %s to %s %s.' %
(t.amount, t.debtor, t.creditor, op))
h.flash(show)
# Send email notification to involved users if they have an email set.
body = render('/emails/transfer.txt', extra_vars={'transfer': t,
'op': op})
g.handle_notification((t.debtor, t.creditor), show, body)
return h.redirect_to('/')
def delete(self, id):
c.title = 'Delete a Transfer'
c.transfer = meta.Session.query(model.Transfer).get(id)
if c.transfer is None:
abort(404)
return render('/transfer/delete.mako')
@redirect_on_get('delete')
@authenticate_form
def destroy(self, id):
t = meta.Session.query(model.Transfer).get(id)
if t is None:
abort(404)
if 'delete' in request.params:
meta.Session.delete(t)
meta.Session.commit()
show = ("Transfer of %s from %s to %s deleted." %
(t.amount, t.debtor, t.creditor))
h.flash(show)
body = render('/emails/transfer.txt',
extra_vars={'transfer': t,
'op': 'deleted'})
g.handle_notification((t.debtor, t.creditor), show, body)
return h.redirect_to('/')
|
"""
Handle expenditures
"""
import logging
from decimal import Decimal, InvalidOperation
from bluechips.lib.base import *
from pylons import request, app_globals as g
from pylons.decorators import validate
from pylons.decorators.secure import authenticate_form
from pylons.controllers.util import abort
from formencode import validators, Schema
from formencode.foreach import ForEach
from formencode.variabledecode import NestedVariables
from formencode.schema import SimpleFormValidator
from mailer import Message
log = logging.getLogger(__name__)
class ShareSchema(Schema):
"Validate individual user shares."
allow_extra_fields = False
user_id = validators.Int(not_empty=True)
amount = validators.Number(not_empty=True)
def validate_state(value_dict, state, validator):
if all(s['amount'] == 0 for s in value_dict['shares']):
return {'shares-0.amount': 'Need at least one non-zero share'}
ValidateNotAllZero = SimpleFormValidator(validate_state)
class ExpenditureSchema(Schema):
"Validate an expenditure."
allow_extra_fields = False
pre_validators = [NestedVariables()]
spender_id = validators.Int(not_empty=True)
amount = model.types.CurrencyValidator(not_empty=True)
description = validators.UnicodeString()
date = validators.DateConverter()
shares = ForEach(ShareSchema)
chained_validators = [ValidateNotAllZero]
class SpendController(BaseController):
def index(self):
return self.edit()
def edit(self, id=None):
c.users = meta.Session.query(model.User.id, model.User)
if id is None:
c.title = 'Add a New Expenditure'
c.expenditure = model.Expenditure()
c.expenditure.spender_id = request.environ['user'].id
num_residents = meta.Session.query(model.User).\
filter_by(resident=True).count()
# Pre-populate split percentages for an even split.
c.values = {}
for ii, user_row in enumerate(c.users):
user_id, user = user_row
val = 0
if user.resident:
val = Decimal(100) / Decimal(num_residents)
c.values['shares-%d.amount' % ii] = val
else:
c.title = 'Edit an Expenditure'
c.expenditure = meta.Session.query(model.Expenditure).get(id)
if c.expenditure is None:
abort(404)
c.values = {}
for ii, user_row in enumerate(c.users):
user_id, user = user_row
shares_by_user = dict(((sp.user, sp.share) for sp
in c.expenditure.splits))
share = shares_by_user.get(user, 0)
if c.expenditure.amount == 0:
percent = 0
else:
percent = (Decimal(100) * Decimal(int(share)) /
Decimal(int(c.expenditure.amount))).\
quantize(Decimal("0.001"))
c.values['shares-%d.amount' % ii] = percent
return render('/spend/index.mako')
@redirect_on_get('edit')
@authenticate_form
@validate(schema=ExpenditureSchema(), form='edit', variable_decode=True)
def update(self, id=None):
# Either create a new object, or, if we're editing, get the
# old one
if id is None:
e = model.Expenditure()
meta.Session.add(e)
op = 'created'
else:
e = meta.Session.query(model.Expenditure).get(id)
if e is None:
abort(404)
op = 'updated'
# Set the fields that were submitted
shares = self.form_result.pop('shares')
update_sar(e, self.form_result)
users = dict(meta.Session.query(model.User.id, model.User).all())
split_dict = {}
for share_params in shares:
user = users[share_params['user_id']]
split_dict[user] = Decimal(str(share_params['amount']))
e.split(split_dict)
meta.Session.commit()
show = ("Expenditure of %s paid for by %s %s." %
(e.amount, e.spender, op))
h.flash(show)
# Send email notification to involved users if they have an email set.
involved_users = set(sp.user for sp in e.splits if sp.share != 0)
involved_users.add(e.spender)
body = render('/emails/expenditure.txt',
extra_vars={'expenditure': e,
'op': op})
g.handle_notification(involved_users, show, body)
return h.redirect_to('/')
def delete(self, id):
c.title = 'Delete an Expenditure'
c.expenditure = meta.Session.query(model.Expenditure).get(id)
if c.expenditure is None:
abort(404)
return render('/spend/delete.mako')
@redirect_on_get('delete')
@authenticate_form
def destroy(self, id):
e = meta.Session.query(model.Expenditure).get(id)
if e is None:
abort(404)
if 'delete' in request.params:
meta.Session.delete(e)
meta.Session.commit()
show = ("Expenditure of %s paid for by %s deleted." %
(e.amount, e.spender))
h.flash(show)
involved_users = set(sp.user for sp in e.splits if sp.share != 0)
involved_users.add(e.spender)
body = render('/emails/expenditure.txt',
extra_vars={'expenditure': e,
'op': 'deleted'})
g.handle_notification(involved_users, show, body)
return h.redirect_to('/')
|
"""
Calculate the current state of the books
"""
import logging
from bluechips.lib.base import *
from bluechips.lib.totals import *
import sqlalchemy
from sqlalchemy import orm
from datetime import date, timedelta
from bluechips.model.types import Currency
from pylons import request
log = logging.getLogger(__name__)
class StatusController(BaseController):
def index(self):
c.debts = debts()
c.settle = settle(c.debts)
c.net = 0
for from_user, to_user, amount in c.settle:
if from_user == request.environ['user']:
c.net -= amount
elif to_user == request.environ['user']:
c.net += amount
periods = {}
periods['Total'] = (None, None)
periods['Past year'] = (date.today() - timedelta(days=365), None)
periods['Year to date'] = (date.today().replace(month=1, day=1), None)
periods['Month to date'] = (date.today().replace(day=1), None)
periods['Last month'] = ((date.today() -
timedelta(days=30)).replace(day=1),
periods['Month to date'][0])
c.totals = {}
for period in periods.keys():
c.totals[period] = {}
start, end = periods[period]
conds = []
if start is not None:
conds.append(model.Expenditure.date >= start)
if end is not None:
conds.append(model.Expenditure.date < end)
if len(conds) > 1:
conds = sqlalchemy.and_(*conds)
elif len(conds) > 0:
conds = conds[0]
else:
conds = None
for scope in ('all', 'mine'):
meth = getattr(self, '_total_%s' % scope)
c.totals[period][scope] = meth(conds)
c.expenditures = meta.Session.query(model.Expenditure).\
filter(sqlalchemy.or_(
model.Expenditure.spender == request.environ['user'],
model.Expenditure.splits.any(
sqlalchemy.and_(
model.Split.user == request.environ['user'],
model.Split.share != 0)))).\
options(orm.eagerload('splits')).\
limit(10).all()
c.transfers = meta.Session.query(model.Transfer).\
filter(sqlalchemy.or_(
model.Transfer.debtor==request.environ['user'],
model.Transfer.creditor==request.environ['user'])).\
limit(10).all()
c.users = meta.Session.query(model.User.id, model.User)
return render('/status/index.mako')
def _total_all(self, conditions=None):
q = meta.Session.query(sqlalchemy.func.SUM(
model.Expenditure.amount))
if conditions is not None:
q = q.filter(conditions)
return q.scalar()
def _total_mine(self, conditions=None):
q = meta.Session.query(sqlalchemy.func.SUM(
model.Split.share)).join(model.Split.expenditure).\
filter(model.Split.user == request.environ['user'])
if conditions is not None:
q = q.filter(conditions)
return q.scalar()
|
"""
Display old transactions
"""
import logging
from bluechips.lib.base import *
from bluechips.lib.totals import *
import sqlalchemy
from sqlalchemy import orm
log = logging.getLogger(__name__)
class HistoryController(BaseController):
def index(self):
c.title = 'History'
c.expenditures = meta.Session.query(model.Expenditure).\
options(orm.eagerload('splits')).all()
c.transfers = meta.Session.query(model.Transfer).all()
return render('/history/index.mako')
|
import os
from setuptools import find_packages, setup
REQUIRED_PKGS = ["torch", "transformers", "gitpython", "seaborn"]
QUALITY_REQUIRE = ["black~=22.0", "flake8>=3.8.3", "isort>=5.0.0", "pyyaml>=5.3.1"]
setup(
name="trfs_fast",
version="0.0.1.dev0",
description="HuggingFace community-driven open-source library of evaluation",
long_description=open("README.md", encoding="utf-8").read(),
long_description_content_type="text/markdown",
author="HuggingFace Inc.",
author_email="[email protected]",
url="https://github.com/fxmarty/accelerated-pytorch-transformers-generation",
download_url="https://github.com/fxmarty/accelerated-pytorch-transformers-generation",
license="MIT",
package_dir={"": "src"},
packages=find_packages("src"),
install_requires=REQUIRED_PKGS,
python_requires=">=3.8.0",
)
|
import argparse
import copy
import contextlib
import hashlib
import os
import traceback
from typing import Dict
from tqdm import tqdm
import pandas as pd
import torch
import git
from torch.profiler import ProfilerActivity, profile, tensorboard_trace_handler
from transformers import AutoModelForCausalLM, AutoTokenizer
from trfs_fast.llama import LlamaForCausalLM
from trfs_fast.utils import recurse_getattr, recurse_hasattr, recurse_setattr, recurse_delattr
# Default case
BATCH_SIZES = [1]
PROMPT_LENGTHS = [1000]
NEW_TOKENS = [200]
WARMUP_RUNS = 2
NUM_RUNS = 5
# Modifiers for profiling (we want a short run)
PROFILE_NEW_TOKENS = [10]
PROFILE_NUM_RUNS = 1
# Modifiers for parameter sweeps
SWEEP_BATCH_SIZES = [1, 2, 4, 8, 16, 32, 64, 128]
SWEEP_PROMPT_LENGTHS = [100, 200, 400, 800, 1600]
SWEEP_NUM_RUNS = 10
parser = argparse.ArgumentParser()
# TODO: support other archs than llama
parser.add_argument(
"--model",
type=str,
default="huggingface/llama-7b",
help="Name of the weights on the Hub",
)
parser.add_argument(
"--dtype",
type=str,
default="fp16",
help="Type of the weights that will be used at test time",
)
parser.add_argument(
"--preallocate",
action='store_true',
help="[TRIGGERS NEW CODE PATH] Whether to preallocate internal model tensors",
)
parser.add_argument(
"--profile",
action='store_true',
help="Does a storter run for profiling purposes",
)
parser.add_argument(
"--compile",
type=str,
choices=["no", "static", "dynamic", "fullgraph"],
default="no",
help="If (and how) to compile the model forward pass with torch.compile",
)
parser.add_argument(
"--sweep",
type=str,
choices=["", "batch", "length"],
required=False,
default="",
help="Select which type of sweep to gather data for",
)
def timing_cuda(
tokenizer,
generate_method,
num_runs: int,
inputs: Dict,
max_new_tokens: int,
device: torch.device,
cache_length: int,
preallocate: bool,
do_profile: bool,
):
warmup_start_event = torch.cuda.Event(enable_timing=True)
warmup_end_event = torch.cuda.Event(enable_timing=True)
if preallocate:
inputs["cache_length"] = cache_length
with torch.no_grad():
print(f"Warming up ({WARMUP_RUNS} runs)...")
warmup_start_event.record()
for _ in range(WARMUP_RUNS):
res = generate_method(
**inputs,
min_new_tokens=max_new_tokens,
max_new_tokens=max_new_tokens,
)
warmup_end_event.record()
torch.cuda.synchronize()
print(f"Warmup/compilation time: {warmup_start_event.elapsed_time(warmup_end_event) * 1.0e-3:.2f} seconds ({WARMUP_RUNS} generate calls)")
start_event = torch.cuda.Event(enable_timing=True)
end_event = torch.cuda.Event(enable_timing=True)
torch.cuda.reset_peak_memory_stats(device)
torch.cuda.empty_cache()
torch.cuda.synchronize()
if do_profile:
profile_dir = "./tb_logs"
print("Profiling and writing to", profile_dir)
cm = profile(
activities=[ProfilerActivity.CPU, ProfilerActivity.CUDA],
record_shapes=True,
profile_memory=True,
with_stack=True,
on_trace_ready=tensorboard_trace_handler(dir_name=profile_dir),
)
else:
cm = contextlib.nullcontext()
with cm:
start_event.record()
for _ in tqdm(range(num_runs), desc="Measuring generate"):
res = generate_method(
**inputs,
min_new_tokens=max_new_tokens,
max_new_tokens=max_new_tokens,
)
end_event.record()
torch.cuda.synchronize()
max_memory = torch.cuda.max_memory_allocated(device)
h = hashlib.new('sha256')
h.update(str(tokenizer.batch_decode(res)).encode())
sha_hash = h.hexdigest()
return (start_event.elapsed_time(end_event) * 1.0e-3) / num_runs, max_memory * 1e-6, sha_hash
args = parser.parse_args()
torch.manual_seed(42)
if args.dtype == "fp16":
dtype = torch.float16
elif args.dtype == "fp32":
dtype = torch.float32
else:
raise ValueError("Choose fp16 or fp32 dtype")
device = torch.device("cuda")
tokenizer = AutoTokenizer.from_pretrained(args.model)
tokenizer.pad_token = tokenizer.eos_token
if args.preallocate:
with device:
original_model = AutoModelForCausalLM.from_pretrained(args.model, torch_dtype=dtype)
with torch.device("meta"):
model = LlamaForCausalLM.from_pretrained(args.model, torch_dtype=dtype)
# replace back parameters and buffers that were untouched by the bettertransformer transform
for path, param in model.state_dict().items():
if "k_proj" not in path and "v_proj" not in path and "q_proj" not in path and "min_allowed" not in path:
recurse_setattr(model, path, copy.deepcopy(recurse_getattr(original_model, path)))
recurse_delattr(original_model, path) # save mem
# some buffers may be non-persistent, hence not in the state_dict (as token_type_ids for some models)
for path, param in model.named_buffers():
if "k_proj" not in path and "v_proj" not in path and "q_proj" not in path and "min_allowed" not in path:
if recurse_hasattr(original_model, path):
recurse_setattr(model, path, copy.deepcopy(recurse_getattr(original_model, path)))
recurse_delattr(original_model, path) # save mem
if "min_allowed" in path:
recurse_setattr(model, path, torch.tensor(torch.finfo(dtype).min, device=device))
for name, module in model.named_parameters():
if "qkv_proj" in name:
base_root_query = ".".join(name.split(".")[:-2]) + ".q_proj.weight"
base_root_key = ".".join(name.split(".")[:-2]) + ".k_proj.weight"
base_root_value = ".".join(name.split(".")[:-2]) + ".v_proj.weight"
root = ".".join(name.split(".")[:-1]) + ".weight"
weight = torch.nn.Parameter(torch.cat([
copy.deepcopy(recurse_getattr(original_model, base_root_query)),
copy.deepcopy(recurse_getattr(original_model, base_root_key)),
copy.deepcopy(recurse_getattr(original_model, base_root_value))
], dim=0))
recurse_setattr(model, name, weight)
del original_model
else:
with device:
model = AutoModelForCausalLM.from_pretrained(args.model, torch_dtype=dtype)
if args.compile != "no":
dynamic = args.compile == "dynamic"
fullgraph = args.compile == "fullgraph"
mode = "reduce-overhead" if not args.sweep else "max-autotune"
model.forward = torch.compile(model.forward, mode=mode, fullgraph=fullgraph, dynamic=dynamic)
if model.config.model_type != "llama":
raise ValueError("This script currently only supports LLAMA")
if args.profile and args.sweep:
raise ValueError("Cannot profile and sweep at the same time")
batch_sizes = BATCH_SIZES if args.sweep != "batch" else SWEEP_BATCH_SIZES
prompt_lengths = PROMPT_LENGTHS if args.sweep != "length" else SWEEP_PROMPT_LENGTHS
new_tokens = NEW_TOKENS if not args.profile else PROFILE_NEW_TOKENS
num_runs = NUM_RUNS
if args.profile:
num_runs = PROFILE_NUM_RUNS
elif args.sweep:
num_runs = SWEEP_NUM_RUNS
stats = {}
for batch_size in tqdm(batch_sizes):
for prompt_length in tqdm(prompt_lengths):
for max_new_tokens in tqdm(new_tokens):
cache_length = 1 * (prompt_length + max_new_tokens)
inp = {
"input_ids": torch.randint(low=1, high=10, size=(batch_size, prompt_length)).to("cuda"),
"attention_mask": torch.ones(batch_size, prompt_length, dtype=torch.int32).to("cuda")
}
if batch_size > 1:
inp["input_ids"][0, :10] = tokenizer.pad_token_id
inp["attention_mask"][0, :10] = 0
h = hashlib.new('sha256')
h.update(str(inp).encode())
print("\nInput hash:", h.hexdigest()[:8])
print("Cache preallocation:", args.preallocate)
generate_method = model.generate if not args.preallocate else model.generate_minimal
try:
time_per_generation, max_memory, sha_hash = timing_cuda(
tokenizer=tokenizer,
num_runs=num_runs,
inputs=inp,
device=device,
max_new_tokens=max_new_tokens,
cache_length=cache_length,
generate_method=generate_method,
preallocate=args.preallocate,
do_profile=args.profile,
)
except:
traceback.print_exc()
break # in a sweep, might get OOM
tok_per_s = (max_new_tokens * batch_size) / time_per_generation
stats[(batch_size, prompt_length, max_new_tokens)] = {
"cache_length": cache_length,
"tok_per_s": tok_per_s,
"hash": sha_hash[:8],
"max_mem": max_memory
}
# build dataframe with the results and store it
rows = []
repo = git.Repo(search_parent_directories=True)
current_git_hash = repo.git.rev_parse(repo.head, short=True)
for key, value in stats.items():
batch_size, prompt_length, new_tokens = key
rows.append({
'Preallocate': args.preallocate,
'Compile': args.compile,
'Batch size': str(batch_size),
'Prompt length': str(prompt_length),
'New tokens': str(new_tokens),
'Cache length': str(value["cache_length"]),
'Weights dtype': args.dtype,
'Tokens per second': f"{value['tok_per_s']:.3f}",
'Max GPU memory (MB)': f"{value['max_mem']:.2f}",
'Results hash': value["hash"],
'Git hash': current_git_hash,
})
df = pd.DataFrame(rows)
print(df)
os.makedirs("./results", exist_ok=True)
output_path = "./results/results_llama.csv"
df.to_csv(output_path, mode='a', header=not os.path.exists(output_path))
print(f"Results also appended to {output_path}")
|
"""
Plots the results of a sweep for the current git hash.
"""
import argparse
import git
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
DEFAULT_BATCH_SIZE = 1
DEFAULT_PROMPT_LENGTH = 1000
parser = argparse.ArgumentParser()
parser.add_argument(
"--sweep",
type=str,
choices=["batch", "length"],
required=True,
help="Select which type of sweep to plot"
)
args = parser.parse_args()
# 1. Read file and retrieve relevant data
results_file = "./results/results_llama.csv"
df = pd.read_csv(results_file)
repo = git.Repo(search_parent_directories=True)
current_git_hash = repo.git.rev_parse(repo.head, short=True)
df = df[df["Git hash"] == current_git_hash]
if df.empty:
raise ValueError(f"No results found for current git hash ({current_git_hash})")
if args.sweep == "batch":
df = df[df["Prompt length"] == DEFAULT_PROMPT_LENGTH]
else:
df = df[df["Batch size"] == DEFAULT_BATCH_SIZE]
df = df[df["Prompt length"] != DEFAULT_PROMPT_LENGTH]
if df.empty:
raise ValueError("Something went wrong -- no results in the filtered dataframe")
# 2. Plot -- we expect 3 series: original model, preallocated, and preallocated + compiled
if args.sweep == "batch":
x_col_name = "Batch size"
else:
x_col_name = "Prompt length"
df["Type"] = df["Preallocate"].astype("str") + df["Compile"]
df["Type"] = df["Type"].replace({"Falseno": "original", "Trueno": "Preallocate", "Truestatic": "Pre + comp."})
g = sns.catplot(
data=df,
kind="bar",
x=x_col_name,
y="Tokens per second",
hue="Type",
palette={"original": "blue", "Preallocate": "orange", "Pre + comp.": "red"},
alpha=.9,
)
g.despine(left=True)
g.set_axis_labels("Batch size" if args.sweep == "batch" else "Prompt length", "Tokens per second")
g.legend.set_title("LLaMA code version")
plt.setp(g._legend.get_texts(), fontsize='7') # for legend text
title_constant = f'{"Batch size = " + str(DEFAULT_BATCH_SIZE) if args.sweep == "length" else "Prompt length = " + str(DEFAULT_PROMPT_LENGTH)}'
g.set(title=f'LLaMA sweep ({title_constant})')
# Add the number to the top of each bar
ax = g.facet_axis(0, 0)
for i in ax.containers:
ax.bar_label(i, fontsize=7)
g.tight_layout()
plt_path = f"./results/llama_sweep_{current_git_hash}_{args.sweep}.png"
plt.savefig(plt_path, dpi=300)
print(f"Plot stored at {plt_path}")
|
# coding=utf-8
# Copyright 2022 HuggingFace Inc. team. All rights reserved.
#
# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX
# and OPT implementations in this library. It has been modified from its
# original forms to accommodate minor architectural differences compared
# to GPT-NeoX and OPT used by the Meta AI team that trained the model.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import inspect
from typing import Optional, TYPE_CHECKING, Dict, Any
from transformers.utils import logging, ModelOutput
from transformers import GenerationConfig
if TYPE_CHECKING:
from transformers.generation.streamers import BaseStreamer
logger = logging.get_logger(__name__)
class GenerationPrefill:
@torch.no_grad()
def generate_minimal(
self,
min_new_tokens: int,
max_new_tokens: int,
inputs: Optional[torch.Tensor] = None,
streamer: Optional["BaseStreamer"] = None,
cache_length: Optional[int] = None,
**model_kwargs
) -> torch.LongTensor:
r"""
Generates sequences of token ids for models with a language modeling head with greedy search.
Parameters:
inputs (`torch.Tensor` of varying shape depending on the modality, *optional*):
The sequence used as a prompt for the generation or as model inputs to the encoder. If `None` the
method initializes it with `bos_token_id` and a batch size of 1. For decoder-only models `inputs`
should of in the format of `input_ids`. For encoder-decoder models *inputs* can represent any of
`input_ids`, `input_values`, `input_features`, or `pixel_values`.
streamer (`BaseStreamer`, *optional*):
Streamer object that will be used to stream the generated sequences. Generated tokens are passed
through `streamer.put(token_ids)` and the streamer is responsible for any further processing.
"""
# 1. Handle `generation_config` and kwargs that might update it, and validate the `.generate()` call
self._validate_model_class()
generation_config = GenerationConfig.from_model_config(self.config)
self._validate_model_kwargs(model_kwargs.copy())
if generation_config.pad_token_id is None and generation_config.eos_token_id is not None:
if model_kwargs.get("attention_mask", None) is None:
logger.warning(
"The attention mask and the pad token id were not set. As a consequence, you may observe "
"unexpected behavior. Please pass your input's `attention_mask` to obtain reliable results."
)
eos_token_id = generation_config.eos_token_id
if isinstance(eos_token_id, list):
eos_token_id = eos_token_id[0]
logger.warning(f"Setting `pad_token_id` to `eos_token_id`:{eos_token_id} for open-end generation.")
generation_config.pad_token_id = eos_token_id
# 3. Define model inputs
# inputs_tensor has to be defined
# model_input_name is defined if model-specific keyword input is passed
# otherwise model_input_name is None
# all model-specific keyword inputs are removed from `model_kwargs`
inputs_tensor, model_input_name, model_kwargs = self._prepare_model_inputs(
inputs, generation_config.bos_token_id, model_kwargs
)
accepts_attention_mask = "attention_mask" in set(inspect.signature(self.forward).parameters.keys())
requires_attention_mask = "encoder_outputs" not in model_kwargs
if model_kwargs.get("attention_mask", None) is None and requires_attention_mask and accepts_attention_mask:
model_kwargs["attention_mask"] = self._prepare_attention_mask_for_generation(
inputs_tensor, generation_config.pad_token_id, generation_config.eos_token_id
)
# decoder-only models should use left-padding for generation
# If `input_ids` was given, check if the last id in any sequence is `pad_token_id`
# Note: If using, `inputs_embeds` this check does not work, because we want to be more hands-off.
if (
generation_config.pad_token_id is not None
and len(inputs_tensor.shape) == 2
and torch.sum(inputs_tensor[:, -1] == generation_config.pad_token_id) > 0
):
logger.warning(
"A decoder-only architecture is being used, but right-padding was detected! For correct "
"generation results, please set `padding_side='left'` when initializing the tokenizer."
)
# 5. Prepare `input_ids` which will be used for auto-regressive generation
if self.config.is_encoder_decoder:
raise NotImplementedError("encoder-decoder not supported yet")
else:
input_ids = inputs_tensor if model_input_name == "input_ids" else model_kwargs.pop("input_ids")
if streamer is not None:
streamer.put(input_ids.cpu())
batch_size, context_length = input_ids.shape
cache_length = cache_length if cache_length is not None else max_new_tokens
model_kwargs["valid_past_index"] = torch.tensor(0, dtype=torch.int64)
model_kwargs["past_key_values"] = self.get_empty_kv_cache(batch_size=batch_size, cache_length=cache_length, device=input_ids.device, dtype=self.dtype)
model_kwargs["attention_mask"] = self.get_preallocated_attention_mask(attention_mask=model_kwargs["attention_mask"], batch_size=batch_size, cache_length=cache_length, device=input_ids.device, context_length=context_length)
# 11. run greedy search
return self.greedy_search_minimal(
input_ids,
pad_token_id=generation_config.pad_token_id,
eos_token_id=generation_config.eos_token_id,
output_scores=generation_config.output_scores,
return_dict_in_generate=generation_config.return_dict_in_generate,
streamer=streamer,
min_new_tokens=min_new_tokens,
max_new_tokens=max_new_tokens,
**model_kwargs,
)
def greedy_search_minimal(
self,
input_ids: torch.LongTensor,
pad_token_id: int,
eos_token_id: int,
min_new_tokens: int,
max_new_tokens: int,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
streamer: Optional["BaseStreamer"] = None,
**model_kwargs,
) -> torch.LongTensor:
r"""
Generates sequences of token ids for models with a language modeling head using **greedy decoding** and can be
used for text-decoder, text-to-text, speech-to-text, and vision-to-text models.
Parameters:
input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
The sequence used as a prompt for the generation.
logits_processor (`LogitsProcessorList`, *optional*):
An instance of [`LogitsProcessorList`]. List of instances of class derived from [`LogitsProcessor`]
used to modify the prediction scores of the language modeling head applied at each generation step.
max_new_tokens (`int`, *optional*, defaults to 20):
The maximum length of the sequence to be generated.
pad_token_id (`int`):
The id of the *padding* token.
eos_token_id (`int`):
The id of the *end-of-sequence* token.
streamer (`BaseStreamer`, *optional*):
Streamer object that will be used to stream the generated sequences. Generated tokens are passed
through `streamer.put(token_ids)` and the streamer is responsible for any further processing.
model_kwargs:
Additional model specific keyword arguments will be forwarded to the `forward` function of the model.
If model is an encoder-decoder model the kwargs should include `encoder_outputs`.
Return:
[`~generation.GreedySearchDecoderOnlyOutput`], [`~generation.GreedySearchEncoderDecoderOutput`] or
`torch.LongTensor`: A `torch.LongTensor` containing the generated tokens (default behaviour) or a
[`~generation.GreedySearchDecoderOnlyOutput`] if `model.config.is_encoder_decoder=False` and
`return_dict_in_generate=True` or a [`~generation.GreedySearchEncoderDecoderOutput`] if
`model.config.is_encoder_decoder=True`.
"""
eos_token_id_tensor = torch.tensor([eos_token_id]).to(input_ids.device).unsqueeze(1)
n_eos_tokens = eos_token_id_tensor.shape[0]
# keep track of which sequences are already finished
unfinished_sequences = torch.ones((input_ids.shape[0], 1), dtype=torch.long, device=input_ids.device)
counter = 0
result = input_ids
while True:
# prepare model inputs
model_inputs = self.prepare_inputs_for_generation(input_ids, **model_kwargs)
# forward pass to get next token
outputs = self(
**model_inputs,
return_dict=True,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
)
counter += 1
# argmax
next_tokens = torch.argmax(outputs.logits[:, -1, :], dim=-1, keepdim=True)
# finished sentences should have their next token be a padding token
if eos_token_id is not None:
if pad_token_id is None:
raise ValueError("If `eos_token_id` is defined, make sure that `pad_token_id` is defined.")
next_tokens = next_tokens * unfinished_sequences + pad_token_id * (1 - unfinished_sequences)
input_ids = next_tokens
# update generated ids, model inputs, and length for next step
result = torch.cat([result, next_tokens], dim=-1)
if streamer is not None:
streamer.put(next_tokens.cpu())
model_kwargs = self.__update_model_kwargs_for_generation(
outputs, model_kwargs, model_inputs
)
# TODO: not sure this is correct anymore with the keepdim=True
# if eos_token was found in one sentence, set sentence to finished
if eos_token_id_tensor is not None:
unfinished_sequences = unfinished_sequences.mul(
next_tokens.tile(n_eos_tokens, 1).ne(eos_token_id_tensor).prod(dim=0)
)
# stop when each sentence is finished
if unfinished_sequences.max() == 0 and counter >= min_new_tokens:
break
# stop if we exceed the maximum length
if counter >= max_new_tokens:
break
if streamer is not None:
streamer.end()
return result
def __update_model_kwargs_for_generation(
self,
outputs: ModelOutput,
model_kwargs: Dict[str, Any],
model_inputs: Dict[str, Any]
) -> Dict[str, Any]:
model_kwargs["valid_past_index"] += outputs.logits.shape[1]
if getattr(outputs, "state", None) is not None:
model_kwargs["state"] = outputs.state
# update attention mask
"""
if "attention_mask" in model_kwargs:
attention_mask = model_kwargs["attention_mask"]
model_kwargs["attention_mask"] = torch.cat(
[attention_mask, attention_mask.new_ones((attention_mask.shape[0], 1))], dim=-1
)
"""
position_ids = model_inputs["position_ids"]
if position_ids.shape[1] > 1:
model_kwargs["position_ids"] = position_ids[:, -1:] + 1
else:
model_kwargs["position_ids"] = position_ids + 1
# NOTE: token_type_ids is not used by llama so we don't care about this one for now
# update token_type_ids with last value
if "token_type_ids" in model_kwargs:
token_type_ids = model_kwargs["token_type_ids"]
model_kwargs["token_type_ids"] = torch.cat([token_type_ids, token_type_ids[:, -1].unsqueeze(-1)], dim=-1)
return model_kwargs
|
__version__ = "0.0.1.dev0"
|
# coding=utf-8
# Copyright 2022 EleutherAI and the HuggingFace Inc. team. All rights reserved.
#
# This code is based on EleutherAI's GPT-NeoX library and the GPT-NeoX
# and OPT implementations in this library. It has been modified from its
# original forms to accommodate minor architectural differences compared
# to GPT-NeoX and OPT used by the Meta AI team that trained the model.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch LLaMA model."""
from typing import List, Optional, Tuple, Union
import torch
import torch.utils.checkpoint
from torch import nn
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
from transformers.activations import ACT2FN
from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
from transformers.modeling_utils import PreTrainedModel
from transformers.utils import add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings
from transformers.models.llama.configuration_llama import LlamaConfig
from .generation import GenerationPrefill
logger = logging.get_logger(__name__)
_CONFIG_FOR_DOC = "LlamaConfig"
# Copied from transformers.models.bart.modeling_bart._make_causal_mask
def _make_causal_mask(
input_ids_shape: torch.Size, dtype: torch.dtype, device: torch.device, past_key_values_length: int = 0
):
"""
Make causal mask used for bi-directional self-attention.
"""
bsz, tgt_len = input_ids_shape
mask = torch.full((tgt_len, tgt_len), torch.tensor(torch.finfo(dtype).min, device=device), device=device)
mask_cond = torch.arange(mask.size(-1), device=device)
mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0)
mask = mask.to(dtype)
if past_key_values_length > 0:
mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype, device=device), mask], dim=-1)
return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length)
# Copied from transformers.models.bart.modeling_bart._expand_mask
def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None):
"""
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
"""
bsz, src_len = mask.size()
tgt_len = tgt_len if tgt_len is not None else src_len
expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype)
inverted_mask = 1.0 - expanded_mask
return inverted_mask.masked_fill(inverted_mask.to(torch.bool), torch.finfo(dtype).min)
class LlamaRMSNorm(nn.Module):
def __init__(self, hidden_size, eps=1e-6):
"""
LlamaRMSNorm is equivalent to T5LayerNorm
"""
super().__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.variance_epsilon = eps
def forward(self, hidden_states):
input_dtype = hidden_states.dtype
variance = hidden_states.to(torch.float32).pow(2).mean(-1, keepdim=True)
hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
return (self.weight * hidden_states).to(input_dtype)
class LlamaRotaryEmbedding(torch.nn.Module):
def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None):
super().__init__()
inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2).float().to(device) / dim))
self.register_buffer("inv_freq", inv_freq)
# Build here to make `torch.jit.trace` work.
self.max_seq_len_cached = max_position_embeddings
t = torch.arange(self.max_seq_len_cached, device=self.inv_freq.device, dtype=self.inv_freq.dtype)
freqs = torch.einsum("i,j->ij", t, self.inv_freq)
# Different from paper, but it uses a different permutation in order to obtain the same calculation
emb = torch.cat((freqs, freqs), dim=-1)
dtype = torch.get_default_dtype()
self.register_buffer("cos_cached", emb.cos()[None, None, :, :].to(dtype), persistent=False)
self.register_buffer("sin_cached", emb.sin()[None, None, :, :].to(dtype), persistent=False)
def forward(self, x, seq_len=None):
# x: [bs, num_attention_heads, seq_len, head_size]
# This `if` block is unlikely to be run after we build sin/cos in `__init__`. Keep the logic here just in case.
# raises control flow exception
# if seq_len > self.max_seq_len_cached:
# self.max_seq_len_cached = seq_len
# t = torch.arange(self.max_seq_len_cached, device=x.device, dtype=self.inv_freq.dtype)
# freqs = torch.einsum("i,j->ij", t, self.inv_freq)
# # Different from paper, but it uses a different permutation in order to obtain the same calculation
# emb = torch.cat((freqs, freqs), dim=-1).to(x.device)
# self.register_buffer("cos_cached", emb.cos()[None, None, :, :].to(x.dtype), persistent=False)
# self.register_buffer("sin_cached", emb.sin()[None, None, :, :].to(x.dtype), persistent=False)
return (
self.cos_cached.to(dtype=x.dtype),
self.sin_cached.to(dtype=x.dtype),
)
def rotate_half(x):
"""Rotates half the hidden dims of the input."""
x = torch.roll(x, shifts=x.shape[-1] // 2, dims=-1)
x[..., : x.shape[-1] // 2] *= -1
return x
def apply_rotary_pos_emb_opt(q, key_states, cos, sin, position_ids):
# The first two dimensions of cos and sin are always 1, so we can `squeeze` them.
# TODO: can we remove some squeeze/unsqueeze?
# TODO: replace by squeeze(0, 1) once https://github.com/pytorch/pytorch/issues/103875 is fixed
cos = cos.squeeze(1).squeeze(0) # [seq_len, dim]
sin = sin.squeeze(1).squeeze(0) # [seq_len, dim]
cos = cos[position_ids].unsqueeze(1) # [bs, 1, seq_len, dim]
sin = sin[position_ids].unsqueeze(1) # [bs, 1, seq_len, dim]
q_embed = (q * cos) + (rotate_half(q) * sin)
key_states.copy_((key_states * cos) + (rotate_half(key_states) * sin))
return q_embed
class LlamaMLP(nn.Module):
def __init__(
self,
hidden_size: int,
intermediate_size: int,
hidden_act: str,
):
super().__init__()
self.gate_proj = nn.Linear(hidden_size, intermediate_size, bias=False)
self.down_proj = nn.Linear(intermediate_size, hidden_size, bias=False)
self.up_proj = nn.Linear(hidden_size, intermediate_size, bias=False)
self.act_fn = ACT2FN[hidden_act]
def forward(self, x):
return self.down_proj(self.act_fn(self.gate_proj(x)) * self.up_proj(x))
class LlamaAttention(nn.Module):
"""Multi-headed attention from 'Attention Is All You Need' paper"""
def __init__(self, config: LlamaConfig):
super().__init__()
self.config = config
self.hidden_size = config.hidden_size
self.num_heads = config.num_attention_heads
self.head_dim = self.hidden_size // self.num_heads
self.max_position_embeddings = config.max_position_embeddings
if (self.head_dim * self.num_heads) != self.hidden_size:
raise ValueError(
f"hidden_size must be divisible by num_heads (got `hidden_size`: {self.hidden_size}"
f" and `num_heads`: {self.num_heads})."
)
self.qkv_proj = nn.Linear(self.hidden_size, 3 * self.num_heads * self.head_dim, bias=False, device="meta")
self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False)
self.rotary_emb = LlamaRotaryEmbedding(self.head_dim, max_position_embeddings=self.max_position_embeddings)
self.register_buffer("min_allowed", torch.tensor(torch.finfo(self.o_proj.weight.dtype).min), persistent=False)
def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int):
return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous()
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
position_ids: Optional[torch.LongTensor] = None,
past_key_value: Optional[Tuple[torch.Tensor]] = None,
output_attentions: bool = False,
valid_past_index: torch.Tensor = torch.tensor(0, dtype=torch.int64),
) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
if output_attentions is True:
raise ValueError("output_attentions=True can not be supported with BetterTransformer.")
bsz, q_len, _ = hidden_states.size()
# query_states = self.q_proj(hidden_states).view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2)
query_key_value_states = self.qkv_proj(hidden_states).view(bsz, q_len, 3, self.num_heads, self.head_dim).permute(2, 0, 3, 1, 4)
query_states = query_key_value_states[0]
key_value_states = query_key_value_states[1:]
# key_value_states used only for dtype here
cos, sin = self.rotary_emb(key_value_states, seq_len=valid_past_index + q_len)
query_states = apply_rotary_pos_emb_opt(query_states, key_value_states[0], cos, sin, position_ids)
# slice end is equivalent to "if valid_past_index > 0: = valid_past_index + 1; else: = q_len"
past_kv_slice_start = valid_past_index
past_kv_slice_end = torch.eq(valid_past_index, 0).int() * q_len + torch.ne(valid_past_index, 0).int() * (valid_past_index + 1)
past_state_slice_end = torch.eq(valid_past_index, 0).int() * key_value_states.shape[-2] + torch.ne(valid_past_index, 0).int() * (past_kv_slice_end)
past_key_value[..., past_kv_slice_start:past_kv_slice_end, :] = key_value_states
key_states, value_states = past_key_value[..., :past_state_slice_end, :]
if bsz == 1 or self.training:
# BEWARE: at this stage, attention_mask is not the same as in transformers llama
if query_states.shape[2] > 1:
attn_output = torch.nn.functional.scaled_dot_product_attention(
query_states, key_states, value_states, attn_mask=None, dropout_p=0.0, is_causal=True
)
else:
attn_output = torch.nn.functional.scaled_dot_product_attention(
query_states, key_states, value_states, attn_mask=None, dropout_p=0.0, is_causal=False
)
else:
# This line is necessary for numerical equivalence, although I'm not sure it is useful in any way.
attention_mask = torch.max(attention_mask, self.min_allowed)
attn_output = torch.nn.functional.scaled_dot_product_attention(
query_states, key_states, value_states, attn_mask=attention_mask, dropout_p=0.0, is_causal=False
)
attn_output = attn_output.transpose(1, 2)
attn_output = attn_output.reshape(bsz, q_len, self.hidden_size)
attn_output = self.o_proj(attn_output)
# TODO (felix) returning past_key_value with static cache is probably useless?
return attn_output, None, None
class LlamaDecoderLayer(nn.Module):
def __init__(self, config: LlamaConfig):
super().__init__()
self.hidden_size = config.hidden_size
self.self_attn = LlamaAttention(config=config)
self.mlp = LlamaMLP(
hidden_size=self.hidden_size,
intermediate_size=config.intermediate_size,
hidden_act=config.hidden_act,
)
self.input_layernorm = LlamaRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
self.post_attention_layernorm = LlamaRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
position_ids: Optional[torch.LongTensor] = None,
past_key_value: Optional[Tuple[torch.Tensor]] = None,
output_attentions: Optional[bool] = False,
valid_past_index: torch.Tensor = torch.tensor(0, dtype=torch.int64),
) -> Tuple[torch.FloatTensor, Optional[Tuple[torch.FloatTensor, torch.FloatTensor]]]:
"""
Args:
hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)`
attention_mask (`torch.FloatTensor`, *optional*): attention mask of size
`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under
returned tensors for more detail.
past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states
"""
residual = hidden_states
hidden_states = self.input_layernorm(hidden_states)
# Self Attention
hidden_states, self_attn_weights, present_key_value = self.self_attn(
hidden_states=hidden_states,
attention_mask=attention_mask,
position_ids=position_ids,
past_key_value=past_key_value,
output_attentions=output_attentions,
valid_past_index=valid_past_index,
)
hidden_states = residual + hidden_states
# Fully Connected
residual = hidden_states
hidden_states = self.post_attention_layernorm(hidden_states)
hidden_states = self.mlp(hidden_states)
hidden_states = residual + hidden_states
outputs = (hidden_states,)
if output_attentions:
outputs += (self_attn_weights,)
outputs += (present_key_value,)
return outputs
LLAMA_START_DOCSTRING = r"""
This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the
library implements for all its model (such as downloading or saving, resizing the input embeddings, pruning heads
etc.)
This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.
Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage
and behavior.
Parameters:
config ([`LlamaConfig`]):
Model configuration class with all the parameters of the model. Initializing with a config file does not
load the weights associated with the model, only the configuration. Check out the
[`~PreTrainedModel.from_pretrained`] method to load the model weights.
"""
@add_start_docstrings(
"The bare LLaMA Model outputting raw hidden-states without any specific head on top.",
LLAMA_START_DOCSTRING,
)
class LlamaPreTrainedModel(PreTrainedModel):
config_class = LlamaConfig
base_model_prefix = "model"
supports_gradient_checkpointing = True
_no_split_modules = ["LlamaDecoderLayer"]
_skip_keys_device_placement = "past_key_values"
_keys_to_ignore_on_load_unexpected = [r"decoder\.version"]
def _init_weights(self, module):
std = self.config.initializer_range
if isinstance(module, nn.Linear):
module.weight.data.normal_(mean=0.0, std=std)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=std)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
def _set_gradient_checkpointing(self, module, value=False):
if isinstance(module, LlamaModel):
module.gradient_checkpointing = value
LLAMA_INPUTS_DOCSTRING = r"""
Args:
input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide
it.
Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
[`PreTrainedTokenizer.__call__`] for details.
[What are input IDs?](../glossary#input-ids)
attention_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
Indices can be obtained using [`AutoTokenizer`]. See [`PreTrainedTokenizer.encode`] and
[`PreTrainedTokenizer.__call__`] for details.
If `past_key_values` is used, optionally only the last `decoder_input_ids` have to be input (see
`past_key_values`).
If you want to change padding behavior, you should read [`modeling_opt._prepare_decoder_attention_mask`]
and modify to your needs. See diagram 1 in [the paper](https://arxiv.org/abs/1910.13461) for more
information on the default strategy.
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
config.n_positions - 1]`.
[What are position IDs?](../glossary#position-ids)
past_key_values (`tuple(tuple(torch.FloatTensor))`, *optional*, returned when `use_cache=True` is passed or when `config.use_cache=True`):
Tuple of `tuple(torch.FloatTensor)` of length `config.n_layers`, with each tuple having 2 tensors of shape
`(batch_size, num_heads, sequence_length, embed_size_per_head)`) and 2 additional tensors of shape
`(batch_size, num_heads, encoder_sequence_length, embed_size_per_head)`.
Contains pre-computed hidden-states (key and values in the self-attention blocks and in the cross-attention
blocks) that can be used (see `past_key_values` input) to speed up sequential decoding.
If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that
don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all
`decoder_input_ids` of shape `(batch_size, sequence_length)`.
inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
is useful if you want more control over how to convert `input_ids` indices into associated vectors than the
model's internal embedding lookup matrix.
use_cache (`bool`, *optional*):
If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
`past_key_values`).
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
"""
@add_start_docstrings(
"The bare LLaMA Model outputting raw hidden-states without any specific head on top.",
LLAMA_START_DOCSTRING,
)
class LlamaModel(LlamaPreTrainedModel):
"""
Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`LlamaDecoderLayer`]
Args:
config: LlamaConfig
"""
def __init__(self, config: LlamaConfig):
super().__init__(config)
self.padding_idx = config.pad_token_id
self.vocab_size = config.vocab_size
self.embed_tokens = nn.Embedding(config.vocab_size, config.hidden_size, self.padding_idx)
self.layers = nn.ModuleList([LlamaDecoderLayer(config) for _ in range(config.num_hidden_layers)])
self.norm = LlamaRMSNorm(config.hidden_size, eps=config.rms_norm_eps)
self.gradient_checkpointing = False
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.embed_tokens
def set_input_embeddings(self, value):
self.embed_tokens = value
# Copied from transformers.models.bart.modeling_bart.BartDecoder._prepare_decoder_attention_mask
def _prepare_decoder_attention_mask(self, attention_mask, input_shape, inputs_embeds, past_key_values_length):
# create causal mask
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
combined_attention_mask = None
if input_shape[-1] > 1:
combined_attention_mask = _make_causal_mask(
input_shape,
inputs_embeds.dtype,
device=inputs_embeds.device,
past_key_values_length=past_key_values_length,
)
if attention_mask is not None:
attention_mask_buffer = attention_mask[:, :past_key_values_length + input_shape[-1]]
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
expanded_attn_mask = _expand_mask(attention_mask_buffer, inputs_embeds.dtype, tgt_len=input_shape[-1]).to(
inputs_embeds.device
)
combined_attention_mask = (
expanded_attn_mask if combined_attention_mask is None else expanded_attn_mask + combined_attention_mask
)
return combined_attention_mask
@add_start_docstrings_to_model_forward(LLAMA_INPUTS_DOCSTRING)
def forward(
self,
input_ids: torch.LongTensor = None,
attention_mask: Optional[torch.Tensor] = None,
position_ids: Optional[torch.LongTensor] = None,
past_key_values: Optional[List[torch.FloatTensor]] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
valid_past_index: torch.Tensor = torch.tensor(0, dtype=torch.int64),
) -> Union[Tuple, BaseModelOutputWithPast]:
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
# retrieve input_ids and inputs_embeds
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time")
elif input_ids is not None:
batch_size, seq_length = input_ids.shape
elif inputs_embeds is not None:
batch_size, seq_length, _ = inputs_embeds.shape
else:
raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds")
seq_length_with_past = seq_length
past_key_values_length = valid_past_index
if position_ids is None:
device = input_ids.device if input_ids is not None else inputs_embeds.device
position_ids = torch.arange(
past_key_values_length, seq_length + past_key_values_length, dtype=torch.long, device=device
)
position_ids = position_ids.unsqueeze(0).view(-1, seq_length)
else:
position_ids = position_ids.view(-1, seq_length).long()
if inputs_embeds is None:
inputs_embeds = self.embed_tokens(input_ids)
# embed positions
if attention_mask is None:
attention_mask = torch.ones(
(batch_size, seq_length_with_past), dtype=torch.bool, device=inputs_embeds.device
)
# As we use SDPA, we simply don't care about the attention mask in the batch size = 1 case
if batch_size > 1:
attention_mask = self._prepare_decoder_attention_mask(
attention_mask, (batch_size, seq_length), inputs_embeds, past_key_values_length
)
hidden_states = inputs_embeds
# decoder layers
all_hidden_states = () if output_hidden_states else None
all_self_attns = () if output_attentions else None
next_decoder_cache = ()
for idx, decoder_layer in enumerate(self.layers):
if output_hidden_states:
all_hidden_states += (hidden_states,)
past_key_value = past_key_values[idx] if past_key_values is not None else None
if self.gradient_checkpointing and self.training:
def create_custom_forward(module):
def custom_forward(*inputs):
# None for past_key_value
return module(*inputs, output_attentions, None)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(decoder_layer),
hidden_states,
attention_mask,
position_ids,
None,
)
else:
layer_outputs = decoder_layer(
hidden_states,
attention_mask=attention_mask,
position_ids=position_ids,
past_key_value=past_key_value,
output_attentions=output_attentions,
valid_past_index=valid_past_index,
)
hidden_states = layer_outputs[0]
next_decoder_cache += (layer_outputs[2 if output_attentions else 1],)
if output_attentions:
all_self_attns += (layer_outputs[1],)
hidden_states = self.norm(hidden_states)
# add hidden states from the last decoder layer
if output_hidden_states:
all_hidden_states += (hidden_states,)
next_cache = next_decoder_cache
if not return_dict:
return tuple(v for v in [hidden_states, next_cache, all_hidden_states, all_self_attns] if v is not None)
return BaseModelOutputWithPast(
last_hidden_state=hidden_states,
past_key_values=next_cache,
hidden_states=all_hidden_states,
attentions=all_self_attns,
)
class LlamaForCausalLM(LlamaPreTrainedModel, GenerationPrefill):
supports_static_kv_cache = True
def __init__(self, config):
super().__init__(config)
self.model = LlamaModel(config)
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.model.embed_tokens
def set_input_embeddings(self, value):
self.model.embed_tokens = value
def get_output_embeddings(self):
return self.lm_head
def set_output_embeddings(self, new_embeddings):
self.lm_head = new_embeddings
def set_decoder(self, decoder):
self.model = decoder
def get_decoder(self):
return self.model
def get_empty_kv_cache(self, batch_size: int, cache_length: int, dtype: torch.dtype, device: torch.device):
past_key_values = [torch.empty(
2,
batch_size,
self.config.num_attention_heads,
cache_length,
self.config.hidden_size // self.config.num_attention_heads, # head dimension
dtype=dtype,
device=device
)
for _ in range(self.config.num_hidden_layers)]
return past_key_values
def get_preallocated_attention_mask(self, attention_mask: torch.Tensor, batch_size: int, cache_length: int, device: torch.device, context_length: int):
attention_mask_buffer = torch.ones(batch_size, cache_length, dtype=torch.int64, device=device)
attention_mask_buffer[:, :context_length] = attention_mask
return attention_mask_buffer
@add_start_docstrings_to_model_forward(LLAMA_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=CausalLMOutputWithPast, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids: torch.LongTensor = None,
attention_mask: Optional[torch.Tensor] = None,
position_ids: Optional[torch.LongTensor] = None,
past_key_values: Optional[List[torch.FloatTensor]] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
labels: Optional[torch.LongTensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
valid_past_index: torch.Tensor = torch.tensor(0, dtype=torch.int64),
) -> Union[Tuple, CausalLMOutputWithPast]:
r"""
Args:
labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
(masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
Returns:
Example:
```python
>>> from transformers import AutoTokenizer, LlamaForCausalLM
>>> model = LlamaForCausalLM.from_pretrained(PATH_TO_CONVERTED_WEIGHTS)
>>> tokenizer = AutoTokenizer.from_pretrained(PATH_TO_CONVERTED_TOKENIZER)
>>> prompt = "Hey, are you consciours? Can you talk to me?"
>>> inputs = tokenizer(prompt, return_tensors="pt")
>>> # Generate
>>> generate_ids = model.generate(inputs.input_ids, max_length=30)
>>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
"Hey, are you consciours? Can you talk to me?\nI'm not consciours, but I can talk to you."
```"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
# decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn)
outputs = self.model(
input_ids=input_ids,
attention_mask=attention_mask,
position_ids=position_ids,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
valid_past_index=valid_past_index,
)
hidden_states = outputs[0]
logits = self.lm_head(hidden_states)
loss = None
if labels is not None:
# Shift so that tokens < n predict n
shift_logits = logits[..., :-1, :].contiguous()
shift_labels = labels[..., 1:].contiguous()
# Flatten the tokens
loss_fct = CrossEntropyLoss()
shift_logits = shift_logits.view(-1, self.config.vocab_size)
shift_labels = shift_labels.view(-1)
# Enable model parallelism
shift_labels = shift_labels.to(shift_logits.device)
loss = loss_fct(shift_logits, shift_labels)
if not return_dict:
output = (logits,) + outputs[1:]
return (loss,) + output if loss is not None else output
return CausalLMOutputWithPast(
loss=loss,
logits=logits,
past_key_values=outputs.past_key_values,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
def prepare_inputs_for_generation(
self, input_ids, past_key_values=None, attention_mask=None, inputs_embeds=None, **kwargs
):
valid_past_index = kwargs.get("valid_past_index", torch.tensor(0, dtype=torch.int64))
if valid_past_index > 0:
input_ids = input_ids[:, -1:]
position_ids = kwargs.get("position_ids", None)
# create position_ids
if position_ids is None:
attention_mask_slice = attention_mask[:, :input_ids.shape[1]]
position_ids = attention_mask_slice.long().cumsum(-1) - 1
position_ids.masked_fill_(attention_mask_slice == 0, 1)
# if `inputs_embeds` are passed, we only want to use them in the 1st generation step
if inputs_embeds is not None and past_key_values is None:
model_inputs = {"inputs_embeds": inputs_embeds}
else:
model_inputs = {"input_ids": input_ids}
model_inputs.update(
{
"position_ids": position_ids,
"past_key_values": past_key_values,
"attention_mask": attention_mask,
"valid_past_index": valid_past_index,
}
)
return model_inputs
|
import functools
def recurse_getattr(obj, attr: str):
def _getattr(obj, attr):
return getattr(obj, attr)
return functools.reduce(_getattr, [obj] + attr.split("."))
def recurse_hasattr(obj, attr):
try: left, right = attr.split('.', 1)
except: return hasattr(obj, attr)
return recurse_hasattr(getattr(obj, left), right)
def recurse_setattr(module, name, value):
if "." not in name:
setattr(module, name, value)
else:
name, rest = name.split(".", 1)
recurse_setattr(getattr(module, name), rest, value)
def recurse_delattr(obj, attr: str):
if "." not in attr:
delattr(obj, attr)
else:
root = ".".join(attr.split(".")[:-1])
end = attr.split(".")[-1]
delattr(recurse_getattr(obj, root), end) |
# reads the config.yaml file, and prints values for use in a bash eval $() scenario, like:
# eval $(python readconfig.py)
# echo jenkinspassword
from __future__ import print_function
import yaml
from os import path
from os.path import join
script_dir = path.dirname(path.realpath(__file__))
with open(join(script_dir, 'config.yaml'), 'r') as f:
config = yaml.load(f)
for k, v in config.items():
print('%s=%s' % (k,v))
|
"""
Spin up an instance, run a single command, spin it down :-)
Usage:
run.py [options] -- <COMMAND> ...
run.py [options] <COMMAND> ...
Options:
--type TYPE type, eg ng0 for bfboost, or ngd3 for dual Titan X [default: ng0]
--image IMAGE image [default: s1]
"""
from __future__ import print_function
import sys
import yaml
import json
import requests
import time
from docopt import docopt
from util.logtailer import LogTailer
api_url = 'https://api.jarvice.com/jarvice'
args = docopt(__doc__)
instancetype = args['--type']
image = args['--image']
command = args['<COMMAND>']
print('command', command)
with open('nimbix.yaml', 'r') as f:
config = yaml.load(f)
username = config['username']
apikey = config['apikey']
launch_data = {
"machine": {
"nodes": "1",
"type": instancetype
},
"variables": {
"FOO": "BAR"
},
"vault": {
"readonly": False,
"force": False,
"name": "drop.jarvice.com"
},
"user": {
"username": username,
"apikey": apikey
},
"nae": {
"force": False,
"name": image,
# "geometry": "1904x881",
"command": " ".join(command),
"ephemeral": False,
"staging": True,
"interactive": False
}
}
res = requests.post('%s/submit' % api_url, json=launch_data)
assert res.status_code == 200
res = json.loads(res.content.decode('utf-8'))
jobnumber = res['number']
print('jobnumber %s' % jobnumber)
logtailer = LogTailer(username=username, apikey=apikey, jobnumber=jobnumber)
while True:
res = requests.get('%s/status?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, jobnumber))
assert res.status_code == 200
res = json.loads(res.content.decode('utf-8'))
status = res[str(jobnumber)]['job_status']
logtailer.updateFromTail()
if 'COMPLETED' in status:
break
time.sleep(1)
logtailer.updateFromOutput()
res = requests.get('%s/status?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, jobnumber))
assert res.status_code == 200
res = json.loads(res.content.decode('utf-8'))
print('wall time %s' % res[str(jobnumber)]['job_walltime'])
|
"""
Usage:
tail.py [options]
Options:
--image IMAGE image
"""
from __future__ import print_function
import sys, os, subprocess
import requests
import json
from docopt import docopt
import yaml
api_url = 'https://api.jarvice.com/jarvice'
args = docopt(__doc__)
image = args['--image']
with open('nimbix.yaml', 'r') as f:
config = yaml.load(f)
username = config['username']
apikey = config['apikey']
if image == '' or image is None:
print('please provide image name')
sys.exit(1)
res = requests.get('%s/jobs?username=%s&apikey=%s' % (api_url, username, apikey))
#print(res.status_code)
res = json.loads(res.content.decode('utf-8'))
#print('res', res)
#print(res.content.decode('utf-8'))
target_jobnumber = None
for jobnumber, info in res.items():
# print('jobnumber', jobnumber)
if info['job_api_submission']['nae']['name'] == image:
target_jobnumber = jobnumber
break
assert target_jobnumber is not None
# print('image', image)
# if imag
res = requests.get('%s/tail?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, target_jobnumber))
print(res.content.decode('utf-8'))
|
# based on https://bitbucket.org/cpbotha/indicator-cpuspeed/src
# work in progress...
# to run it, you'll need to, after installgin your env3 virtualenv:
# pushd env3/lib/python3.5/site-packages
# ln -s /usr/lib/python3/dist-packages/gi/ .
# popd
from __future__ import print_function, division
from os import path
from os.path import join
import os.path
import traceback
import yaml
import sys
import argparse
import requests
import json
import subprocess
from gi.repository import Gtk, GLib
try:
from gi.repository import AppIndicator3 as AppIndicator
except:
from gi.repository import AppIndicator
import re
import jobs
import launch
script_dir = path.dirname(path.realpath(__file__))
api_url = 'https://api.jarvice.com/jarvice'
parser = argparse.ArgumentParser()
parser.add_argument('--configfile', default=join(script_dir, 'nimbix.yaml'))
parser.add_argument('--iconfile')
args = parser.parse_args()
with open(args.configfile, 'r') as f:
config = yaml.load(f)
username = config['username']
apikey = config['apikey']
ssh_command = config['ssh_command']
launch_profiles = config.get('launch_profiles', {})
print('launch_profiles', launch_profiles)
class IndicatorCPUSpeed(object):
def __init__(self):
# param1: identifier of this indicator
# param2: name of icon. this will be searched for in the standard them
# dirs
# finally, the category. We're monitoring CPUs, so HARDWARE.
self.ind = AppIndicator.Indicator.new(
"indicator-cpuspeed",
"onboard-mono",
AppIndicator.IndicatorCategory.HARDWARE)
if args.iconfile is not None:
theme_path = path.dirname(args.iconfile)
icon = path.basename(args.iconfile).split('.')[0]
print('theme_path', theme_path, 'icon', icon)
self.ind.set_icon_theme_path(theme_path)
self.ind.set_icon(icon)
# self.ind.set_icon_theme_path(join(script_dir, 'img'))
# self.ind.set_icon('nimbix')
# some more information about the AppIndicator:
# http://developer.ubuntu.com/api/ubuntu-12.04/python/AppIndicator3-0.1.html
# http://developer.ubuntu.com/resources/technologies/application-indicators/
# need to set this for indicator to be shown
self.ind.set_status (AppIndicator.IndicatorStatus.ACTIVE)
# have to give indicator a menu
self.menu = Gtk.Menu()
# you can use this menu item for experimenting
item = Gtk.MenuItem()
item.set_label("Poll")
item.connect("activate", self.handler_menu_test)
item.show()
self.menu.append(item)
# this is for exiting the app
item = Gtk.MenuItem()
item.set_label("Exit")
item.connect("activate", self.handler_menu_exit)
item.show()
self.menu.append(item)
for launch_profile in launch_profiles:
image = launch_profile['image']
instancetype = launch_profile['type']
name = launch_profile['name']
item = Gtk.MenuItem()
item.set_label("Launch %s" % name)
item.target_image = image
item.target_type = instancetype
item.connect("activate", self.handler_instance_launch)
item.show()
self.menu.insert(item, 0)
self.menu.show()
self.ind.set_menu(self.menu)
# initialize cpu speed display
self.instance_items = []
self.update_cpu_speeds()
# then start updating every 2 seconds
# http://developer.gnome.org/pygobject/stable/glib-functions.html#function-glib--timeout-add-seconds
GLib.timeout_add_seconds(180, self.handler_timeout)
def handler_poll_onetime(self):
self.update_cpu_speeds()
return False
def handler_menu_exit(self, evt):
Gtk.main_quit()
def handler_menu_test(self, evt):
# we can change the icon at any time
# self.ind.set_icon("indicator-messages-new")
self.update_cpu_speeds()
def handler_timeout(self):
"""This will be called every few seconds by the GLib.timeout.
"""
self.update_cpu_speeds()
# return True so that we get called again
# returning False will make the timeout stop
return True
def handler_instance_launch(self, evt):
self.instance_launch(evt.target_image, evt.target_type)
def handler_instance_ssh(self, evt):
self.instance_ssh(evt.job_number, evt.target_image)
def handler_instance_kill(self, evt):
self.instance_kill(evt.job_number, evt.target_image)
def instance_launch(self, image, instancetype):
launch.launch(config, image, instancetype)
GLib.timeout_add_seconds(10, self.handler_poll_onetime)
def instance_ssh(self, job_number, target_image):
res = requests.get('%s/connect?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, job_number))
res = json.loads(res.content.decode('utf-8'))
ip_address = res['address']
subprocess.Popen(ssh_command.format(
ip_address=ip_address,
image=target_image
).split())
def instance_kill(self, job_number, target_image):
res = requests.get('%s/shutdown?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, job_number))
res = json.loads(res.content.decode('utf-8'))
GLib.timeout_add_seconds(10, self.handler_poll_onetime)
def update_cpu_speeds(self):
label = 'failed'
try:
jobslist = jobs.get_jobs(config)
label = ''
for item in self.instance_items:
self.menu.remove(item)
self.instance_items.clear()
for job in jobslist:
if label != '':
label += ' '
if job['status'] in ['SUBMITTED']:
label += '(' + job['type'] + ')'
GLib.timeout_add_seconds(10, self.handler_poll_onetime) # fast poll whilst wait for it to start
else:
label += job['type']
item = Gtk.MenuItem()
item.set_label('ssh to %s' % job['image'])
item.connect("activate", self.handler_instance_ssh)
item.target_image = job['image']
item.job_number = job['number']
item.show()
self.menu.insert(item, 0)
self.instance_items.append(item)
item = Gtk.MenuItem()
item.set_label('kill %s' % job['image'])
item.connect("activate", self.handler_instance_kill)
item.target_image = job['image']
item.job_number = job['number']
item.show()
self.menu.insert(item, 0)
self.instance_items.append(item)
except Exception as e:
label = 'exception occurred'
try:
print(traceback.format_exc())
except:
print('exception in exception :-P')
self.ind.set_label(label, "")
def main(self):
Gtk.main()
if __name__ == "__main__":
ind = IndicatorCPUSpeed()
ind.main()
|
"""
Usage:
launch.py [options]
Options:
--image IMAGE image [default: ng0]
"""
from __future__ import print_function
import sys, os, subprocess
import requests
import yaml
import json
from docopt import docopt
api_url = 'https://api.jarvice.com/jarvice'
# ssh_path = '/usr/bin/ssh'
args = docopt(__doc__)
image = args['--image']
with open('nimbix.yaml', 'r') as f:
config = yaml.load(f)
username = config['username']
apikey = config['apikey']
if image == '' or image is None:
print('please provide image name')
sys.exit(1)
res = requests.get('%s/jobs?username=%s&apikey=%s' % (api_url, username, apikey))
#print(res.status_code)
res = json.loads(res.content.decode('utf-8'))
#print('res', res)
#print(res.content.decode('utf-8'))
target_jobnumber = None
for jobnumber, info in res.items():
# print('jobnumber', jobnumber)
if info['job_api_submission']['nae']['name'] == image:
target_jobnumber = jobnumber
break
assert target_jobnumber is not None
# print('image', image)
# if imag
res = requests.get('%s/shutdown?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, target_jobnumber))
res = json.loads(res.content.decode('utf-8'))
print('res', res)
|
"""
Usage:
launch.py
"""
from __future__ import print_function
import sys, os, subprocess
import requests
import json
from os import path
from os.path import join
from docopt import docopt
import yaml
api_url = 'https://api.jarvice.com/jarvice'
args = docopt(__doc__)
script_dir = path.dirname(path.realpath(__file__))
with open(join(script_dir, 'nimbix.yaml'), 'r') as f:
config = yaml.load(f)
username = config['username']
apikey = config['apikey']
def get_jobs():
res = requests.get('%s/jobs?username=%s&apikey=%s' % (api_url, username, apikey))
res = json.loads(res.content.decode('utf-8'))
jobs = []
for jobnumber, info in res.items():
# print(json.dumps(info, indent=2))
# print(jobnumber, info['job_api_submission']['nae']['name'])
job = {}
job['number'] = jobnumber
job['image'] = info['job_api_submission']['nae']['name']
job['type'] = info['job_api_submission']['machine']['type']
job['count'] = int(info['job_api_submission']['machine']['nodes'])
jobs.append(job)
return jobs
if __name__ == '__main__':
for job in get_jobs():
print(job['type'], job['image'], job['count'])
res = requests.get('%s/shutdown?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, job['number']))
res = json.loads(res.content.decode('utf-8'))
print('res', res)
# print(get_jobs())
|
from __future__ import print_function, unicode_literals
import sys
import os
from os import path
from os.path import join
import os.path
import yaml
#script_dir = path.dirname(path.realpath(__file__))
def checkChanges(filepath, data):
newString = yaml.safe_dump(data, default_flow_style = False )
oldString = ''
if os.path.isfile(filepath):
f = open(filepath,'r')
oldString = f.read()
f.close()
changesString = ''
if(newString != oldString):
changesString += 'changed:\n'
changesString += 'old:\n'
changesString += oldString + '\n'
changesString += 'new:\n'
changesString += newString + '\n'
with open('%s~' % filepath, 'w') as f:
f.write( newString )
os.rename('%s~' % filepath, filepath)
return changesString
|
from __future__ import print_function
import sys, os, subprocess
import requests
import json
import argparse
from os import path
from os.path import join
import yaml
api_url = 'https://api.jarvice.com/jarvice'
script_dir = path.dirname(path.realpath(__file__))
def get_jobs(config):
username = config['username']
apikey = config['apikey']
res = requests.get('%s/jobs?username=%s&apikey=%s' % (api_url, username, apikey))
res = json.loads(res.content.decode('utf-8'))
jobs = []
for jobnumber, info in res.items():
# print(json.dumps(info, indent=2))
# print(jobnumber, info['job_api_submission']['nae']['name'])
# job_start_time": 1467578395
job = {}
job['number'] = jobnumber
job['image'] = info['job_api_submission']['nae']['name']
job['type'] = info['job_api_submission']['machine']['type']
job['count'] = int(info['job_api_submission']['machine']['nodes'])
job['status'] = info['job_status']
jobs.append(job)
return jobs
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--configfile', default=join(script_dir, 'nimbix.yaml'))
args = parser.parse_args()
config_path = args.configfile
if not config_path.startswith('/'):
config_path = join(script_dir, config_path)
with open(config_path, 'r') as f:
config = yaml.load(f)
for job in get_jobs(config):
print(job['type'], job['image'], job['count'], job['status'])
# print(get_jobs())
|
from __future__ import print_function
import sys
import yaml
from os.path import join
from os import path
import argparse
import requests
import argparse
api_url = 'https://api.jarvice.com/jarvice'
script_dir = path.dirname(path.realpath(__file__))
def launch(config, image, instancetype):
username = config['username']
apikey = config['apikey']
launch_data = {
"machine": {
"nodes": "1",
"type": instancetype
},
"vault": {
"readonly": False,
"force": False,
"name": "drop.jarvice.com"
},
"user": {
"username": username,
"apikey": apikey
},
"nae": {
"force": False,
"name": image,
"geometry": "1904x881",
"ephemeral": False,
"staging": True,
"interactive": True
}
}
res = requests.post('%s/submit' % api_url, json=launch_data)
return res.status_code, res.content
if __name__ == '__main__':
instancetype = None
config_path = 'nimbix.yaml'
if len(sys.argv) > 2 or (len(sys.argv) == 2 and sys.argv[1] in ['--help']):
parser = argparse.ArgumentParser()
parser.add_argument('--type', help='type, eg ng0 for bfboost, or ngd3 for dual Titan X')
parser.add_argument('--image', default='ng0', help='image name (basically, container name, more or less)')
parser.add_argument('--configfile', default=join(script_dir, 'nimbix.yaml'))
args = parser.parse_args()
instancetype = args.type
image = args.image
config_path = args.configfile
else:
image = sys.argv[1]
if not config_path.startswith('/'):
config_path = join(script_dir, config_path)
with open(config_path, 'r') as f:
config = yaml.load(f)
if instancetype is None:
instancetype = config['type_by_instance'].get(image, image)
print('instancetype: %s' % instancetype)
status_code, content = launch(config, image, instancetype)
print(status_code)
print(content)
print(status_code)
|
import jobs
import checkchanges
from os.path import join
from os import path
script_dir = path.dirname(path.realpath(__file__))
jobs = jobs.get_jobs()
print(checkchanges.checkChanges(join(script_dir, 'nimbixinstances.txt'), jobs))
|
from __future__ import print_function
import sys, os, subprocess
import requests
import json
import argparse
import yaml
api_url = 'https://api.jarvice.com/jarvice'
# ssh_path = '/usr/bin/ssh'
with open('nimbix.yaml', 'r') as f:
config = yaml.load(f)
instancetype = None
if len(sys.argv) > 2:
parser = argparse.ArgumentParser()
parser.add_argument('--image', default='ng0', help='image name (basically, container name, more or less)')
args = parser.parse_args()
image = args.image
else:
image = sys.argv[1]
username = config['username']
apikey = config['apikey']
if image == '' or image is None:
print('please provide image name')
sys.exit(1)
res = requests.get('%s/jobs?username=%s&apikey=%s' % (api_url, username, apikey))
#print(res.status_code)
res = json.loads(res.content.decode('utf-8'))
#print('res', res)
#print(res.content.decode('utf-8'))
target_jobnumber = None
for jobnumber, info in res.items():
# print('jobnumber', jobnumber)
if info['job_api_submission']['nae']['name'] == image:
target_jobnumber = jobnumber
break
assert target_jobnumber is not None
# print('image', image)
# if imag
res = requests.get('%s/connect?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, target_jobnumber))
#print(res.status_code)
#print(res.content)
res = json.loads(res.content.decode('utf-8'))
ip_address = res['address']
#print('ip_address', ip_address)
print(ip_address)
|
"""
Spin up an instance, run a single script, spin it down :-)
Usage:
run.py [options] <SCRIPTPATH>
Options:
--type TYPE type, eg ng0 for bfboost, or ngd3 for dual Titan X [default: ng0]
--image IMAGE image [default: s1]
"""
from __future__ import print_function
import sys
import yaml
import json
import requests
import pysftp
import time
from docopt import docopt
from util.logtailer import LogTailer
api_url = 'https://api.jarvice.com/jarvice'
drop_host = 'drop.jarvice.com'
args = docopt(__doc__)
instancetype = args['--type']
image = args['--image']
scriptPath = args['<SCRIPTPATH>']
print('scriptPath', scriptPath)
#command = args['<COMMAND>']
#print('command', command)
with open('nimbix.yaml', 'r') as f:
config = yaml.load(f)
username = config['username']
apikey = config['apikey']
scriptName = scriptPath.split('/')[-1]
print('scriptName', scriptName)
# need to sftp it up to data first...
with pysftp.Connection(drop_host, username=username, password=apikey) as sftp:
try:
sftp.mkdir('temp')
except:
pass
# sftp.cd('temp'):
sftp.put(scriptPath, "temp/%s" % scriptName)
launch_data = {
"machine": {
"nodes": "1",
"type": instancetype
},
"variables": {
"FOO": "BAR"
},
"vault": {
"readonly": False,
"force": False,
"name": "drop.jarvice.com"
},
"user": {
"username": username,
"apikey": apikey
},
"nae": {
"force": False,
"name": image,
# "geometry": "1904x881",
"command": "bash /data/temp/%s" % scriptName,
"ephemeral": False,
"staging": True,
"interactive": False
}
}
res = requests.post('%s/submit' % api_url, json=launch_data)
assert res.status_code == 200
res = json.loads(res.content.decode('utf-8'))
jobnumber = res['number']
print('jobnumber %s' % jobnumber)
def get_last_nonblank_index(target):
index = len(target) - 1
while index > 0 and target[index] == '':
index -= 1
return index
logtailer = LogTailer(username=username, apikey=apikey, jobnumber=jobnumber)
while True:
res = requests.get('%s/status?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, jobnumber))
assert res.status_code == 200
res = json.loads(res.content.decode('utf-8'))
status = res[str(jobnumber)]['job_status']
if str(status) == str('SUBMITTED'):
time.sleep(1)
continue
logtailer.updateFromTail()
if 'COMPLETED' in status:
break
time.sleep(1)
logtailer.updateFromOutput()
res = requests.get('%s/status?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, jobnumber))
assert res.status_code == 200
res = json.loads(res.content.decode('utf-8'))
print('wall time %s' % res[str(jobnumber)]['job_walltime'])
|
import requests
import json
api_url = 'https://api.jarvice.com/jarvice'
class LogTailer(object):
def __init__(self, username, apikey, jobnumber):
self.lines_printed = 0
self.jobnumber = jobnumber
self.username = username
self.apikey = apikey
@staticmethod
def get_last_nonblank_index(target):
index = len(target) - 1
while index > 0 and target[index] == '':
index -= 1
return index
def updateFromTail(self, echo=True):
res = requests.get('%s/tail?username=%s&apikey=%s&number=%s&lines=10000' % (
api_url, self.username, self.apikey, self.jobnumber))
if res.content.decode('utf-8') == '{\n "error": "Running job is not found"\n}\n':
return
full_log = res.content.decode('utf-8').split('\n')
last_nonblank_line = self.get_last_nonblank_index(full_log)
full_log = full_log[:last_nonblank_line + 1]
new_numlines = len(full_log)
res = None
if new_numlines != self.lines_printed:
res = '\n'.join(full_log[self.lines_printed:])
self.lines_printed = new_numlines
if res is not None and echo:
print(res)
return res
def updateFromOutput(self, echo=True):
res = requests.get('%s/output?username=%s&apikey=%s&number=%s' % (
api_url, self.username, self.apikey, self.jobnumber))
full_log = res.content.decode('utf-8').replace('\r', '').split('\n')
last_nonblank_line = self.get_last_nonblank_index(full_log)
full_log = full_log[:last_nonblank_line + 1]
new_numlines = len(full_log)
res = None
if new_numlines != self.lines_printed:
res = '\n'.join(full_log[self.lines_printed:])
# print('\n'.join(full_log[self.lines_printed:]))
self.lines_printed = new_numlines
if res is not None and echo:
print(res)
return res
|
"""
Thin webservice, that wraps nimbix api, is responsible for knowing the apikey,
but can only run very specific scripts in very specific ways.
Its use-case is to reduce the risk that someone spawns a zillion read-only images, running arbitrary scripts.
Jenkins then (for example), can then point at this service, not know the apikey itself.
Jenkins security doesnt have to then be quite so fort-knox tight
(although, if it has 'push' access to your repos, it should be fairly tight...),
since jenkins doesnt then have the power to bankrupt your overnight :-P
"""
from flask import Flask, request, Response
import os
from os import path
from os.path import join
import sys
import yaml
import requests
import json
import argparse
import time
import logging
import pysftp
from util.logtailer import LogTailer
logging.basicConfig()
logger = logging.getLogger(__name__)
script_dir = path.dirname(path.dirname(path.realpath(__file__)))
api_url = 'https://api.jarvice.com/jarvice'
drop_host = 'drop.jarvice.com'
parser = argparse.ArgumentParser()
parser.add_argument('--configfile', default=join(script_dir, 'nimbix.yaml'))
parser.add_argument('--loglevel', default='info')
args = parser.parse_args()
logger.setLevel(logging.__dict__[args.loglevel.upper()])
with open(args.configfile, 'r') as f:
config = yaml.load(f)
with open(join(script_dir, 'wrapper-service/config.yaml'), 'r') as f:
wrapper_config = yaml.load(f)
username = config['username']
apikey = config['apikey']
type_by_instance = config.get('type_by_instance', {})
app = Flask(__name__)
@app.route('/run', methods=['POST'])
def run():
try:
commit_hash = request.values.get('h', None)
secret = request.values.get('s', None)
branch = request.values.get('b', None)
project = request.values.get('p', None)
ghtoken = request.values.get('g', None)
pyversion = request.values.get('py', None)
# validation
client_ip = request.remote_addr
if client_ip not in ['127.0.0.1']: # 127.0.0.1 should be ok...
logger.info('client ip %s config ip %s' % (client_ip, '127.0.0.1'))
raise Exception('client ip doesnt match that in config => ignoring')
if secret != wrapper_config['shared_secret']:
raise Exception('shared secret not correct, or absent => ignoring')
if commit_hash is None:
raise Exception('no commit_hash provided => ignoring')
if branch is None:
raise Exception('no branch provided => ignoring')
if project is None:
raise Exception('no project provided => ignoring')
commit_hash = str(commit_hash)
if len(commit_hash) > 40:
raise Exception('commit_hash exceeds length 40 => ignoring')
for c in commit_hash: # probably is a faster way of doing this. anyway...
if c not in "abcdef0123456789":
raise Exception('illegal character found => ignoring')
# if we got here, we assume validation is ok
username = config['username']
apikey = config['apikey']
image = wrapper_config['image']
instancetype = wrapper_config['instance_type']
# ftp the script to drop host
scriptPath = '/tmp/~job.sh'
with open(scriptPath, 'w') as f:
f.write(wrapper_config['script'].format(commit_hash=commit_hash, project=project, branch=branch, ghtoken1=ghtoken, ghtoken2=ghtoken, pyversion=pyversion))
scriptName = '~job.sh'
logger.debug('doing ftp...')
with pysftp.Connection(drop_host, username=username, password=apikey) as sftp:
#try:
# sftp.mkdir('temp')
#except Exception as e:
# print('exception %s' % str(e))
# pass
sftp.put(scriptPath, "%s" % scriptName)
logger.debug('... ftp done')
# start the job...
launch_data = {
"machine": {
"nodes": "1",
"type": instancetype
},
"vault": {
"readonly": False,
"force": False,
"name": "drop.jarvice.com"
},
"user": {
"username": username,
"apikey": apikey
},
"nae": {
"force": False,
"name": image,
"command": "bash /data/%s" % scriptName,
"ephemeral": False, # this is important: means read/write, and can only launch this image once at a time
"staging": True,
"interactive": False,
"walltime": "0:%s:00" % wrapper_config['max_time_minutes'],
}
}
logger.debug('launch_data %s' % json.dumps(launch_data))
res = requests.post('%s/submit' % api_url, json=launch_data)
logger.info('%s %s' % (res.status_code, res.content))
res = json.loads(res.content.decode('utf-8'))
jobnumber = res['number']
logger.debug('jobnumber %s' % jobnumber)
def response_generator():
logtailer = LogTailer(username=username, apikey=apikey, jobnumber=jobnumber)
while True:
res = requests.get('%s/status?username=%s&apikey=%s&number=%s' % (api_url, username, apikey, jobnumber))
assert res.status_code == 200
res = json.loads(res.content.decode('utf-8'))
status = res[str(jobnumber)]['job_status']
if str(status) == str('SUBMITTED'):
time.sleep(1)
continue
res = logtailer.updateFromTail()
if res is not None:
yield res
if 'COMPLETED' in status:
break
time.sleep(1)
res = logtailer.updateFromOutput()
if res is not None:
yield res
return Response(response_generator())
except Exception as e:
logger.exception(e)
return ""
app.run(host='0.0.0.0', port=3237)
|
# Logic copied from PEP 513
def is_manylinux1_compatible():
# Only Linux, and only x86-64 / i686
from distutils.util import get_platform
if get_platform() not in ["linux-x86_64", "linux-i686"]:
return False
# Check for presence of _manylinux module
try:
import _manylinux
return bool(_manylinux.manylinux1_compatible)
except (ImportError, AttributeError):
# Fall through to heuristic check below
pass
# Check glibc version. CentOS 5 uses glibc 2.5.
return have_compatible_glibc(2, 5)
def have_compatible_glibc(major, minimum_minor):
import ctypes
process_namespace = ctypes.CDLL(None)
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return False
# Call gnu_get_libc_version, which returns a string like "2.5".
gnu_get_libc_version.restype = ctypes.c_char_p
version_str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
# Parse string and check against requested version.
version = [int(piece) for piece in version_str.split(".")]
assert len(version) == 2
if major != version[0]:
return False
if minimum_minor > version[1]:
return False
return True
import sys
if is_manylinux1_compatible():
print("%s is manylinux1 compatible" % (sys.executable,))
sys.exit(0)
else:
print("%s is NOT manylinux1 compatible" % (sys.executable,))
sys.exit(1)
|
# cf. https://github.com/pypa/manylinux/issues/53
GOOD_SSL = "https://google.com"
BAD_SSL = "https://self-signed.badssl.com"
import sys
print("Testing SSL certificate checking for Python:", sys.version)
if (sys.version_info[:2] < (2, 7)
or sys.version_info[:2] < (3, 4)):
print("This version never checks SSL certs; skipping tests")
sys.exit(0)
if sys.version_info[0] >= 3:
from urllib.request import urlopen
EXC = OSError
else:
from urllib import urlopen
EXC = IOError
print("Connecting to %s should work" % (GOOD_SSL,))
urlopen(GOOD_SSL)
print("...it did, yay.")
print("Connecting to %s should fail" % (BAD_SSL,))
try:
urlopen(BAD_SSL)
# If we get here then we failed:
print("...it DIDN'T!!!!!11!!1one!")
sys.exit(1)
except EXC:
print("...it did, yay.") |
# Utility script to print the python tag + the abi tag for a Python
# See PEP 425 for exactly what these are, but an example would be:
# cp27-cp27mu
from wheel.pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag
print("{0}{1}-{2}".format(get_abbr_impl(), get_impl_ver(), get_abi_tag())) |
import botocore
import boto3
import os
from os import walk
from tqdm import tqdm
import gzip
if not os.path.exists('cache'):
os.makedirs('cache')
s3 = boto3.resource('s3')
client = boto3.client('s3')
bucket = s3.Bucket('pytorch')
print('Downloading log files')
for key in tqdm(bucket.objects.filter(Prefix='cflogs')):
# print(key.key)
remote_fname = key.key
local_fname = os.path.join('cache', remote_fname)
if not os.path.exists(local_fname):
dirname = os.path.dirname(local_fname)
if not os.path.exists(dirname):
os.makedirs(dirname)
client.download_file("pytorch", remote_fname, local_fname)
size_cache = dict()
def get_size(name):
if name[0] == '/':
name = name[1:]
if name not in size_cache:
for key in bucket.objects.filter(Prefix=name):
size_cache[name] = key.size
return size_cache[name]
valid_cache = dict()
def is_valid(name):
if name not in valid_cache:
exists = False
try:
s3.Object('pytorch', name).load()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
exists = False
else:
raise
else:
exists = True
valid_cache[name] = exists
return valid_cache[name]
# parse all files, read each line, add up all the bytes sizes
print('parsing log files')
bytes_cache = dict()
for (dirpath, dirnames, filenames) in walk('cache/cflogs/'):
for filename in tqdm(filenames):
f = gzip.open(os.path.join(dirpath, filename), 'r')
string = f.read().decode("utf-8")
f.close()
entries = string.splitlines()[2:]
for entry in entries:
columns = entry.split('\t')
filename = columns[7]
if filename[0] == '/':
filename = filename[1:]
bytes_sent = columns[3]
if filename not in bytes_cache:
bytes_cache[filename] = 0
bytes_cache[filename] += int(bytes_sent)
print('Filtering invalid entries')
final_list = dict()
for k, v in tqdm(bytes_cache.items()):
if '.whl' in k and is_valid(k):
final_list[k] = v
print('Counting downloads (bytes sent / filesize)')
total_downloads = 0
for k, v in final_list.items():
sz = get_size(k)
downloads = v / sz
print(k, round(downloads))
total_downloads += downloads
print('')
print('')
print('Total PyTorch wheel downloads: ', round(total_downloads))
print('')
print('')
|
#!/usr/bin/env python
import codecs
import os.path
import re
import sys
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
return codecs.open(os.path.join(here, *parts), 'r').read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
requires = ['jmespath>=0.7.1,<1.0.0',
'python-dateutil>=2.1,<3.0.0',
'docutils>=0.10']
if sys.version_info[:2] == (2, 6):
# For python2.6 we have a few other dependencies.
# First we need an ordered dictionary so we use the
# 2.6 backport.
requires.append('ordereddict==1.1')
# Then we need simplejson. This is because we need
# a json version that allows us to specify we want to
# use an ordereddict instead of a normal dict for the
# JSON objects. The 2.7 json module has this. For 2.6
# we need simplejson.
requires.append('simplejson==3.3.0')
setup(
name='botocore',
version=find_version("botocore", "__init__.py"),
description='Low-level, data-driven core of boto 3.',
long_description=open('README.rst').read(),
author='Amazon Web Services',
url='https://github.com/boto/botocore',
scripts=[],
packages=find_packages(exclude=['tests*']),
package_data={'botocore': ['data/*.json', 'data/*/*.json'],
'botocore.vendored.requests': ['*.pem']},
include_package_data=True,
install_requires=requires,
extras_require={
':python_version=="2.6"': [
'ordereddict==1.1',
'simplejson==3.3.0',
]
},
license="Apache License 2.0",
classifiers=(
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
),
)
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
import sys
import mock
import time
import random
import shutil
import contextlib
import tempfile
import binascii
import platform
import select
import datetime
from subprocess import Popen, PIPE
from dateutil.tz import tzlocal
# The unittest module got a significant overhaul
# in 2.7, so if we're in 2.6 we can use the backported
# version unittest2.
if sys.version_info[:2] == (2, 6):
import unittest2 as unittest
else:
import unittest
from nose.tools import assert_equal
import botocore.loaders
import botocore.session
from botocore.compat import six
from botocore.compat import urlparse
from botocore.compat import parse_qs
from botocore import utils
from botocore import credentials
_LOADER = botocore.loaders.Loader()
def skip_unless_has_memory_collection(cls):
"""Class decorator to skip tests that require memory collection.
Any test that uses memory collection (such as the resource leak tests)
can decorate their class with skip_unless_has_memory_collection to
indicate that if the platform does not support memory collection
the tests should be skipped.
"""
if platform.system() not in ['Darwin', 'Linux']:
return unittest.skip('Memory tests only supported on mac/linux.')(cls)
return cls
def skip_if_windows(reason):
"""Decorator to skip tests that should not be run on windows.
Example usage:
@skip_if_windows("Not valid")
def test_some_non_windows_stuff(self):
self.assertEqual(...)
"""
def decorator(func):
return unittest.skipIf(
platform.system() not in ['Darwin', 'Linux'], reason)(func)
return decorator
def random_chars(num_chars):
"""Returns random hex characters.
Useful for creating resources with random names.
"""
return binascii.hexlify(os.urandom(int(num_chars / 2))).decode('ascii')
def create_session(**kwargs):
# Create a Session object. By default,
# the _LOADER object is used as the loader
# so that we reused the same models across tests.
session = botocore.session.Session(**kwargs)
session.register_component('data_loader', _LOADER)
session.set_config_variable('credentials_file', 'noexist/foo/botocore')
return session
@contextlib.contextmanager
def temporary_file(mode):
"""This is a cross platform temporary file creation.
tempfile.NamedTemporary file on windows creates a secure temp file
that can't be read by other processes and can't be opened a second time.
For tests, we generally *want* them to be read multiple times.
The test fixture writes the temp file contents, the test reads the
temp file.
"""
temporary_directory = tempfile.mkdtemp()
basename = 'tmpfile-%s-%s' % (int(time.time()), random.randint(1, 1000))
full_filename = os.path.join(temporary_directory, basename)
open(full_filename, 'w').close()
try:
with open(full_filename, mode) as f:
yield f
finally:
shutil.rmtree(temporary_directory)
class BaseEnvVar(unittest.TestCase):
def setUp(self):
# Automatically patches out os.environ for you
# and gives you a self.environ attribute that simulates
# the environment. Also will automatically restore state
# for you in tearDown()
self.environ = {}
self.environ_patch = mock.patch('os.environ', self.environ)
self.environ_patch.start()
def tearDown(self):
self.environ_patch.stop()
class BaseSessionTest(BaseEnvVar):
"""Base class used to provide credentials.
This class can be used as a base class that want to use a real
session class but want to be completely isolated from the
external environment (including environment variables).
This class will also set credential vars so you can make fake
requests to services.
"""
def setUp(self, **environ):
super(BaseSessionTest, self).setUp()
self.environ['AWS_ACCESS_KEY_ID'] = 'access_key'
self.environ['AWS_SECRET_ACCESS_KEY'] = 'secret_key'
self.environ['AWS_CONFIG_FILE'] = 'no-exist-foo'
self.environ.update(environ)
self.session = create_session()
self.session.config_filename = 'no-exist-foo'
@skip_unless_has_memory_collection
class BaseClientDriverTest(unittest.TestCase):
INJECT_DUMMY_CREDS = False
def setUp(self):
self.driver = ClientDriver()
env = None
if self.INJECT_DUMMY_CREDS:
env = {'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar'}
self.driver.start(env=env)
def cmd(self, *args):
self.driver.cmd(*args)
def send_cmd(self, *args):
self.driver.send_cmd(*args)
def record_memory(self):
self.driver.record_memory()
@property
def memory_samples(self):
return self.driver.memory_samples
def tearDown(self):
self.driver.stop()
class ClientDriver(object):
CLIENT_SERVER = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'cmd-runner'
)
def __init__(self):
self._popen = None
self.memory_samples = []
def _get_memory_with_ps(self, pid):
# It would be better to eventually switch to psutil,
# which should allow us to test on windows, but for now
# we'll just use ps and run on POSIX platforms.
command_list = ['ps', '-p', str(pid), '-o', 'rss']
p = Popen(command_list, stdout=PIPE)
stdout = p.communicate()[0]
if not p.returncode == 0:
raise RuntimeError("Could not retrieve memory")
else:
# Get the RSS from output that looks like this:
# RSS
# 4496
return int(stdout.splitlines()[1].split()[0]) * 1024
def record_memory(self):
mem = self._get_memory_with_ps(self._popen.pid)
self.memory_samples.append(mem)
def start(self, env=None):
"""Start up the command runner process."""
self._popen = Popen([sys.executable, self.CLIENT_SERVER],
stdout=PIPE, stdin=PIPE, env=env)
def stop(self):
"""Shutdown the command runner process."""
self.cmd('exit')
self._popen.wait()
def send_cmd(self, *cmd):
"""Send a command and return immediately.
This is a lower level method than cmd().
This method will instruct the cmd-runner process
to execute a command, but this method will
immediately return. You will need to use
``is_cmd_finished()`` to check that the command
is finished.
This method is useful if you want to record attributes
about the process while an operation is occurring. For
example, if you want to instruct the cmd-runner process
to upload a 1GB file to S3 and you'd like to record
the memory during the upload process, you can use
send_cmd() instead of cmd().
"""
cmd_str = ' '.join(cmd) + '\n'
cmd_bytes = cmd_str.encode('utf-8')
self._popen.stdin.write(cmd_bytes)
self._popen.stdin.flush()
def is_cmd_finished(self):
rlist = [self._popen.stdout.fileno()]
result = select.select(rlist, [], [], 0.01)
if result[0]:
return True
return False
def cmd(self, *cmd):
"""Send a command and block until it finishes.
This method will send a command to the cmd-runner process
to run. It will block until the cmd-runner process is
finished executing the command and sends back a status
response.
"""
self.send_cmd(*cmd)
result = self._popen.stdout.readline().strip()
if result != b'OK':
raise RuntimeError(
"Error from command '%s': %s" % (cmd, result))
# This is added to this file because it's used in both
# the functional and unit tests for cred refresh.
class IntegerRefresher(credentials.RefreshableCredentials):
"""Refreshable credentials to help with testing.
This class makes testing refreshable credentials easier.
It has the following functionality:
* A counter, self.refresh_counter, to indicate how many
times refresh was called.
* A way to specify how many seconds to make credentials
valid.
* Configurable advisory/mandatory refresh.
* An easy way to check consistency. Each time creds are
refreshed, all the cred values are set to the next
incrementing integer. Frozen credentials should always
have this value.
"""
_advisory_refresh_timeout = 2
_mandatory_refresh_timeout = 1
_credentials_expire = 3
def __init__(self, creds_last_for=_credentials_expire,
advisory_refresh=_advisory_refresh_timeout,
mandatory_refresh=_mandatory_refresh_timeout,
refresh_function=None):
expires_in = (
self._current_datetime() +
datetime.timedelta(seconds=creds_last_for))
if refresh_function is None:
refresh_function = self._do_refresh
super(IntegerRefresher, self).__init__(
'0', '0', '0', expires_in,
refresh_function, 'INTREFRESH')
self.creds_last_for = creds_last_for
self.refresh_counter = 0
self._advisory_refresh_timeout = advisory_refresh
self._mandatory_refresh_timeout = mandatory_refresh
def _do_refresh(self):
self.refresh_counter += 1
current = int(self._access_key)
next_id = str(current + 1)
return {
'access_key': next_id,
'secret_key': next_id,
'token': next_id,
'expiry_time': self._seconds_later(self.creds_last_for),
}
def _seconds_later(self, num_seconds):
# We need to guarantee at *least* num_seconds.
# Because this doesn't handle subsecond precision
# we'll round up to the next second.
num_seconds += 1
t = self._current_datetime() + datetime.timedelta(seconds=num_seconds)
return self._to_timestamp(t)
def _to_timestamp(self, datetime_obj):
obj = utils.parse_to_aware_datetime(datetime_obj)
return obj.strftime('%Y-%m-%dT%H:%M:%SZ')
def _current_timestamp(self):
return self._to_timestamp(self._current_datetime())
def _current_datetime(self):
return datetime.datetime.now(tzlocal())
def _urlparse(url):
if isinstance(url, six.binary_type):
# Not really necessary, but it helps to reduce noise on Python 2.x
url = url.decode('utf8')
return urlparse(url)
def assert_url_equal(url1, url2):
parts1 = _urlparse(url1)
parts2 = _urlparse(url2)
# Because the query string ordering isn't relevant, we have to parse
# every single part manually and then handle the query string.
assert_equal(parts1.scheme, parts2.scheme)
assert_equal(parts1.netloc, parts2.netloc)
assert_equal(parts1.path, parts2.path)
assert_equal(parts1.params, parts2.params)
assert_equal(parts1.fragment, parts2.fragment)
assert_equal(parts1.username, parts2.username)
assert_equal(parts1.password, parts2.password)
assert_equal(parts1.hostname, parts2.hostname)
assert_equal(parts1.port, parts2.port)
assert_equal(parse_qs(parts1.query), parse_qs(parts2.query))
|
from tests import unittest
from botocore import model
from botocore.compat import OrderedDict
def test_missing_model_attribute_raises_exception():
# We're using a nose test generator here to cut down
# on the duplication. The property names below
# all have the same test logic.
service_model = model.ServiceModel({'metadata': {'endpointPrefix': 'foo'}})
property_names = ['api_version', 'protocol']
def _test_attribute_raise_exception(attr_name):
try:
getattr(service_model, attr_name)
except model.UndefinedModelAttributeError:
# This is what we expect, so the test passes.
pass
except Exception as e:
raise AssertionError("Expected UndefinedModelAttributeError to "
"be raised, but %s was raised instead" %
(e.__class__))
else:
raise AssertionError(
"Expected UndefinedModelAttributeError to "
"be raised, but no exception was raised for: %s" % attr_name)
for name in property_names:
yield _test_attribute_raise_exception, name
class TestServiceModel(unittest.TestCase):
def setUp(self):
self.model = {
'metadata': {'protocol': 'query',
'endpointPrefix': 'endpoint-prefix'},
'documentation': 'Documentation value',
'operations': {},
'shapes': {
'StringShape': {'type': 'string'}
}
}
self.service_model = model.ServiceModel(self.model)
def test_metadata_available(self):
# You should be able to access the metadata in a service description
# through the service model object.
self.assertEqual(self.service_model.metadata.get('protocol'), 'query')
def test_service_name_can_be_overriden(self):
service_model = model.ServiceModel(self.model,
service_name='myservice')
self.assertEqual(service_model.service_name, 'myservice')
def test_service_name_defaults_to_endpoint_prefix(self):
self.assertEqual(self.service_model.service_name, 'endpoint-prefix')
def test_operation_does_not_exist(self):
with self.assertRaises(model.OperationNotFoundError):
self.service_model.operation_model('NoExistOperation')
def test_signing_name_defaults_to_endpoint_prefix(self):
self.assertEqual(self.service_model.signing_name, 'endpoint-prefix')
def test_documentation_exposed_as_property(self):
self.assertEqual(self.service_model.documentation,
'Documentation value')
def test_shape_names(self):
self.assertEqual(self.service_model.shape_names, ['StringShape'])
class TestOperationModelFromService(unittest.TestCase):
def setUp(self):
self.model = {
'metadata': {'protocol': 'query', 'endpointPrefix': 'foo'},
'documentation': '',
'operations': {
'OperationName': {
'http': {
'method': 'POST',
'requestUri': '/',
},
'name': 'OperationName',
'input': {
'shape': 'OperationNameRequest'
},
'output': {
'shape': 'OperationNameResponse',
},
'errors': [{'shape': 'NoSuchResourceException'}],
'documentation': 'Docs for OperationName',
'authtype': 'v4'
},
'OperationTwo': {
'http': {
'method': 'POST',
'requestUri': '/',
},
'name': 'OperationTwo',
'input': {
'shape': 'OperationNameRequest'
},
'output': {
'shape': 'OperationNameResponse',
},
'errors': [{'shape': 'NoSuchResourceException'}],
'documentation': 'Docs for OperationTwo',
}
},
'shapes': {
'OperationNameRequest': {
'type': 'structure',
'members': {
'Arg1': {'shape': 'stringType'},
'Arg2': {'shape': 'stringType'},
}
},
'OperationNameResponse': {
'type': 'structure',
'members': {
'String': {
'shape': 'stringType',
}
}
},
'NoSuchResourceException': {
'type': 'structure',
'members': {}
},
'stringType': {
'type': 'string',
}
}
}
self.service_model = model.ServiceModel(self.model)
def test_wire_name_always_matches_model(self):
service_model = model.ServiceModel(self.model)
operation = model.OperationModel(
self.model['operations']['OperationName'], service_model, 'Foo')
self.assertEqual(operation.name, 'Foo')
self.assertEqual(operation.wire_name, 'OperationName')
def test_operation_name_in_repr(self):
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
self.assertIn('OperationName', repr(operation))
def test_name_and_wire_name_defaults_to_same_value(self):
service_model = model.ServiceModel(self.model)
operation = model.OperationModel(
self.model['operations']['OperationName'], service_model)
self.assertEqual(operation.name, 'OperationName')
self.assertEqual(operation.wire_name, 'OperationName')
def test_name_from_service(self):
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
self.assertEqual(operation.name, 'OperationName')
def test_name_from_service_model_when_differs_from_name(self):
self.model['operations']['Foo'] = \
self.model['operations']['OperationName']
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('Foo')
self.assertEqual(operation.name, 'Foo')
def test_operation_input_model(self):
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
self.assertEqual(operation.name, 'OperationName')
# Operations should also have a reference to the top level metadata.
self.assertEqual(operation.metadata['protocol'], 'query')
self.assertEqual(operation.http['method'], 'POST')
self.assertEqual(operation.http['requestUri'], '/')
shape = operation.input_shape
self.assertEqual(shape.name, 'OperationNameRequest')
self.assertEqual(list(sorted(shape.members)), ['Arg1', 'Arg2'])
def test_has_documentation_property(self):
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
self.assertEqual(operation.documentation, 'Docs for OperationName')
def test_service_model_available_from_operation_model(self):
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
# This is an identity comparison because we don't implement
# __eq__, so we may need to change this in the future.
self.assertEqual(
operation.service_model, service_model)
def test_operation_output_model(self):
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
output = operation.output_shape
self.assertEqual(list(output.members), ['String'])
self.assertFalse(operation.has_streaming_output)
def test_operation_shape_not_required(self):
# It's ok if there's no output shape. We'll just get a return value of
# None.
del self.model['operations']['OperationName']['output']
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
output_shape = operation.output_shape
self.assertIsNone(output_shape)
def test_error_shapes(self):
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
# OperationName only has a NoSuchResourceException
self.assertEqual(len(operation.error_shapes), 1)
self.assertEqual(
operation.error_shapes[0].name, 'NoSuchResourceException')
def test_has_auth_type(self):
operation = self.service_model.operation_model('OperationName')
self.assertEqual(operation.auth_type, 'v4')
def test_auth_type_not_set(self):
operation = self.service_model.operation_model('OperationTwo')
self.assertIsNone(operation.auth_type)
def test_deprecated_present(self):
self.model['operations']['OperationName']['deprecated'] = True
service_model = model.ServiceModel(self.model)
operation_name = service_model.operation_model('OperationName')
self.assertTrue(operation_name.deprecated)
def test_deprecated_present_false(self):
self.model['operations']['OperationName']['deprecated'] = False
service_model = model.ServiceModel(self.model)
operation_name = service_model.operation_model('OperationName')
self.assertFalse(operation_name.deprecated)
def test_deprecated_absent(self):
service_model = model.ServiceModel(self.model)
operation_two = service_model.operation_model('OperationTwo')
self.assertFalse(operation_two.deprecated)
class TestOperationModelStreamingTypes(unittest.TestCase):
def setUp(self):
super(TestOperationModelStreamingTypes, self).setUp()
self.model = {
'metadata': {'protocol': 'query', 'endpointPrefix': 'foo'},
'documentation': '',
'operations': {
'OperationName': {
'name': 'OperationName',
'input': {
'shape': 'OperationRequest',
},
'output': {
'shape': 'OperationResponse',
},
}
},
'shapes': {
'OperationRequest': {
'type': 'structure',
'members': {
'String': {
'shape': 'stringType',
},
"Body": {
'shape': 'blobType',
}
},
'payload': 'Body'
},
'OperationResponse': {
'type': 'structure',
'members': {
'String': {
'shape': 'stringType',
},
"Body": {
'shape': 'blobType',
}
},
'payload': 'Body'
},
'stringType': {
'type': 'string',
},
'blobType': {
'type': 'blob'
}
}
}
def remove_payload(self, type):
self.model['shapes']['Operation' + type].pop('payload')
def test_streaming_input_for_operation(self):
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
self.assertTrue(operation.has_streaming_input)
self.assertEqual(operation.get_streaming_input().name, 'blobType')
def test_not_streaming_input_for_operation(self):
self.remove_payload('Request')
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
self.assertFalse(operation.has_streaming_input)
self.assertEqual(operation.get_streaming_input(), None)
def test_streaming_output_for_operation(self):
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
self.assertTrue(operation.has_streaming_output)
self.assertEqual(operation.get_streaming_output().name, 'blobType')
def test_not_streaming_output_for_operation(self):
self.remove_payload('Response')
service_model = model.ServiceModel(self.model)
operation = service_model.operation_model('OperationName')
self.assertFalse(operation.has_streaming_output)
self.assertEqual(operation.get_streaming_output(), None)
class TestDeepMerge(unittest.TestCase):
def setUp(self):
self.shapes = {
'SetQueueAttributes': {
'type': 'structure',
'members': {
'MapExample': {'shape': 'StrToStrMap',
'locationName': 'Attribute'},
}
},
'SetQueueAttributes2': {
'type': 'structure',
'members': {
'MapExample': {'shape': 'StrToStrMap',
'locationName': 'Attribute2'},
}
},
'StrToStrMap': {
'type': 'map',
'key': {'shape': 'StringType', 'locationName': 'Name'},
'value': {'shape': 'StringType', 'locationName': 'Value'},
'flattened': True,
'name': 'NotAttribute',
},
'StringType': {'type': 'string'}
}
self.shape_resolver = model.ShapeResolver(self.shapes)
def test_deep_merge(self):
shape = self.shape_resolver.get_shape_by_name('SetQueueAttributes')
map_merged = shape.members['MapExample']
# map_merged has a serialization as a member trait as well as
# in the StrToStrMap.
# The member trait should have precedence.
self.assertEqual(map_merged.serialization,
# member beats the definition.
{'name': 'Attribute',
# From the definition.
'flattened': True,})
# Ensure we don't merge/mutate the original dicts.
self.assertEqual(map_merged.key.serialization['name'], 'Name')
self.assertEqual(map_merged.value.serialization['name'], 'Value')
self.assertEqual(map_merged.key.serialization['name'], 'Name')
def test_merges_copy_dict(self):
shape = self.shape_resolver.get_shape_by_name('SetQueueAttributes')
map_merged = shape.members['MapExample']
self.assertEqual(map_merged.serialization.get('name'), 'Attribute')
shape2 = self.shape_resolver.get_shape_by_name('SetQueueAttributes2')
map_merged2 = shape2.members['MapExample']
self.assertEqual(map_merged2.serialization.get('name'), 'Attribute2')
class TestShapeResolver(unittest.TestCase):
def test_get_shape_by_name(self):
shape_map = {
'Foo': {
'type': 'structure',
'members': {
'Bar': {'shape': 'StringType'},
'Baz': {'shape': 'StringType'},
}
},
"StringType": {
"type": "string"
}
}
resolver = model.ShapeResolver(shape_map)
shape = resolver.get_shape_by_name('Foo')
self.assertEqual(shape.name, 'Foo')
self.assertEqual(shape.type_name, 'structure')
def test_resolve_shape_reference(self):
shape_map = {
'Foo': {
'type': 'structure',
'members': {
'Bar': {'shape': 'StringType'},
'Baz': {'shape': 'StringType'},
}
},
"StringType": {
"type": "string"
}
}
resolver = model.ShapeResolver(shape_map)
shape = resolver.resolve_shape_ref({'shape': 'StringType'})
self.assertEqual(shape.name, 'StringType')
self.assertEqual(shape.type_name, 'string')
def test_resolve_shape_references_with_member_traits(self):
shape_map = {
'Foo': {
'type': 'structure',
'members': {
'Bar': {'shape': 'StringType'},
'Baz': {'shape': 'StringType', 'locationName': 'other'},
}
},
"StringType": {
"type": "string"
}
}
resolver = model.ShapeResolver(shape_map)
shape = resolver.resolve_shape_ref({'shape': 'StringType',
'locationName': 'other'})
self.assertEqual(shape.serialization['name'], 'other')
self.assertEqual(shape.name, 'StringType')
def test_serialization_cache(self):
shape_map = {
'Foo': {
'type': 'structure',
'members': {
'Baz': {'shape': 'StringType', 'locationName': 'other'},
}
},
"StringType": {
"type": "string"
}
}
resolver = model.ShapeResolver(shape_map)
shape = resolver.resolve_shape_ref({'shape': 'StringType',
'locationName': 'other'})
self.assertEqual(shape.serialization['name'], 'other')
# serialization is computed on demand, and a cache is kept.
# This is just verifying that trying to access serialization again
# gives the same result. We don't actually care that it's cached,
# we just care that the cache doesn't mess with correctness.
self.assertEqual(shape.serialization['name'], 'other')
def test_shape_overrides(self):
shape_map = {
"StringType": {
"type": "string",
"documentation": "Original documentation"
}
}
resolver = model.ShapeResolver(shape_map)
shape = resolver.get_shape_by_name('StringType')
self.assertEqual(shape.documentation, 'Original documentation')
shape = resolver.resolve_shape_ref({'shape': 'StringType',
'documentation': 'override'})
self.assertEqual(shape.documentation, 'override')
def test_shape_type_structure(self):
shapes = {
'ChangePasswordRequest': {
'type': 'structure',
'members': {
'OldPassword': {'shape': 'passwordType'},
'NewPassword': {'shape': 'passwordType'},
}
},
'passwordType': {
"type":"string",
}
}
resolver = model.ShapeResolver(shapes)
shape = resolver.get_shape_by_name('ChangePasswordRequest')
self.assertEqual(shape.type_name, 'structure')
self.assertEqual(shape.name, 'ChangePasswordRequest')
self.assertEqual(list(sorted(shape.members)),
['NewPassword', 'OldPassword'])
self.assertEqual(shape.members['OldPassword'].name, 'passwordType')
self.assertEqual(shape.members['OldPassword'].type_name, 'string')
def test_shape_metadata(self):
shapes = {
'ChangePasswordRequest': {
'type': 'structure',
'required': ['OldPassword', 'NewPassword'],
'members': {
'OldPassword': {'shape': 'passwordType'},
'NewPassword': {'shape': 'passwordType'},
}
},
'passwordType': {
"type":"string",
"min":1,
"max":128,
"sensitive":True
}
}
resolver = model.ShapeResolver(shapes)
shape = resolver.get_shape_by_name('ChangePasswordRequest')
self.assertEqual(shape.metadata['required'],
['OldPassword', 'NewPassword'])
member = shape.members['OldPassword']
self.assertEqual(member.metadata['min'], 1)
self.assertEqual(member.metadata['max'], 128)
self.assertEqual(member.metadata['sensitive'], True)
def test_shape_list(self):
shapes = {
'mfaDeviceListType': {
"type":"list",
"member": {"shape": "MFADevice"},
},
'MFADevice': {
'type': 'structure',
'members': {
'UserName': {'shape': 'userNameType'}
}
},
'userNameType': {
'type': 'string'
}
}
resolver = model.ShapeResolver(shapes)
shape = resolver.get_shape_by_name('mfaDeviceListType')
self.assertEqual(shape.member.type_name, 'structure')
self.assertEqual(shape.member.name, 'MFADevice')
self.assertEqual(list(shape.member.members), ['UserName'])
def test_shape_does_not_exist(self):
resolver = model.ShapeResolver({})
with self.assertRaises(model.NoShapeFoundError):
resolver.get_shape_by_name('NoExistShape')
def test_missing_type_key(self):
shapes = {
'UnknownType': {
'NotTheTypeKey': 'someUnknownType'
}
}
resolver = model.ShapeResolver(shapes)
with self.assertRaises(model.InvalidShapeError):
resolver.get_shape_by_name('UnknownType')
def test_bad_shape_ref(self):
# This is an example of a denormalized model,
# which should raise an exception.
shapes = {
'Struct': {
'type': 'structure',
'members': {
'A': {'type': 'string'},
'B': {'type': 'string'},
}
}
}
resolver = model.ShapeResolver(shapes)
with self.assertRaises(model.InvalidShapeReferenceError):
struct = resolver.get_shape_by_name('Struct')
# Resolving the members will fail because
# the 'A' and 'B' members are not shape refs.
struct.members
def test_shape_name_in_repr(self):
shapes = {
'StringType': {
'type': 'string',
}
}
resolver = model.ShapeResolver(shapes)
self.assertIn('StringType',
repr(resolver.get_shape_by_name('StringType')))
class TestBuilders(unittest.TestCase):
def test_structure_shape_builder_with_scalar_types(self):
b = model.DenormalizedStructureBuilder()
shape = b.with_members({
'A': {'type': 'string'},
'B': {'type': 'integer'},
}).build_model()
self.assertIsInstance(shape, model.StructureShape)
self.assertEqual(sorted(list(shape.members)), ['A', 'B'])
self.assertEqual(shape.members['A'].type_name, 'string')
self.assertEqual(shape.members['B'].type_name, 'integer')
def test_structure_shape_with_structure_type(self):
b = model.DenormalizedStructureBuilder()
shape = b.with_members({
'A': {
'type': 'structure',
'members': {
'A-1': {'type': 'string'},
}
},
}).build_model()
self.assertIsInstance(shape, model.StructureShape)
self.assertEqual(list(shape.members), ['A'])
self.assertEqual(shape.members['A'].type_name, 'structure')
self.assertEqual(list(shape.members['A'].members), ['A-1'])
def test_structure_shape_with_list(self):
b = model.DenormalizedStructureBuilder()
shape = b.with_members({
'A': {
'type': 'list',
'member': {
'type': 'string'
}
},
}).build_model()
self.assertIsInstance(shape.members['A'], model.ListShape)
self.assertEqual(shape.members['A'].member.type_name, 'string')
def test_structure_shape_with_map_type(self):
b = model.DenormalizedStructureBuilder()
shape = b.with_members({
'A': {
'type': 'map',
'key': {'type': 'string'},
'value': {'type': 'string'},
}
}).build_model()
self.assertIsInstance(shape.members['A'], model.MapShape)
map_shape = shape.members['A']
self.assertEqual(map_shape.key.type_name, 'string')
self.assertEqual(map_shape.value.type_name, 'string')
def test_nested_structure(self):
b = model.DenormalizedStructureBuilder()
shape = b.with_members({
'A': {
'type': 'structure',
'members': {
'B': {
'type': 'structure',
'members': {
'C': {
'type': 'string',
}
}
}
}
}
}).build_model()
self.assertEqual(
shape.members['A'].members['B'].members['C'].type_name, 'string')
def test_enum_values_on_string_used(self):
b = model.DenormalizedStructureBuilder()
enum_values = ['foo', 'bar', 'baz']
shape = b.with_members({
'A': {
'type': 'string',
'enum': enum_values,
},
}).build_model()
self.assertIsInstance(shape, model.StructureShape)
string_shape = shape.members['A']
self.assertIsInstance(string_shape, model.StringShape)
self.assertEqual(string_shape.metadata['enum'], enum_values)
self.assertEqual(string_shape.enum, enum_values)
def test_documentation_on_shape_used(self):
b = model.DenormalizedStructureBuilder()
shape = b.with_members({
'A': {
'type': 'string',
'documentation': 'MyDocs',
},
}).build_model()
self.assertEqual(shape.members['A'].documentation,
'MyDocs')
def test_use_shape_name_when_provided(self):
b = model.DenormalizedStructureBuilder()
shape = b.with_members({
'A': {
'type': 'string',
'shape_name': 'MyStringShape',
},
}).build_model()
self.assertEqual(shape.members['A'].name, 'MyStringShape')
def test_unknown_shape_type(self):
b = model.DenormalizedStructureBuilder()
with self.assertRaises(model.InvalidShapeError):
b.with_members({
'A': {
'type': 'brand-new-shape-type',
},
}).build_model()
def test_ordered_shape_builder(self):
b = model.DenormalizedStructureBuilder()
shape = b.with_members(OrderedDict(
[
('A', {
'type': 'string'
}),
('B', {
'type': 'structure',
'members': OrderedDict(
[
('C', {
'type': 'string'
}),
('D', {
'type': 'string'
})
]
)
})
]
)).build_model()
# Members should be in order
self.assertEqual(['A', 'B'], list(shape.members.keys()))
# Nested structure members should *also* stay ordered
self.assertEqual(['C', 'D'], list(shape.members['B'].members.keys()))
if __name__ == '__main__':
unittest.main()
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import re
import mock
from botocore.handlers import generate_idempotent_uuid
class TestIdempotencyInjection(unittest.TestCase):
def setUp(self):
self.mock_model = mock.MagicMock()
self.mock_model.idempotent_members = ['RequiredKey']
self.uuid_pattern = re.compile(
'^[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}$',
re.I)
def test_injection(self):
# No parameters are provided, RequiredKey should be autofilled
params = {}
generate_idempotent_uuid(params, self.mock_model)
self.assertIn('RequiredKey', params)
self.assertIsNotNone(self.uuid_pattern.match(params['RequiredKey']))
def test_provided(self):
# RequiredKey is provided, should not be replaced
params = {'RequiredKey': 'already populated'}
generate_idempotent_uuid(params, self.mock_model)
self.assertEqual(params['RequiredKey'], 'already populated')
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
from dateutil.tz import tzutc, tzoffset
import datetime
from botocore.compat import six
import copy
import mock
import botocore
from botocore import xform_name
from botocore.compat import OrderedDict, json
from botocore.awsrequest import AWSRequest
from botocore.exceptions import InvalidExpressionError, ConfigNotFound
from botocore.exceptions import ClientError
from botocore.exceptions import InvalidDNSNameError, MetadataRetrievalError
from botocore.model import ServiceModel
from botocore.vendored import requests
from botocore.utils import is_json_value_header
from botocore.utils import remove_dot_segments
from botocore.utils import normalize_url_path
from botocore.utils import validate_jmespath_for_set
from botocore.utils import set_value_from_jmespath
from botocore.utils import parse_key_val_file_contents
from botocore.utils import parse_key_val_file
from botocore.utils import parse_timestamp
from botocore.utils import parse_to_aware_datetime
from botocore.utils import datetime2timestamp
from botocore.utils import CachedProperty
from botocore.utils import ArgumentGenerator
from botocore.utils import calculate_tree_hash
from botocore.utils import calculate_sha256
from botocore.utils import is_valid_endpoint_url
from botocore.utils import fix_s3_host
from botocore.utils import switch_to_virtual_host_style
from botocore.utils import instance_cache
from botocore.utils import merge_dicts
from botocore.utils import get_service_module_name
from botocore.utils import percent_encode_sequence
from botocore.utils import percent_encode
from botocore.utils import switch_host_s3_accelerate
from botocore.utils import deep_merge
from botocore.utils import S3RegionRedirector
from botocore.utils import ContainerMetadataFetcher
from botocore.model import DenormalizedStructureBuilder
from botocore.model import ShapeResolver
from botocore.config import Config
class TestIsJSONValueHeader(unittest.TestCase):
def test_no_serialization_section(self):
shape = mock.Mock()
shape.type_name = 'string'
self.assertFalse(is_json_value_header(shape))
def test_non_jsonvalue_shape(self):
shape = mock.Mock()
shape.serialization = {
'location': 'header'
}
shape.type_name = 'string'
self.assertFalse(is_json_value_header(shape))
def test_non_header_jsonvalue_shape(self):
shape = mock.Mock()
shape.serialization = {
'jsonvalue': True
}
shape.type_name = 'string'
self.assertFalse(is_json_value_header(shape))
def test_non_string_jsonvalue_shape(self):
shape = mock.Mock()
shape.serialization = {
'location': 'header',
'jsonvalue': True
}
shape.type_name = 'integer'
self.assertFalse(is_json_value_header(shape))
def test_json_value_header(self):
shape = mock.Mock()
shape.serialization = {
'jsonvalue': True,
'location': 'header'
}
shape.type_name = 'string'
self.assertTrue(is_json_value_header(shape))
class TestURINormalization(unittest.TestCase):
def test_remove_dot_segments(self):
self.assertEqual(remove_dot_segments('../foo'), 'foo')
self.assertEqual(remove_dot_segments('../../foo'), 'foo')
self.assertEqual(remove_dot_segments('./foo'), 'foo')
self.assertEqual(remove_dot_segments('/./'), '/')
self.assertEqual(remove_dot_segments('/../'), '/')
self.assertEqual(remove_dot_segments('/foo/bar/baz/../qux'),
'/foo/bar/qux')
self.assertEqual(remove_dot_segments('/foo/..'), '/')
self.assertEqual(remove_dot_segments('foo/bar/baz'), 'foo/bar/baz')
self.assertEqual(remove_dot_segments('..'), '')
self.assertEqual(remove_dot_segments('.'), '')
self.assertEqual(remove_dot_segments('/.'), '/')
self.assertEqual(remove_dot_segments('/.foo'), '/.foo')
self.assertEqual(remove_dot_segments('/..foo'), '/..foo')
self.assertEqual(remove_dot_segments(''), '')
self.assertEqual(remove_dot_segments('/a/b/c/./../../g'), '/a/g')
self.assertEqual(remove_dot_segments('mid/content=5/../6'), 'mid/6')
# I don't think this is RFC compliant...
self.assertEqual(remove_dot_segments('//foo//'), '/foo/')
def test_empty_url_normalization(self):
self.assertEqual(normalize_url_path(''), '/')
class TestTransformName(unittest.TestCase):
def test_upper_camel_case(self):
self.assertEqual(xform_name('UpperCamelCase'), 'upper_camel_case')
self.assertEqual(xform_name('UpperCamelCase', '-'), 'upper-camel-case')
def test_lower_camel_case(self):
self.assertEqual(xform_name('lowerCamelCase'), 'lower_camel_case')
self.assertEqual(xform_name('lowerCamelCase', '-'), 'lower-camel-case')
def test_consecutive_upper_case(self):
self.assertEqual(xform_name('HTTPHeaders'), 'http_headers')
self.assertEqual(xform_name('HTTPHeaders', '-'), 'http-headers')
def test_consecutive_upper_case_middle_string(self):
self.assertEqual(xform_name('MainHTTPHeaders'), 'main_http_headers')
self.assertEqual(xform_name('MainHTTPHeaders', '-'),
'main-http-headers')
def test_s3_prefix(self):
self.assertEqual(xform_name('S3BucketName'), 's3_bucket_name')
def test_already_snake_cased(self):
self.assertEqual(xform_name('leave_alone'), 'leave_alone')
self.assertEqual(xform_name('s3_bucket_name'), 's3_bucket_name')
self.assertEqual(xform_name('bucket_s3_name'), 'bucket_s3_name')
def test_special_cases(self):
# Some patterns don't actually match the rules we expect.
self.assertEqual(xform_name('SwapEnvironmentCNAMEs'),
'swap_environment_cnames')
self.assertEqual(xform_name('SwapEnvironmentCNAMEs', '-'),
'swap-environment-cnames')
self.assertEqual(xform_name('CreateCachediSCSIVolume', '-'),
'create-cached-iscsi-volume')
self.assertEqual(xform_name('DescribeCachediSCSIVolumes', '-'),
'describe-cached-iscsi-volumes')
self.assertEqual(xform_name('DescribeStorediSCSIVolumes', '-'),
'describe-stored-iscsi-volumes')
self.assertEqual(xform_name('CreateStorediSCSIVolume', '-'),
'create-stored-iscsi-volume')
def test_special_case_ends_with_s(self):
self.assertEqual(xform_name('GatewayARNs', '-'), 'gateway-arns')
def test_partial_rename(self):
transformed = xform_name('IPV6', '-')
self.assertEqual(transformed, 'ipv6')
transformed = xform_name('IPV6', '_')
self.assertEqual(transformed, 'ipv6')
class TestValidateJMESPathForSet(unittest.TestCase):
def setUp(self):
super(TestValidateJMESPathForSet, self).setUp()
self.data = {
'Response': {
'Thing': {
'Id': 1,
'Name': 'Thing #1',
}
},
'Marker': 'some-token'
}
def test_invalid_exp(self):
with self.assertRaises(InvalidExpressionError):
validate_jmespath_for_set('Response.*.Name')
with self.assertRaises(InvalidExpressionError):
validate_jmespath_for_set('Response.Things[0]')
with self.assertRaises(InvalidExpressionError):
validate_jmespath_for_set('')
with self.assertRaises(InvalidExpressionError):
validate_jmespath_for_set('.')
class TestSetValueFromJMESPath(unittest.TestCase):
def setUp(self):
super(TestSetValueFromJMESPath, self).setUp()
self.data = {
'Response': {
'Thing': {
'Id': 1,
'Name': 'Thing #1',
}
},
'Marker': 'some-token'
}
def test_single_depth_existing(self):
set_value_from_jmespath(self.data, 'Marker', 'new-token')
self.assertEqual(self.data['Marker'], 'new-token')
def test_single_depth_new(self):
self.assertFalse('Limit' in self.data)
set_value_from_jmespath(self.data, 'Limit', 100)
self.assertEqual(self.data['Limit'], 100)
def test_multiple_depth_existing(self):
set_value_from_jmespath(self.data, 'Response.Thing.Name', 'New Name')
self.assertEqual(self.data['Response']['Thing']['Name'], 'New Name')
def test_multiple_depth_new(self):
self.assertFalse('Brand' in self.data)
set_value_from_jmespath(self.data, 'Brand.New', {'abc': 123})
self.assertEqual(self.data['Brand']['New']['abc'], 123)
class TestParseEC2CredentialsFile(unittest.TestCase):
def test_parse_ec2_content(self):
contents = "AWSAccessKeyId=a\nAWSSecretKey=b\n"
self.assertEqual(parse_key_val_file_contents(contents),
{'AWSAccessKeyId': 'a',
'AWSSecretKey': 'b'})
def test_parse_ec2_content_empty(self):
contents = ""
self.assertEqual(parse_key_val_file_contents(contents), {})
def test_key_val_pair_with_blank_lines(self):
# The \n\n has an extra blank between the access/secret keys.
contents = "AWSAccessKeyId=a\n\nAWSSecretKey=b\n"
self.assertEqual(parse_key_val_file_contents(contents),
{'AWSAccessKeyId': 'a',
'AWSSecretKey': 'b'})
def test_key_val_parser_lenient(self):
# Ignore any line that does not have a '=' char in it.
contents = "AWSAccessKeyId=a\nNOTKEYVALLINE\nAWSSecretKey=b\n"
self.assertEqual(parse_key_val_file_contents(contents),
{'AWSAccessKeyId': 'a',
'AWSSecretKey': 'b'})
def test_multiple_equals_on_line(self):
contents = "AWSAccessKeyId=a\nAWSSecretKey=secret_key_with_equals=b\n"
self.assertEqual(parse_key_val_file_contents(contents),
{'AWSAccessKeyId': 'a',
'AWSSecretKey': 'secret_key_with_equals=b'})
def test_os_error_raises_config_not_found(self):
mock_open = mock.Mock()
mock_open.side_effect = OSError()
with self.assertRaises(ConfigNotFound):
parse_key_val_file('badfile', _open=mock_open)
class TestParseTimestamps(unittest.TestCase):
def test_parse_iso8601(self):
self.assertEqual(
parse_timestamp('1970-01-01T00:10:00.000Z'),
datetime.datetime(1970, 1, 1, 0, 10, tzinfo=tzutc()))
def test_parse_epoch(self):
self.assertEqual(
parse_timestamp(1222172800),
datetime.datetime(2008, 9, 23, 12, 26, 40, tzinfo=tzutc()))
def test_parse_epoch_zero_time(self):
self.assertEqual(
parse_timestamp(0),
datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tzutc()))
def test_parse_epoch_as_string(self):
self.assertEqual(
parse_timestamp('1222172800'),
datetime.datetime(2008, 9, 23, 12, 26, 40, tzinfo=tzutc()))
def test_parse_rfc822(self):
self.assertEqual(
parse_timestamp('Wed, 02 Oct 2002 13:00:00 GMT'),
datetime.datetime(2002, 10, 2, 13, 0, tzinfo=tzutc()))
def test_parse_gmt_in_uk_time(self):
# In the UK the time switches from GMT to BST and back as part of
# their daylight savings time. time.tzname will therefore report
# both time zones. dateutil sees that the time zone is a local time
# zone and so parses it as local time, but it ends up being BST
# instead of GMT. To remedy this issue we can provide a time zone
# context which will enforce GMT == UTC.
with mock.patch('time.tzname', ('GMT', 'BST')):
self.assertEqual(
parse_timestamp('Wed, 02 Oct 2002 13:00:00 GMT'),
datetime.datetime(2002, 10, 2, 13, 0, tzinfo=tzutc()))
def test_parse_invalid_timestamp(self):
with self.assertRaises(ValueError):
parse_timestamp('invalid date')
class TestDatetime2Timestamp(unittest.TestCase):
def test_datetime2timestamp_naive(self):
self.assertEqual(
datetime2timestamp(datetime.datetime(1970, 1, 2)), 86400)
def test_datetime2timestamp_aware(self):
tzinfo = tzoffset("BRST", -10800)
self.assertEqual(
datetime2timestamp(datetime.datetime(1970, 1, 2, tzinfo=tzinfo)),
97200)
class TestParseToUTCDatetime(unittest.TestCase):
def test_handles_utc_time(self):
original = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tzutc())
self.assertEqual(parse_to_aware_datetime(original), original)
def test_handles_other_timezone(self):
tzinfo = tzoffset("BRST", -10800)
original = datetime.datetime(2014, 1, 1, 0, 0, 0, tzinfo=tzinfo)
self.assertEqual(parse_to_aware_datetime(original), original)
def test_handles_naive_datetime(self):
original = datetime.datetime(1970, 1, 1, 0, 0, 0)
expected = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tzutc())
self.assertEqual(parse_to_aware_datetime(original), expected)
def test_handles_string_epoch(self):
expected = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tzutc())
self.assertEqual(parse_to_aware_datetime('0'), expected)
def test_handles_int_epoch(self):
expected = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tzutc())
self.assertEqual(parse_to_aware_datetime(0), expected)
def test_handles_full_iso_8601(self):
expected = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tzutc())
self.assertEqual(
parse_to_aware_datetime('1970-01-01T00:00:00Z'),
expected)
def test_year_only_iso_8601(self):
expected = datetime.datetime(1970, 1, 1, 0, 0, 0, tzinfo=tzutc())
self.assertEqual(parse_to_aware_datetime('1970-01-01'), expected)
class TestCachedProperty(unittest.TestCase):
def test_cached_property_same_value(self):
class CacheMe(object):
@CachedProperty
def foo(self):
return 'foo'
c = CacheMe()
self.assertEqual(c.foo, 'foo')
self.assertEqual(c.foo, 'foo')
def test_cached_property_only_called_once(self):
# Note: you would normally never want to cache
# a property that returns a new value each time,
# but this is done to demonstrate the caching behavior.
class NoIncrement(object):
def __init__(self):
self.counter = 0
@CachedProperty
def current_value(self):
self.counter += 1
return self.counter
c = NoIncrement()
self.assertEqual(c.current_value, 1)
# If the property wasn't cached, the next value should be
# be 2, but because it's cached, we know the value will be 1.
self.assertEqual(c.current_value, 1)
class TestArgumentGenerator(unittest.TestCase):
def setUp(self):
self.arg_generator = ArgumentGenerator()
def assert_skeleton_from_model_is(self, model, generated_skeleton):
shape = DenormalizedStructureBuilder().with_members(
model).build_model()
actual = self.arg_generator.generate_skeleton(shape)
self.assertEqual(actual, generated_skeleton)
def test_generate_string(self):
self.assert_skeleton_from_model_is(
model={
'A': {'type': 'string'}
},
generated_skeleton={
'A': ''
}
)
def test_generate_string_enum(self):
enum_values = ['A', 'B', 'C']
model = {
'A': {'type': 'string', 'enum': enum_values}
}
shape = DenormalizedStructureBuilder().with_members(
model).build_model()
actual = self.arg_generator.generate_skeleton(shape)
self.assertIn(actual['A'], enum_values)
def test_generate_scalars(self):
self.assert_skeleton_from_model_is(
model={
'A': {'type': 'string'},
'B': {'type': 'integer'},
'C': {'type': 'float'},
'D': {'type': 'boolean'},
'E': {'type': 'timestamp'}
},
generated_skeleton={
'A': '',
'B': 0,
'C': 0.0,
'D': True,
'E': datetime.datetime(1970, 1, 1, 0, 0, 0)
}
)
def test_will_use_member_names_for_string_values(self):
self.arg_generator = ArgumentGenerator(use_member_names=True)
self.assert_skeleton_from_model_is(
model={
'A': {'type': 'string'},
'B': {'type': 'integer'},
'C': {'type': 'float'},
'D': {'type': 'boolean'},
},
generated_skeleton={
'A': 'A',
'B': 0,
'C': 0.0,
'D': True,
}
)
def test_will_use_member_names_for_string_values_of_list(self):
self.arg_generator = ArgumentGenerator(use_member_names=True)
# We're not using assert_skeleton_from_model_is
# because we can't really control the name of strings shapes
# being used in the DenormalizedStructureBuilder. We can only
# control the name of structures and list shapes.
shape_map = ShapeResolver({
'InputShape': {
'type': 'structure',
'members': {
'StringList': {'shape': 'StringList'},
}
},
'StringList': {
'type': 'list',
'member': {'shape': 'StringType'},
},
'StringType': {
'type': 'string',
}
})
shape = shape_map.get_shape_by_name('InputShape')
actual = self.arg_generator.generate_skeleton(shape)
expected = {'StringList': ['StringType']}
self.assertEqual(actual, expected)
def test_generate_nested_structure(self):
self.assert_skeleton_from_model_is(
model={
'A': {
'type': 'structure',
'members': {
'B': {'type': 'string'},
}
}
},
generated_skeleton={
'A': {'B': ''}
}
)
def test_generate_scalar_list(self):
self.assert_skeleton_from_model_is(
model={
'A': {
'type': 'list',
'member': {
'type': 'string'
}
},
},
generated_skeleton={
'A': [''],
}
)
def test_generate_scalar_map(self):
self.assert_skeleton_from_model_is(
model={
'A': {
'type': 'map',
'key': {'type': 'string'},
'value': {'type': 'string'},
}
},
generated_skeleton={
'A': {
'KeyName': '',
}
}
)
def test_handles_recursive_shapes(self):
# We're not using assert_skeleton_from_model_is
# because we can't use a DenormalizedStructureBuilder,
# we need a normalized model to represent recursive
# shapes.
shape_map = ShapeResolver({
'InputShape': {
'type': 'structure',
'members': {
'A': {'shape': 'RecursiveStruct'},
'B': {'shape': 'StringType'},
}
},
'RecursiveStruct': {
'type': 'structure',
'members': {
'C': {'shape': 'RecursiveStruct'},
'D': {'shape': 'StringType'},
}
},
'StringType': {
'type': 'string',
}
})
shape = shape_map.get_shape_by_name('InputShape')
actual = self.arg_generator.generate_skeleton(shape)
expected = {
'A': {
'C': {
# For recurisve shapes, we'll just show
# an empty dict.
},
'D': ''
},
'B': ''
}
self.assertEqual(actual, expected)
class TestChecksums(unittest.TestCase):
def test_empty_hash(self):
self.assertEqual(
calculate_sha256(six.BytesIO(b''), as_hex=True),
'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
def test_as_hex(self):
self.assertEqual(
calculate_sha256(six.BytesIO(b'hello world'), as_hex=True),
'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9')
def test_as_binary(self):
self.assertEqual(
calculate_sha256(six.BytesIO(b'hello world'), as_hex=False),
(b"\xb9M'\xb9\x93M>\x08\xa5.R\xd7\xda}\xab\xfa\xc4\x84\xef"
b"\xe3zS\x80\xee\x90\x88\xf7\xac\xe2\xef\xcd\xe9"))
class TestTreeHash(unittest.TestCase):
# Note that for these tests I've independently verified
# what the expected tree hashes should be from other
# SDK implementations.
def test_empty_tree_hash(self):
self.assertEqual(
calculate_tree_hash(six.BytesIO(b'')),
'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
def test_tree_hash_less_than_one_mb(self):
one_k = six.BytesIO(b'a' * 1024)
self.assertEqual(
calculate_tree_hash(one_k),
'2edc986847e209b4016e141a6dc8716d3207350f416969382d431539bf292e4a')
def test_tree_hash_exactly_one_mb(self):
one_meg_bytestring = b'a' * (1 * 1024 * 1024)
one_meg = six.BytesIO(one_meg_bytestring)
self.assertEqual(
calculate_tree_hash(one_meg),
'9bc1b2a288b26af7257a36277ae3816a7d4f16e89c1e7e77d0a5c48bad62b360')
def test_tree_hash_multiple_of_one_mb(self):
four_mb = six.BytesIO(b'a' * (4 * 1024 * 1024))
self.assertEqual(
calculate_tree_hash(four_mb),
'9491cb2ed1d4e7cd53215f4017c23ec4ad21d7050a1e6bb636c4f67e8cddb844')
def test_tree_hash_offset_of_one_mb_multiple(self):
offset_four_mb = six.BytesIO(b'a' * (4 * 1024 * 1024) + b'a' * 20)
self.assertEqual(
calculate_tree_hash(offset_four_mb),
'12f3cbd6101b981cde074039f6f728071da8879d6f632de8afc7cdf00661b08f')
class TestIsValidEndpointURL(unittest.TestCase):
def test_dns_name_is_valid(self):
self.assertTrue(is_valid_endpoint_url('https://s3.amazonaws.com/'))
def test_ip_address_is_allowed(self):
self.assertTrue(is_valid_endpoint_url('https://10.10.10.10/'))
def test_path_component_ignored(self):
self.assertTrue(
is_valid_endpoint_url('https://foo.bar.com/other/path/'))
def test_can_have_port(self):
self.assertTrue(is_valid_endpoint_url('https://foo.bar.com:12345/'))
def test_ip_can_have_port(self):
self.assertTrue(is_valid_endpoint_url('https://10.10.10.10:12345/'))
def test_cannot_have_spaces(self):
self.assertFalse(is_valid_endpoint_url('https://my invalid name/'))
def test_missing_scheme(self):
self.assertFalse(is_valid_endpoint_url('foo.bar.com'))
def test_no_new_lines(self):
self.assertFalse(is_valid_endpoint_url('https://foo.bar.com\nbar/'))
def test_long_hostname(self):
long_hostname = 'htps://%s.com' % ('a' * 256)
self.assertFalse(is_valid_endpoint_url(long_hostname))
def test_hostname_can_end_with_dot(self):
self.assertTrue(is_valid_endpoint_url('https://foo.bar.com./'))
def test_hostname_no_dots(self):
self.assertTrue(is_valid_endpoint_url('https://foo/'))
class TestFixS3Host(unittest.TestCase):
def test_fix_s3_host_initial(self):
request = AWSRequest(
method='PUT', headers={},
url='https://s3-us-west-2.amazonaws.com/bucket/key.txt'
)
region_name = 'us-west-2'
signature_version = 's3'
fix_s3_host(
request=request, signature_version=signature_version,
region_name=region_name)
self.assertEqual(request.url,
'https://bucket.s3.amazonaws.com/key.txt')
self.assertEqual(request.auth_path, '/bucket/key.txt')
def test_fix_s3_host_only_applied_once(self):
request = AWSRequest(
method='PUT', headers={},
url='https://s3-us-west-2.amazonaws.com/bucket/key.txt'
)
region_name = 'us-west-2'
signature_version = 's3'
fix_s3_host(
request=request, signature_version=signature_version,
region_name=region_name)
# Calling the handler again should not affect the end result:
fix_s3_host(
request=request, signature_version=signature_version,
region_name=region_name)
self.assertEqual(request.url,
'https://bucket.s3.amazonaws.com/key.txt')
# This was a bug previously. We want to make sure that
# calling fix_s3_host() again does not alter the auth_path.
# Otherwise we'll get signature errors.
self.assertEqual(request.auth_path, '/bucket/key.txt')
def test_dns_style_not_used_for_get_bucket_location(self):
original_url = 'https://s3-us-west-2.amazonaws.com/bucket?location'
request = AWSRequest(
method='GET', headers={},
url=original_url,
)
signature_version = 's3'
region_name = 'us-west-2'
fix_s3_host(
request=request, signature_version=signature_version,
region_name=region_name)
# The request url should not have been modified because this is
# a request for GetBucketLocation.
self.assertEqual(request.url, original_url)
def test_can_provide_default_endpoint_url(self):
request = AWSRequest(
method='PUT', headers={},
url='https://s3-us-west-2.amazonaws.com/bucket/key.txt'
)
region_name = 'us-west-2'
signature_version = 's3'
fix_s3_host(
request=request, signature_version=signature_version,
region_name=region_name,
default_endpoint_url='foo.s3.amazonaws.com')
self.assertEqual(request.url,
'https://bucket.foo.s3.amazonaws.com/key.txt')
def test_no_endpoint_url_uses_request_url(self):
request = AWSRequest(
method='PUT', headers={},
url='https://s3-us-west-2.amazonaws.com/bucket/key.txt'
)
region_name = 'us-west-2'
signature_version = 's3'
fix_s3_host(
request=request, signature_version=signature_version,
region_name=region_name,
# A value of None means use the url in the current request.
default_endpoint_url=None,
)
self.assertEqual(request.url,
'https://bucket.s3-us-west-2.amazonaws.com/key.txt')
class TestSwitchToVirtualHostStyle(unittest.TestCase):
def test_switch_to_virtual_host_style(self):
request = AWSRequest(
method='PUT', headers={},
url='https://foo.amazonaws.com/bucket/key.txt'
)
region_name = 'us-west-2'
signature_version = 's3'
switch_to_virtual_host_style(
request=request, signature_version=signature_version,
region_name=region_name)
self.assertEqual(request.url,
'https://bucket.foo.amazonaws.com/key.txt')
self.assertEqual(request.auth_path, '/bucket/key.txt')
def test_uses_default_endpoint(self):
request = AWSRequest(
method='PUT', headers={},
url='https://foo.amazonaws.com/bucket/key.txt'
)
region_name = 'us-west-2'
signature_version = 's3'
switch_to_virtual_host_style(
request=request, signature_version=signature_version,
region_name=region_name, default_endpoint_url='s3.amazonaws.com')
self.assertEqual(request.url,
'https://bucket.s3.amazonaws.com/key.txt')
self.assertEqual(request.auth_path, '/bucket/key.txt')
def test_throws_invalid_dns_name_error(self):
request = AWSRequest(
method='PUT', headers={},
url='https://foo.amazonaws.com/mybucket.foo/key.txt'
)
region_name = 'us-west-2'
signature_version = 's3'
with self.assertRaises(InvalidDNSNameError):
switch_to_virtual_host_style(
request=request, signature_version=signature_version,
region_name=region_name)
def test_fix_s3_host_only_applied_once(self):
request = AWSRequest(
method='PUT', headers={},
url='https://foo.amazonaws.com/bucket/key.txt'
)
region_name = 'us-west-2'
signature_version = 's3'
switch_to_virtual_host_style(
request=request, signature_version=signature_version,
region_name=region_name)
# Calling the handler again should not affect the end result:
switch_to_virtual_host_style(
request=request, signature_version=signature_version,
region_name=region_name)
self.assertEqual(request.url,
'https://bucket.foo.amazonaws.com/key.txt')
# This was a bug previously. We want to make sure that
# calling fix_s3_host() again does not alter the auth_path.
# Otherwise we'll get signature errors.
self.assertEqual(request.auth_path, '/bucket/key.txt')
def test_virtual_host_style_for_make_bucket(self):
request = AWSRequest(
method='PUT', headers={},
url='https://foo.amazonaws.com/bucket'
)
region_name = 'us-west-2'
signature_version = 's3'
switch_to_virtual_host_style(
request=request, signature_version=signature_version,
region_name=region_name)
self.assertEqual(request.url,
'https://bucket.foo.amazonaws.com/')
def test_virtual_host_style_not_used_for_get_bucket_location(self):
original_url = 'https://foo.amazonaws.com/bucket?location'
request = AWSRequest(
method='GET', headers={},
url=original_url,
)
signature_version = 's3'
region_name = 'us-west-2'
switch_to_virtual_host_style(
request=request, signature_version=signature_version,
region_name=region_name)
# The request url should not have been modified because this is
# a request for GetBucketLocation.
self.assertEqual(request.url, original_url)
def test_virtual_host_style_not_used_for_list_buckets(self):
original_url = 'https://foo.amazonaws.com/'
request = AWSRequest(
method='GET', headers={},
url=original_url,
)
signature_version = 's3'
region_name = 'us-west-2'
switch_to_virtual_host_style(
request=request, signature_version=signature_version,
region_name=region_name)
# The request url should not have been modified because this is
# a request for GetBucketLocation.
self.assertEqual(request.url, original_url)
def test_is_unaffected_by_sigv4(self):
request = AWSRequest(
method='PUT', headers={},
url='https://foo.amazonaws.com/bucket/key.txt'
)
region_name = 'us-west-2'
signature_version = 's3v4'
switch_to_virtual_host_style(
request=request, signature_version=signature_version,
region_name=region_name, default_endpoint_url='s3.amazonaws.com')
self.assertEqual(request.url,
'https://bucket.s3.amazonaws.com/key.txt')
class TestInstanceCache(unittest.TestCase):
class DummyClass(object):
def __init__(self, cache):
self._instance_cache = cache
@instance_cache
def add(self, x, y):
return x + y
@instance_cache
def sub(self, x, y):
return x - y
def setUp(self):
self.cache = {}
def test_cache_single_method_call(self):
adder = self.DummyClass(self.cache)
self.assertEqual(adder.add(2, 1), 3)
# This should result in one entry in the cache.
self.assertEqual(len(self.cache), 1)
# When we call the method with the same args,
# we should reuse the same entry in the cache.
self.assertEqual(adder.add(2, 1), 3)
self.assertEqual(len(self.cache), 1)
def test_can_cache_multiple_methods(self):
adder = self.DummyClass(self.cache)
adder.add(2, 1)
# A different method results in a new cache entry,
# so now there should be two elements in the cache.
self.assertEqual(adder.sub(2, 1), 1)
self.assertEqual(len(self.cache), 2)
self.assertEqual(adder.sub(2, 1), 1)
def test_can_cache_kwargs(self):
adder = self.DummyClass(self.cache)
adder.add(x=2, y=1)
self.assertEqual(adder.add(x=2, y=1), 3)
self.assertEqual(len(self.cache), 1)
class TestMergeDicts(unittest.TestCase):
def test_merge_dicts_overrides(self):
first = {
'foo': {'bar': {'baz': {'one': 'ORIGINAL', 'two': 'ORIGINAL'}}}}
second = {'foo': {'bar': {'baz': {'one': 'UPDATE'}}}}
merge_dicts(first, second)
# The value from the second dict wins.
self.assertEqual(first['foo']['bar']['baz']['one'], 'UPDATE')
# And we still preserve the other attributes.
self.assertEqual(first['foo']['bar']['baz']['two'], 'ORIGINAL')
def test_merge_dicts_new_keys(self):
first = {
'foo': {'bar': {'baz': {'one': 'ORIGINAL', 'two': 'ORIGINAL'}}}}
second = {'foo': {'bar': {'baz': {'three': 'UPDATE'}}}}
merge_dicts(first, second)
self.assertEqual(first['foo']['bar']['baz']['one'], 'ORIGINAL')
self.assertEqual(first['foo']['bar']['baz']['two'], 'ORIGINAL')
self.assertEqual(first['foo']['bar']['baz']['three'], 'UPDATE')
def test_merge_empty_dict_does_nothing(self):
first = {'foo': {'bar': 'baz'}}
merge_dicts(first, {})
self.assertEqual(first, {'foo': {'bar': 'baz'}})
def test_more_than_one_sub_dict(self):
first = {'one': {'inner': 'ORIGINAL', 'inner2': 'ORIGINAL'},
'two': {'inner': 'ORIGINAL', 'inner2': 'ORIGINAL'}}
second = {'one': {'inner': 'UPDATE'}, 'two': {'inner': 'UPDATE'}}
merge_dicts(first, second)
self.assertEqual(first['one']['inner'], 'UPDATE')
self.assertEqual(first['one']['inner2'], 'ORIGINAL')
self.assertEqual(first['two']['inner'], 'UPDATE')
self.assertEqual(first['two']['inner2'], 'ORIGINAL')
def test_new_keys(self):
first = {'one': {'inner': 'ORIGINAL'}, 'two': {'inner': 'ORIGINAL'}}
second = {'three': {'foo': {'bar': 'baz'}}}
# In this case, second has no keys in common, but we'd still expect
# this to get merged.
merge_dicts(first, second)
self.assertEqual(first['three']['foo']['bar'], 'baz')
def test_list_values_no_append(self):
dict1 = {'Foo': ['old_foo_value']}
dict2 = {'Foo': ['new_foo_value']}
merge_dicts(dict1, dict2)
self.assertEqual(
dict1, {'Foo': ['new_foo_value']})
def test_list_values_append(self):
dict1 = {'Foo': ['old_foo_value']}
dict2 = {'Foo': ['new_foo_value']}
merge_dicts(dict1, dict2, append_lists=True)
self.assertEqual(
dict1, {'Foo': ['old_foo_value', 'new_foo_value']})
def test_list_values_mismatching_types(self):
dict1 = {'Foo': 'old_foo_value'}
dict2 = {'Foo': ['new_foo_value']}
merge_dicts(dict1, dict2, append_lists=True)
self.assertEqual(
dict1, {'Foo': ['new_foo_value']})
def test_list_values_missing_key(self):
dict1 = {}
dict2 = {'Foo': ['foo_value']}
merge_dicts(dict1, dict2, append_lists=True)
self.assertEqual(
dict1, {'Foo': ['foo_value']})
class TestGetServiceModuleName(unittest.TestCase):
def setUp(self):
self.service_description = {
'metadata': {
'serviceFullName': 'AWS MyService',
'apiVersion': '2014-01-01',
'endpointPrefix': 'myservice',
'signatureVersion': 'v4',
'protocol': 'query'
},
'operations': {},
'shapes': {},
}
self.service_model = ServiceModel(
self.service_description, 'myservice')
def test_default(self):
self.assertEqual(
get_service_module_name(self.service_model),
'MyService'
)
def test_client_name_with_amazon(self):
self.service_description['metadata']['serviceFullName'] = (
'Amazon MyService')
self.assertEqual(
get_service_module_name(self.service_model),
'MyService'
)
def test_client_name_using_abreviation(self):
self.service_description['metadata']['serviceAbbreviation'] = (
'Abbreviation')
self.assertEqual(
get_service_module_name(self.service_model),
'Abbreviation'
)
def test_client_name_with_non_alphabet_characters(self):
self.service_description['metadata']['serviceFullName'] = (
'Amazon My-Service')
self.assertEqual(
get_service_module_name(self.service_model),
'MyService'
)
def test_client_name_with_no_full_name_or_abbreviation(self):
del self.service_description['metadata']['serviceFullName']
self.assertEqual(
get_service_module_name(self.service_model),
'myservice'
)
class TestPercentEncodeSequence(unittest.TestCase):
def test_percent_encode_empty(self):
self.assertEqual(percent_encode_sequence({}), '')
def test_percent_encode_special_chars(self):
self.assertEqual(
percent_encode_sequence({'k1': 'with spaces++/'}),
'k1=with%20spaces%2B%2B%2F')
def test_percent_encode_string_string_tuples(self):
self.assertEqual(percent_encode_sequence([('k1', 'v1'), ('k2', 'v2')]),
'k1=v1&k2=v2')
def test_percent_encode_dict_single_pair(self):
self.assertEqual(percent_encode_sequence({'k1': 'v1'}), 'k1=v1')
def test_percent_encode_dict_string_string(self):
self.assertEqual(
percent_encode_sequence(OrderedDict([('k1', 'v1'), ('k2', 'v2')])),
'k1=v1&k2=v2')
def test_percent_encode_single_list_of_values(self):
self.assertEqual(percent_encode_sequence({'k1': ['a', 'b', 'c']}),
'k1=a&k1=b&k1=c')
def test_percent_encode_list_values_of_string(self):
self.assertEqual(
percent_encode_sequence(
OrderedDict([('k1', ['a', 'list']),
('k2', ['another', 'list'])])),
'k1=a&k1=list&k2=another&k2=list')
class TestPercentEncode(unittest.TestCase):
def test_percent_encode_obj(self):
self.assertEqual(percent_encode(1), '1')
def test_percent_encode_text(self):
self.assertEqual(percent_encode(u''), '')
self.assertEqual(percent_encode(u'a'), 'a')
self.assertEqual(percent_encode(u'\u0000'), '%00')
# Codepoint > 0x7f
self.assertEqual(percent_encode(u'\u2603'), '%E2%98%83')
# Codepoint > 0xffff
self.assertEqual(percent_encode(u'\U0001f32e'), '%F0%9F%8C%AE')
def test_percent_encode_bytes(self):
self.assertEqual(percent_encode(b''), '')
self.assertEqual(percent_encode(b'a'), u'a')
self.assertEqual(percent_encode(b'\x00'), u'%00')
# UTF-8 Snowman
self.assertEqual(percent_encode(b'\xe2\x98\x83'), '%E2%98%83')
# Arbitrary bytes (not valid UTF-8).
self.assertEqual(percent_encode(b'\x80\x00'), '%80%00')
class TestSwitchHostS3Accelerate(unittest.TestCase):
def setUp(self):
self.original_url = 'https://s3.amazonaws.com/foo/key.txt'
self.request = AWSRequest(
method='PUT', headers={},
url=self.original_url
)
self.client_config = Config()
self.request.context['client_config'] = self.client_config
def test_switch_host(self):
switch_host_s3_accelerate(self.request, 'PutObject')
self.assertEqual(
self.request.url,
'https://s3-accelerate.amazonaws.com/foo/key.txt')
def test_do_not_switch_black_listed_operations(self):
# It should not get switched for ListBuckets, DeleteBucket, and
# CreateBucket
blacklist_ops = [
'ListBuckets',
'DeleteBucket',
'CreateBucket'
]
for op_name in blacklist_ops:
switch_host_s3_accelerate(self.request, op_name)
self.assertEqual(self.request.url, self.original_url)
def test_uses_original_endpoint_scheme(self):
self.request.url = 'http://s3.amazonaws.com/foo/key.txt'
switch_host_s3_accelerate(self.request, 'PutObject')
self.assertEqual(
self.request.url,
'http://s3-accelerate.amazonaws.com/foo/key.txt')
def test_uses_dualstack(self):
self.client_config.s3 = {'use_dualstack_endpoint': True}
self.original_url = 'https://s3.dualstack.amazonaws.com/foo/key.txt'
self.request = AWSRequest(
method='PUT', headers={},
url=self.original_url
)
self.request.context['client_config'] = self.client_config
switch_host_s3_accelerate(self.request, 'PutObject')
self.assertEqual(
self.request.url,
'https://s3-accelerate.dualstack.amazonaws.com/foo/key.txt')
class TestDeepMerge(unittest.TestCase):
def test_simple_merge(self):
a = {'key': 'value'}
b = {'otherkey': 'othervalue'}
deep_merge(a, b)
expected = {'key': 'value', 'otherkey': 'othervalue'}
self.assertEqual(a, expected)
def test_merge_list(self):
# Lists are treated as opaque data and so no effort should be made to
# combine them.
a = {'key': ['original']}
b = {'key': ['new']}
deep_merge(a, b)
self.assertEqual(a, {'key': ['new']})
def test_merge_number(self):
# The value from b is always taken
a = {'key': 10}
b = {'key': 45}
deep_merge(a, b)
self.assertEqual(a, {'key': 45})
a = {'key': 45}
b = {'key': 10}
deep_merge(a, b)
self.assertEqual(a, {'key': 10})
def test_merge_boolean(self):
# The value from b is always taken
a = {'key': False}
b = {'key': True}
deep_merge(a, b)
self.assertEqual(a, {'key': True})
a = {'key': True}
b = {'key': False}
deep_merge(a, b)
self.assertEqual(a, {'key': False})
def test_merge_string(self):
a = {'key': 'value'}
b = {'key': 'othervalue'}
deep_merge(a, b)
self.assertEqual(a, {'key': 'othervalue'})
def test_merge_overrides_value(self):
# The value from b is always taken, even when it's a different type
a = {'key': 'original'}
b = {'key': {'newkey': 'newvalue'}}
deep_merge(a, b)
self.assertEqual(a, {'key': {'newkey': 'newvalue'}})
a = {'key': {'anotherkey': 'value'}}
b = {'key': 'newvalue'}
deep_merge(a, b)
self.assertEqual(a, {'key': 'newvalue'})
def test_deep_merge(self):
a = {
'first': {
'second': {
'key': 'value',
'otherkey': 'othervalue'
},
'key': 'value'
}
}
b = {
'first': {
'second': {
'otherkey': 'newvalue',
'yetanotherkey': 'yetanothervalue'
}
}
}
deep_merge(a, b)
expected = {
'first': {
'second': {
'key': 'value',
'otherkey': 'newvalue',
'yetanotherkey': 'yetanothervalue'
},
'key': 'value'
}
}
self.assertEqual(a, expected)
class TestS3RegionRedirector(unittest.TestCase):
def setUp(self):
self.endpoint_bridge = mock.Mock()
self.endpoint_bridge.resolve.return_value = {
'endpoint_url': 'https://eu-central-1.amazonaws.com'
}
self.client = mock.Mock()
self.cache = {}
self.redirector = S3RegionRedirector(self.endpoint_bridge, self.client)
self.set_client_response_headers({})
self.operation = mock.Mock()
self.operation.name = 'foo'
def set_client_response_headers(self, headers):
error_response = ClientError({
'Error': {
'Code': '',
'Message': ''
},
'ResponseMetadata': {
'HTTPHeaders': headers
}
}, 'HeadBucket')
success_response = {
'ResponseMetadata': {
'HTTPHeaders': headers
}
}
self.client.head_bucket.side_effect = [
error_response, success_response]
def test_set_request_url(self):
params = {'url': 'https://us-west-2.amazonaws.com/foo'}
context = {'signing': {
'endpoint': 'https://eu-central-1.amazonaws.com'
}}
self.redirector.set_request_url(params, context)
self.assertEqual(
params['url'], 'https://eu-central-1.amazonaws.com/foo')
def test_only_changes_request_url_if_endpoint_present(self):
params = {'url': 'https://us-west-2.amazonaws.com/foo'}
context = {}
self.redirector.set_request_url(params, context)
self.assertEqual(
params['url'], 'https://us-west-2.amazonaws.com/foo')
def test_set_request_url_keeps_old_scheme(self):
params = {'url': 'http://us-west-2.amazonaws.com/foo'}
context = {'signing': {
'endpoint': 'https://eu-central-1.amazonaws.com'
}}
self.redirector.set_request_url(params, context)
self.assertEqual(
params['url'], 'http://eu-central-1.amazonaws.com/foo')
def test_sets_signing_context_from_cache(self):
signing_context = {'endpoint': 'bar'}
self.cache['foo'] = signing_context
self.redirector = S3RegionRedirector(
self.endpoint_bridge, self.client, cache=self.cache)
params = {'Bucket': 'foo'}
context = {}
self.redirector.redirect_from_cache(params, context)
self.assertEqual(context.get('signing'), signing_context)
def test_only_changes_context_if_bucket_in_cache(self):
signing_context = {'endpoint': 'bar'}
self.cache['bar'] = signing_context
self.redirector = S3RegionRedirector(
self.endpoint_bridge, self.client, cache=self.cache)
params = {'Bucket': 'foo'}
context = {}
self.redirector.redirect_from_cache(params, context)
self.assertNotEqual(context.get('signing'), signing_context)
def test_redirect_from_error(self):
request_dict = {
'context': {'signing': {'bucket': 'foo'}},
'url': 'https://us-west-2.amazonaws.com/foo'
}
response = (None, {
'Error': {
'Code': 'PermanentRedirect',
'Endpoint': 'foo.eu-central-1.amazonaws.com',
'Bucket': 'foo'
},
'ResponseMetadata': {
'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'}
}
})
redirect_response = self.redirector.redirect_from_error(
request_dict, response, self.operation)
# The response needs to be 0 so that there is no retry delay
self.assertEqual(redirect_response, 0)
self.assertEqual(
request_dict['url'], 'https://eu-central-1.amazonaws.com/foo')
expected_signing_context = {
'endpoint': 'https://eu-central-1.amazonaws.com',
'bucket': 'foo',
'region': 'eu-central-1'
}
signing_context = request_dict['context'].get('signing')
self.assertEqual(signing_context, expected_signing_context)
def test_does_not_redirect_unless_permanentredirect_recieved(self):
request_dict = {}
response = (None, {})
redirect_response = self.redirector.redirect_from_error(
request_dict, response, self.operation)
self.assertIsNone(redirect_response)
self.assertEqual(request_dict, {})
def test_does_not_redirect_if_region_cannot_be_found(self):
request_dict = {'url': 'https://us-west-2.amazonaws.com/foo',
'context': {'signing': {'bucket': 'foo'}}}
response = (None, {
'Error': {
'Code': 'PermanentRedirect',
'Endpoint': 'foo.eu-central-1.amazonaws.com',
'Bucket': 'foo'
},
'ResponseMetadata': {
'HTTPHeaders': {}
}
})
redirect_response = self.redirector.redirect_from_error(
request_dict, response, self.operation)
self.assertIsNone(redirect_response)
def test_redirects_301(self):
request_dict = {'url': 'https://us-west-2.amazonaws.com/foo',
'context': {'signing': {'bucket': 'foo'}}}
response = (None, {
'Error': {
'Code': '301',
'Message': 'Moved Permanently'
},
'ResponseMetadata': {
'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'}
}
})
self.operation.name = 'HeadObject'
redirect_response = self.redirector.redirect_from_error(
request_dict, response, self.operation)
self.assertEqual(redirect_response, 0)
self.operation.name = 'ListObjects'
redirect_response = self.redirector.redirect_from_error(
request_dict, response, self.operation)
self.assertIsNone(redirect_response)
def test_does_not_redirect_if_None_response(self):
request_dict = {'url': 'https://us-west-2.amazonaws.com/foo',
'context': {'signing': {'bucket': 'foo'}}}
response = None
redirect_response = self.redirector.redirect_from_error(
request_dict, response, self.operation)
self.assertIsNone(redirect_response)
def test_get_region_from_response(self):
response = (None, {
'Error': {
'Code': 'PermanentRedirect',
'Endpoint': 'foo.eu-central-1.amazonaws.com',
'Bucket': 'foo'
},
'ResponseMetadata': {
'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'}
}
})
region = self.redirector.get_bucket_region('foo', response)
self.assertEqual(region, 'eu-central-1')
def test_get_region_from_response_error_body(self):
response = (None, {
'Error': {
'Code': 'PermanentRedirect',
'Endpoint': 'foo.eu-central-1.amazonaws.com',
'Bucket': 'foo',
'Region': 'eu-central-1'
},
'ResponseMetadata': {
'HTTPHeaders': {}
}
})
region = self.redirector.get_bucket_region('foo', response)
self.assertEqual(region, 'eu-central-1')
def test_get_region_from_head_bucket_error(self):
self.set_client_response_headers(
{'x-amz-bucket-region': 'eu-central-1'})
response = (None, {
'Error': {
'Code': 'PermanentRedirect',
'Endpoint': 'foo.eu-central-1.amazonaws.com',
'Bucket': 'foo',
},
'ResponseMetadata': {
'HTTPHeaders': {}
}
})
region = self.redirector.get_bucket_region('foo', response)
self.assertEqual(region, 'eu-central-1')
def test_get_region_from_head_bucket_success(self):
success_response = {
'ResponseMetadata': {
'HTTPHeaders': {'x-amz-bucket-region': 'eu-central-1'}
}
}
self.client.head_bucket.side_effect = None
self.client.head_bucket.return_value = success_response
response = (None, {
'Error': {
'Code': 'PermanentRedirect',
'Endpoint': 'foo.eu-central-1.amazonaws.com',
'Bucket': 'foo',
},
'ResponseMetadata': {
'HTTPHeaders': {}
}
})
region = self.redirector.get_bucket_region('foo', response)
self.assertEqual(region, 'eu-central-1')
class TestContainerMetadataFetcher(unittest.TestCase):
def setUp(self):
self.responses = []
self.http = mock.Mock()
self.sleep = mock.Mock()
def create_fetcher(self):
return ContainerMetadataFetcher(self.http, sleep=self.sleep)
def fake_response(self, status_code, body):
response = mock.Mock()
response.status_code = status_code
response.text = body
return response
def set_http_responses_to(self, *responses):
http_responses = []
for response in responses:
if isinstance(response, Exception):
# Simulating an error condition.
http_response = response
elif hasattr(response, 'status_code'):
# It's a precreated fake_response.
http_response = response
else:
http_response = self.fake_response(
status_code=200, body=json.dumps(response))
http_responses.append(http_response)
self.http.get.side_effect = http_responses
def assert_can_retrieve_metadata_from(self, full_uri):
response_body = {'foo': 'bar'}
self.set_http_responses_to(response_body)
fetcher = self.create_fetcher()
response = fetcher.retrieve_full_uri(full_uri)
self.assertEqual(response, response_body)
self.http.get.assert_called_with(
full_uri, headers={'Accept': 'application/json'},
timeout=fetcher.TIMEOUT_SECONDS,
)
def assert_host_is_not_allowed(self, full_uri):
response_body = {'foo': 'bar'}
self.set_http_responses_to(response_body)
fetcher = self.create_fetcher()
with self.assertRaisesRegexp(ValueError, 'Unsupported host'):
fetcher.retrieve_full_uri(full_uri)
self.assertFalse(self.http.get.called)
def test_can_specify_extra_headers_are_merged(self):
headers = {
# The 'Accept' header will override the
# default Accept header of application/json.
'Accept': 'application/not-json',
'X-Other-Header': 'foo',
}
self.set_http_responses_to({'foo': 'bar'})
fetcher = self.create_fetcher()
response = fetcher.retrieve_full_uri(
'http://localhost', headers)
self.http.get.assert_called_with(
'http://localhost', headers=headers,
timeout=fetcher.TIMEOUT_SECONDS,
)
def test_can_retrieve_uri(self):
json_body = {
"AccessKeyId" : "a",
"SecretAccessKey" : "b",
"Token" : "c",
"Expiration" : "d"
}
self.set_http_responses_to(json_body)
fetcher = self.create_fetcher()
response = fetcher.retrieve_uri('/foo?id=1')
self.assertEqual(response, json_body)
# Ensure we made calls to the right endpoint.
self.http.get.assert_called_with(
'http://169.254.170.2/foo?id=1',
headers={'Accept': 'application/json'},
timeout=fetcher.TIMEOUT_SECONDS,
)
def test_can_retry_requests(self):
success_response = {
"AccessKeyId" : "a",
"SecretAccessKey" : "b",
"Token" : "c",
"Expiration" : "d"
}
self.set_http_responses_to(
# First response is a connection error, should
# be retried.
requests.ConnectionError(),
# Second response is the successful JSON response
# with credentials.
success_response,
)
fetcher = self.create_fetcher()
response = fetcher.retrieve_uri('/foo?id=1')
self.assertEqual(response, success_response)
def test_propagates_credential_error_on_http_errors(self):
self.set_http_responses_to(
# In this scenario, we never get a successful response.
requests.ConnectionError(),
requests.ConnectionError(),
requests.ConnectionError(),
requests.ConnectionError(),
requests.ConnectionError(),
)
# As a result, we expect an appropriate error to be raised.
fetcher = self.create_fetcher()
with self.assertRaises(MetadataRetrievalError):
fetcher.retrieve_uri('/foo?id=1')
self.assertEqual(self.http.get.call_count, fetcher.RETRY_ATTEMPTS)
def test_error_raised_on_non_200_response(self):
self.set_http_responses_to(
self.fake_response(status_code=404, body='Error not found'),
self.fake_response(status_code=404, body='Error not found'),
self.fake_response(status_code=404, body='Error not found'),
)
fetcher = self.create_fetcher()
with self.assertRaises(MetadataRetrievalError):
fetcher.retrieve_uri('/foo?id=1')
# Should have tried up to RETRY_ATTEMPTS.
self.assertEqual(self.http.get.call_count, fetcher.RETRY_ATTEMPTS)
def test_error_raised_on_no_json_response(self):
# If the service returns a sucess response but with a body that
# does not contain JSON, we should still retry up to RETRY_ATTEMPTS,
# but after exhausting retries we propagate the exception.
self.set_http_responses_to(
self.fake_response(status_code=200, body='Not JSON'),
self.fake_response(status_code=200, body='Not JSON'),
self.fake_response(status_code=200, body='Not JSON'),
)
fetcher = self.create_fetcher()
with self.assertRaises(MetadataRetrievalError):
fetcher.retrieve_uri('/foo?id=1')
# Should have tried up to RETRY_ATTEMPTS.
self.assertEqual(self.http.get.call_count, fetcher.RETRY_ATTEMPTS)
def test_can_retrieve_full_uri_with_fixed_ip(self):
self.assert_can_retrieve_metadata_from(
'http://%s/foo?id=1' % ContainerMetadataFetcher.IP_ADDRESS)
def test_localhost_http_is_allowed(self):
self.assert_can_retrieve_metadata_from('http://localhost/foo')
def test_localhost_with_port_http_is_allowed(self):
self.assert_can_retrieve_metadata_from('http://localhost:8000/foo')
def test_localhost_https_is_allowed(self):
self.assert_can_retrieve_metadata_from('https://localhost/foo')
def test_can_use_127_ip_addr(self):
self.assert_can_retrieve_metadata_from('https://127.0.0.1/foo')
def test_can_use_127_ip_addr_with_port(self):
self.assert_can_retrieve_metadata_from('https://127.0.0.1:8080/foo')
def test_link_local_http_is_not_allowed(self):
self.assert_host_is_not_allowed('http://169.254.0.1/foo')
def test_link_local_https_is_not_allowed(self):
self.assert_host_is_not_allowed('https://169.254.0.1/foo')
def test_non_link_local_nonallowed_url(self):
self.assert_host_is_not_allowed('http://169.1.2.3/foo')
def test_error_raised_on_nonallowed_url(self):
self.assert_host_is_not_allowed('http://somewhere.com/foo')
def test_external_host_not_allowed_if_https(self):
self.assert_host_is_not_allowed('https://somewhere.com/foo')
class TestUnsigned(unittest.TestCase):
def test_copy_returns_same_object(self):
self.assertIs(botocore.UNSIGNED, copy.copy(botocore.UNSIGNED))
def test_deepcopy_returns_same_object(self):
self.assertIs(botocore.UNSIGNED, copy.deepcopy(botocore.UNSIGNED))
if __name__ == '__main__':
unittest.main()
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
from tests.unit import BaseResponseTest
import datetime
from dateutil.tz import tzutc
import botocore
from botocore import response
from botocore.compat import six
from botocore.exceptions import IncompleteReadError
from botocore.vendored.requests.models import Response, Request
XMLBODY1 = (b'<?xml version="1.0" encoding="UTF-8"?><Error>'
b'<Code>AccessDenied</Code>'
b'<Message>Access Denied</Message>'
b'<RequestId>XXXXXXXXXXXXXXXX</RequestId>'
b'<HostId>AAAAAAAAAAAAAAAAAAA</HostId>'
b'</Error>')
XMLBODY2 = (b'<?xml version="1.0" encoding="UTF-8"?>'
b'<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
b'<Name>mybucket</Name><Prefix></Prefix><Marker></Marker>'
b'<MaxKeys>1000</MaxKeys><IsTruncated>false</IsTruncated>'
b'<Contents><Key>test.png</Key><LastModified>2014-03-01T17:06:40.000Z</LastModified>'
b'<ETag>"00000000000000000000000000000000"</ETag><Size>6702</Size>'
b'<Owner><ID>AAAAAAAAAAAAAAAAAAA</ID>'
b'<DisplayName>dummy</DisplayName></Owner>'
b'<StorageClass>STANDARD</StorageClass></Contents></ListBucketResult>')
class TestStreamWrapper(unittest.TestCase):
def test_streaming_wrapper_validates_content_length(self):
body = six.BytesIO(b'1234567890')
stream = response.StreamingBody(body, content_length=10)
self.assertEqual(stream.read(), b'1234567890')
def test_streaming_body_with_invalid_length(self):
body = six.BytesIO(b'123456789')
stream = response.StreamingBody(body, content_length=10)
with self.assertRaises(IncompleteReadError):
self.assertEqual(stream.read(9), b'123456789')
# The next read will have nothing returned and raise
# an IncompleteReadError because we were expectd 10 bytes, not 9.
stream.read()
def test_streaming_body_with_zero_read(self):
body = six.BytesIO(b'1234567890')
stream = response.StreamingBody(body, content_length=10)
chunk = stream.read(0)
self.assertEqual(chunk, b'')
self.assertEqual(stream.read(), b'1234567890')
def test_streaming_body_with_single_read(self):
body = six.BytesIO(b'123456789')
stream = response.StreamingBody(body, content_length=10)
with self.assertRaises(IncompleteReadError):
stream.read()
def test_streaming_body_closes(self):
body = six.BytesIO(b'1234567890')
stream = response.StreamingBody(body, content_length=10)
self.assertFalse(body.closed)
stream.close()
self.assertTrue(body.closed)
class TestGetResponse(BaseResponseTest):
maxDiff = None
def test_get_response_streaming_ok(self):
http_response = Response()
http_response.headers = {
'content-type': 'image/png',
'server': 'AmazonS3',
'AcceptRanges': 'bytes',
'transfer-encoding': 'chunked',
'ETag': '"00000000000000000000000000000000"',
}
http_response.raw = six.BytesIO(b'\x89PNG\r\n\x1a\n\x00\x00')
http_response.status_code = 200
http_response.reason = 'OK'
session = botocore.session.get_session()
service_model = session.get_service_model('s3')
operation_model = service_model.operation_model('GetObject')
res = response.get_response(operation_model, http_response)
self.assertTrue(isinstance(res[1]['Body'], response.StreamingBody))
self.assertEqual(res[1]['ETag'],
'"00000000000000000000000000000000"')
def test_get_response_streaming_ng(self):
http_response = Response()
http_response.headers = {
'content-type': 'application/xml',
'date': 'Sat, 08 Mar 2014 12:05:44 GMT',
'server': 'AmazonS3',
'transfer-encoding': 'chunked',
'x-amz-id-2': 'AAAAAAAAAAAAAAAAAAA',
'x-amz-request-id': 'XXXXXXXXXXXXXXXX'}
http_response.raw = six.BytesIO(XMLBODY1)
http_response.status_code = 403
http_response.reason = 'Forbidden'
session = botocore.session.get_session()
service_model = session.get_service_model('s3')
operation_model = service_model.operation_model('GetObject')
self.assert_response_with_subset_metadata(
response.get_response(operation_model, http_response)[1],
{'Error': {'Message': 'Access Denied',
'Code': 'AccessDenied'},
'ResponseMetadata': {'HostId': 'AAAAAAAAAAAAAAAAAAA',
'RequestId': 'XXXXXXXXXXXXXXXX',
'HTTPStatusCode': 403},
}
)
def test_get_response_nonstreaming_ok(self):
http_response = Response()
http_response.headers = {
'content-type': 'application/xml',
'date': 'Sun, 09 Mar 2014 02:55:43 GMT',
'server': 'AmazonS3',
'transfer-encoding': 'chunked',
'x-amz-id-2': 'AAAAAAAAAAAAAAAAAAA',
'x-amz-request-id': 'XXXXXXXXXXXXXXXX'}
http_response.raw = six.BytesIO(XMLBODY1)
http_response.status_code = 403
http_response.reason = 'Forbidden'
http_response.request = Request()
session = botocore.session.get_session()
service_model = session.get_service_model('s3')
operation_model = service_model.operation_model('ListObjects')
self.assert_response_with_subset_metadata(
response.get_response(operation_model, http_response)[1],
{
'ResponseMetadata': {
'RequestId': 'XXXXXXXXXXXXXXXX',
'HostId': 'AAAAAAAAAAAAAAAAAAA',
'HTTPStatusCode': 403
},
'Error': {
'Message': 'Access Denied',
'Code': 'AccessDenied'
}
})
def test_get_response_nonstreaming_ng(self):
http_response = Response()
http_response.headers = {
'content-type': 'application/xml',
'date': 'Sat, 08 Mar 2014 12:05:44 GMT',
'server': 'AmazonS3',
'transfer-encoding': 'chunked',
'x-amz-id-2': 'AAAAAAAAAAAAAAAAAAA',
'x-amz-request-id': 'XXXXXXXXXXXXXXXX'}
http_response.raw = six.BytesIO(XMLBODY2)
http_response.status_code = 200
http_response.reason = 'ok'
http_response.request = Request()
session = botocore.session.get_session()
service_model = session.get_service_model('s3')
operation_model = service_model.operation_model('ListObjects')
self.assert_response_with_subset_metadata(
response.get_response(operation_model, http_response)[1],
{u'Contents': [{u'ETag': '"00000000000000000000000000000000"',
u'Key': 'test.png',
u'LastModified': datetime.datetime(2014, 3, 1, 17, 6, 40, tzinfo=tzutc()),
u'Owner': {u'DisplayName': 'dummy',
u'ID': 'AAAAAAAAAAAAAAAAAAA'},
u'Size': 6702,
u'StorageClass': 'STANDARD'}],
u'IsTruncated': False,
u'Marker': "",
u'MaxKeys': 1000,
u'Name': 'mybucket',
u'Prefix': "",
'ResponseMetadata': {
'RequestId': 'XXXXXXXXXXXXXXXX',
'HostId': 'AAAAAAAAAAAAAAAAAAA',
'HTTPStatusCode': 200,
}}
)
|
#!/usr/bin/env python
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from tests import BaseSessionTest
from mock import patch, Mock
from botocore.compat import OrderedDict
from botocore.handlers import set_list_objects_encoding_type_url
class TestS3Addressing(BaseSessionTest):
def setUp(self):
super(TestS3Addressing, self).setUp()
self.region_name = 'us-east-1'
self.signature_version = 's3'
self.mock_response = Mock()
self.mock_response.content = ''
self.mock_response.headers = {}
self.mock_response.status_code = 200
self.session.unregister('before-parameter-build.s3.ListObjects',
set_list_objects_encoding_type_url)
def get_prepared_request(self, operation, params,
force_hmacv1=False):
if force_hmacv1:
self.session.register('choose-signer', self.enable_hmacv1)
with patch('botocore.endpoint.BotocoreHTTPSession') as \
mock_http_session:
mock_send = mock_http_session.return_value.send
mock_send.return_value = self.mock_response
client = self.session.create_client('s3', self.region_name)
getattr(client, operation)(**params)
# Return the request that was sent over the wire.
return mock_send.call_args[0][0]
def enable_hmacv1(self, **kwargs):
return 's3'
def test_list_objects_dns_name(self):
params = {'Bucket': 'safename'}
prepared_request = self.get_prepared_request('list_objects', params,
force_hmacv1=True)
self.assertEqual(prepared_request.url,
'https://safename.s3.amazonaws.com/')
def test_list_objects_non_dns_name(self):
params = {'Bucket': 'un_safe_name'}
prepared_request = self.get_prepared_request('list_objects', params,
force_hmacv1=True)
self.assertEqual(prepared_request.url,
'https://s3.amazonaws.com/un_safe_name')
def test_list_objects_dns_name_non_classic(self):
self.region_name = 'us-west-2'
params = {'Bucket': 'safename'}
prepared_request = self.get_prepared_request('list_objects', params,
force_hmacv1=True)
self.assertEqual(prepared_request.url,
'https://safename.s3.amazonaws.com/')
def test_list_objects_unicode_query_string_eu_central_1(self):
self.region_name = 'eu-central-1'
params = OrderedDict([('Bucket', 'safename'),
('Marker', u'\xe4\xf6\xfc-01.txt')])
prepared_request = self.get_prepared_request('list_objects', params)
self.assertEqual(
prepared_request.url,
('https://s3.eu-central-1.amazonaws.com/safename'
'?marker=%C3%A4%C3%B6%C3%BC-01.txt')
)
def test_list_objects_in_restricted_regions(self):
self.region_name = 'us-gov-west-1'
params = {'Bucket': 'safename'}
prepared_request = self.get_prepared_request('list_objects', params)
# Note how we keep the region specific endpoint here.
self.assertEqual(prepared_request.url,
'https://s3.us-gov-west-1.amazonaws.com/safename')
def test_list_objects_in_fips(self):
self.region_name = 'fips-us-gov-west-1'
params = {'Bucket': 'safename'}
prepared_request = self.get_prepared_request('list_objects', params)
# Note how we keep the region specific endpoint here.
self.assertEqual(
prepared_request.url,
'https://s3-fips-us-gov-west-1.amazonaws.com/safename')
def test_list_objects_non_dns_name_non_classic(self):
self.region_name = 'us-west-2'
params = {'Bucket': 'un_safe_name'}
prepared_request = self.get_prepared_request('list_objects', params)
self.assertEqual(prepared_request.url,
'https://s3.us-west-2.amazonaws.com/un_safe_name')
def test_put_object_dns_name_non_classic(self):
self.region_name = 'us-west-2'
file_path = os.path.join(os.path.dirname(__file__),
'put_object_data')
with open(file_path, 'rb') as fp:
params = {
'Bucket': 'my.valid.name',
'Key': 'mykeyname',
'Body': fp,
'ACL': 'public-read',
'ContentLanguage': 'piglatin',
'ContentType': 'text/plain'
}
prepared_request = self.get_prepared_request('put_object', params)
self.assertEqual(
prepared_request.url,
'https://s3.us-west-2.amazonaws.com/my.valid.name/mykeyname')
def test_put_object_dns_name_classic(self):
self.region_name = 'us-east-1'
file_path = os.path.join(os.path.dirname(__file__),
'put_object_data')
with open(file_path, 'rb') as fp:
params = {
'Bucket': 'my.valid.name',
'Key': 'mykeyname',
'Body': fp,
'ACL': 'public-read',
'ContentLanguage': 'piglatin',
'ContentType': 'text/plain'
}
prepared_request = self.get_prepared_request('put_object', params)
self.assertEqual(
prepared_request.url,
'https://s3.amazonaws.com/my.valid.name/mykeyname')
def test_put_object_dns_name_single_letter_non_classic(self):
self.region_name = 'us-west-2'
file_path = os.path.join(os.path.dirname(__file__),
'put_object_data')
with open(file_path, 'rb') as fp:
params = {
'Bucket': 'a.valid.name',
'Key': 'mykeyname',
'Body': fp,
'ACL': 'public-read',
'ContentLanguage': 'piglatin',
'ContentType': 'text/plain'
}
prepared_request = self.get_prepared_request('put_object', params)
self.assertEqual(
prepared_request.url,
'https://s3.us-west-2.amazonaws.com/a.valid.name/mykeyname')
def test_get_object_non_dns_name_non_classic(self):
self.region_name = 'us-west-2'
params = {
'Bucket': 'AnInvalidName',
'Key': 'mykeyname'
}
prepared_request = self.get_prepared_request('get_object', params)
self.assertEqual(
prepared_request.url,
'https://s3.us-west-2.amazonaws.com/AnInvalidName/mykeyname')
def test_get_object_non_dns_name_classic(self):
self.region_name = 'us-east-1'
params = {
'Bucket': 'AnInvalidName',
'Key': 'mykeyname'
}
prepared_request = self.get_prepared_request('get_object', params)
self.assertEqual(prepared_request.url,
'https://s3.amazonaws.com/AnInvalidName/mykeyname')
def test_get_object_ip_address_name_non_classic(self):
self.region_name = 'us-west-2'
params = {
'Bucket': '192.168.5.4',
'Key': 'mykeyname'}
prepared_request = self.get_prepared_request('get_object', params)
self.assertEqual(
prepared_request.url,
'https://s3.us-west-2.amazonaws.com/192.168.5.4/mykeyname')
def test_get_object_almost_an_ip_address_name_non_classic(self):
self.region_name = 'us-west-2'
params = {
'Bucket': '192.168.5.256',
'Key': 'mykeyname'}
prepared_request = self.get_prepared_request('get_object', params)
self.assertEqual(
prepared_request.url,
'https://s3.us-west-2.amazonaws.com/192.168.5.256/mykeyname')
def test_invalid_endpoint_raises_exception(self):
with self.assertRaisesRegexp(ValueError, 'Invalid endpoint'):
self.session.create_client('s3', 'Invalid region')
def test_non_existent_region(self):
# If I ask for a region that does not
# exist on a global endpoint, such as:
client = self.session.create_client('s3', 'us-west-111')
# Then the default endpoint heuristic will apply and we'll
# get the region name as specified.
self.assertEqual(client.meta.region_name, 'us-west-111')
# Why not fixed this? Well backwards compatibility for one thing.
# The other reason is because it was intended to accommodate this
# use case. Let's say I have us-west-2 set as my default region,
# possibly through an env var or config variable. Well, by default,
# we'd make a call like:
client = self.session.create_client('iam', 'us-west-2')
# Instead of giving the user an error, we should instead give
# them the partition-global endpoint.
self.assertEqual(client.meta.region_name, 'aws-global')
# But if they request an endpoint that we *do* know about, we use
# that specific endpoint.
client = self.session.create_client('iam', 'aws-us-gov-global')
self.assertEqual(client.meta.region_name, 'aws-us-gov-global')
|
#!/usr/bin/env
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import os
import tempfile
import shutil
import io
import socket
import sys
from mock import Mock, patch
from botocore.exceptions import UnseekableStreamError
from botocore.awsrequest import AWSRequest, AWSPreparedRequest
from botocore.awsrequest import AWSHTTPConnection
from botocore.awsrequest import prepare_request_dict, create_request_object
from botocore.compat import file_type, six
class IgnoreCloseBytesIO(io.BytesIO):
def close(self):
pass
class FakeSocket(object):
def __init__(self, read_data, fileclass=IgnoreCloseBytesIO):
self.sent_data = b''
self.read_data = read_data
self.fileclass = fileclass
self._fp_object = None
def sendall(self, data):
self.sent_data += data
def makefile(self, mode, bufsize=None):
if self._fp_object is None:
self._fp_object = self.fileclass(self.read_data)
return self._fp_object
def close(self):
pass
class BytesIOWithLen(six.BytesIO):
def __len__(self):
return len(self.getvalue())
class Unseekable(file_type):
def __init__(self, stream):
self._stream = stream
def read(self):
return self._stream.read()
def seek(self, offset, whence):
# This is a case where seek() exists as part of the object's interface,
# but it doesn't actually work (for example socket.makefile(), which
# will raise an io.* error on python3).
raise ValueError("Underlying stream does not support seeking.")
class Seekable(object):
"""This class represents a bare-bones,seekable file-like object
Note it does not include some of the other attributes of other
file-like objects such as StringIO's getvalue() and file object's fileno
property. If the file-like object does not have either of these attributes
requests will not calculate the content length even though it is still
possible to calculate it.
"""
def __init__(self, stream):
self._stream = stream
def __iter__(self):
return iter(self._stream)
def read(self):
return self._stream.read()
def seek(self, offset, whence=0):
self._stream.seek(offset, whence)
def tell(self):
return self._stream.tell()
class TestAWSRequest(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.request = AWSRequest(url='http://example.com')
self.prepared_request = self.request.prepare()
self.filename = os.path.join(self.tempdir, 'foo')
def tearDown(self):
shutil.rmtree(self.tempdir)
def test_should_reset_stream(self):
with open(self.filename, 'wb') as f:
f.write(b'foobarbaz')
with open(self.filename, 'rb') as body:
self.prepared_request.body = body
# Now pretend we try to send the request.
# This means that we read the body:
body.read()
# And create a response object that indicates
# a redirect.
fake_response = Mock()
fake_response.status_code = 307
# Then requests calls our reset_stream hook.
self.prepared_request.reset_stream_on_redirect(fake_response)
# The stream should now be reset.
self.assertEqual(body.tell(), 0)
def test_cannot_reset_stream_raises_error(self):
with open(self.filename, 'wb') as f:
f.write(b'foobarbaz')
with open(self.filename, 'rb') as body:
self.prepared_request.body = Unseekable(body)
# Now pretend we try to send the request.
# This means that we read the body:
body.read()
# And create a response object that indicates
# a redirect
fake_response = Mock()
fake_response.status_code = 307
# Then requests calls our reset_stream hook.
with self.assertRaises(UnseekableStreamError):
self.prepared_request.reset_stream_on_redirect(fake_response)
def test_duck_type_for_file_check(self):
# As part of determining whether or not we can rewind a stream
# we first need to determine if the thing is a file like object.
# We should not be using an isinstance check. Instead, we should
# be using duck type checks.
class LooksLikeFile(object):
def __init__(self):
self.seek_called = False
def read(self, amount=None):
pass
def seek(self, where):
self.seek_called = True
looks_like_file = LooksLikeFile()
self.prepared_request.body = looks_like_file
fake_response = Mock()
fake_response.status_code = 307
# Then requests calls our reset_stream hook.
self.prepared_request.reset_stream_on_redirect(fake_response)
# The stream should now be reset.
self.assertTrue(looks_like_file.seek_called)
class TestAWSPreparedRequest(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.filename = os.path.join(self.tempdir, 'foo')
self.request = AWSRequest(url='http://example.com')
self.prepared_request = AWSPreparedRequest(self.request)
self.prepared_request.prepare_headers(self.request.headers)
def tearDown(self):
shutil.rmtree(self.tempdir)
def test_prepare_body_content_adds_content_length(self):
content = b'foobarbaz'
with open(self.filename, 'wb') as f:
f.write(content)
with open(self.filename, 'rb') as f:
data = Seekable(f)
self.prepared_request.prepare_body(data=data, files=None)
self.assertEqual(
self.prepared_request.headers['Content-Length'],
str(len(content)))
def test_prepare_body_removes_transfer_encoding(self):
self.prepared_request.headers['Transfer-Encoding'] = 'chunked'
content = b'foobarbaz'
with open(self.filename, 'wb') as f:
f.write(content)
with open(self.filename, 'rb') as f:
data = Seekable(f)
self.prepared_request.prepare_body(data=data, files=None)
self.assertEqual(
self.prepared_request.headers['Content-Length'],
str(len(content)))
self.assertNotIn('Transfer-Encoding', self.prepared_request.headers)
def test_prepare_body_ignores_existing_transfer_encoding(self):
content = b'foobarbaz'
self.prepared_request.headers['Transfer-Encoding'] = 'chunked'
with open(self.filename, 'wb') as f:
f.write(content)
with open(self.filename, 'rb') as f:
self.prepared_request.prepare_body(data=f, files=None)
# The Transfer-Encoding should not be removed if Content-Length
# is not added via the custom logic in the ``prepare_body`` method.
# Note requests' ``prepare_body`` is the method that adds the
# Content-Length header for this case as the ``data`` is a
# regular file handle.
self.assertEqual(
self.prepared_request.headers['Transfer-Encoding'],
'chunked')
class TestAWSHTTPConnection(unittest.TestCase):
def create_tunneled_connection(self, url, port, response):
s = FakeSocket(response)
conn = AWSHTTPConnection(url, port)
conn.sock = s
conn._tunnel_host = url
conn._tunnel_port = port
conn._tunnel_headers = {'key': 'value'}
# Create a mock response.
self.mock_response = Mock()
self.mock_response.fp = Mock()
# Imitate readline function by creating a list to be sent as
# a side effect of the mocked readline to be able to track how the
# response is processed in ``_tunnel()``.
delimeter = b'\r\n'
side_effect = []
response_components = response.split(delimeter)
for i in range(len(response_components)):
new_component = response_components[i]
# Only add the delimeter on if it is not the last component
# which should be an empty string.
if i != len(response_components) - 1:
new_component += delimeter
side_effect.append(new_component)
self.mock_response.fp.readline.side_effect = side_effect
response_components = response.split(b' ')
self.mock_response._read_status.return_value = (
response_components[0], int(response_components[1]),
response_components[2]
)
conn.response_class = Mock()
conn.response_class.return_value = self.mock_response
return conn
def test_expect_100_continue_returned(self):
with patch('select.select') as select_mock:
# Shows the server first sending a 100 continue response
# then a 200 ok response.
s = FakeSocket(b'HTTP/1.1 100 Continue\r\n\r\nHTTP/1.1 200 OK\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
select_mock.return_value = ([s], [], [])
conn.request('GET', '/bucket/foo', b'body',
{'Expect': '100-continue'})
response = conn.getresponse()
# Now we should verify that our final response is the 200 OK
self.assertEqual(response.status, 200)
def test_handles_expect_100_with_different_reason_phrase(self):
with patch('select.select') as select_mock:
# Shows the server first sending a 100 continue response
# then a 200 ok response.
s = FakeSocket(b'HTTP/1.1 100 (Continue)\r\n\r\nHTTP/1.1 200 OK\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
select_mock.return_value = ([s], [], [])
conn.request('GET', '/bucket/foo', six.BytesIO(b'body'),
{'Expect': '100-continue', 'Content-Length': '4'})
response = conn.getresponse()
# Now we should verify that our final response is the 200 OK.
self.assertEqual(response.status, 200)
# Verify that we went the request body because we got a 100
# continue.
self.assertIn(b'body', s.sent_data)
def test_expect_100_sends_connection_header(self):
# When using squid as an HTTP proxy, it will also send
# a Connection: keep-alive header back with the 100 continue
# response. We need to ensure we handle this case.
with patch('select.select') as select_mock:
# Shows the server first sending a 100 continue response
# then a 500 response. We're picking 500 to confirm we
# actually parse the response instead of getting the
# default status of 200 which happens when we can't parse
# the response.
s = FakeSocket(b'HTTP/1.1 100 Continue\r\n'
b'Connection: keep-alive\r\n'
b'\r\n'
b'HTTP/1.1 500 Internal Service Error\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
select_mock.return_value = ([s], [], [])
conn.request('GET', '/bucket/foo', b'body',
{'Expect': '100-continue'})
response = conn.getresponse()
self.assertEqual(response.status, 500)
def test_expect_100_continue_sends_307(self):
# This is the case where we send a 100 continue and the server
# immediately sends a 307
with patch('select.select') as select_mock:
# Shows the server first sending a 100 continue response
# then a 200 ok response.
s = FakeSocket(
b'HTTP/1.1 307 Temporary Redirect\r\n'
b'Location: http://example.org\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
select_mock.return_value = ([s], [], [])
conn.request('GET', '/bucket/foo', b'body',
{'Expect': '100-continue'})
response = conn.getresponse()
# Now we should verify that our final response is the 307.
self.assertEqual(response.status, 307)
def test_expect_100_continue_no_response_from_server(self):
with patch('select.select') as select_mock:
# Shows the server first sending a 100 continue response
# then a 200 ok response.
s = FakeSocket(
b'HTTP/1.1 307 Temporary Redirect\r\n'
b'Location: http://example.org\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
# By settings select_mock to return empty lists, this indicates
# that the server did not send any response. In this situation
# we should just send the request anyways.
select_mock.return_value = ([], [], [])
conn.request('GET', '/bucket/foo', b'body',
{'Expect': '100-continue'})
response = conn.getresponse()
self.assertEqual(response.status, 307)
def test_message_body_is_file_like_object(self):
# Shows the server first sending a 100 continue response
# then a 200 ok response.
body = BytesIOWithLen(b'body contents')
s = FakeSocket(b'HTTP/1.1 200 OK\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
conn.request('GET', '/bucket/foo', body)
response = conn.getresponse()
self.assertEqual(response.status, 200)
def test_no_expect_header_set(self):
# Shows the server first sending a 100 continue response
# then a 200 ok response.
s = FakeSocket(b'HTTP/1.1 200 OK\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
conn.request('GET', '/bucket/foo', b'body')
response = conn.getresponse()
self.assertEqual(response.status, 200)
def test_tunnel_readline_none_bugfix(self):
# Tests whether ``_tunnel`` function is able to work around the
# py26 bug of avoiding infinite while loop if nothing is returned.
conn = self.create_tunneled_connection(
url='s3.amazonaws.com',
port=443,
response=b'HTTP/1.1 200 OK\r\n',
)
conn._tunnel()
# Ensure proper amount of readline calls were made.
self.assertEqual(self.mock_response.fp.readline.call_count, 2)
def test_tunnel_readline_normal(self):
# Tests that ``_tunnel`` function behaves normally when it comes
# across the usual http ending.
conn = self.create_tunneled_connection(
url='s3.amazonaws.com',
port=443,
response=b'HTTP/1.1 200 OK\r\n\r\n',
)
conn._tunnel()
# Ensure proper amount of readline calls were made.
self.assertEqual(self.mock_response.fp.readline.call_count, 2)
def test_tunnel_raises_socket_error(self):
# Tests that ``_tunnel`` function throws appropriate error when
# not 200 status.
conn = self.create_tunneled_connection(
url='s3.amazonaws.com',
port=443,
response=b'HTTP/1.1 404 Not Found\r\n\r\n',
)
with self.assertRaises(socket.error):
conn._tunnel()
@unittest.skipIf(sys.version_info[:2] == (2, 6),
("``_tunnel()`` function defaults to standard "
"http library function when not py26."))
def test_tunnel_uses_std_lib(self):
s = FakeSocket(b'HTTP/1.1 200 OK\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
# Test that the standard library method was used by patching out
# the ``_tunnel`` method and seeing if the std lib method was called.
with patch('botocore.vendored.requests.packages.urllib3.connection.'
'HTTPConnection._tunnel') as mock_tunnel:
conn._tunnel()
self.assertTrue(mock_tunnel.called)
def test_encodes_unicode_method_line(self):
s = FakeSocket(b'HTTP/1.1 200 OK\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
# Note the combination of unicode 'GET' and
# bytes 'Utf8-Header' value.
conn.request(u'GET', '/bucket/foo', b'body',
headers={"Utf8-Header": b"\xe5\xb0\x8f"})
response = conn.getresponse()
self.assertEqual(response.status, 200)
def test_state_reset_on_connection_close(self):
# This simulates what urllib3 does with connections
# in its connection pool logic.
with patch('select.select') as select_mock:
# First fast fail with a 500 response when we first
# send the expect header.
s = FakeSocket(b'HTTP/1.1 500 Internal Server Error\r\n')
conn = AWSHTTPConnection('s3.amazonaws.com', 443)
conn.sock = s
select_mock.return_value = ([s], [], [])
conn.request('GET', '/bucket/foo', b'body',
{'Expect': '100-continue'})
response = conn.getresponse()
self.assertEqual(response.status, 500)
# Now what happens in urllib3 is that when the next
# request comes along and this conection gets checked
# out. We see that the connection needs to be
# reset. So first the connection is closed.
conn.close()
# And then a new connection is established.
new_conn = FakeSocket(
b'HTTP/1.1 100 (Continue)\r\n\r\nHTTP/1.1 200 OK\r\n')
conn.sock = new_conn
# And we make a request, we should see the 200 response
# that was sent back.
select_mock.return_value = ([new_conn], [], [])
conn.request('GET', '/bucket/foo', b'body',
{'Expect': '100-continue'})
response = conn.getresponse()
# This should be 200. If it's a 500 then
# the prior response was leaking into our
# current response.,
self.assertEqual(response.status, 200)
class TestPrepareRequestDict(unittest.TestCase):
def setUp(self):
self.user_agent = 'botocore/1.0'
self.endpoint_url = 'https://s3.amazonaws.com'
self.base_request_dict = {
'body': '',
'headers': {},
'method': u'GET',
'query_string': '',
'url_path': '/',
'context': {}
}
def prepare_base_request_dict(self, request_dict, endpoint_url=None,
user_agent=None, context=None):
self.base_request_dict.update(request_dict)
context = context or {}
if user_agent is None:
user_agent = self.user_agent
if endpoint_url is None:
endpoint_url = self.endpoint_url
prepare_request_dict(self.base_request_dict, endpoint_url=endpoint_url,
user_agent=user_agent, context=context)
def test_prepare_request_dict_for_get(self):
request_dict = {
'method': u'GET',
'url_path': '/'
}
self.prepare_base_request_dict(
request_dict, endpoint_url='https://s3.amazonaws.com')
self.assertEqual(self.base_request_dict['method'], 'GET')
self.assertEqual(self.base_request_dict['url'],
'https://s3.amazonaws.com/')
self.assertEqual(self.base_request_dict['headers']['User-Agent'],
self.user_agent)
def test_prepare_request_dict_for_get_no_user_agent(self):
self.user_agent = None
request_dict = {
'method': u'GET',
'url_path': '/'
}
self.prepare_base_request_dict(
request_dict, endpoint_url='https://s3.amazonaws.com')
self.assertNotIn('User-Agent', self.base_request_dict['headers'])
def test_prepare_request_dict_with_context(self):
context = {'foo': 'bar'}
self.prepare_base_request_dict({}, context=context)
self.assertEqual(self.base_request_dict['context'], context)
def test_query_string_serialized_to_url(self):
request_dict = {
'method': u'GET',
'query_string': {u'prefix': u'foo'},
'url_path': u'/mybucket'
}
self.prepare_base_request_dict(request_dict)
self.assertEqual(
self.base_request_dict['url'],
'https://s3.amazonaws.com/mybucket?prefix=foo')
def test_url_path_combined_with_endpoint_url(self):
# This checks the case where a user specifies and
# endpoint_url that has a path component, and the
# serializer gives us a request_dict that has a url
# component as well (say from a rest-* service).
request_dict = {
'query_string': {u'prefix': u'foo'},
'url_path': u'/mybucket'
}
endpoint_url = 'https://custom.endpoint/foo/bar'
self.prepare_base_request_dict(request_dict, endpoint_url)
self.assertEqual(
self.base_request_dict['url'],
'https://custom.endpoint/foo/bar/mybucket?prefix=foo')
def test_url_path_with_trailing_slash(self):
self.prepare_base_request_dict(
{'url_path': u'/mybucket'},
endpoint_url='https://custom.endpoint/foo/bar/')
self.assertEqual(
self.base_request_dict['url'],
'https://custom.endpoint/foo/bar/mybucket')
def test_url_path_is_slash(self):
self.prepare_base_request_dict(
{'url_path': u'/'},
endpoint_url='https://custom.endpoint/foo/bar/')
self.assertEqual(
self.base_request_dict['url'],
'https://custom.endpoint/foo/bar/')
def test_url_path_is_slash_with_endpoint_url_no_slash(self):
self.prepare_base_request_dict(
{'url_path': u'/'},
endpoint_url='https://custom.endpoint/foo/bar')
self.assertEqual(
self.base_request_dict['url'],
'https://custom.endpoint/foo/bar')
def test_custom_endpoint_with_query_string(self):
self.prepare_base_request_dict(
{'url_path': u'/baz', 'query_string': {'x': 'y'}},
endpoint_url='https://custom.endpoint/foo/bar?foo=bar')
self.assertEqual(
self.base_request_dict['url'],
'https://custom.endpoint/foo/bar/baz?foo=bar&x=y')
class TestCreateRequestObject(unittest.TestCase):
def setUp(self):
self.request_dict = {
'method': u'GET',
'query_string': {u'prefix': u'foo'},
'url_path': u'/mybucket',
'headers': {u'User-Agent': u'my-agent'},
'body': u'my body',
'url': u'https://s3.amazonaws.com/mybucket?prefix=foo',
'context': {'signing': {'region': 'us-west-2'}}
}
def test_create_request_object(self):
request = create_request_object(self.request_dict)
self.assertEqual(request.method, self.request_dict['method'])
self.assertEqual(request.url, self.request_dict['url'])
self.assertEqual(request.data, self.request_dict['body'])
self.assertEqual(request.context, self.request_dict['context'])
self.assertIn('User-Agent', request.headers)
if __name__ == "__main__":
unittest.main()
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest, BaseSessionTest
import base64
import mock
import copy
import os
import json
import botocore
import botocore.session
from botocore.compat import OrderedDict
from botocore.exceptions import ParamValidationError, MD5UnavailableError
from botocore.exceptions import AliasConflictParameterError
from botocore.awsrequest import AWSRequest
from botocore.compat import quote, six
from botocore.config import Config
from botocore.docs.bcdoc.restdoc import DocumentStructure
from botocore.docs.params import RequestParamsDocumenter
from botocore.docs.example import RequestExampleDocumenter
from botocore.hooks import HierarchicalEmitter
from botocore.model import OperationModel, ServiceModel
from botocore.model import DenormalizedStructureBuilder
from botocore.signers import RequestSigner
from botocore.credentials import Credentials
from botocore import handlers
class TestHandlers(BaseSessionTest):
def test_get_console_output(self):
parsed = {'Output': base64.b64encode(b'foobar').decode('utf-8')}
handlers.decode_console_output(parsed)
self.assertEqual(parsed['Output'], 'foobar')
def test_get_console_output_cant_be_decoded(self):
parsed = {'Output': 1}
handlers.decode_console_output(parsed)
self.assertEqual(parsed['Output'], 1)
def test_get_console_output_bad_unicode_errors(self):
original = base64.b64encode(b'before\xffafter').decode('utf-8')
parsed = {'Output': original}
handlers.decode_console_output(parsed)
self.assertEqual(parsed['Output'], u'before\ufffdafter')
def test_noop_if_output_key_does_not_exist(self):
original = {'foo': 'bar'}
parsed = original.copy()
handlers.decode_console_output(parsed)
# Should be unchanged because the 'Output'
# key is not in the output.
self.assertEqual(parsed, original)
def test_decode_quoted_jsondoc(self):
value = quote('{"foo":"bar"}')
converted_value = handlers.decode_quoted_jsondoc(value)
self.assertEqual(converted_value, {'foo': 'bar'})
def test_cant_decode_quoted_jsondoc(self):
value = quote('{"foo": "missing end quote}')
converted_value = handlers.decode_quoted_jsondoc(value)
self.assertEqual(converted_value, value)
def test_disable_signing(self):
self.assertEqual(handlers.disable_signing(), botocore.UNSIGNED)
def test_only_quote_url_path_not_version_id(self):
params = {'CopySource': '/foo/bar++baz?versionId=123'}
handlers.handle_copy_source_param(params)
self.assertEqual(params['CopySource'],
'/foo/bar%2B%2Bbaz?versionId=123')
def test_only_version_id_is_special_cased(self):
params = {'CopySource': '/foo/bar++baz?notVersion=foo+'}
handlers.handle_copy_source_param(params)
self.assertEqual(params['CopySource'],
'/foo/bar%2B%2Bbaz%3FnotVersion%3Dfoo%2B')
def test_copy_source_with_multiple_questions(self):
params = {'CopySource': '/foo/bar+baz?a=baz+?versionId=a+'}
handlers.handle_copy_source_param(params)
self.assertEqual(params['CopySource'],
'/foo/bar%2Bbaz%3Fa%3Dbaz%2B?versionId=a+')
def test_copy_source_supports_dict(self):
params = {
'CopySource': {'Bucket': 'foo', 'Key': 'keyname+'}
}
handlers.handle_copy_source_param(params)
self.assertEqual(params['CopySource'], 'foo/keyname%2B')
def test_copy_source_ignored_if_not_dict(self):
params = {
'CopySource': 'stringvalue'
}
handlers.handle_copy_source_param(params)
self.assertEqual(params['CopySource'], 'stringvalue')
def test_copy_source_supports_optional_version_id(self):
params = {
'CopySource': {'Bucket': 'foo',
'Key': 'keyname+',
'VersionId': 'asdf+'}
}
handlers.handle_copy_source_param(params)
self.assertEqual(params['CopySource'],
# Note, versionId is not url encoded.
'foo/keyname%2B?versionId=asdf+')
def test_copy_source_has_validation_failure(self):
with self.assertRaisesRegexp(ParamValidationError, 'Key'):
handlers.handle_copy_source_param(
{'CopySource': {'Bucket': 'foo'}})
def test_quote_source_header_needs_no_changes(self):
params = {'CopySource': '/foo/bar?versionId=123'}
handlers.handle_copy_source_param(params)
self.assertEqual(params['CopySource'],
'/foo/bar?versionId=123')
def test_presigned_url_already_present_ec2(self):
operation_model = mock.Mock()
operation_model.name = 'CopySnapshot'
params = {'body': {'PresignedUrl': 'https://foo'}}
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'ec2', 'us-east-1', 'ec2', 'v4', credentials, event_emitter)
handlers.inject_presigned_url_ec2(
params, request_signer, operation_model)
self.assertEqual(params['body']['PresignedUrl'], 'https://foo')
def test_presigned_url_with_source_region_ec2(self):
operation_model = mock.Mock()
operation_model.name = 'CopySnapshot'
params = {
'body': {
'PresignedUrl': 'https://foo',
'SourceRegion': 'us-east-1'
}
}
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'ec2', 'us-east-1', 'ec2', 'v4', credentials, event_emitter)
handlers.inject_presigned_url_ec2(
params, request_signer, operation_model)
self.assertEqual(params['body']['PresignedUrl'], 'https://foo')
self.assertEqual(params['body']['SourceRegion'], 'us-east-1')
def test_presigned_url_already_present_rds(self):
operation_model = mock.Mock()
operation_model.name = 'CopyDBSnapshot'
params = {'body': {'PreSignedUrl': 'https://foo'}}
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'rds', 'us-east-1', 'rds', 'v4', credentials, event_emitter)
handlers.inject_presigned_url_rds(
params, request_signer, operation_model)
self.assertEqual(params['body']['PreSignedUrl'], 'https://foo')
def test_presigned_url_with_source_region_rds(self):
operation_model = mock.Mock()
operation_model.name = 'CopyDBSnapshot'
params = {
'body': {
'PreSignedUrl': 'https://foo',
'SourceRegion': 'us-east-1'
}
}
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'rds', 'us-east-1', 'rds', 'v4', credentials, event_emitter)
handlers.inject_presigned_url_rds(
params, request_signer, operation_model)
self.assertEqual(params['body']['PreSignedUrl'], 'https://foo')
self.assertNotIn('SourceRegion', params['body'])
def test_inject_presigned_url_ec2(self):
operation_model = mock.Mock()
operation_model.name = 'CopySnapshot'
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'ec2', 'us-east-1', 'ec2', 'v4', credentials, event_emitter)
request_dict = {}
params = {'SourceRegion': 'us-west-2'}
request_dict['body'] = params
request_dict['url'] = 'https://ec2.us-east-1.amazonaws.com'
request_dict['method'] = 'POST'
request_dict['headers'] = {}
request_dict['context'] = {}
handlers.inject_presigned_url_ec2(
request_dict, request_signer, operation_model)
self.assertIn('https://ec2.us-west-2.amazonaws.com?',
params['PresignedUrl'])
self.assertIn('X-Amz-Signature',
params['PresignedUrl'])
self.assertIn('DestinationRegion', params['PresignedUrl'])
# We should also populate the DestinationRegion with the
# region_name of the endpoint object.
self.assertEqual(params['DestinationRegion'], 'us-east-1')
def test_use_event_operation_name(self):
operation_model = mock.Mock()
operation_model.name = 'FakeOperation'
request_signer = mock.Mock()
request_signer._region_name = 'us-east-1'
request_dict = {}
params = {'SourceRegion': 'us-west-2'}
request_dict['body'] = params
request_dict['url'] = 'https://myservice.us-east-1.amazonaws.com'
request_dict['method'] = 'POST'
request_dict['headers'] = {}
request_dict['context'] = {}
handlers.inject_presigned_url_ec2(
request_dict, request_signer, operation_model)
call_args = request_signer.generate_presigned_url.call_args
operation_name = call_args[1].get('operation_name')
self.assertEqual(operation_name, 'FakeOperation')
def test_destination_region_always_changed(self):
# If the user provides a destination region, we will still
# override the DesinationRegion with the region_name from
# the endpoint object.
actual_region = 'us-west-1'
operation_model = mock.Mock()
operation_model.name = 'CopySnapshot'
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'ec2', actual_region, 'ec2', 'v4', credentials, event_emitter)
request_dict = {}
params = {
'SourceRegion': 'us-west-2',
'DestinationRegion': 'us-east-1'}
request_dict['body'] = params
request_dict['url'] = 'https://ec2.us-west-1.amazonaws.com'
request_dict['method'] = 'POST'
request_dict['headers'] = {}
request_dict['context'] = {}
# The user provides us-east-1, but we will override this to
# endpoint.region_name, of 'us-west-1' in this case.
handlers.inject_presigned_url_ec2(
request_dict, request_signer, operation_model)
self.assertIn('https://ec2.us-west-2.amazonaws.com?',
params['PresignedUrl'])
# Always use the DestinationRegion from the endpoint, regardless of
# whatever value the user provides.
self.assertEqual(params['DestinationRegion'], actual_region)
def test_inject_presigned_url_rds(self):
operation_model = mock.Mock()
operation_model.name = 'CopyDBSnapshot'
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'rds', 'us-east-1', 'rds', 'v4', credentials, event_emitter)
request_dict = {}
params = {'SourceRegion': 'us-west-2'}
request_dict['body'] = params
request_dict['url'] = 'https://rds.us-east-1.amazonaws.com'
request_dict['method'] = 'POST'
request_dict['headers'] = {}
request_dict['context'] = {}
handlers.inject_presigned_url_rds(
request_dict, request_signer, operation_model)
self.assertIn('https://rds.us-west-2.amazonaws.com?',
params['PreSignedUrl'])
self.assertIn('X-Amz-Signature',
params['PreSignedUrl'])
self.assertIn('DestinationRegion', params['PreSignedUrl'])
# We should not populate the destination region for rds
self.assertNotIn('DestinationRegion', params)
def test_source_region_removed(self):
operation_model = mock.Mock()
operation_model.name = 'CopyDBSnapshot'
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'rds', 'us-east-1', 'rds', 'v4', credentials, event_emitter)
request_dict = {}
params = {'SourceRegion': 'us-west-2'}
request_dict['body'] = params
request_dict['url'] = 'https://rds.us-east-1.amazonaws.com'
request_dict['method'] = 'POST'
request_dict['headers'] = {}
request_dict['context'] = {}
handlers.inject_presigned_url_rds(
params=request_dict,
request_signer=request_signer,
model=operation_model
)
self.assertNotIn('SourceRegion', params)
def test_source_region_removed_when_presigned_url_provided_for_rds(self):
operation_model = mock.Mock()
operation_model.name = 'CopyDBSnapshot'
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'rds', 'us-east-1', 'rds', 'v4', credentials, event_emitter)
request_dict = {}
params = {'SourceRegion': 'us-west-2', 'PreSignedUrl': 'https://foo'}
request_dict['body'] = params
request_dict['url'] = 'https://rds.us-east-1.amazonaws.com'
request_dict['method'] = 'POST'
request_dict['headers'] = {}
request_dict['context'] = {}
handlers.inject_presigned_url_rds(
params=request_dict,
request_signer=request_signer,
model=operation_model
)
self.assertNotIn('SourceRegion', params)
def test_dest_region_removed(self):
operation_model = mock.Mock()
operation_model.name = 'CopyDBSnapshot'
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'rds', 'us-east-1', 'rds', 'v4', credentials, event_emitter)
request_dict = {}
params = {'SourceRegion': 'us-west-2'}
request_dict['body'] = params
request_dict['url'] = 'https://rds.us-east-1.amazonaws.com'
request_dict['method'] = 'POST'
request_dict['headers'] = {}
request_dict['context'] = {}
handlers.inject_presigned_url_rds(
params=request_dict,
request_signer=request_signer,
model=operation_model
)
self.assertNotIn('DestinationRegion', params)
def test_presigned_url_already_present_for_rds(self):
operation_model = mock.Mock()
operation_model.name = 'CopyDBSnapshot'
params = {'body': {'PresignedUrl': 'https://foo'}}
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'rds', 'us-east-1', 'rds', 'v4', credentials, event_emitter)
handlers.inject_presigned_url_rds(
params=params,
request_signer=request_signer,
model=operation_model
)
self.assertEqual(params['body']['PresignedUrl'], 'https://foo')
def test_presigned_url_casing_changed_for_rds(self):
operation_model = mock.Mock()
operation_model.name = 'CopyDBSnapshot'
credentials = Credentials('key', 'secret')
event_emitter = HierarchicalEmitter()
request_signer = RequestSigner(
'rds', 'us-east-1', 'rds', 'v4', credentials, event_emitter)
request_dict = {}
params = {'SourceRegion': 'us-west-2'}
request_dict['body'] = params
request_dict['url'] = 'https://rds.us-east-1.amazonaws.com'
request_dict['method'] = 'POST'
request_dict['headers'] = {}
request_dict['context'] = {}
handlers.inject_presigned_url_rds(
params=request_dict,
request_signer=request_signer,
model=operation_model
)
self.assertNotIn('PresignedUrl', params)
self.assertIn('https://rds.us-west-2.amazonaws.com?',
params['PreSignedUrl'])
self.assertIn('X-Amz-Signature', params['PreSignedUrl'])
def test_500_status_code_set_for_200_response(self):
http_response = mock.Mock()
http_response.status_code = 200
http_response.content = """
<Error>
<Code>AccessDenied</Code>
<Message>Access Denied</Message>
<RequestId>id</RequestId>
<HostId>hostid</HostId>
</Error>
"""
handlers.check_for_200_error((http_response, {}))
self.assertEqual(http_response.status_code, 500)
def test_200_response_with_no_error_left_untouched(self):
http_response = mock.Mock()
http_response.status_code = 200
http_response.content = "<NotAnError></NotAnError>"
handlers.check_for_200_error((http_response, {}))
# We don't touch the status code since there are no errors present.
self.assertEqual(http_response.status_code, 200)
def test_500_response_can_be_none(self):
# A 500 response can raise an exception, which means the response
# object is None. We need to handle this case.
handlers.check_for_200_error(None)
def test_route53_resource_id(self):
event = 'before-parameter-build.route53.GetHostedZone'
params = {'Id': '/hostedzone/ABC123',
'HostedZoneId': '/hostedzone/ABC123',
'ResourceId': '/hostedzone/DEF456',
'DelegationSetId': '/hostedzone/GHI789',
'Other': '/hostedzone/foo'}
operation_def = {
'name': 'GetHostedZone',
'input': {
'shape': 'GetHostedZoneInput'
}
}
service_def = {
'metadata': {},
'shapes': {
'GetHostedZoneInput': {
'type': 'structure',
'members': {
'Id': {
'shape': 'ResourceId'
},
'HostedZoneId': {
'shape': 'ResourceId'
},
'ResourceId': {
'shape': 'ResourceId'
},
'DelegationSetId': {
'shape': 'DelegationSetId'
},
'Other': {
'shape': 'String'
}
}
},
'ResourceId': {
'type': 'string'
},
'DelegationSetId': {
'type': 'string'
},
'String': {
'type': 'string'
}
}
}
model = OperationModel(operation_def, ServiceModel(service_def))
self.session.emit(event, params=params, model=model)
self.assertEqual(params['Id'], 'ABC123')
self.assertEqual(params['HostedZoneId'], 'ABC123')
self.assertEqual(params['ResourceId'], 'DEF456')
self.assertEqual(params['DelegationSetId'], 'GHI789')
# This one should have been left alone
self.assertEqual(params['Other'], '/hostedzone/foo')
def test_route53_resource_id_missing_input_shape(self):
event = 'before-parameter-build.route53.GetHostedZone'
params = {'HostedZoneId': '/hostedzone/ABC123'}
operation_def = {
'name': 'GetHostedZone'
}
service_def = {
'metadata': {},
'shapes': {}
}
model = OperationModel(operation_def, ServiceModel(service_def))
self.session.emit(event, params=params, model=model)
self.assertEqual(params['HostedZoneId'], '/hostedzone/ABC123')
def test_run_instances_userdata(self):
user_data = 'This is a test'
b64_user_data = base64.b64encode(six.b(user_data)).decode('utf-8')
params = dict(ImageId='img-12345678',
MinCount=1, MaxCount=5, UserData=user_data)
handlers.base64_encode_user_data(params=params)
result = {'ImageId': 'img-12345678',
'MinCount': 1,
'MaxCount': 5,
'UserData': b64_user_data}
self.assertEqual(params, result)
def test_run_instances_userdata_blob(self):
# Ensure that binary can be passed in as user data.
# This is valid because you can send gzip compressed files as
# user data.
user_data = b'\xc7\xa9This is a test'
b64_user_data = base64.b64encode(user_data).decode('utf-8')
params = dict(ImageId='img-12345678',
MinCount=1, MaxCount=5, UserData=user_data)
handlers.base64_encode_user_data(params=params)
result = {'ImageId': 'img-12345678',
'MinCount': 1,
'MaxCount': 5,
'UserData': b64_user_data}
self.assertEqual(params, result)
def test_register_retry_for_handlers_with_no_endpoint_prefix(self):
no_endpoint_prefix = {'metadata': {}}
session = mock.Mock()
handlers.register_retries_for_service(service_data=no_endpoint_prefix,
session=mock.Mock(),
service_name='foo')
self.assertFalse(session.register.called)
def test_register_retry_handlers(self):
service_data = {
'metadata': {'endpointPrefix': 'foo'},
}
session = mock.Mock()
loader = mock.Mock()
session.get_component.return_value = loader
loader.load_data.return_value = {
'retry': {
'__default__': {
'max_attempts': 10,
'delay': {
'type': 'exponential',
'base': 2,
'growth_factor': 5,
},
},
},
}
handlers.register_retries_for_service(service_data=service_data,
session=session,
service_name='foo')
session.register.assert_called_with('needs-retry.foo', mock.ANY,
unique_id='retry-config-foo')
def test_get_template_has_error_response(self):
original = {'Error': {'Code': 'Message'}}
handler_input = copy.deepcopy(original)
handlers.json_decode_template_body(parsed=handler_input)
# The handler should not have changed the response because it's
# an error response.
self.assertEqual(original, handler_input)
def test_does_decode_template_body_in_order(self):
expected_ordering = OrderedDict([
('TemplateVersion', 1.0),
('APropertyOfSomeKind', 'a value'),
('list', [1, 2, 3]),
('nested', OrderedDict([('key', 'value'),
('foo', 'bar')]))
])
template_string = json.dumps(expected_ordering)
parsed_response = {'TemplateBody': template_string}
handlers.json_decode_template_body(parsed=parsed_response)
result = parsed_response['TemplateBody']
self.assertTrue(isinstance(result, OrderedDict))
for element, expected_element in zip(result, expected_ordering):
self.assertEqual(element, expected_element)
def test_decode_json_policy(self):
parsed = {
'Document': '{"foo": "foobarbaz"}',
'Other': 'bar',
}
service_def = {
'operations': {
'Foo': {
'output': {'shape': 'PolicyOutput'},
}
},
'shapes': {
'PolicyOutput': {
'type': 'structure',
'members': {
'Document': {
'shape': 'policyDocumentType'
},
'Other': {
'shape': 'stringType'
}
}
},
'policyDocumentType': {
'type': 'string'
},
'stringType': {
'type': 'string'
},
}
}
model = ServiceModel(service_def)
op_model = model.operation_model('Foo')
handlers.json_decode_policies(parsed, op_model)
self.assertEqual(parsed['Document'], {'foo': 'foobarbaz'})
no_document = {'Other': 'bar'}
handlers.json_decode_policies(no_document, op_model)
self.assertEqual(no_document, {'Other': 'bar'})
def test_inject_account_id(self):
params = {}
handlers.inject_account_id(params)
self.assertEqual(params['accountId'], '-')
def test_account_id_not_added_if_present(self):
params = {'accountId': 'foo'}
handlers.inject_account_id(params)
self.assertEqual(params['accountId'], 'foo')
def test_glacier_version_header_added(self):
request_dict = {
'headers': {}
}
model = ServiceModel({'metadata': {'apiVersion': '2012-01-01'}})
handlers.add_glacier_version(model, request_dict)
self.assertEqual(request_dict['headers']['x-amz-glacier-version'],
'2012-01-01')
def test_application_json_header_added(self):
request_dict = {
'headers': {}
}
handlers.add_accept_header(None, request_dict)
self.assertEqual(request_dict['headers']['Accept'], 'application/json')
def test_accept_header_not_added_if_present(self):
request_dict = {
'headers': {'Accept': 'application/yaml'}
}
handlers.add_accept_header(None, request_dict)
self.assertEqual(request_dict['headers']['Accept'], 'application/yaml')
def test_glacier_checksums_added(self):
request_dict = {
'headers': {},
'body': six.BytesIO(b'hello world'),
}
handlers.add_glacier_checksums(request_dict)
self.assertIn('x-amz-content-sha256', request_dict['headers'])
self.assertIn('x-amz-sha256-tree-hash', request_dict['headers'])
self.assertEqual(
request_dict['headers']['x-amz-content-sha256'],
'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9')
self.assertEqual(
request_dict['headers']['x-amz-sha256-tree-hash'],
'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9')
# And verify that the body can still be read.
self.assertEqual(request_dict['body'].read(), b'hello world')
def test_tree_hash_added_only_if_not_exists(self):
request_dict = {
'headers': {
'x-amz-sha256-tree-hash': 'pre-exists',
},
'body': six.BytesIO(b'hello world'),
}
handlers.add_glacier_checksums(request_dict)
self.assertEqual(request_dict['headers']['x-amz-sha256-tree-hash'],
'pre-exists')
def test_checksum_added_only_if_not_exists(self):
request_dict = {
'headers': {
'x-amz-content-sha256': 'pre-exists',
},
'body': six.BytesIO(b'hello world'),
}
handlers.add_glacier_checksums(request_dict)
self.assertEqual(request_dict['headers']['x-amz-content-sha256'],
'pre-exists')
def test_glacier_checksums_support_raw_bytes(self):
request_dict = {
'headers': {},
'body': b'hello world',
}
handlers.add_glacier_checksums(request_dict)
self.assertEqual(
request_dict['headers']['x-amz-content-sha256'],
'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9')
self.assertEqual(
request_dict['headers']['x-amz-sha256-tree-hash'],
'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9')
def test_switch_host_with_param(self):
request = AWSRequest()
url = 'https://machinelearning.us-east-1.amazonaws.com'
new_endpoint = 'https://my-custom-endpoint.amazonaws.com'
data = '{"PredictEndpoint":"%s"}' % new_endpoint
request.data = data.encode('utf-8')
request.url = url
handlers.switch_host_with_param(request, 'PredictEndpoint')
self.assertEqual(request.url, new_endpoint)
def test_invalid_char_in_bucket_raises_exception(self):
params = {
'Bucket': 'bad/bucket/name',
'Key': 'foo',
'Body': b'asdf',
}
with self.assertRaises(ParamValidationError):
handlers.validate_bucket_name(params)
def test_bucket_too_long_raises_exception(self):
params = {
'Bucket': 'a' * 300,
'Key': 'foo',
'Body': b'asdf',
}
with self.assertRaises(ParamValidationError):
handlers.validate_bucket_name(params)
def test_not_dns_compat_but_still_valid_bucket_name(self):
params = {
'Bucket': 'foasdf......bar--baz-a_b_CD10',
'Key': 'foo',
'Body': b'asdf',
}
self.assertIsNone(handlers.validate_bucket_name(params))
def test_valid_bucket_name_hyphen(self):
self.assertIsNone(
handlers.validate_bucket_name({'Bucket': 'my-bucket-name'}))
def test_valid_bucket_name_underscore(self):
self.assertIsNone(
handlers.validate_bucket_name({'Bucket': 'my_bucket_name'}))
def test_valid_bucket_name_period(self):
self.assertIsNone(
handlers.validate_bucket_name({'Bucket': 'my.bucket.name'}))
def test_validation_is_noop_if_no_bucket_param_exists(self):
self.assertIsNone(handlers.validate_bucket_name(params={}))
def test_validate_non_ascii_metadata_values(self):
with self.assertRaises(ParamValidationError):
handlers.validate_ascii_metadata({'Metadata': {'foo': u'\u2713'}})
def test_validate_non_ascii_metadata_keys(self):
with self.assertRaises(ParamValidationError):
handlers.validate_ascii_metadata(
{'Metadata': {u'\u2713': 'bar'}})
def test_validate_non_triggered_when_no_md_specified(self):
original = {'NotMetadata': ''}
copied = original.copy()
handlers.validate_ascii_metadata(copied)
self.assertEqual(original, copied)
def test_validation_passes_when_all_ascii_chars(self):
original = {'Metadata': {'foo': 'bar'}}
copied = original.copy()
handlers.validate_ascii_metadata(original)
self.assertEqual(original, copied)
def test_set_encoding_type(self):
params = {}
context = {}
handlers.set_list_objects_encoding_type_url(params, context=context)
self.assertEqual(params['EncodingType'], 'url')
self.assertTrue(context['encoding_type_auto_set'])
params['EncodingType'] = 'new_value'
handlers.set_list_objects_encoding_type_url(params, context={})
self.assertEqual(params['EncodingType'], 'new_value')
def test_decode_list_objects(self):
parsed = {
'Contents': [{'Key': "%C3%A7%C3%B6s%25asd%08"}],
'EncodingType': 'url',
}
context = {'encoding_type_auto_set': True}
handlers.decode_list_object(parsed, context=context)
self.assertEqual(parsed['Contents'][0]['Key'], u'\xe7\xf6s%asd\x08')
def test_decode_list_objects_does_not_decode_without_context(self):
parsed = {
'Contents': [{'Key': "%C3%A7%C3%B6s%25asd"}],
'EncodingType': 'url',
}
handlers.decode_list_object(parsed, context={})
self.assertEqual(parsed['Contents'][0]['Key'], u'%C3%A7%C3%B6s%25asd')
def test_decode_list_objects_with_marker(self):
parsed = {
'Marker': "%C3%A7%C3%B6s%25%20asd%08+c",
'EncodingType': 'url',
}
context = {'encoding_type_auto_set': True}
handlers.decode_list_object(parsed, context=context)
self.assertEqual(parsed['Marker'], u'\xe7\xf6s% asd\x08 c')
def test_decode_list_objects_with_nextmarker(self):
parsed = {
'NextMarker': "%C3%A7%C3%B6s%25%20asd%08+c",
'EncodingType': 'url',
}
context = {'encoding_type_auto_set': True}
handlers.decode_list_object(parsed, context=context)
self.assertEqual(parsed['NextMarker'], u'\xe7\xf6s% asd\x08 c')
def test_decode_list_objects_with_common_prefixes(self):
parsed = {
'CommonPrefixes': [{'Prefix': "%C3%A7%C3%B6s%25%20asd%08+c"}],
'EncodingType': 'url',
}
context = {'encoding_type_auto_set': True}
handlers.decode_list_object(parsed, context=context)
self.assertEqual(parsed['CommonPrefixes'][0]['Prefix'],
u'\xe7\xf6s% asd\x08 c')
def test_decode_list_objects_with_delimiter(self):
parsed = {
'Delimiter': "%C3%A7%C3%B6s%25%20asd%08+c",
'EncodingType': 'url',
}
context = {'encoding_type_auto_set': True}
handlers.decode_list_object(parsed, context=context)
self.assertEqual(parsed['Delimiter'], u'\xe7\xf6s% asd\x08 c')
def test_get_bucket_location_optional(self):
# This handler should no-op if another hook (i.e. stubber) has already
# filled in response
response = {"LocationConstraint": "eu-west-1"}
handlers.parse_get_bucket_location(response, None),
self.assertEqual(response["LocationConstraint"], "eu-west-1")
def test_set_operation_specific_signer_no_auth_type(self):
signing_name = 'myservice'
context = {'auth_type': None}
response = handlers.set_operation_specific_signer(
context=context, signing_name=signing_name)
self.assertIsNone(response)
def test_set_operation_specific_signer_unsigned(self):
signing_name = 'myservice'
context = {'auth_type': 'none'}
response = handlers.set_operation_specific_signer(
context=context, signing_name=signing_name)
self.assertEqual(response, botocore.UNSIGNED)
def test_set_operation_specific_signer_v4(self):
signing_name = 'myservice'
context = {'auth_type': 'v4'}
response = handlers.set_operation_specific_signer(
context=context, signing_name=signing_name)
self.assertEqual(response, 'v4')
def test_set_operation_specific_signer_s3v4(self):
signing_name = 's3'
context = {'auth_type': 'v4'}
response = handlers.set_operation_specific_signer(
context=context, signing_name=signing_name)
self.assertEqual(response, 's3v4')
def test_set_operation_specific_signer_v4_unsinged_payload(self):
signing_name = 'myservice'
context = {'auth_type': 'v4-unsigned-body'}
response = handlers.set_operation_specific_signer(
context=context, signing_name=signing_name)
self.assertEqual(response, 'v4')
self.assertEqual(context.get('payload_signing_enabled'), False)
def test_set_operation_specific_signer_s3v4_unsigned_payload(self):
signing_name = 's3'
context = {'auth_type': 'v4-unsigned-body'}
response = handlers.set_operation_specific_signer(
context=context, signing_name=signing_name)
self.assertEqual(response, 's3v4')
self.assertEqual(context.get('payload_signing_enabled'), False)
class TestConvertStringBodyToFileLikeObject(BaseSessionTest):
def assert_converts_to_file_like_object_with_bytes(self, body, body_bytes):
params = {'Body': body}
handlers.convert_body_to_file_like_object(params)
self.assertTrue(hasattr(params['Body'], 'read'))
contents = params['Body'].read()
self.assertIsInstance(contents, six.binary_type)
self.assertEqual(contents, body_bytes)
def test_string(self):
self.assert_converts_to_file_like_object_with_bytes('foo', b'foo')
def test_binary(self):
body = os.urandom(500)
body_bytes = body
self.assert_converts_to_file_like_object_with_bytes(body, body_bytes)
def test_file(self):
body = six.StringIO()
params = {'Body': body}
handlers.convert_body_to_file_like_object(params)
self.assertEqual(params['Body'], body)
def test_unicode(self):
self.assert_converts_to_file_like_object_with_bytes(u'bar', b'bar')
def test_non_ascii_characters(self):
self.assert_converts_to_file_like_object_with_bytes(
u'\u2713', b'\xe2\x9c\x93')
class TestRetryHandlerOrder(BaseSessionTest):
def get_handler_names(self, responses):
names = []
for response in responses:
handler = response[0]
if hasattr(handler, '__name__'):
names.append(handler.__name__)
elif hasattr(handler, '__class__'):
names.append(handler.__class__.__name__)
else:
names.append(str(handler))
return names
def test_s3_special_case_is_before_other_retry(self):
service_model = self.session.get_service_model('s3')
operation = service_model.operation_model('CopyObject')
responses = self.session.emit(
'needs-retry.s3.CopyObject',
response=(mock.Mock(), mock.Mock()), endpoint=mock.Mock(),
operation=operation, attempts=1, caught_exception=None)
# This is implementation specific, but we're trying to verify that
# the check_for_200_error is before any of the retry logic in
# botocore.retryhandlers.
# Technically, as long as the relative order is preserved, we don't
# care about the absolute order.
names = self.get_handler_names(responses)
self.assertIn('check_for_200_error', names)
self.assertIn('RetryHandler', names)
s3_200_handler = names.index('check_for_200_error')
general_retry_handler = names.index('RetryHandler')
self.assertTrue(s3_200_handler < general_retry_handler,
"S3 200 error handler was supposed to be before "
"the general retry handler, but it was not.")
class BaseMD5Test(BaseSessionTest):
def setUp(self, **environ):
super(BaseMD5Test, self).setUp(**environ)
self.md5_object = mock.Mock()
self.md5_digest = mock.Mock(return_value=b'foo')
self.md5_object.digest = self.md5_digest
md5_builder = mock.Mock(return_value=self.md5_object)
self.md5_patch = mock.patch('hashlib.md5', md5_builder)
self.md5_patch.start()
self._md5_available_patch = None
self.set_md5_available()
def tearDown(self):
super(BaseMD5Test, self).tearDown()
self.md5_patch.stop()
if self._md5_available_patch:
self._md5_available_patch.stop()
def set_md5_available(self, is_available=True):
if self._md5_available_patch:
self._md5_available_patch.stop()
self._md5_available_patch = \
mock.patch('botocore.compat.MD5_AVAILABLE', is_available)
self._md5_available_patch.start()
class TestSSEMD5(BaseMD5Test):
def test_raises_error_when_md5_unavailable(self):
self.set_md5_available(False)
with self.assertRaises(MD5UnavailableError):
handlers.sse_md5({'SSECustomerKey': b'foo'})
with self.assertRaises(MD5UnavailableError):
handlers.copy_source_sse_md5({'CopySourceSSECustomerKey': b'foo'})
def test_sse_params(self):
for op in ('HeadObject', 'GetObject', 'PutObject', 'CopyObject',
'CreateMultipartUpload', 'UploadPart', 'UploadPartCopy'):
event = 'before-parameter-build.s3.%s' % op
params = {'SSECustomerKey': b'bar',
'SSECustomerAlgorithm': 'AES256'}
self.session.emit(event, params=params, model=mock.MagicMock())
self.assertEqual(params['SSECustomerKey'], 'YmFy')
self.assertEqual(params['SSECustomerKeyMD5'], 'Zm9v')
def test_sse_params_as_str(self):
event = 'before-parameter-build.s3.PutObject'
params = {'SSECustomerKey': 'bar',
'SSECustomerAlgorithm': 'AES256'}
self.session.emit(event, params=params, model=mock.MagicMock())
self.assertEqual(params['SSECustomerKey'], 'YmFy')
self.assertEqual(params['SSECustomerKeyMD5'], 'Zm9v')
def test_copy_source_sse_params(self):
for op in ['CopyObject', 'UploadPartCopy']:
event = 'before-parameter-build.s3.%s' % op
params = {'CopySourceSSECustomerKey': b'bar',
'CopySourceSSECustomerAlgorithm': 'AES256'}
self.session.emit(event, params=params, model=mock.MagicMock())
self.assertEqual(params['CopySourceSSECustomerKey'], 'YmFy')
self.assertEqual(params['CopySourceSSECustomerKeyMD5'], 'Zm9v')
def test_copy_source_sse_params_as_str(self):
event = 'before-parameter-build.s3.CopyObject'
params = {'CopySourceSSECustomerKey': 'bar',
'CopySourceSSECustomerAlgorithm': 'AES256'}
self.session.emit(event, params=params, model=mock.MagicMock())
self.assertEqual(params['CopySourceSSECustomerKey'], 'YmFy')
self.assertEqual(params['CopySourceSSECustomerKeyMD5'], 'Zm9v')
class TestAddMD5(BaseMD5Test):
def get_context(self, s3_config=None):
if s3_config is None:
s3_config = {}
return {
'client_config': Config(s3=s3_config)
}
def test_adds_md5_when_v4(self):
credentials = Credentials('key', 'secret')
request_signer = RequestSigner(
's3', 'us-east-1', 's3', 'v4', credentials, mock.Mock())
request_dict = {'body': b'bar',
'url': 'https://s3.us-east-1.amazonaws.com',
'method': 'PUT',
'headers': {}}
context = self.get_context()
handlers.conditionally_calculate_md5(
request_dict, request_signer=request_signer, context=context)
self.assertTrue('Content-MD5' in request_dict['headers'])
def test_adds_md5_when_s3v4(self):
credentials = Credentials('key', 'secret')
request_signer = RequestSigner(
's3', 'us-east-1', 's3', 's3v4', credentials, mock.Mock())
request_dict = {'body': b'bar',
'url': 'https://s3.us-east-1.amazonaws.com',
'method': 'PUT',
'headers': {}}
context = self.get_context({'payload_signing_enabled': False})
handlers.conditionally_calculate_md5(
request_dict, request_signer=request_signer, context=context)
self.assertTrue('Content-MD5' in request_dict['headers'])
def test_conditional_does_not_add_when_md5_unavailable(self):
credentials = Credentials('key', 'secret')
request_signer = RequestSigner(
's3', 'us-east-1', 's3', 's3', credentials, mock.Mock())
request_dict = {'body': b'bar',
'url': 'https://s3.us-east-1.amazonaws.com',
'method': 'PUT',
'headers': {}}
context = self.get_context()
self.set_md5_available(False)
with mock.patch('botocore.handlers.MD5_AVAILABLE', False):
handlers.conditionally_calculate_md5(
request_dict, request_signer=request_signer, context=context)
self.assertFalse('Content-MD5' in request_dict['headers'])
def test_add_md5_raises_error_when_md5_unavailable(self):
credentials = Credentials('key', 'secret')
request_signer = RequestSigner(
's3', 'us-east-1', 's3', 's3', credentials, mock.Mock())
request_dict = {'body': b'bar',
'url': 'https://s3.us-east-1.amazonaws.com',
'method': 'PUT',
'headers': {}}
self.set_md5_available(False)
with self.assertRaises(MD5UnavailableError):
handlers.calculate_md5(
request_dict, request_signer=request_signer)
def test_adds_md5_when_s3v2(self):
credentials = Credentials('key', 'secret')
request_signer = RequestSigner(
's3', 'us-east-1', 's3', 's3', credentials, mock.Mock())
request_dict = {'body': b'bar',
'url': 'https://s3.us-east-1.amazonaws.com',
'method': 'PUT',
'headers': {}}
context = self.get_context()
handlers.conditionally_calculate_md5(
request_dict, request_signer=request_signer, context=context)
self.assertTrue('Content-MD5' in request_dict['headers'])
def test_add_md5_with_file_like_body(self):
request_dict = {
'body': six.BytesIO(b'foobar'),
'headers': {}
}
self.md5_digest.return_value = b'8X\xf6"0\xac<\x91_0\x0cfC\x12\xc6?'
handlers.calculate_md5(request_dict)
self.assertEqual(request_dict['headers']['Content-MD5'],
'OFj2IjCsPJFfMAxmQxLGPw==')
def test_add_md5_with_bytes_object(self):
request_dict = {
'body': b'foobar',
'headers': {}
}
self.md5_digest.return_value = b'8X\xf6"0\xac<\x91_0\x0cfC\x12\xc6?'
handlers.calculate_md5(request_dict)
self.assertEqual(
request_dict['headers']['Content-MD5'],
'OFj2IjCsPJFfMAxmQxLGPw==')
def test_add_md5_with_bytearray_object(self):
request_dict = {
'body': bytearray(b'foobar'),
'headers': {}
}
self.md5_digest.return_value = b'8X\xf6"0\xac<\x91_0\x0cfC\x12\xc6?'
handlers.calculate_md5(request_dict)
self.assertEqual(
request_dict['headers']['Content-MD5'],
'OFj2IjCsPJFfMAxmQxLGPw==')
class TestParameterAlias(unittest.TestCase):
def setUp(self):
self.original_name = 'original'
self.alias_name = 'alias'
self.parameter_alias = handlers.ParameterAlias(
self.original_name, self.alias_name)
self.operation_model = mock.Mock()
request_shape = DenormalizedStructureBuilder().with_members(
{self.original_name: {'type': 'string'}}).build_model()
self.operation_model.input_shape = request_shape
self.sample_section = DocumentStructure('')
self.event_emitter = HierarchicalEmitter()
def test_alias_parameter_in_call(self):
value = 'value'
params = {self.alias_name: value}
self.parameter_alias.alias_parameter_in_call(
params, self.operation_model)
self.assertEqual(params, {self.original_name: value})
def test_alias_parameter_and_original_in_call(self):
params = {
self.original_name: 'orginal_value',
self.alias_name: 'alias_value'
}
with self.assertRaises(AliasConflictParameterError):
self.parameter_alias.alias_parameter_in_call(
params, self.operation_model)
def test_alias_parameter_in_call_does_not_touch_original(self):
value = 'value'
params = {self.original_name: value}
self.parameter_alias.alias_parameter_in_call(
params, self.operation_model)
self.assertEqual(params, {self.original_name: value})
def test_does_not_alias_parameter_for_no_input_shape(self):
value = 'value'
params = {self.alias_name: value}
self.operation_model.input_shape = None
self.parameter_alias.alias_parameter_in_call(
params, self.operation_model)
self.assertEqual(params, {self.alias_name: value})
def test_does_not_alias_parameter_for_not_modeled_member(self):
value = 'value'
params = {self.alias_name: value}
request_shape = DenormalizedStructureBuilder().with_members(
{'foo': {'type': 'string'}}).build_model()
self.operation_model.input_shape = request_shape
self.parameter_alias.alias_parameter_in_call(
params, self.operation_model)
self.assertEqual(params, {self.alias_name: value})
def test_alias_parameter_in_documentation_request_params(self):
RequestParamsDocumenter(
'myservice', 'myoperation', self.event_emitter).document_params(
self.sample_section, self.operation_model.input_shape)
self.parameter_alias.alias_parameter_in_documentation(
'docs.request-params.myservice.myoperation.complete-section',
self.sample_section
)
contents = self.sample_section.flush_structure().decode('utf-8')
self.assertIn(':type ' + self.alias_name + ':', contents)
self.assertIn(':param ' + self.alias_name + ':', contents)
self.assertNotIn(':type ' + self.original_name + ':', contents)
self.assertNotIn(':param ' + self.original_name + ':', contents)
def test_alias_parameter_in_documentation_request_example(self):
RequestExampleDocumenter(
'myservice', 'myoperation', self.event_emitter).document_example(
self.sample_section, self.operation_model.input_shape)
self.parameter_alias.alias_parameter_in_documentation(
'docs.request-example.myservice.myoperation.complete-section',
self.sample_section
)
contents = self.sample_section.flush_structure().decode('utf-8')
self.assertIn(self.alias_name + '=', contents)
self.assertNotIn(self.original_name + '=', contents)
class TestCommandAlias(unittest.TestCase):
def test_command_alias(self):
alias = handlers.ClientMethodAlias('foo')
client = mock.Mock()
client.foo.return_value = 'bar'
response = alias(client=client)()
self.assertEqual(response, 'bar')
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
from botocore import model
from botocore.paginate import Paginator
from botocore.paginate import PaginatorModel
from botocore.paginate import TokenDecoder
from botocore.paginate import TokenEncoder
from botocore.exceptions import PaginationError
from botocore.compat import six
import mock
def encode_token(token):
return TokenEncoder().encode(token)
class TestTokenDecoder(unittest.TestCase):
def setUp(self):
self.decoder = TokenDecoder()
def test_decode(self):
token = 'eyJmb28iOiAiYmFyIn0='
expected = {'foo': 'bar'}
self.assertEqual(self.decoder.decode(token), expected)
def test_decode_with_bytes(self):
token = (
'eyJib3RvX2VuY29kZWRfa2V5cyI6IFtbImZvbyJdXSwgImZvbyI6ICJZbUZ5In0='
)
expected = {'foo': b'bar'}
self.assertEqual(self.decoder.decode(token), expected)
def test_decode_with_nested_bytes(self):
token = (
'eyJmb28iOiB7ImJhciI6ICJZbUY2In0sICJib3RvX2VuY29kZWRfa2V5cyI6'
'IFtbImZvbyIsICJiYXIiXV19'
)
expected = {'foo': {'bar': b'baz'}}
self.assertEqual(self.decoder.decode(token), expected)
def test_decode_with_listed_bytes(self):
token = (
'eyJib3RvX2VuY29kZWRfa2V5cyI6IFtbImZvbyIsICJiYXIiLCAxXV0sICJmb28i'
'OiB7ImJhciI6IFsiYmF6IiwgIlltbHUiXX19'
)
expected = {'foo': {'bar': ['baz', b'bin']}}
self.assertEqual(self.decoder.decode(token), expected)
def test_decode_with_multiple_bytes_values(self):
token = (
'eyJib3RvX2VuY29kZWRfa2V5cyI6IFtbImZvbyIsICJiaW4iXSwgWyJmb28iLCAi'
'YmFyIl1dLCAiZm9vIjogeyJiaW4iOiAiWW1GdCIsICJiYXIiOiAiWW1GNiJ9fQ=='
)
expected = {'foo': {'bar': b'baz', 'bin': b'bam'}}
self.assertEqual(self.decoder.decode(token), expected)
class TestPaginatorModel(unittest.TestCase):
def setUp(self):
self.paginator_config = {}
self.paginator_config['pagination'] = {
'ListFoos': {
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Foo'
}
}
self.paginator_model = PaginatorModel(self.paginator_config)
def test_get_paginator(self):
paginator_config = self.paginator_model.get_paginator('ListFoos')
self.assertEqual(
paginator_config,
{'output_token': 'NextToken', 'input_token': 'NextToken',
'result_key': 'Foo'}
)
def test_get_paginator_no_exists(self):
with self.assertRaises(ValueError):
paginator_config = self.paginator_model.get_paginator('ListBars')
class TestPagination(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_result_key_available(self):
self.assertEqual(
[rk.expression for rk in self.paginator.result_keys],
['Foo']
)
def test_no_next_token(self):
response = {'not_the_next_token': 'foobar'}
self.method.return_value = response
actual = list(self.paginator.paginate())
self.assertEqual(actual, [{'not_the_next_token': 'foobar'}])
def test_next_token_in_response(self):
responses = [{'NextToken': 'token1'},
{'NextToken': 'token2'},
{'not_next_token': 'foo'}]
self.method.side_effect = responses
actual = list(self.paginator.paginate())
self.assertEqual(actual, responses)
# The first call has no next token, the second and third call should
# have 'token1' and 'token2' respectively.
self.assertEqual(self.method.call_args_list,
[mock.call(), mock.call(NextToken='token1'),
mock.call(NextToken='token2')])
def test_next_token_is_string(self):
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users",
"limit_key": "MaxKeys",
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]}
]
self.method.side_effect = responses
result = self.paginator.paginate(PaginationConfig={'MaxItems': 1})
result = result.build_full_result()
token = result.get('NextToken')
self.assertIsInstance(token, six.string_types)
def test_any_passed_in_args_are_unmodified(self):
responses = [{'NextToken': 'token1'},
{'NextToken': 'token2'},
{'not_next_token': 'foo'}]
self.method.side_effect = responses
actual = list(self.paginator.paginate(Foo='foo', Bar='bar'))
self.assertEqual(actual, responses)
self.assertEqual(
self.method.call_args_list,
[mock.call(Foo='foo', Bar='bar'),
mock.call(Foo='foo', Bar='bar', NextToken='token1'),
mock.call(Foo='foo', Bar='bar', NextToken='token2')])
def test_exception_raised_if_same_next_token(self):
responses = [{'NextToken': 'token1'},
{'NextToken': 'token2'},
{'NextToken': 'token2'}]
self.method.side_effect = responses
with self.assertRaises(PaginationError):
list(self.paginator.paginate())
def test_next_token_with_or_expression(self):
self.pagination_config = {
'output_token': 'NextToken || NextToken2',
'input_token': 'NextToken',
'result_key': 'Foo',
}
self.paginator = Paginator(self.method, self.pagination_config, self.model)
# Verify that despite varying between NextToken and NextToken2
# we still can extract the right next tokens.
responses = [
{'NextToken': 'token1'},
{'NextToken2': 'token2'},
# The first match found wins, so because NextToken is
# listed before NextToken2 in the 'output_tokens' config,
# 'token3' is chosen over 'token4'.
{'NextToken': 'token3', 'NextToken2': 'token4'},
{'not_next_token': 'foo'},
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(NextToken='token1'),
mock.call(NextToken='token2'),
mock.call(NextToken='token3')])
def test_more_tokens(self):
# Some pagination configs have a 'more_token' key that
# indicate whether or not the results are being paginated.
self.paginate_config = {
'more_results': 'IsTruncated',
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{'Foo': [1], 'IsTruncated': True, 'NextToken': 'token1'},
{'Foo': [2], 'IsTruncated': True, 'NextToken': 'token2'},
{'Foo': [3], 'IsTruncated': False, 'NextToken': 'token3'},
{'Foo': [4], 'not_next_token': 'foo'},
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(NextToken='token1'),
mock.call(NextToken='token2')])
def test_more_tokens_is_path_expression(self):
self.paginate_config = {
'more_results': 'Foo.IsTruncated',
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Bar',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{'Foo': {'IsTruncated': True}, 'NextToken': 'token1'},
{'Foo': {'IsTruncated': False}, 'NextToken': 'token2'},
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(NextToken='token1')])
def test_page_size(self):
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users",
"limit_key": "MaxKeys",
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
users = []
for page in self.paginator.paginate(PaginationConfig={'PageSize': 1}):
users += page['Users']
self.assertEqual(
self.method.call_args_list,
[mock.call(MaxKeys=1),
mock.call(Marker='m1', MaxKeys=1),
mock.call(Marker='m2', MaxKeys=1)]
)
def test_with_empty_markers(self):
responses = [
{"Users": ["User1"], "Marker": ""},
{"Users": ["User1"], "Marker": ""},
{"Users": ["User1"], "Marker": ""}
]
self.method.side_effect = responses
users = []
for page in self.paginator.paginate():
users += page['Users']
# We want to stop paginating if the next token is empty.
self.assertEqual(
self.method.call_args_list,
[mock.call()]
)
self.assertEqual(users, ['User1'])
def test_build_full_result_with_single_key(self):
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users",
"limit_key": "MaxKeys",
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete, {'Users': ['User1', 'User2', 'User3']})
def test_build_multiple_results(self):
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users",
"limit_key": "MaxKeys",
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
max_items = 3
page_size = 2
responses = [
{"Users": ["User1", "User2"], "Marker": "m1"},
{"Users": ["User3", "User4"], "Marker": "m2"},
{"Users": ["User3", "User4"], "Marker": "m2"},
{"Users": ["User5", "User6", "User7"], "Marker": "m3"},
]
self.method.side_effect = responses
pages = self.paginator.paginate(
PaginationConfig={
'PageSize': page_size,
'MaxItems': max_items
}
)
result = pages.build_full_result()
pages = self.paginator.paginate(
PaginationConfig={
'MaxItems': max_items,
'PageSize': page_size,
'StartingToken': result['NextToken']
}
)
result = pages.build_full_result()
expected_token = encode_token({
'Marker': 'm2',
'boto_truncate_amount': 2,
})
self.assertEqual(expected_token, result['NextToken'])
class TestPaginatorPageSize(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": ["Users", "Groups"],
'limit_key': 'MaxKeys',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
self.endpoint = mock.Mock()
def test_no_page_size(self):
kwargs = {'arg1': 'foo', 'arg2': 'bar'}
ref_kwargs = {'arg1': 'foo', 'arg2': 'bar'}
pages = self.paginator.paginate(**kwargs)
pages._inject_starting_params(kwargs)
self.assertEqual(kwargs, ref_kwargs)
def test_page_size(self):
kwargs = {'arg1': 'foo', 'arg2': 'bar',
'PaginationConfig': {'PageSize': 5}}
extracted_kwargs = {'arg1': 'foo', 'arg2': 'bar'}
# Note that ``MaxKeys`` in ``setUp()`` is the parameter used for
# the page size for pagination.
ref_kwargs = {'arg1': 'foo', 'arg2': 'bar', 'MaxKeys': 5}
pages = self.paginator.paginate(**kwargs)
pages._inject_starting_params(extracted_kwargs)
self.assertEqual(extracted_kwargs, ref_kwargs)
def test_page_size_incorrectly_provided(self):
kwargs = {'arg1': 'foo', 'arg2': 'bar',
'PaginationConfig': {'PageSize': 5}}
del self.paginate_config['limit_key']
paginator = Paginator(self.method, self.paginate_config, self.model)
with self.assertRaises(PaginationError):
paginator.paginate(**kwargs)
class TestPaginatorWithPathExpressions(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something we'd see in s3 pagination.
self.paginate_config = {
'output_token': [
'NextMarker || ListBucketResult.Contents[-1].Key'],
'input_token': 'next_marker',
'result_key': 'Contents',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_s3_list_objects(self):
responses = [
{'NextMarker': 'token1'},
{'NextMarker': 'token2'},
{'not_next_token': 'foo'}]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(next_marker='token1'),
mock.call(next_marker='token2')])
def test_s3_list_object_complex(self):
responses = [
{'NextMarker': 'token1'},
{'ListBucketResult': {
'Contents': [{"Key": "first"}, {"Key": "Last"}]}},
{'not_next_token': 'foo'}]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(next_marker='token1'),
mock.call(next_marker='Last')])
class TestBinaryTokens(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users"
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_build_full_result_with_bytes(self):
responses = [
{"Users": ["User1", "User2"], "Marker": b'\xff'},
{"Users": ["User3", "User4"], "Marker": b'\xfe'},
{"Users": ["User5"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate(PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token({
"Marker": b'\xff', "boto_truncate_amount": 1,
})
expected_response = {
"Users": ["User1", "User2", "User3"],
"NextToken": expected_token
}
self.assertEqual(complete, expected_response)
def test_build_full_result_with_nested_bytes(self):
responses = [
{"Users": ["User1", "User2"], "Marker": {'key': b'\xff'}},
{"Users": ["User3", "User4"], "Marker": {'key': b'\xfe'}},
{"Users": ["User5"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate(PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token({
"Marker": {'key': b'\xff'}, "boto_truncate_amount": 1,
})
expected_response = {
"Users": ["User1", "User2", "User3"],
"NextToken": expected_token
}
self.assertEqual(complete, expected_response)
def test_build_full_result_with_listed_bytes(self):
responses = [
{"Users": ["User1", "User2"], "Marker": {'key': ['foo', b'\xff']}},
{"Users": ["User3", "User4"], "Marker": {'key': ['foo', b'\xfe']}},
{"Users": ["User5"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate(PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token({
"Marker": {'key': ['foo', b'\xff']}, "boto_truncate_amount": 1,
})
expected_response = {
"Users": ["User1", "User2", "User3"],
"NextToken": expected_token
}
self.assertEqual(complete, expected_response)
def test_build_full_result_with_multiple_bytes_values(self):
responses = [
{
"Users": ["User1", "User2"],
"Marker": {'key': b'\xff', 'key2': b'\xef'}
},
{
"Users": ["User3", "User4"],
"Marker": {'key': b'\xfe', 'key2': b'\xee'}
},
{
"Users": ["User5"]
}
]
self.method.side_effect = responses
pages = self.paginator.paginate(PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token({
"Marker": {'key': b'\xff', 'key2': b'\xef'},
"boto_truncate_amount": 1,
})
expected_response = {
"Users": ["User1", "User2", "User3"],
"NextToken": expected_token
}
self.assertEqual(complete, expected_response)
def test_resume_with_bytes(self):
responses = [
{"Users": ["User3", "User4"], "Marker": b'\xfe'},
{"Users": ["User5"]}
]
self.method.side_effect = responses
starting_token = encode_token({
"Marker": b'\xff', "boto_truncate_amount": 1,
})
pages = self.paginator.paginate(
PaginationConfig={'StartingToken': starting_token})
complete = pages.build_full_result()
expected_response = {
"Users": ["User4", "User5"]
}
self.assertEqual(complete, expected_response)
self.method.assert_any_call(Marker=b'\xff')
def test_resume_with_nested_bytes(self):
responses = [
{"Users": ["User3", "User4"], "Marker": {'key': b'\xfe'}},
{"Users": ["User5"]}
]
self.method.side_effect = responses
starting_token = encode_token({
"Marker": {'key': b'\xff'}, "boto_truncate_amount": 1,
})
pages = self.paginator.paginate(
PaginationConfig={'StartingToken': starting_token})
complete = pages.build_full_result()
expected_response = {
"Users": ["User4", "User5"]
}
self.assertEqual(complete, expected_response)
self.method.assert_any_call(Marker={'key': b'\xff'})
def test_resume_with_listed_bytes(self):
responses = [
{"Users": ["User3", "User4"], "Marker": {'key': ['bar', b'\xfe']}},
{"Users": ["User5"]}
]
self.method.side_effect = responses
starting_token = encode_token({
"Marker": {'key': ['foo', b'\xff']}, "boto_truncate_amount": 1,
})
pages = self.paginator.paginate(
PaginationConfig={'StartingToken': starting_token})
complete = pages.build_full_result()
expected_response = {
"Users": ["User4", "User5"]
}
self.assertEqual(complete, expected_response)
self.method.assert_any_call(Marker={'key': ['foo', b'\xff']})
def test_resume_with_multiple_bytes_values(self):
responses = [
{
"Users": ["User3", "User4"],
"Marker": {'key': b'\xfe', 'key2': b'\xee'}
},
{
"Users": ["User5"]
}
]
self.method.side_effect = responses
starting_token = encode_token({
"Marker": {'key': b'\xff', 'key2': b'\xef'},
"boto_truncate_amount": 1,
})
pages = self.paginator.paginate(
PaginationConfig={'StartingToken': starting_token})
complete = pages.build_full_result()
expected_response = {
"Users": ["User4", "User5"]
}
self.assertEqual(complete, expected_response)
self.method.assert_any_call(Marker={'key': b'\xfe', 'key2': b'\xee'})
class TestMultipleTokens(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something we'd see in s3 pagination.
self.paginate_config = {
"output_token": ["ListBucketResults.NextKeyMarker",
"ListBucketResults.NextUploadIdMarker"],
"input_token": ["key_marker", "upload_id_marker"],
"result_key": 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_s3_list_multipart_uploads(self):
responses = [
{"Foo": [1], "ListBucketResults": {"NextKeyMarker": "key1",
"NextUploadIdMarker": "up1"}},
{"Foo": [2], "ListBucketResults": {"NextKeyMarker": "key2",
"NextUploadIdMarker": "up2"}},
{"Foo": [3], "ListBucketResults": {"NextKeyMarker": "key3",
"NextUploadIdMarker": "up3"}},
{}
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(key_marker='key1', upload_id_marker='up1'),
mock.call(key_marker='key2', upload_id_marker='up2'),
mock.call(key_marker='key3', upload_id_marker='up3'),
])
class TestOptionalTokens(unittest.TestCase):
"""
Tests a paginator with an optional output token.
The Route53 ListResourceRecordSets paginator includes three output tokens,
one of which only appears in certain records. If this gets left in the
request params from a previous page, the API will skip over a record.
"""
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is based on Route53 pagination.
self.paginate_config = {
"output_token": ["NextRecordName",
"NextRecordType",
"NextRecordIdentifier"],
"input_token": ["StartRecordName",
"StartRecordType",
"StartRecordIdentifier"],
"result_key": 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_clean_token(self):
responses = [
{"Foo": [1],
"IsTruncated": True,
"NextRecordName": "aaa.example.com",
"NextRecordType": "A",
"NextRecordIdentifier": "id"},
{"Foo": [2],
"IsTruncated": True,
"NextRecordName": "bbb.example.com",
"NextRecordType": "A"},
{"Foo": [3],
"IsTruncated": False},
]
self.method.side_effect = responses
list(self.paginator.paginate())
self.assertEqual(
self.method.call_args_list,
[mock.call(),
mock.call(StartRecordName='aaa.example.com', StartRecordType='A',
StartRecordIdentifier='id'),
mock.call(StartRecordName='bbb.example.com', StartRecordType='A')
])
class TestKeyIterators(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something we'd see in s3 pagination.
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": "Users"
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_result_key_iters(self):
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
iterators = pages.result_key_iters()
self.assertEqual(len(iterators), 1)
self.assertEqual(list(iterators[0]),
["User1", "User2", "User3"])
def test_build_full_result_with_single_key(self):
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete, {'Users': ['User1', 'User2', 'User3']})
def test_max_items_can_be_specified(self):
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
expected_token = encode_token({"Marker": "m1"})
self.assertEqual(
paginator.paginate(
PaginationConfig={'MaxItems': 1}).build_full_result(),
{'Users': ['User1'], 'NextToken': expected_token})
def test_max_items_as_strings(self):
# Some services (route53) model MaxItems as a string type.
# We need to be able to handle this case.
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
expected_token = encode_token({"Marker": "m1"})
self.assertEqual(
# Note MaxItems is a string here.
paginator.paginate(
PaginationConfig={'MaxItems': '1'}).build_full_result(),
{'Users': ['User1'], 'NextToken': expected_token})
def test_next_token_on_page_boundary(self):
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
expected_token = encode_token({"Marker": "m2"})
self.assertEqual(
paginator.paginate(
PaginationConfig={'MaxItems': 2}).build_full_result(),
{'Users': ['User1', 'User2'], 'NextToken': expected_token})
def test_max_items_can_be_specified_truncates_response(self):
# We're saying we only want 4 items, but notice that the second
# page of results returns users 4-6 so we have to truncated
# part of that second page.
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1", "User2", "User3"], "Marker": "m1"},
{"Users": ["User4", "User5", "User6"], "Marker": "m2"},
{"Users": ["User7"]},
]
self.method.side_effect = responses
expected_token = encode_token(
{"Marker": "m1", "boto_truncate_amount": 1})
self.assertEqual(
paginator.paginate(
PaginationConfig={'MaxItems': 4}).build_full_result(),
{'Users': ['User1', 'User2', 'User3', 'User4'],
'NextToken': expected_token})
def test_resume_next_marker_mid_page(self):
# This is a simulation of picking up from the response
# from test_MaxItems_can_be_specified_truncates_response
# We got the first 4 users, when we pick up we should get
# User5 - User7.
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User4", "User5", "User6"], "Marker": "m2"},
{"Users": ["User7"]},
]
self.method.side_effect = responses
starting_token = encode_token(
{"Marker": "m1", "boto_truncate_amount": 1})
pagination_config = {'StartingToken': starting_token}
self.assertEqual(
paginator.paginate(
PaginationConfig=pagination_config).build_full_result(),
{'Users': ['User5', 'User6', 'User7']})
self.assertEqual(
self.method.call_args_list,
[mock.call(Marker='m1'),
mock.call(Marker='m2')])
def test_max_items_exceeds_actual_amount(self):
# Because MaxItems=10 > number of users (3), we should just return
# all of the users.
paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
self.assertEqual(
paginator.paginate(
PaginationConfig={'MaxItems': 10}).build_full_result(),
{'Users': ['User1', 'User2', 'User3']})
def test_bad_input_tokens(self):
responses = [
{"Users": ["User1"], "Marker": "m1"},
{"Users": ["User2"], "Marker": "m2"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
with self.assertRaisesRegexp(ValueError, 'Bad starting token'):
pagination_config = {'StartingToken': 'does___not___work'}
self.paginator.paginate(
PaginationConfig=pagination_config).build_full_result()
class TestMultipleResultKeys(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something we'd see in s3 pagination.
self.paginate_config = {
"output_token": "Marker",
"input_token": "Marker",
"result_key": ["Users", "Groups"],
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_build_full_result_with_multiple_result_keys(self):
responses = [
{"Users": ["User1"], "Groups": ["Group1"], "Marker": "m1"},
{"Users": ["User2"], "Groups": ["Group2"], "Marker": "m2"},
{"Users": ["User3"], "Groups": ["Group3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete,
{"Users": ['User1', 'User2', 'User3'],
"Groups": ['Group1', 'Group2', 'Group3']})
def test_build_full_result_with_different_length_result_keys(self):
responses = [
{"Users": ["User1"], "Groups": ["Group1"], "Marker": "m1"},
# Then we stop getting "Users" output, but we get more "Groups"
{"Users": [], "Groups": ["Group2"], "Marker": "m2"},
{"Users": [], "Groups": ["Group3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete,
{"Users": ['User1'],
"Groups": ['Group1', 'Group2', 'Group3']})
def test_build_full_result_with_zero_length_result_key(self):
responses = [
# In this case the 'Users' key is always empty but we should
# have a 'Users' key in the output, it should just have an
# empty list for a value.
{"Users": [], "Groups": ["Group1"], "Marker": "m1"},
{"Users": [], "Groups": ["Group2"], "Marker": "m2"},
{"Users": [], "Groups": ["Group3"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete,
{"Users": [],
"Groups": ['Group1', 'Group2', 'Group3']})
def test_build_result_with_secondary_keys(self):
responses = [
{"Users": ["User1", "User2"],
"Groups": ["Group1", "Group2"],
"Marker": "m1"},
{"Users": ["User3"], "Groups": ["Group3"], "Marker": "m2"},
{"Users": ["User4"], "Groups": ["Group4"]},
]
self.method.side_effect = responses
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1})
complete = pages.build_full_result()
expected_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
self.assertEqual(complete,
{"Users": ["User1"], "Groups": ["Group1", "Group2"],
"NextToken": expected_token})
def test_resume_with_secondary_keys(self):
# This is simulating a continutation of the previous test,
# test_build_result_with_secondary_keys. We use the
# token specified in the response "None___1" to continue where we
# left off.
responses = [
{"Users": ["User1", "User2"],
"Groups": ["Group1", "Group2"],
"Marker": "m1"},
{"Users": ["User3"], "Groups": ["Group3"], "Marker": "m2"},
{"Users": ["User4"], "Groups": ["Group4"]},
]
self.method.side_effect = responses
starting_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1,
'StartingToken': starting_token})
complete = pages.build_full_result()
# Note that the secondary keys ("Groups") are all truncated because
# they were in the original (first) response.
expected_token = encode_token({"Marker": "m1"})
self.assertEqual(complete,
{"Users": ["User2"], "Groups": [],
"NextToken": expected_token})
def test_resume_with_secondary_result_as_string(self):
self.method.return_value = {"Users": ["User1", "User2"], "Groups": "a"}
starting_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1, 'StartingToken': starting_token})
complete = pages.build_full_result()
# Note that the secondary keys ("Groups") becomes empty string because
# they were in the original (first) response.
self.assertEqual(complete, {"Users": ["User2"], "Groups": ""})
def test_resume_with_secondary_result_as_integer(self):
self.method.return_value = {"Users": ["User1", "User2"], "Groups": 123}
starting_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1, 'StartingToken': starting_token})
complete = pages.build_full_result()
# Note that the secondary keys ("Groups") becomes zero because
# they were in the original (first) response.
self.assertEqual(complete, {"Users": ["User2"], "Groups": 0})
class TestMultipleInputKeys(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# Probably the most complicated example we'll see:
# multiple input/output/result keys.
self.paginate_config = {
"output_token": ["Marker1", "Marker2"],
"input_token": ["InMarker1", "InMarker2"],
"result_key": ["Users", "Groups"],
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_build_full_result_with_multiple_input_keys(self):
responses = [
{"Users": ["User1", "User2"], "Groups": ["Group1"],
"Marker1": "m1", "Marker2": "m2"},
{"Users": ["User3", "User4"], "Groups": ["Group2"],
"Marker1": "m3", "Marker2": "m4"},
{"Users": ["User5"], "Groups": ["Group3"]}
]
self.method.side_effect = responses
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 3})
complete = pages.build_full_result()
expected_token = encode_token(
{"InMarker1": "m1", "InMarker2": "m2", "boto_truncate_amount": 1})
self.assertEqual(complete,
{"Users": ['User1', 'User2', 'User3'],
"Groups": ['Group1', 'Group2'],
"NextToken": expected_token})
def test_resume_with_multiple_input_keys(self):
responses = [
{"Users": ["User3", "User4"], "Groups": ["Group2"],
"Marker1": "m3", "Marker2": "m4"},
{"Users": ["User5"], "Groups": ["Group3"]},
]
self.method.side_effect = responses
starting_token = encode_token(
{"InMarker1": "m1", "InMarker2": "m2", "boto_truncate_amount": 1})
pages = self.paginator.paginate(
PaginationConfig={'MaxItems': 1,
'StartingToken': starting_token})
complete = pages.build_full_result()
expected_token = encode_token(
{"InMarker1": "m3", "InMarker2": "m4"})
self.assertEqual(complete,
{"Users": ['User4'],
"Groups": [],
"NextToken": expected_token})
self.assertEqual(
self.method.call_args_list,
[mock.call(InMarker1='m1', InMarker2='m2')])
def test_resume_encounters_an_empty_payload(self):
response = {"not_a_result_key": "it happens in some service"}
self.method.return_value = response
starting_token = encode_token(
{"Marker": None, "boto_truncate_amount": 1})
complete = self.paginator \
.paginate(PaginationConfig={'StartingToken': starting_token}) \
.build_full_result()
self.assertEqual(complete, {})
def test_result_key_exposed_on_paginator(self):
self.assertEqual(
[rk.expression for rk in self.paginator.result_keys],
['Users', 'Groups']
)
def test_result_key_exposed_on_page_iterator(self):
pages = self.paginator.paginate(MaxItems=3)
self.assertEqual(
[rk.expression for rk in pages.result_keys],
['Users', 'Groups']
)
class TestExpressionKeyIterators(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
# This is something like what we'd see in RDS.
self.paginate_config = {
"input_token": "Marker",
"output_token": "Marker",
"limit_key": "MaxRecords",
"result_key": "EngineDefaults.Parameters"
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
self.responses = [
{"EngineDefaults": {"Parameters": ["One", "Two"]},
"Marker": "m1"},
{"EngineDefaults": {"Parameters": ["Three", "Four"]},
"Marker": "m2"},
{"EngineDefaults": {"Parameters": ["Five"]}}
]
def test_result_key_iters(self):
self.method.side_effect = self.responses
pages = self.paginator.paginate()
iterators = pages.result_key_iters()
self.assertEqual(len(iterators), 1)
self.assertEqual(list(iterators[0]),
['One', 'Two', 'Three', 'Four', 'Five'])
def test_build_full_result_with_single_key(self):
self.method.side_effect = self.responses
pages = self.paginator.paginate()
complete = pages.build_full_result()
self.assertEqual(complete, {
'EngineDefaults': {
'Parameters': ['One', 'Two', 'Three', 'Four', 'Five']
},
})
class TestIncludeResultKeys(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
'output_token': 'Marker',
'input_token': 'Marker',
'result_key': ['ResultKey', 'Count', 'Log'],
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_different_kinds_of_result_key(self):
self.method.side_effect = [
{'ResultKey': ['a'], 'Count': 1, 'Log': 'x', 'Marker': 'a'},
{'not_a_result_key': 'this page will be ignored', 'Marker': '_'},
{'ResultKey': ['b', 'c'], 'Count': 2, 'Log': 'y', 'Marker': 'b'},
{'ResultKey': ['d', 'e', 'f'], 'Count': 3, 'Log': 'z'},
]
pages = self.paginator.paginate()
expected = {
'ResultKey': ['a', 'b', 'c', 'd', 'e', 'f'],
'Count': 6,
'Log': 'xyz',
}
self.assertEqual(pages.build_full_result(), expected)
def test_result_key_is_missing(self):
self.method.side_effect = [
{'not_a_result_key': 'this page will be ignored', 'Marker': '_'},
{'neither_this_one': 'this page will be ignored, too'},
]
pages = self.paginator.paginate()
expected = {}
self.assertEqual(pages.build_full_result(), expected)
class TestIncludeNonResultKeys(unittest.TestCase):
maxDiff = None
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'ResultKey',
'non_aggregate_keys': ['NotResultKey'],
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_include_non_aggregate_keys(self):
self.method.side_effect = [
{'ResultKey': ['foo'], 'NotResultKey': 'a', 'NextToken': 't1'},
{'ResultKey': ['bar'], 'NotResultKey': 'a', 'NextToken': 't2'},
{'ResultKey': ['baz'], 'NotResultKey': 'a'},
]
pages = self.paginator.paginate()
actual = pages.build_full_result()
self.assertEqual(pages.non_aggregate_part, {'NotResultKey': 'a'})
expected = {
'ResultKey': ['foo', 'bar', 'baz'],
'NotResultKey': 'a',
}
self.assertEqual(actual, expected)
def test_include_with_multiple_result_keys(self):
self.paginate_config['result_key'] = ['ResultKey1', 'ResultKey2']
self.paginator = Paginator(self.method, self.paginate_config, self.model)
self.method.side_effect = [
{'ResultKey1': ['a', 'b'], 'ResultKey2': ['u', 'v'],
'NotResultKey': 'a', 'NextToken': 'token1'},
{'ResultKey1': ['c', 'd'], 'ResultKey2': ['w', 'x'],
'NotResultKey': 'a', 'NextToken': 'token2'},
{'ResultKey1': ['e', 'f'], 'ResultKey2': ['y', 'z'],
'NotResultKey': 'a'}
]
pages = self.paginator.paginate()
actual = pages.build_full_result()
expected = {
'ResultKey1': ['a', 'b', 'c', 'd', 'e', 'f'],
'ResultKey2': ['u', 'v', 'w', 'x', 'y', 'z'],
'NotResultKey': 'a',
}
self.assertEqual(actual, expected)
def test_include_with_nested_result_keys(self):
self.paginate_config['result_key'] = 'Result.Key'
self.paginate_config['non_aggregate_keys'] = [
'Outer', 'Result.Inner',
]
self.paginator = Paginator(self.method, self.paginate_config, self.model)
self.method.side_effect = [
# The non result keys shows hypothetical
# example. This doesn't actually happen,
# but in the case where the non result keys
# are different across pages, we use the values
# from the first page.
{'Result': {'Key': ['foo'], 'Inner': 'v1'},
'Outer': 'v2', 'NextToken': 't1'},
{'Result': {'Key': ['bar', 'baz'], 'Inner': 'v3'},
'Outer': 'v4', 'NextToken': 't2'},
{'Result': {'Key': ['qux'], 'Inner': 'v5'},
'Outer': 'v6', 'NextToken': 't3'},
]
pages = self.paginator.paginate()
actual = pages.build_full_result()
self.assertEqual(pages.non_aggregate_part,
{'Outer': 'v2', 'Result': {'Inner': 'v1'}})
expected = {
'Result': {'Key': ['foo', 'bar', 'baz', 'qux'], 'Inner': 'v1'},
'Outer': 'v2',
}
self.assertEqual(actual, expected)
class TestSearchOverResults(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
self.paginate_config = {
'more_results': 'IsTruncated',
'output_token': 'NextToken',
'input_token': 'NextToken',
'result_key': 'Foo',
}
self.paginator = Paginator(self.method, self.paginate_config, self.model)
responses = [
{'Foo': [{'a': 1}, {'b': 2}],
'IsTruncated': True, 'NextToken': '1'},
{'Foo': [{'a': 3}, {'b': 4}],
'IsTruncated': True, 'NextToken': '2'},
{'Foo': [{'a': 5}], 'IsTruncated': False, 'NextToken': '3'}
]
self.method.side_effect = responses
def test_yields_non_list_values(self):
result = list(self.paginator.paginate().search('Foo[0].a'))
self.assertEqual([1, 3, 5], result)
def test_yields_individual_list_values(self):
result = list(self.paginator.paginate().search('Foo[].*[]'))
self.assertEqual([1, 2, 3, 4, 5], result)
def test_empty_when_no_match(self):
result = list(self.paginator.paginate().search('Foo[].qux'))
self.assertEqual([], result)
def test_no_yield_when_no_match_on_page(self):
result = list(self.paginator.paginate().search('Foo[].b'))
self.assertEqual([2, 4], result)
class TestDeprecatedStartingToken(unittest.TestCase):
def setUp(self):
self.method = mock.Mock()
self.model = mock.Mock()
def create_paginator(self, multiple_tokens=False):
if multiple_tokens:
paginator_config = {
"output_token": ["Marker1", "Marker2"],
"input_token": ["InMarker1", "InMarker2"],
"result_key": ["Users", "Groups"],
}
else:
paginator_config = {
'output_token': 'Marker',
'input_token': 'Marker',
'result_key': 'Users',
}
return Paginator(self.method, paginator_config, self.model)
def assert_pagination_result(self, expected, pagination_config,
multiple_tokens=False):
paginator = self.create_paginator(multiple_tokens)
try:
actual = paginator.paginate(
PaginationConfig=pagination_config).build_full_result()
self.assertEqual(actual, expected)
except ValueError:
self.fail("Deprecated paginator failed.")
def test_deprecated_starting_token(self):
responses = [
{"Users": ["User1"], "Marker": "m2"},
{"Users": ["User2"], "Marker": "m3"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm1___0'}
expected = {'Users': ['User1', 'User2', 'User3']}
self.assert_pagination_result(expected, pagination_config)
def test_deprecated_multiple_starting_token(self):
responses = [
{
"Users": ["User1", "User2"],
"Groups": ["Group1"],
"Marker1": "m1",
"Marker2": "m2"
},
{
"Users": ["User3", "User4"],
"Groups": ["Group2"],
"Marker1": "m3",
"Marker2": "m4"
},
{
"Users": ["User5"],
"Groups": ["Group3"]
}
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm0___m0___1'}
expected = {
'Groups': ['Group2', 'Group3'],
'Users': ['User2', 'User3', 'User4', 'User5']
}
self.assert_pagination_result(
expected, pagination_config, multiple_tokens=True)
def test_deprecated_starting_token_returns_new_style_next_token(self):
responses = [
{"Users": ["User1"], "Marker": "m2"},
{"Users": ["User2"], "Marker": "m3"},
{"Users": ["User3"], "Marker": "m4"},
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm1___0', 'MaxItems': 3}
expected = {
'Users': ['User1', 'User2', 'User3'],
'NextToken': encode_token({'Marker': 'm4'})
}
self.assert_pagination_result(expected, pagination_config)
def test_deprecated_starting_token_without_all_input_set_to_none(self):
responses = [
{
"Users": ["User1", "User2"],
"Groups": ["Group1"],
"Marker1": "m1",
"Marker2": "m2"
},
{
"Users": ["User3", "User4"],
"Groups": ["Group2"],
"Marker1": "m3",
"Marker2": "m4"
},
{
"Users": ["User5"],
"Groups": ["Group3"]
}
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm0'}
expected = {
'Groups': ['Group2', 'Group3'],
'Users': ['User1', 'User2', 'User3', 'User4', 'User5']
}
self.assert_pagination_result(
expected, pagination_config, multiple_tokens=True)
def test_deprecated_starting_token_rejects_too_many_input_tokens(self):
responses = [
{"Users": ["User1"], "Marker": "m2"},
{"Users": ["User2"], "Marker": "m3"},
{"Users": ["User3"]},
]
self.method.side_effect = responses
pagination_config = {'StartingToken': 'm1___m4___0'}
expected = {'Users': ['User1', 'User2', 'User3']}
paginator = self.create_paginator()
with self.assertRaises(ValueError):
actual = paginator.paginate(
PaginationConfig=pagination_config).build_full_result()
self.assertEqual(actual, expected)
class TestStringPageSize(unittest.TestCase):
def setUp(self):
self.service_model = {
'metadata': {
'protocol': 'query',
'endpointPrefix': 'prefix'
},
'documentation': 'best service ever',
'operations': {
'ListStuff': {
'name': 'ListStuff',
'http': {
'method': 'GET',
'requestUri': '/things'
},
'input': {'shape': 'ListStuffInputShape'},
'output': {'shape': 'ListStuffOutputShape'},
'errors': [],
'documentation': 'Lists stuff'
}
},
'shapes': {
'String': {'type': 'string'},
'ListOfStuff': {
'type': 'list',
'member': {'type': 'string'}
},
'ListStuffInputShape': {
'type': 'structure',
'required': [],
'members': {
'NextToken': {'shape': 'String'},
'MaxItems': {'shape': 'String'}
}
},
'ListStuffOutputShape': {
'type': 'structure',
'required': [],
'members': {
'NextToken': {'shape': 'String'},
'Stuff': {'shape': 'ListOfStuff'},
'IsTruncated': {'type': 'boolean'}
},
}
}
}
self.paginate_config = {
'input_token': 'NextToken',
'output_token': 'NextToken',
'limit_key': 'MaxItems',
'result_key': 'Stuff',
}
self.service = model.ServiceModel(self.service_model)
self.model = self.service.operation_model('ListStuff')
self.method = mock.Mock()
self.method.side_effect = []
self.paginator = Paginator(self.method, self.paginate_config, self.model)
def test_int_page_size(self):
res = list(self.paginator.paginate(PaginationConfig={'PageSize': 1}))
self.method.assert_called_with(MaxItems='1')
def test_str_page_size(self):
res = list(self.paginator.paginate(PaginationConfig={'PageSize': '1'}))
self.method.assert_called_with(MaxItems='1')
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import botocore.config
from tests import unittest, create_session, temporary_file
import os
import logging
import tempfile
import shutil
import mock
import botocore.session
import botocore.exceptions
from botocore.model import ServiceModel
from botocore import client
from botocore.hooks import HierarchicalEmitter
from botocore.waiter import WaiterModel
from botocore.paginate import PaginatorModel
import botocore.loaders
class BaseSessionTest(unittest.TestCase):
def setUp(self):
self.env_vars = {
'profile': (None, 'FOO_PROFILE', None, None),
'region': ('foo_region', 'FOO_REGION', None, None),
'data_path': ('data_path', 'FOO_DATA_PATH', None, None),
'config_file': (None, 'FOO_CONFIG_FILE', None, None),
'credentials_file': (None, None, '/tmp/nowhere', None),
'ca_bundle': ('foo_ca_bundle', 'FOO_AWS_CA_BUNDLE', None, None),
'api_versions': ('foo_api_versions', None, {}, None)
}
self.environ = {}
self.environ_patch = mock.patch('os.environ', self.environ)
self.environ_patch.start()
self.environ['FOO_PROFILE'] = 'foo'
self.environ['FOO_REGION'] = 'us-west-11'
data_path = os.path.join(os.path.dirname(__file__), 'data')
self.environ['FOO_DATA_PATH'] = data_path
config_path = os.path.join(os.path.dirname(__file__), 'cfg',
'foo_config')
self.environ['FOO_CONFIG_FILE'] = config_path
self.session = create_session(session_vars=self.env_vars)
def tearDown(self):
self.environ_patch.stop()
class SessionTest(BaseSessionTest):
def close_log_file_handler(self, tempdir, filename):
logger = logging.getLogger('botocore')
handlers = logger.handlers
for handler in handlers[:]:
if hasattr(handler, 'stream') and handler.stream.name == filename:
handler.stream.close()
logger.removeHandler(handler)
os.remove(filename)
# logging has an atexit handler that will try to flush/close
# the file. By setting this flag to False, we'll prevent it
# from raising an exception, which is fine because we're
# handling the closing of the file ourself.
logging.raiseExceptions = False
shutil.rmtree(tempdir)
def test_supports_multiple_env_vars_for_single_logical_name(self):
env_vars = {
'profile': (None, ['BAR_DEFAULT_PROFILE', 'BAR_PROFILE'],
None, None),
}
session = create_session(session_vars=env_vars)
self.environ['BAR_DEFAULT_PROFILE'] = 'first'
self.environ['BAR_PROFILE'] = 'second'
self.assertEqual(session.get_config_variable('profile'), 'first')
def test_profile_when_set_explicitly(self):
session = create_session(session_vars=self.env_vars, profile='asdf')
self.assertEqual(session.profile, 'asdf')
def test_profile_when_pulled_from_env(self):
self.environ['FOO_PROFILE'] = 'bar'
# Even though we didn't explicitly pass in a profile, the
# profile property will still look this up for us.
self.assertEqual(self.session.profile, 'bar')
def test_multiple_env_vars_uses_second_var(self):
env_vars = {
'profile': (None, ['BAR_DEFAULT_PROFILE', 'BAR_PROFILE'],
None, None),
}
session = create_session(session_vars=env_vars)
self.environ.pop('BAR_DEFAULT_PROFILE', None)
self.environ['BAR_PROFILE'] = 'second'
self.assertEqual(session.get_config_variable('profile'), 'second')
def test_profile(self):
self.assertEqual(self.session.get_config_variable('profile'), 'foo')
self.assertEqual(self.session.get_config_variable('region'),
'us-west-11')
self.session.get_config_variable('profile') == 'default'
saved_region = self.environ['FOO_REGION']
del self.environ['FOO_REGION']
saved_profile = self.environ['FOO_PROFILE']
del self.environ['FOO_PROFILE']
session = create_session(session_vars=self.env_vars)
self.assertEqual(session.get_config_variable('profile'), None)
self.assertEqual(session.get_config_variable('region'), 'us-west-1')
self.environ['FOO_REGION'] = saved_region
self.environ['FOO_PROFILE'] = saved_profile
def test_profile_does_not_exist_raises_exception(self):
# Given we have no profile:
self.environ['FOO_PROFILE'] = 'profile_that_does_not_exist'
session = create_session(session_vars=self.env_vars)
with self.assertRaises(botocore.exceptions.ProfileNotFound):
session.get_scoped_config()
def test_variable_does_not_exist(self):
session = create_session(session_vars=self.env_vars)
self.assertIsNone(session.get_config_variable('foo/bar'))
def test_get_aws_services_in_alphabetical_order(self):
session = create_session(session_vars=self.env_vars)
services = session.get_available_services()
self.assertEqual(sorted(services), services)
def test_profile_does_not_exist_with_default_profile(self):
session = create_session(session_vars=self.env_vars)
config = session.get_scoped_config()
# We should have loaded this properly, and we'll check
# that foo_access_key which is defined in the config
# file should be present in the loaded config dict.
self.assertIn('aws_access_key_id', config)
def test_type_conversions_occur_when_specified(self):
# Specify that we can retrieve the var from the
# FOO_TIMEOUT env var, with a conversion function
# of int().
self.env_vars['metadata_service_timeout'] = (
None, 'FOO_TIMEOUT', None, int)
# Environment variables are always strings.
self.environ['FOO_TIMEOUT'] = '10'
session = create_session(session_vars=self.env_vars)
# But we should type convert this to a string.
self.assertEqual(
session.get_config_variable('metadata_service_timeout'), 10)
def test_default_profile_specified_raises_exception(self):
# If you explicity set the default profile and you don't
# have that in your config file, an exception is raised.
config_path = os.path.join(os.path.dirname(__file__), 'cfg',
'boto_config_empty')
self.environ['FOO_CONFIG_FILE'] = config_path
self.environ['FOO_PROFILE'] = 'default'
session = create_session(session_vars=self.env_vars)
# In this case, even though we specified default, because
# the boto_config_empty config file does not have a default
# profile, we should be raising an exception.
with self.assertRaises(botocore.exceptions.ProfileNotFound):
session.get_scoped_config()
def test_file_logger(self):
tempdir = tempfile.mkdtemp()
temp_file = os.path.join(tempdir, 'file_logger')
self.session.set_file_logger(logging.DEBUG, temp_file)
self.addCleanup(self.close_log_file_handler, tempdir, temp_file)
self.session.get_credentials()
self.assertTrue(os.path.isfile(temp_file))
with open(temp_file) as logfile:
s = logfile.read()
self.assertTrue('Looking for credentials' in s)
def test_full_config_property(self):
full_config = self.session.full_config
self.assertTrue('foo' in full_config['profiles'])
self.assertTrue('default' in full_config['profiles'])
def test_full_config_merges_creds_file_data(self):
with temporary_file('w') as f:
self.session.set_config_variable('credentials_file', f.name)
f.write('[newprofile]\n')
f.write('aws_access_key_id=FROM_CREDS_FILE_1\n')
f.write('aws_secret_access_key=FROM_CREDS_FILE_2\n')
f.flush()
full_config = self.session.full_config
self.assertEqual(full_config['profiles']['newprofile'],
{'aws_access_key_id': 'FROM_CREDS_FILE_1',
'aws_secret_access_key': 'FROM_CREDS_FILE_2'})
def test_path_not_in_available_profiles(self):
with temporary_file('w') as f:
self.session.set_config_variable('credentials_file', f.name)
f.write('[newprofile]\n')
f.write('aws_access_key_id=FROM_CREDS_FILE_1\n')
f.write('aws_secret_access_key=FROM_CREDS_FILE_2\n')
f.flush()
profiles = self.session.available_profiles
self.assertEqual(
set(profiles),
set(['foo', 'default', 'newprofile']))
def test_emit_delegates_to_emitter(self):
calls = []
handler = lambda **kwargs: calls.append(kwargs)
self.session.register('foo', handler)
self.session.emit('foo')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0]['event_name'], 'foo')
def test_emitter_can_be_passed_in(self):
events = HierarchicalEmitter()
session = create_session(session_vars=self.env_vars,
event_hooks=events)
calls = []
handler = lambda **kwargs: calls.append(kwargs)
events.register('foo', handler)
session.emit('foo')
self.assertEqual(len(calls), 1)
def test_emit_first_non_none(self):
session = create_session(session_vars=self.env_vars)
session.register('foo', lambda **kwargs: None)
session.register('foo', lambda **kwargs: 'first')
session.register('foo', lambda **kwargs: 'second')
response = session.emit_first_non_none_response('foo')
self.assertEqual(response, 'first')
@mock.patch('logging.getLogger')
@mock.patch('logging.FileHandler')
def test_logger_name_can_be_passed_in(self, file_handler, get_logger):
self.session.set_debug_logger('botocore.hooks')
get_logger.assert_called_with('botocore.hooks')
self.session.set_file_logger('DEBUG', 'debuglog', 'botocore.service')
get_logger.assert_called_with('botocore.service')
file_handler.assert_called_with('debuglog')
@mock.patch('logging.getLogger')
@mock.patch('logging.StreamHandler')
@mock.patch('logging.Formatter')
def test_general_purpose_logger(self, formatter, file_handler, get_logger):
self.session.set_stream_logger('foo.bar', 'ERROR', format_string='foo')
get_logger.assert_called_with('foo.bar')
get_logger.return_value.setLevel.assert_called_with(logging.DEBUG)
formatter.assert_called_with('foo')
def test_register_with_unique_id(self):
calls = []
handler = lambda **kwargs: calls.append(kwargs)
self.session.register('foo', handler, unique_id='bar')
self.session.emit('foo')
self.assertEqual(calls[0]['event_name'], 'foo')
calls = []
self.session.unregister('foo', unique_id='bar')
self.session.emit('foo')
self.assertEqual(calls, [])
class TestBuiltinEventHandlers(BaseSessionTest):
def setUp(self):
super(TestBuiltinEventHandlers, self).setUp()
self.builtin_handlers = [
('foo', self.on_foo),
]
self.foo_called = False
self.handler_patch = mock.patch('botocore.handlers.BUILTIN_HANDLERS',
self.builtin_handlers)
self.handler_patch.start()
def on_foo(self, **kwargs):
self.foo_called = True
def tearDown(self):
super(TestBuiltinEventHandlers, self).setUp()
self.handler_patch.stop()
def test_registered_builtin_handlers(self):
session = botocore.session.Session(self.env_vars, None,
include_builtin_handlers=True)
session.emit('foo')
self.assertTrue(self.foo_called)
class TestSessionConfigurationVars(BaseSessionTest):
def test_per_session_config_vars(self):
self.session.session_var_map['foobar'] = (None, 'FOOBAR',
'default', None)
# Default value.
self.assertEqual(self.session.get_config_variable('foobar'), 'default')
# Retrieve from os environment variable.
self.environ['FOOBAR'] = 'fromenv'
self.assertEqual(self.session.get_config_variable('foobar'), 'fromenv')
# Explicit override.
self.session.set_config_variable('foobar', 'session-instance')
self.assertEqual(self.session.get_config_variable('foobar'),
'session-instance')
# Can disable this check via the ``methods`` arg.
del self.environ['FOOBAR']
self.assertEqual(self.session.get_config_variable(
'foobar', methods=('env', 'config')), 'default')
def test_default_value_can_be_overriden(self):
self.session.session_var_map['foobar'] = (None, 'FOOBAR', 'default',
None)
self.assertEqual(self.session.get_config_variable('foobar'), 'default')
class TestSessionPartitionFiles(BaseSessionTest):
def test_lists_partitions_on_disk(self):
mock_resolver = mock.Mock()
mock_resolver.get_available_partitions.return_value = ['foo']
self.session.register_component('endpoint_resolver', mock_resolver)
self.assertEqual(['foo'], self.session.get_available_partitions())
def test_proxies_list_endpoints_to_resolver(self):
resolver = mock.Mock()
resolver.get_available_endpoints.return_value = ['a', 'b']
self.session.register_component('endpoint_resolver', resolver)
self.session.get_available_regions('foo', 'bar', True)
def test_provides_empty_list_for_unknown_service_regions(self):
regions = self.session.get_available_regions('__foo__')
self.assertEqual([], regions)
class TestSessionUserAgent(BaseSessionTest):
def test_can_change_user_agent_name(self):
self.session.user_agent_name = 'something-else'
self.assertTrue(self.session.user_agent().startswith('something-else'))
def test_can_change_user_agent_version(self):
self.session.user_agent_version = '24.0'
self.assertTrue(self.session.user_agent().startswith('Botocore/24.0'))
def test_can_append_to_user_agent(self):
self.session.user_agent_extra = 'custom-thing/other'
self.assertTrue(
self.session.user_agent().endswith('custom-thing/other'))
def test_execution_env_not_set(self):
self.assertFalse(self.session.user_agent().endswith('FooEnv'))
def test_execution_env_set(self):
self.environ['AWS_EXECUTION_ENV'] = 'FooEnv'
self.assertTrue(self.session.user_agent().endswith(' exec-env/FooEnv'))
def test_agent_extra_and_exec_env(self):
self.session.user_agent_extra = 'custom-thing/other'
self.environ['AWS_EXECUTION_ENV'] = 'FooEnv'
user_agent = self.session.user_agent()
self.assertTrue(user_agent.endswith('custom-thing/other'))
self.assertIn('exec-env/FooEnv', user_agent)
class TestConfigLoaderObject(BaseSessionTest):
def test_config_loader_delegation(self):
session = create_session(session_vars=self.env_vars,
profile='credfile-profile')
with temporary_file('w') as f:
f.write('[credfile-profile]\naws_access_key_id=a\n')
f.write('aws_secret_access_key=b\n')
f.flush()
session.set_config_variable('credentials_file', f.name)
# Now trying to retrieve the scoped config should pull in
# values from the shared credentials file.
self.assertEqual(session.get_scoped_config(),
{'aws_access_key_id': 'a',
'aws_secret_access_key': 'b'})
class TestGetServiceModel(BaseSessionTest):
def test_get_service_model(self):
loader = mock.Mock()
loader.load_service_model.return_value = {}
self.session.register_component('data_loader', loader)
model = self.session.get_service_model('made_up')
self.assertIsInstance(model, ServiceModel)
self.assertEqual(model.service_name, 'made_up')
class TestGetPaginatorModel(BaseSessionTest):
def test_get_paginator_model(self):
loader = mock.Mock()
loader.load_service_model.return_value = {"pagination": {}}
self.session.register_component('data_loader', loader)
model = self.session.get_paginator_model('foo')
# Verify we get a PaginatorModel back
self.assertIsInstance(model, PaginatorModel)
# Verify we called the loader correctly.
loader.load_service_model.assert_called_with(
'foo', 'paginators-1', None)
class TestGetWaiterModel(BaseSessionTest):
def test_get_waiter_model(self):
loader = mock.Mock()
loader.load_service_model.return_value = {"version": 2, "waiters": {}}
self.session.register_component('data_loader', loader)
model = self.session.get_waiter_model('foo')
# Verify we (1) get the expected return data,
self.assertIsInstance(model, WaiterModel)
self.assertEqual(model.waiter_names, [])
# and (2) call the loader correctly.
loader.load_service_model.assert_called_with(
'foo', 'waiters-2', None)
class TestCreateClient(BaseSessionTest):
def test_can_create_client(self):
sts_client = self.session.create_client('sts', 'us-west-2')
self.assertIsInstance(sts_client, client.BaseClient)
def test_credential_provider_not_called_when_creds_provided(self):
cred_provider = mock.Mock()
self.session.register_component(
'credential_provider', cred_provider)
self.session.create_client(
'sts', 'us-west-2',
aws_access_key_id='foo',
aws_secret_access_key='bar',
aws_session_token='baz')
self.assertFalse(cred_provider.load_credentials.called,
"Credential provider was called even though "
"explicit credentials were provided to the "
"create_client call.")
def test_cred_provider_called_when_partial_creds_provided(self):
with self.assertRaises(botocore.exceptions.PartialCredentialsError):
self.session.create_client(
'sts', 'us-west-2',
aws_access_key_id='foo',
aws_secret_access_key=None
)
with self.assertRaises(botocore.exceptions.PartialCredentialsError):
self.session.create_client(
'sts', 'us-west-2',
aws_access_key_id=None,
aws_secret_access_key='foo',
)
@mock.patch('botocore.client.ClientCreator')
def test_config_passed_to_client_creator(self, client_creator):
# Make sure there is no default set
self.assertEqual(self.session.get_default_client_config(), None)
# The config passed to the client should be the one that is used
# in creating the client.
config = botocore.config.Config(region_name='us-west-2')
self.session.create_client('sts', config=config)
client_creator.return_value.create_client.assert_called_with(
service_name=mock.ANY, region_name=mock.ANY, is_secure=mock.ANY,
endpoint_url=mock.ANY, verify=mock.ANY, credentials=mock.ANY,
scoped_config=mock.ANY, client_config=config,
api_version=mock.ANY)
@mock.patch('botocore.client.ClientCreator')
def test_create_client_with_default_client_config(self, client_creator):
config = botocore.config.Config()
self.session.set_default_client_config(config)
self.session.create_client('sts')
client_creator.return_value.create_client.assert_called_with(
service_name=mock.ANY, region_name=mock.ANY, is_secure=mock.ANY,
endpoint_url=mock.ANY, verify=mock.ANY, credentials=mock.ANY,
scoped_config=mock.ANY, client_config=config,
api_version=mock.ANY)
@mock.patch('botocore.client.ClientCreator')
def test_create_client_with_merging_client_configs(self, client_creator):
config = botocore.config.Config(region_name='us-west-2')
other_config = botocore.config.Config(region_name='us-east-1')
self.session.set_default_client_config(config)
self.session.create_client('sts', config=other_config)
# Grab the client config used in creating the client
used_client_config = (
client_creator.return_value.create_client.call_args[1][
'client_config'])
# Check that the client configs were merged
self.assertEqual(used_client_config.region_name, 'us-east-1')
# Make sure that the client config used is not the default client
# config or the one passed in. It should be a new config.
self.assertIsNot(used_client_config, config)
self.assertIsNot(used_client_config, other_config)
def test_create_client_with_region(self):
ec2_client = self.session.create_client(
'ec2', 'us-west-2')
self.assertEqual(ec2_client.meta.region_name, 'us-west-2')
def test_create_client_with_region_and_client_config(self):
config = botocore.config.Config()
# Use a client config with no region configured.
ec2_client = self.session.create_client(
'ec2', region_name='us-west-2', config=config)
self.assertEqual(ec2_client.meta.region_name, 'us-west-2')
# If the region name is changed, it should not change the
# region of the client
config.region_name = 'us-east-1'
self.assertEqual(ec2_client.meta.region_name, 'us-west-2')
# Now make a new client with the updated client config.
ec2_client = self.session.create_client(
'ec2', config=config)
self.assertEqual(ec2_client.meta.region_name, 'us-east-1')
def test_create_client_no_region_and_no_client_config(self):
ec2_client = self.session.create_client('ec2')
self.assertEqual(ec2_client.meta.region_name, 'us-west-11')
@mock.patch('botocore.client.ClientCreator')
def test_create_client_with_ca_bundle_from_config(self, client_creator):
with temporary_file('w') as f:
del self.environ['FOO_PROFILE']
self.environ['FOO_CONFIG_FILE'] = f.name
self.session = create_session(session_vars=self.env_vars)
f.write('[default]\n')
f.write('foo_ca_bundle=config-certs.pem\n')
f.flush()
self.session.create_client('ec2', 'us-west-2')
call_kwargs = client_creator.return_value.\
create_client.call_args[1]
self.assertEqual(call_kwargs['verify'], 'config-certs.pem')
@mock.patch('botocore.client.ClientCreator')
def test_create_client_with_ca_bundle_from_env_var(self, client_creator):
self.environ['FOO_AWS_CA_BUNDLE'] = 'env-certs.pem'
self.session.create_client('ec2', 'us-west-2')
call_kwargs = client_creator.return_value.create_client.call_args[1]
self.assertEqual(call_kwargs['verify'], 'env-certs.pem')
@mock.patch('botocore.client.ClientCreator')
def test_create_client_with_verify_param(self, client_creator):
self.session.create_client(
'ec2', 'us-west-2', verify='verify-certs.pem')
call_kwargs = client_creator.return_value.create_client.call_args[1]
self.assertEqual(call_kwargs['verify'], 'verify-certs.pem')
@mock.patch('botocore.client.ClientCreator')
def test_create_client_verify_param_overrides_all(self, client_creator):
with temporary_file('w') as f:
# Set the ca cert using the config file
del self.environ['FOO_PROFILE']
self.environ['FOO_CONFIG_FILE'] = f.name
self.session = create_session(session_vars=self.env_vars)
f.write('[default]\n')
f.write('foo_ca_bundle=config-certs.pem\n')
f.flush()
# Set the ca cert with an environment variable
self.environ['FOO_AWS_CA_BUNDLE'] = 'env-certs.pem'
# Set the ca cert using the verify parameter
self.session.create_client(
'ec2', 'us-west-2', verify='verify-certs.pem')
call_kwargs = client_creator.return_value.\
create_client.call_args[1]
# The verify parameter should override all the other
# configurations
self.assertEqual(call_kwargs['verify'], 'verify-certs.pem')
@mock.patch('botocore.client.ClientCreator')
def test_create_client_use_no_api_version_by_default(self, client_creator):
self.session.create_client('myservice', 'us-west-2')
call_kwargs = client_creator.return_value.create_client.call_args[1]
self.assertEqual(call_kwargs['api_version'], None)
@mock.patch('botocore.client.ClientCreator')
def test_create_client_uses_api_version_from_config(self, client_creator):
config_api_version = '2012-01-01'
with temporary_file('w') as f:
del self.environ['FOO_PROFILE']
self.environ['FOO_CONFIG_FILE'] = f.name
self.session = create_session(session_vars=self.env_vars)
f.write('[default]\n')
f.write('foo_api_versions =\n'
' myservice = %s\n' % config_api_version)
f.flush()
self.session.create_client('myservice', 'us-west-2')
call_kwargs = client_creator.return_value.\
create_client.call_args[1]
self.assertEqual(call_kwargs['api_version'], config_api_version)
@mock.patch('botocore.client.ClientCreator')
def test_can_specify_multiple_versions_from_config(self, client_creator):
config_api_version = '2012-01-01'
second_config_api_version = '2013-01-01'
with temporary_file('w') as f:
del self.environ['FOO_PROFILE']
self.environ['FOO_CONFIG_FILE'] = f.name
self.session = create_session(session_vars=self.env_vars)
f.write('[default]\n')
f.write('foo_api_versions =\n'
' myservice = %s\n'
' myservice2 = %s\n' % (
config_api_version, second_config_api_version)
)
f.flush()
self.session.create_client('myservice', 'us-west-2')
call_kwargs = client_creator.return_value.\
create_client.call_args[1]
self.assertEqual(call_kwargs['api_version'], config_api_version)
self.session.create_client('myservice2', 'us-west-2')
call_kwargs = client_creator.return_value.\
create_client.call_args[1]
self.assertEqual(
call_kwargs['api_version'], second_config_api_version)
@mock.patch('botocore.client.ClientCreator')
def test_param_api_version_overrides_config_value(self, client_creator):
config_api_version = '2012-01-01'
override_api_version = '2014-01-01'
with temporary_file('w') as f:
del self.environ['FOO_PROFILE']
self.environ['FOO_CONFIG_FILE'] = f.name
self.session = create_session(session_vars=self.env_vars)
f.write('[default]\n')
f.write('foo_api_versions =\n'
' myservice = %s\n' % config_api_version)
f.flush()
self.session.create_client(
'myservice', 'us-west-2', api_version=override_api_version)
call_kwargs = client_creator.return_value.\
create_client.call_args[1]
self.assertEqual(call_kwargs['api_version'], override_api_version)
class TestComponentLocator(unittest.TestCase):
def setUp(self):
self.components = botocore.session.ComponentLocator()
def test_unknown_component_raises_exception(self):
with self.assertRaises(ValueError):
self.components.get_component('unknown-component')
def test_can_register_and_retrieve_component(self):
component = object()
self.components.register_component('foo', component)
self.assertIs(self.components.get_component('foo'), component)
def test_last_registration_wins(self):
first = object()
second = object()
self.components.register_component('foo', first)
self.components.register_component('foo', second)
self.assertIs(self.components.get_component('foo'), second)
def test_can_lazy_register_a_component(self):
component = object()
lazy = lambda: component
self.components.lazy_register_component('foo', lazy)
self.assertIs(self.components.get_component('foo'), component)
def test_latest_registration_wins_even_if_lazy(self):
first = object()
second = object()
lazy_second = lambda: second
self.components.register_component('foo', first)
self.components.lazy_register_component('foo', lazy_second)
self.assertIs(self.components.get_component('foo'), second)
def test_latest_registration_overrides_lazy(self):
first = object()
second = object()
lazy_first = lambda: first
self.components.lazy_register_component('foo', lazy_first)
self.components.register_component('foo', second)
self.assertIs(self.components.get_component('foo'), second)
def test_lazy_registration_factory_does_not_remove_from_list_on_error(self):
class ArbitraryError(Exception):
pass
def bad_factory():
raise ArbitraryError("Factory raises an exception.")
self.components.lazy_register_component('foo', bad_factory)
with self.assertRaises(ArbitraryError):
self.components.get_component('foo')
# Trying again should raise the same exception,
# not an ValueError("Unknown component")
with self.assertRaises(ArbitraryError):
self.components.get_component('foo')
class TestDefaultClientConfig(BaseSessionTest):
def test_new_session_has_no_default_client_config(self):
self.assertEqual(self.session.get_default_client_config(), None)
def test_set_and_get_client_config(self):
client_config = botocore.config.Config()
self.session.set_default_client_config(client_config)
self.assertIs(self.session.get_default_client_config(), client_config)
|
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import datetime
import collections
from dateutil.tz import tzutc
from nose.tools import assert_equal
from botocore import parsers
from botocore import model
from botocore.compat import json
# HTTP responses will typically return a custom HTTP
# dict. We want to ensure we're able to work with any
# kind of mutable mapping implementation.
class CustomHeaderDict(collections.MutableMapping):
def __init__(self, original_dict):
self._d = original_dict
def __getitem__(self, item):
return self._d[item]
def __setitem__(self, item, value):
self._d[item] = value
def __delitem__(self, item):
del self._d[item]
def __iter__(self):
return iter(self._d)
def __len__(self):
return len(self._d)
# These tests contain botocore specific tests that either
# don't make sense in the protocol tests or haven't been added
# yet.
class TestResponseMetadataParsed(unittest.TestCase):
def test_response_metadata_parsed_for_query_service(self):
parser = parsers.QueryParser()
response = (
'<OperationNameResponse>'
' <OperationNameResult><Str>myname</Str></OperationNameResult>'
' <ResponseMetadata>'
' <RequestId>request-id</RequestId>'
' </ResponseMetadata>'
'</OperationNameResponse>').encode('utf-8')
output_shape = model.StructureShape(
'OutputShape',
{
'type': 'structure',
'resultWrapper': 'OperationNameResult',
'members': {
'Str': {
'shape': 'StringType',
},
'Num': {
'shape': 'IntegerType',
}
}
},
model.ShapeResolver({
'StringType': {
'type': 'string',
},
'IntegerType': {
'type': 'integer',
}
})
)
parsed = parser.parse(
{'body': response,
'headers': {},
'status_code': 200}, output_shape)
self.assertEqual(
parsed, {'Str': 'myname',
'ResponseMetadata': {'RequestId': 'request-id',
'HTTPStatusCode': 200,
'HTTPHeaders': {}}})
def test_metadata_always_exists_for_query(self):
# ResponseMetadata is used for more than just the request id. It
# should always get populated, even if the request doesn't seem to
# have an id.
parser = parsers.QueryParser()
response = (
'<OperationNameResponse>'
' <OperationNameResult><Str>myname</Str></OperationNameResult>'
'</OperationNameResponse>').encode('utf-8')
output_shape = model.StructureShape(
'OutputShape',
{
'type': 'structure',
'resultWrapper': 'OperationNameResult',
'members': {
'Str': {
'shape': 'StringType',
},
'Num': {
'shape': 'IntegerType',
}
}
},
model.ShapeResolver({
'StringType': {
'type': 'string',
},
'IntegerType': {
'type': 'integer',
}
})
)
parsed = parser.parse(
{'body': response, 'headers': {}, 'status_code': 200},
output_shape)
expected = {
'Str': 'myname',
'ResponseMetadata': {
'HTTPStatusCode': 200,
'HTTPHeaders': {}
}
}
self.assertEqual(parsed, expected)
def test_response_metadata_parsed_for_ec2(self):
parser = parsers.EC2QueryParser()
response = (
'<OperationNameResponse>'
' <Str>myname</Str>'
' <requestId>request-id</requestId>'
'</OperationNameResponse>').encode('utf-8')
output_shape = model.StructureShape(
'OutputShape',
{
'type': 'structure',
'members': {
'Str': {
'shape': 'StringType',
}
}
},
model.ShapeResolver({'StringType': {'type': 'string'}})
)
parsed = parser.parse({'headers': {},
'body': response,
'status_code': 200}, output_shape)
# Note that the response metadata is normalized to match the query
# protocol, even though this is not how it appears in the output.
self.assertEqual(
parsed, {'Str': 'myname',
'ResponseMetadata': {'RequestId': 'request-id',
'HTTPStatusCode': 200,
'HTTPHeaders': {}}})
def test_metadata_always_exists_for_ec2(self):
# ResponseMetadata is used for more than just the request id. It
# should always get populated, even if the request doesn't seem to
# have an id.
parser = parsers.EC2QueryParser()
response = (
'<OperationNameResponse>'
' <Str>myname</Str>'
'</OperationNameResponse>').encode('utf-8')
output_shape = model.StructureShape(
'OutputShape',
{
'type': 'structure',
'members': {
'Str': {
'shape': 'StringType',
}
}
},
model.ShapeResolver({'StringType': {'type': 'string'}})
)
parsed = parser.parse(
{'headers': {}, 'body': response, 'status_code': 200},
output_shape)
expected = {
'Str': 'myname',
'ResponseMetadata': {
'HTTPStatusCode': 200,
'HTTPHeaders': {}
}
}
self.assertEqual(
parsed, expected)
def test_response_metadata_on_json_request(self):
parser = parsers.JSONParser()
response = b'{"Str": "mystring"}'
headers = {'x-amzn-requestid': 'request-id'}
output_shape = model.StructureShape(
'OutputShape',
{
'type': 'structure',
'members': {
'Str': {
'shape': 'StringType',
}
}
},
model.ShapeResolver({'StringType': {'type': 'string'}})
)
parsed = parser.parse({'body': response, 'headers': headers,
'status_code': 200}, output_shape)
# Note that the response metadata is normalized to match the query
# protocol, even though this is not how it appears in the output.
self.assertEqual(
parsed, {'Str': 'mystring',
'ResponseMetadata': {'RequestId': 'request-id',
'HTTPStatusCode': 200,
'HTTPHeaders': headers}})
def test_metadata_always_exists_for_json(self):
# ResponseMetadata is used for more than just the request id. It
# should always get populated, even if the request doesn't seem to
# have an id.
parser = parsers.JSONParser()
response = b'{"Str": "mystring"}'
headers = {}
output_shape = model.StructureShape(
'OutputShape',
{
'type': 'structure',
'members': {
'Str': {
'shape': 'StringType',
}
}
},
model.ShapeResolver({'StringType': {'type': 'string'}})
)
parsed = parser.parse(
{'body': response, 'headers': headers, 'status_code': 200},
output_shape)
expected = {
'Str': 'mystring',
'ResponseMetadata': {
'HTTPStatusCode': 200,
'HTTPHeaders': headers
}
}
self.assertEqual(parsed, expected)
def test_response_metadata_on_rest_json_response(self):
parser = parsers.RestJSONParser()
response = b'{"Str": "mystring"}'
headers = {'x-amzn-requestid': 'request-id'}
output_shape = model.StructureShape(
'OutputShape',
{
'type': 'structure',
'members': {
'Str': {
'shape': 'StringType',
}
}
},
model.ShapeResolver({'StringType': {'type': 'string'}})
)
parsed = parser.parse({'body': response, 'headers': headers,
'status_code': 200}, output_shape)
# Note that the response metadata is normalized to match the query
# protocol, even though this is not how it appears in the output.
self.assertEqual(
parsed, {'Str': 'mystring',
'ResponseMetadata': {'RequestId': 'request-id',
'HTTPStatusCode': 200,
'HTTPHeaders': headers}})
def test_metadata_always_exists_on_rest_json_response(self):
# ResponseMetadata is used for more than just the request id. It
# should always get populated, even if the request doesn't seem to
# have an id.
parser = parsers.RestJSONParser()
response = b'{"Str": "mystring"}'
headers = {}
output_shape = model.StructureShape(
'OutputShape',
{
'type': 'structure',
'members': {
'Str': {
'shape': 'StringType',
}
}
},
model.ShapeResolver({'StringType': {'type': 'string'}})
)
parsed = parser.parse(
{'body': response, 'headers': headers, 'status_code': 200},
output_shape)
expected = {
'Str': 'mystring',
'ResponseMetadata': {
'HTTPStatusCode': 200,
'HTTPHeaders': headers
}
}
self.assertEqual(parsed, expected)
def test_response_metadata_from_s3_response(self):
# Even though s3 is a rest-xml service, it's response metadata
# is slightly different. It has two request ids, both come from
# the response headers, are both are named differently from other
# rest-xml responses.
headers = {
'x-amz-id-2': 'second-id',
'x-amz-request-id': 'request-id'
}
parser = parsers.RestXMLParser()
parsed = parser.parse(
{'body': '', 'headers': headers, 'status_code': 200}, None)
self.assertEqual(
parsed,
{'ResponseMetadata': {'RequestId': 'request-id',
'HostId': 'second-id',
'HTTPStatusCode': 200,
'HTTPHeaders': headers}})
def test_metadata_always_exists_on_rest_xml_response(self):
# ResponseMetadata is used for more than just the request id. It
# should always get populated, even if the request doesn't seem to
# have an id.
headers = {}
parser = parsers.RestXMLParser()
parsed = parser.parse(
{'body': '', 'headers': headers, 'status_code': 200}, None)
expected = {
'ResponseMetadata': {
'HTTPStatusCode': 200,
'HTTPHeaders': headers
}
}
self.assertEqual(parsed, expected)
class TestHeaderResponseInclusion(unittest.TestCase):
def create_parser(self):
return parsers.JSONParser()
def create_arbitary_output_shape(self):
output_shape = model.StructureShape(
'OutputShape',
{
'type': 'structure',
'members': {
'Str': {
'shape': 'StringType',
}
}
},
model.ShapeResolver({'StringType': {'type': 'string'}})
)
return output_shape
def test_can_add_errors_into_response(self):
parser = self.create_parser()
headers = {
'x-amzn-requestid': 'request-id',
'Header1': 'foo',
'Header2': 'bar',
}
output_shape = self.create_arbitary_output_shape()
parsed = parser.parse(
{'body': b'{}', 'headers': headers,
'status_code': 200}, output_shape)
# Response headers should be mapped as HTTPHeaders.
self.assertEqual(
parsed['ResponseMetadata']['HTTPHeaders'], headers)
def test_can_always_json_serialize_headers(self):
parser = self.create_parser()
original_headers = {
'x-amzn-requestid': 'request-id',
'Header1': 'foo',
}
headers = CustomHeaderDict(original_headers)
output_shape = self.create_arbitary_output_shape()
parsed = parser.parse(
{'body': b'{}', 'headers': headers,
'status_code': 200}, output_shape)
metadata = parsed['ResponseMetadata']
# We've had the contract that you can json serialize a
# response. So we want to ensure that despite using a CustomHeaderDict
# we can always JSON dumps the response metadata.
self.assertEqual(
json.loads(json.dumps(metadata))['HTTPHeaders']['Header1'], 'foo')
class TestResponseParsingDatetimes(unittest.TestCase):
def test_can_parse_float_timestamps(self):
# The type "timestamp" can come back as both an integer or as a float.
# We need to make sure we handle the case where the timestamp comes
# back as a float. It might make sense to move this to protocol tests.
output_shape = model.Shape(shape_name='datetime',
shape_model={'type': 'timestamp'})
parser = parsers.JSONParser()
timestamp_as_float = b'1407538750.49'
expected_parsed = datetime.datetime(
2014, 8, 8, 22, 59, 10, 490000, tzinfo=tzutc())
parsed = parser.parse(
{'body': timestamp_as_float,
'headers': [],
'status_code': 200}, output_shape)
self.assertEqual(parsed, expected_parsed)
class TestCanDecorateResponseParsing(unittest.TestCase):
def setUp(self):
self.factory = parsers.ResponseParserFactory()
def create_request_dict(self, with_body):
return {
'body': with_body, 'headers': [], 'status_code': 200
}
def test_normal_blob_parsing(self):
output_shape = model.Shape(shape_name='BlobType',
shape_model={'type': 'blob'})
parser = self.factory.create_parser('json')
hello_world_b64 = b'"aGVsbG8gd29ybGQ="'
expected_parsed = b'hello world'
parsed = parser.parse(
self.create_request_dict(with_body=hello_world_b64),
output_shape)
self.assertEqual(parsed, expected_parsed)
def test_can_decorate_scalar_parsing(self):
output_shape = model.Shape(shape_name='BlobType',
shape_model={'type': 'blob'})
# Here we're overriding the blob parser so that
# we can change it to a noop parser.
self.factory.set_parser_defaults(
blob_parser=lambda x: x)
parser = self.factory.create_parser('json')
hello_world_b64 = b'"aGVsbG8gd29ybGQ="'
expected_parsed = "aGVsbG8gd29ybGQ="
parsed = parser.parse(
self.create_request_dict(with_body=hello_world_b64),
output_shape)
self.assertEqual(parsed, expected_parsed)
def test_can_decorate_timestamp_parser(self):
output_shape = model.Shape(shape_name='datetime',
shape_model={'type': 'timestamp'})
# Here we're overriding the timestamp parser so that
# we can change it to just convert a string to an integer
# instead of converting to a datetime.
self.factory.set_parser_defaults(
timestamp_parser=lambda x: int(x))
parser = self.factory.create_parser('json')
timestamp_as_int = b'1407538750'
expected_parsed = int(timestamp_as_int)
parsed = parser.parse(
self.create_request_dict(with_body=timestamp_as_int),
output_shape)
self.assertEqual(parsed, expected_parsed)
class TestHandlesNoOutputShape(unittest.TestCase):
"""Verify that each protocol handles no output shape properly."""
def test_empty_rest_json_response(self):
headers = {'x-amzn-requestid': 'request-id'}
parser = parsers.RestJSONParser()
output_shape = None
parsed = parser.parse(
{'body': b'', 'headers': headers, 'status_code': 200},
output_shape)
self.assertEqual(
parsed,
{'ResponseMetadata': {'RequestId': 'request-id',
'HTTPStatusCode': 200,
'HTTPHeaders': headers}})
def test_empty_rest_xml_response(self):
# This is the format used by cloudfront, route53.
headers = {'x-amzn-requestid': 'request-id'}
parser = parsers.RestXMLParser()
output_shape = None
parsed = parser.parse(
{'body': b'', 'headers': headers, 'status_code': 200},
output_shape)
self.assertEqual(
parsed,
{'ResponseMetadata': {'RequestId': 'request-id',
'HTTPStatusCode': 200,
'HTTPHeaders': headers}})
def test_empty_query_response(self):
body = (
b'<DeleteTagsResponse xmlns="http://autoscaling.amazonaws.com/">'
b' <ResponseMetadata>'
b' <RequestId>request-id</RequestId>'
b' </ResponseMetadata>'
b'</DeleteTagsResponse>'
)
parser = parsers.QueryParser()
output_shape = None
parsed = parser.parse(
{'body': body, 'headers': {}, 'status_code': 200},
output_shape)
self.assertEqual(
parsed,
{'ResponseMetadata': {'RequestId': 'request-id',
'HTTPStatusCode': 200,
'HTTPHeaders': {}}})
def test_empty_json_response(self):
headers = {'x-amzn-requestid': 'request-id'}
# Output shape of None represents no output shape in the model.
output_shape = None
parser = parsers.JSONParser()
parsed = parser.parse(
{'body': b'', 'headers': headers, 'status_code': 200},
output_shape)
self.assertEqual(
parsed,
{'ResponseMetadata': {'RequestId': 'request-id',
'HTTPStatusCode': 200,
'HTTPHeaders': headers}})
class TestHandlesInvalidXMLResponses(unittest.TestCase):
def test_invalid_xml_shown_in_error_message(self):
# Missing the closing XML tags.
invalid_xml = (
b'<DeleteTagsResponse xmlns="http://autoscaling.amazonaws.com/">'
b' <ResponseMetadata>'
)
parser = parsers.QueryParser()
output_shape = None
# The XML body should be in the error message.
with self.assertRaisesRegexp(parsers.ResponseParserError,
'<DeleteTagsResponse'):
parser.parse(
{'body': invalid_xml, 'headers': {}, 'status_code': 200},
output_shape)
class TestRESTXMLResponses(unittest.TestCase):
def test_multiple_structures_list_returns_struture(self):
# This is to handle the scenario when something is modeled
# as a structure and instead a list of structures is returned.
# For this case, a single element from the list should be parsed
# For botocore, this will be the first element.
# Currently, this logic may happen in s3's GetBucketLifecycle
# operation.
headers = {}
parser = parsers.RestXMLParser()
body = (
'<?xml version="1.0" ?>'
'<OperationName xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
' <Foo><Bar>first_value</Bar></Foo>'
' <Foo><Bar>middle_value</Bar></Foo>'
' <Foo><Bar>last_value</Bar></Foo>'
'</OperationName>'
)
builder = model.DenormalizedStructureBuilder()
output_shape = builder.with_members({
'Foo': {
'type': 'structure',
'members': {
'Bar': {
'type': 'string',
}
}
}
}).build_model()
parsed = parser.parse(
{'body': body, 'headers': headers, 'status_code': 200},
output_shape)
# Ensure the first element is used out of the list.
self.assertEqual(parsed['Foo'], {'Bar': 'first_value'})
class TestParseErrorResponses(unittest.TestCase):
# This class consolidates all the error parsing tests
# across all the protocols. We may potentially pull
# this into the shared protocol tests in the future,
# so consolidating them into a single class will make
# this easier.
def test_response_metadata_errors_for_json_protocol(self):
parser = parsers.JSONParser()
response = {
"body": b"""
{"__type":"amazon.foo.validate#ValidationException",
"message":"this is a message"}
""",
"status_code": 400,
"headers": {
"x-amzn-requestid": "request-id"
}
}
parsed = parser.parse(response, None)
# Even (especially) on an error condition, the
# ResponseMetadata should be populated.
self.assertIn('ResponseMetadata', parsed)
self.assertEqual(parsed['ResponseMetadata']['RequestId'], 'request-id')
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error']['Message'], 'this is a message')
self.assertEqual(parsed['Error']['Code'], 'ValidationException')
def test_response_metadata_errors_alternate_form_json_protocol(self):
# Sometimes there is no '#' in the __type. We need to be
# able to parse this error message as well.
parser = parsers.JSONParser()
response = {
"body": b"""
{"__type":"ValidationException",
"message":"this is a message"}
""",
"status_code": 400,
"headers": {
"x-amzn-requestid": "request-id"
}
}
parsed = parser.parse(response, None)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error']['Message'], 'this is a message')
self.assertEqual(parsed['Error']['Code'], 'ValidationException')
def test_parse_error_response_for_query_protocol(self):
body = (
'<ErrorResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">'
' <Error>'
' <Type>Sender</Type>'
' <Code>InvalidInput</Code>'
' <Message>ARN asdf is not valid.</Message>'
' </Error>'
' <RequestId>request-id</RequestId>'
'</ErrorResponse>'
).encode('utf-8')
parser = parsers.QueryParser()
parsed = parser.parse({
'body': body, 'headers': {}, 'status_code': 400}, None)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': 'InvalidInput',
'Message': 'ARN asdf is not valid.',
'Type': 'Sender',
})
def test_can_parse_sdb_error_response_query_protocol(self):
body = (
'<OperationNameResponse>'
' <Errors>'
' <Error>'
' <Code>1</Code>'
' <Message>msg</Message>'
' </Error>'
' </Errors>'
' <RequestId>abc-123</RequestId>'
'</OperationNameResponse>'
).encode('utf-8')
parser = parsers.QueryParser()
parsed = parser.parse({
'body': body, 'headers': {}, 'status_code': 500}, None)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': '1',
'Message': 'msg'
})
self.assertEqual(parsed['ResponseMetadata'], {
'RequestId': 'abc-123',
'HTTPStatusCode': 500,
'HTTPHeaders': {}
})
def test_can_parser_ec2_errors(self):
body = (
'<Response>'
' <Errors>'
' <Error>'
' <Code>InvalidInstanceID.NotFound</Code>'
' <Message>The instance ID i-12345 does not exist</Message>'
' </Error>'
' </Errors>'
' <RequestID>06f382b0-d521-4bb6-988c-ca49d5ae6070</RequestID>'
'</Response>'
).encode('utf-8')
parser = parsers.EC2QueryParser()
parsed = parser.parse({
'body': body, 'headers': {}, 'status_code': 400}, None)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': 'InvalidInstanceID.NotFound',
'Message': 'The instance ID i-12345 does not exist',
})
def test_can_parse_rest_xml_errors(self):
body = (
'<ErrorResponse xmlns="https://route53.amazonaws.com/doc/2013-04-01/">'
' <Error>'
' <Type>Sender</Type>'
' <Code>NoSuchHostedZone</Code>'
' <Message>No hosted zone found with ID: foobar</Message>'
' </Error>'
' <RequestId>bc269cf3-d44f-11e5-8779-2d21c30eb3f1</RequestId>'
'</ErrorResponse>'
).encode('utf-8')
parser = parsers.RestXMLParser()
parsed = parser.parse({
'body': body, 'headers': {}, 'status_code': 400}, None)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': 'NoSuchHostedZone',
'Message': 'No hosted zone found with ID: foobar',
'Type': 'Sender',
})
def test_can_parse_rest_json_errors(self):
body = (
'{"Message":"Function not found: foo","Type":"User"}'
).encode('utf-8')
headers = {
'x-amzn-requestid': 'request-id',
'x-amzn-errortype': 'ResourceNotFoundException:http://url/',
}
parser = parsers.RestJSONParser()
parsed = parser.parse({
'body': body, 'headers': headers, 'status_code': 400}, None)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': 'ResourceNotFoundException',
'Message': 'Function not found: foo',
})
def test_error_response_with_no_body_rest_json(self):
parser = parsers.RestJSONParser()
response = b''
headers = {'content-length': '0', 'connection': 'keep-alive'}
output_shape = None
parsed = parser.parse({'body': response, 'headers': headers,
'status_code': 504}, output_shape)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': '504',
'Message': 'Gateway Timeout'
})
self.assertEqual(parsed['ResponseMetadata'], {
'HTTPStatusCode': 504,
'HTTPHeaders': headers
})
def test_error_response_with_string_body_rest_json(self):
parser = parsers.RestJSONParser()
response = b'HTTP content length exceeded 1049600 bytes.'
headers = {'content-length': '0', 'connection': 'keep-alive'}
output_shape = None
parsed = parser.parse({'body': response, 'headers': headers,
'status_code': 413}, output_shape)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': '413',
'Message': response.decode('utf-8')
})
self.assertEqual(parsed['ResponseMetadata'], {
'HTTPStatusCode': 413,
'HTTPHeaders': headers
})
def test_error_response_with_xml_body_rest_json(self):
parser = parsers.RestJSONParser()
response = (
'<AccessDeniedException>'
' <Message>Unable to determine service/operation name to be authorized</Message>'
'</AccessDeniedException>'
).encode('utf-8')
headers = {'content-length': '0', 'connection': 'keep-alive'}
output_shape = None
parsed = parser.parse({'body': response, 'headers': headers,
'status_code': 403}, output_shape)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': '403',
'Message': response.decode('utf-8')
})
self.assertEqual(parsed['ResponseMetadata'], {
'HTTPStatusCode': 403,
'HTTPHeaders': headers
})
def test_s3_error_response(self):
body = (
'<Error>'
' <Code>NoSuchBucket</Code>'
' <Message>error message</Message>'
' <BucketName>asdf</BucketName>'
' <RequestId>EF1EF43A74415102</RequestId>'
' <HostId>hostid</HostId>'
'</Error>'
).encode('utf-8')
headers = {
'x-amz-id-2': 'second-id',
'x-amz-request-id': 'request-id'
}
parser = parsers.RestXMLParser()
parsed = parser.parse(
{'body': body, 'headers': headers, 'status_code': 400}, None)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': 'NoSuchBucket',
'Message': 'error message',
'BucketName': 'asdf',
# We don't want the RequestId/HostId because they're already
# present in the ResponseMetadata key.
})
self.assertEqual(parsed['ResponseMetadata'], {
'RequestId': 'request-id',
'HostId': 'second-id',
'HTTPStatusCode': 400,
'HTTPHeaders': headers
})
def test_s3_error_response_with_no_body(self):
# If you try to HeadObject a key that does not exist,
# you will get an empty body. When this happens
# we expect that we will use Code/Message from the
# HTTP status code.
body = ''
headers = {
'x-amz-id-2': 'second-id',
'x-amz-request-id': 'request-id'
}
parser = parsers.RestXMLParser()
parsed = parser.parse(
{'body': body, 'headers': headers, 'status_code': 404}, None)
self.assertIn('Error', parsed)
self.assertEqual(parsed['Error'], {
'Code': '404',
'Message': 'Not Found',
})
self.assertEqual(parsed['ResponseMetadata'], {
'RequestId': 'request-id',
'HostId': 'second-id',
'HTTPStatusCode': 404,
'HTTPHeaders': headers
})
def test_can_parse_glacier_error_response(self):
body = (b'{"code":"AccessDeniedException","type":"Client","message":'
b'"Access denied"}')
headers = {
'x-amzn-requestid': 'request-id'
}
parser = parsers.RestJSONParser()
parsed = parser.parse(
{'body': body, 'headers': headers, 'status_code': 400}, None)
self.assertEqual(parsed['Error'], {'Message': 'Access denied',
'Code': 'AccessDeniedException'})
def test_can_parse_restjson_error_code(self):
body = b'''{
"status": "error",
"errors": [{"message": "[*Deprecated*: blah"}],
"adds": 0,
"__type": "#WasUnableToParseThis",
"message": "blah",
"deletes": 0}'''
headers = {
'x-amzn-requestid': 'request-id'
}
parser = parsers.RestJSONParser()
parsed = parser.parse(
{'body': body, 'headers': headers, 'status_code': 400}, None)
self.assertEqual(parsed['Error'], {'Message': 'blah',
'Code': 'WasUnableToParseThis'})
def test_can_parse_with_case_insensitive_keys(self):
body = (b'{"Code":"AccessDeniedException","type":"Client","Message":'
b'"Access denied"}')
headers = {
'x-amzn-requestid': 'request-id'
}
parser = parsers.RestJSONParser()
parsed = parser.parse(
{'body': body, 'headers': headers, 'status_code': 400}, None)
self.assertEqual(parsed['Error'], {'Message': 'Access denied',
'Code': 'AccessDeniedException'})
def test_can_parse_route53_with_missing_message(self):
# The message isn't always in the XML response (or even the headers).
# We should be able to handle this gracefully and still at least
# populate a "Message" key so that consumers don't have to
# conditionally check for this.
body = (
'<ErrorResponse>'
' <Error>'
' <Type>Sender</Type>'
' <Code>InvalidInput</Code>'
' </Error>'
' <RequestId>id</RequestId>'
'</ErrorResponse>'
).encode('utf-8')
parser = parsers.RestXMLParser()
parsed = parser.parse({
'body': body, 'headers': {}, 'status_code': 400}, None)
error = parsed['Error']
self.assertEqual(error['Code'], 'InvalidInput')
# Even though there's no <Message /> we should
# still populate an empty string.
self.assertEqual(error['Message'], '')
def test_can_handle_generic_error_message():
# There are times when you can get a service to respond with a generic
# html error page. We should be able to handle this case.
for parser_cls in parsers.PROTOCOL_PARSERS.values():
generic_html_body = (
'<html><body><b>Http/1.1 Service Unavailable</b></body></html>'
).encode('utf-8')
empty_body = b''
yield _assert_parses_generic_error, parser_cls(), generic_html_body
yield _assert_parses_generic_error, parser_cls(), empty_body
def _assert_parses_generic_error(parser, body):
# There are times when you can get a service to respond with a generic
# html error page. We should be able to handle this case.
parsed = parser.parse({
'body': body, 'headers': {}, 'status_code': 503}, None)
assert_equal(
parsed['Error'],
{'Code': '503', 'Message': 'Service Unavailable'})
assert_equal(parsed['ResponseMetadata']['HTTPStatusCode'], 503)
|
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
from botocore.exceptions import ClientError
from botocore.errorfactory import BaseClientExceptions
from botocore.errorfactory import ClientExceptionsFactory
from botocore.model import ServiceModel
class TestBaseClientExceptions(unittest.TestCase):
def setUp(self):
self.code_to_exception = {}
self.exceptions = BaseClientExceptions(self.code_to_exception)
def test_has_client_error(self):
self.assertIs(self.exceptions.ClientError, ClientError)
def test_from_code(self):
exception_cls = type('MyException', (ClientError,), {})
self.code_to_exception['MyExceptionCode'] = exception_cls
self.assertIs(
self.exceptions.from_code('MyExceptionCode'), exception_cls)
def test_from_code_nonmatch_defaults_to_client_error(self):
self.assertIs(
self.exceptions.from_code('SomeUnknownErrorCode'), ClientError)
def test_gettattr_message(self):
exception_cls = type('MyException', (ClientError,), {})
self.code_to_exception['MyExceptionCode'] = exception_cls
with self.assertRaisesRegexp(
AttributeError, 'Valid exceptions are: MyException'):
self.exceptions.SomeUnmodeledError
class TestClientExceptionsFactory(unittest.TestCase):
def setUp(self):
self.model = {
"metadata": {
'endpointPrefix': 'myservice',
'serviceFullName': 'MyService',
},
'operations': {
'OperationName': {
'name': 'OperationName',
'errors': [
{'shape': 'ExceptionMissingCode'},
{'shape': 'ExceptionWithModeledCode'},
],
},
'AnotherOperationName': {
'name': 'AnotherOperationName',
'errors': [
{'shape': 'ExceptionForAnotherOperation'},
{'shape': 'ExceptionWithModeledCode'},
],
}
},
'shapes': {
'ExceptionWithModeledCode': {
'type': 'structure',
'members': {},
'error': {
'code': 'ModeledCode'
},
'exception': True,
},
'ExceptionMissingCode': {
'type': 'structure',
'members': {},
'exception': True,
},
'ExceptionForAnotherOperation': {
'type': 'structure',
'members': {},
'exception': True,
}
}
}
self.service_model = ServiceModel(self.model)
self.exceptions_factory = ClientExceptionsFactory()
def test_class_name(self):
exceptions = self.exceptions_factory.create_client_exceptions(
self.service_model)
self.assertEqual(exceptions.__class__.__name__, 'MyServiceExceptions')
def test_creates_modeled_exception(self):
exceptions = self.exceptions_factory.create_client_exceptions(
self.service_model)
self.assertTrue(hasattr(exceptions, 'ExceptionWithModeledCode'))
modeled_exception = exceptions.ExceptionWithModeledCode
self.assertEqual(
modeled_exception.__name__, 'ExceptionWithModeledCode')
self.assertTrue(issubclass(modeled_exception, ClientError))
def test_collects_modeled_exceptions_for_all_operations(self):
exceptions = self.exceptions_factory.create_client_exceptions(
self.service_model)
# Make sure exceptions were added for all operations by checking
# an exception only found on an a different operation.
self.assertTrue(hasattr(exceptions, 'ExceptionForAnotherOperation'))
modeled_exception = exceptions.ExceptionForAnotherOperation
self.assertEqual(
modeled_exception.__name__, 'ExceptionForAnotherOperation')
self.assertTrue(issubclass(modeled_exception, ClientError))
def test_creates_modeled_exception_mapping_that_has_code(self):
exceptions = self.exceptions_factory.create_client_exceptions(
self.service_model)
exception = exceptions.from_code('ModeledCode')
self.assertEqual(exception.__name__, 'ExceptionWithModeledCode')
self.assertTrue(issubclass(exception, ClientError))
def test_creates_modeled_exception_mapping_that_has_no_code(self):
exceptions = self.exceptions_factory.create_client_exceptions(
self.service_model)
# For exceptions that do not have an explicit code associated to them,
# the code is the name of the exception.
exception = exceptions.from_code('ExceptionMissingCode')
self.assertEqual(exception.__name__, 'ExceptionMissingCode')
self.assertTrue(issubclass(exception, ClientError))
|
#!/usr/bin/env
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest, BaseEnvVar
import os
import mock
import tempfile
import shutil
import botocore.exceptions
from botocore.configloader import raw_config_parse, load_config, \
multi_file_load_config
from botocore.compat import six
def path(filename):
directory = os.path.join(os.path.dirname(__file__), 'cfg')
if isinstance(filename, six.binary_type):
directory = six.b(directory)
return os.path.join(directory, filename)
class TestConfigLoader(BaseEnvVar):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tempdir)
def create_config_file(self, filename):
contents = (
'[default]\n'
'aws_access_key_id = foo\n'
'aws_secret_access_key = bar\n\n'
'[profile "personal"]\n'
'aws_access_key_id = fie\n'
'aws_secret_access_key = baz\n'
'aws_security_token = fiebaz\n'
)
directory = self.tempdir
if isinstance(filename, six.binary_type):
directory = six.b(directory)
full_path = os.path.join(directory, filename)
with open(full_path, 'w') as f:
f.write(contents)
return full_path
def test_config_not_found(self):
with self.assertRaises(botocore.exceptions.ConfigNotFound):
loaded_config = raw_config_parse(path('aws_config_notfound'))
def test_config_parse_error(self):
filename = path('aws_config_bad')
with self.assertRaises(botocore.exceptions.ConfigParseError):
raw_config_parse(filename)
def test_config(self):
loaded_config = raw_config_parse(path('aws_config'))
self.assertIn('default', loaded_config)
self.assertIn('profile "personal"', loaded_config)
def test_profile_map_conversion(self):
loaded_config = load_config(path('aws_config'))
self.assertIn('profiles', loaded_config)
self.assertEqual(sorted(loaded_config['profiles'].keys()),
['default', 'personal'])
def test_bad_profiles_are_ignored(self):
filename = path('aws_bad_profile')
loaded_config = load_config(filename)
self.assertEqual(len(loaded_config['profiles']), 3)
profiles = loaded_config['profiles']
self.assertIn('my profile', profiles)
self.assertIn('personal1', profiles)
self.assertIn('default', profiles)
def test_nested_hierarchy_parsing(self):
filename = path('aws_config_nested')
loaded_config = load_config(filename)
config = loaded_config['profiles']['default']
self.assertEqual(config['aws_access_key_id'], 'foo')
self.assertEqual(config['region'], 'us-west-2')
self.assertEqual(config['s3']['signature_version'], 's3v4')
self.assertEqual(config['cloudwatch']['signature_version'], 'v4')
def test_nested_hierarchy_with_no_subsection_parsing(self):
filename = path('aws_config_nested')
raw_config = raw_config_parse(filename, False)['default']
self.assertEqual(raw_config['aws_access_key_id'], 'foo')
self.assertEqual(raw_config['region'], 'us-west-2')
# Specifying False for pase_subsections in raw_config_parse
# will make sure that indented sections such as singature_version
# will not be treated as another subsection but rather
# its literal value.
self.assertEqual(
raw_config['cloudwatch'], '\nsignature_version = v4')
self.assertEqual(
raw_config['s3'],
'\nsignature_version = s3v4'
'\naddressing_style = path'
)
def test_nested_bad_config(self):
filename = path('aws_config_nested_bad')
with self.assertRaises(botocore.exceptions.ConfigParseError):
loaded_config = load_config(filename)
def test_multi_file_load(self):
filenames = [path('aws_config_other'),
path('aws_config'),
path('aws_third_config'),
path('aws_config_notfound')]
loaded_config = multi_file_load_config(*filenames)
config = loaded_config['profiles']['default']
self.assertEqual(config['aws_access_key_id'], 'other_foo')
self.assertEqual(config['aws_secret_access_key'], 'other_bar')
second_config = loaded_config['profiles']['personal']
self.assertEqual(second_config['aws_access_key_id'], 'fie')
self.assertEqual(second_config['aws_secret_access_key'], 'baz')
self.assertEqual(second_config['aws_security_token'], 'fiebaz')
third_config = loaded_config['profiles']['third']
self.assertEqual(third_config['aws_access_key_id'], 'third_fie')
self.assertEqual(third_config['aws_secret_access_key'], 'third_baz')
self.assertEqual(third_config['aws_security_token'], 'third_fiebaz')
def test_unicode_bytes_path_not_found(self):
with self.assertRaises(botocore.exceptions.ConfigNotFound):
with mock.patch('sys.getfilesystemencoding') as encoding:
encoding.return_value = 'utf-8'
load_config(path(b'\xe2\x9c\x93'))
def test_unicode_bytes_path(self):
filename = self.create_config_file(b'aws_config_unicode\xe2\x9c\x93')
with mock.patch('sys.getfilesystemencoding') as encoding:
encoding.return_value = 'utf-8'
loaded_config = load_config(filename)
self.assertIn('default', loaded_config['profiles'])
self.assertIn('personal', loaded_config['profiles'])
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from nose.tools import assert_equal
from botocore import exceptions
def test_client_error_can_handle_missing_code_or_message():
response = {'Error': {}}
expect = 'An error occurred (Unknown) when calling the blackhole operation: Unknown'
assert_equal(str(exceptions.ClientError(response, 'blackhole')), expect)
def test_client_error_has_operation_name_set():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert hasattr(exception, 'operation_name')
def test_client_error_set_correct_operation_name():
response = {'Error': {}}
exception = exceptions.ClientError(response, 'blackhole')
assert_equal(exception.operation_name, 'blackhole')
def test_retry_info_added_when_present():
response = {
'Error': {},
'ResponseMetadata': {
'MaxAttemptsReached': True,
'RetryAttempts': 3,
}
}
error_msg = str(exceptions.ClientError(response, 'operation'))
if '(reached max retries: 3)' not in error_msg:
raise AssertionError("retry information not inject into error "
"message: %s" % error_msg)
def test_retry_info_not_added_if_retry_attempts_not_present():
response = {
'Error': {},
'ResponseMetadata': {
'MaxAttemptsReached': True,
}
}
# Because RetryAttempts is missing, retry info is not
# in the error message.
error_msg = str(exceptions.ClientError(response, 'operation'))
if 'max retries' in error_msg:
raise AssertionError("Retry information should not be in exception "
"message when retry attempts not in response "
"metadata: %s" % error_msg)
def test_can_handle_when_response_missing_error_key():
response = {
'ResponseMetadata': {
'HTTPHeaders': {},
'HTTPStatusCode': 503,
'MaxAttemptsReached': True,
'RetryAttempts': 4
}
}
e = exceptions.ClientError(response, 'SomeOperation')
if 'An error occurred (Unknown)' not in str(e):
raise AssertionError(
"Error code should default to 'Unknown' "
"when missing error response, instead got: %s" % str(e))
|
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import copy
from tests import unittest
class BaseResponseTest(unittest.TestCase):
def assert_response_with_subset_metadata(self, actual_response,
expected_response):
"""
Compares two parsed service responses. For ResponseMetadata, it will
only assert that the expected is a proper subset of the actual. This
is useful so that when new keys are added to the metadata, tests don't
break.
"""
actual = copy.copy(actual_response)
expected = copy.copy(expected_response)
actual_metadata = actual.pop('ResponseMetadata', {})
expected_metadata = expected.pop('ResponseMetadata', {})
self.assertEqual(actual, expected)
self.assert_dict_is_proper_subset(actual_metadata, expected_metadata)
def assert_dict_is_proper_subset(self, superset, subset):
"""
Asserts that a dictionary is a proper subset of another.
"""
self.assertTrue(all((k in superset and superset[k] == v)
for k, v in subset.items()))
|
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
from botocore import regions
from botocore.exceptions import NoRegionError
class TestEndpointResolver(unittest.TestCase):
def _template(self):
return {
'partitions': [
{
'partition': 'aws',
'dnsSuffix': 'amazonaws.com',
'regionRegex': r'^(us|eu)\-\w+$',
'defaults': {
'hostname': '{service}.{region}.{dnsSuffix}'
},
'regions': {
'us-foo': {'regionName': 'a'},
'us-bar': {'regionName': 'b'},
'eu-baz': {'regionName': 'd'}
},
'services': {
'ec2': {
'endpoints': {
'us-foo': {},
'us-bar': {},
'eu-baz': {},
'd': {}
}
},
's3': {
'defaults': {
'sslCommonName': \
'{service}.{region}.{dnsSuffix}'
},
'endpoints': {
'us-foo': {
'sslCommonName': \
'{region}.{service}.{dnsSuffix}'
},
'us-bar': {},
'eu-baz': {'hostname': 'foo'}
}
},
'not-regionalized': {
'isRegionalized': False,
'partitionEndpoint': 'aws',
'endpoints': {
'aws': {'hostname': 'not-regionalized'},
'us-foo': {},
'eu-baz': {}
}
},
'non-partition': {
'partitionEndpoint': 'aws',
'endpoints': {
'aws': {'hostname': 'host'},
'us-foo': {}
}
},
'merge': {
'defaults': {
'signatureVersions': ['v2'],
'protocols': ['http']
},
'endpoints': {
'us-foo': {'signatureVersions': ['v4']},
'us-bar': {'protocols': ['https']}
}
}
}
},
{
'partition': 'foo',
'dnsSuffix': 'foo.com',
'regionRegex': r'^(foo)\-\w+$',
'defaults': {
'hostname': '{service}.{region}.{dnsSuffix}',
'protocols': ['http'],
'foo': 'bar'
},
'regions': {
'foo-1': {'regionName': '1'},
'foo-2': {'regionName': '2'},
'foo-3': {'regionName': '3'}
},
'services': {
'ec2': {
'endpoints': {
'foo-1': {
'foo': 'baz'
},
'foo-2': {},
'foo-3': {}
}
}
}
}
]
}
def test_ensures_region_is_not_none(self):
with self.assertRaises(NoRegionError):
resolver = regions.EndpointResolver(self._template())
resolver.construct_endpoint('foo', None)
def test_ensures_required_keys_present(self):
with self.assertRaises(ValueError):
regions.EndpointResolver({})
def test_returns_empty_list_when_listing_for_different_partition(self):
resolver = regions.EndpointResolver(self._template())
self.assertEqual([], resolver.get_available_endpoints('ec2', 'bar'))
def test_returns_empty_list_when_no_service_found(self):
resolver = regions.EndpointResolver(self._template())
self.assertEqual([], resolver.get_available_endpoints('what?'))
def test_gets_endpoint_names(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.get_available_endpoints(
'ec2', allow_non_regional=True)
self.assertEqual(['d', 'eu-baz', 'us-bar', 'us-foo'], sorted(result))
def test_gets_endpoint_names_for_partition(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.get_available_endpoints(
'ec2', allow_non_regional=True, partition_name='foo')
self.assertEqual(['foo-1', 'foo-2', 'foo-3'], sorted(result))
def test_list_regional_endpoints_only(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.get_available_endpoints(
'ec2', allow_non_regional=False)
self.assertEqual(['eu-baz', 'us-bar', 'us-foo'], sorted(result))
def test_returns_none_when_no_match(self):
resolver = regions.EndpointResolver(self._template())
self.assertIsNone(resolver.construct_endpoint('foo', 'baz'))
def test_constructs_regionalized_endpoints_for_exact_matches(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.construct_endpoint('not-regionalized', 'eu-baz')
self.assertEqual('not-regionalized.eu-baz.amazonaws.com',
result['hostname'])
self.assertEqual('aws', result['partition'])
self.assertEqual('eu-baz', result['endpointName'])
def test_constructs_partition_endpoints_for_real_partition_region(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.construct_endpoint('not-regionalized', 'us-bar')
self.assertEqual('not-regionalized', result['hostname'])
self.assertEqual('aws', result['partition'])
self.assertEqual('aws', result['endpointName'])
def test_constructs_partition_endpoints_for_regex_match(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.construct_endpoint('not-regionalized', 'us-abc')
self.assertEqual('not-regionalized', result['hostname'])
def test_constructs_endpoints_for_regionalized_regex_match(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.construct_endpoint('s3', 'us-abc')
self.assertEqual('s3.us-abc.amazonaws.com', result['hostname'])
def test_constructs_endpoints_for_unknown_service_but_known_region(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.construct_endpoint('unknown', 'us-foo')
self.assertEqual('unknown.us-foo.amazonaws.com', result['hostname'])
def test_merges_service_keys(self):
resolver = regions.EndpointResolver(self._template())
us_foo = resolver.construct_endpoint('merge', 'us-foo')
us_bar = resolver.construct_endpoint('merge', 'us-bar')
self.assertEqual(['http'], us_foo['protocols'])
self.assertEqual(['v4'], us_foo['signatureVersions'])
self.assertEqual(['https'], us_bar['protocols'])
self.assertEqual(['v2'], us_bar['signatureVersions'])
def test_merges_partition_default_keys_with_no_overwrite(self):
resolver = regions.EndpointResolver(self._template())
resolved = resolver.construct_endpoint('ec2', 'foo-1')
self.assertEqual('baz', resolved['foo'])
self.assertEqual(['http'], resolved['protocols'])
def test_merges_partition_default_keys_with_overwrite(self):
resolver = regions.EndpointResolver(self._template())
resolved = resolver.construct_endpoint('ec2', 'foo-2')
self.assertEqual('bar', resolved['foo'])
self.assertEqual(['http'], resolved['protocols'])
def test_gives_hostname_and_common_name_unaltered(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.construct_endpoint('s3', 'eu-baz')
self.assertEqual('s3.eu-baz.amazonaws.com', result['sslCommonName'])
self.assertEqual('foo', result['hostname'])
def tests_uses_partition_endpoint_when_no_region_provided(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.construct_endpoint('not-regionalized')
self.assertEqual('not-regionalized', result['hostname'])
self.assertEqual('aws', result['endpointName'])
def test_returns_dns_suffix_if_available(self):
resolver = regions.EndpointResolver(self._template())
result = resolver.construct_endpoint('not-regionalized')
self.assertEqual(result['dnsSuffix'], 'amazonaws.com')
|
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import mock
import datetime
import json
from dateutil.tz import tzutc
import botocore
import botocore.session
import botocore.auth
from botocore.config import Config
from botocore.credentials import Credentials
from botocore.credentials import ReadOnlyCredentials
from botocore.hooks import HierarchicalEmitter
from botocore.exceptions import NoRegionError, UnknownSignatureVersionError
from botocore.exceptions import UnknownClientMethodError, ParamValidationError
from botocore.exceptions import UnsupportedSignatureVersionError
from botocore.signers import RequestSigner, S3PostPresigner, CloudFrontSigner
from botocore.signers import generate_db_auth_token
from tests import unittest
from tests import assert_url_equal
class BaseSignerTest(unittest.TestCase):
def setUp(self):
self.credentials = Credentials('key', 'secret')
self.emitter = mock.Mock()
self.emitter.emit_until_response.return_value = (None, None)
self.signer = RequestSigner(
'service_name', 'region_name', 'signing_name',
'v4', self.credentials, self.emitter)
self.fixed_credentials = self.credentials.get_frozen_credentials()
class TestSigner(BaseSignerTest):
def test_region_name(self):
self.assertEqual(self.signer.region_name, 'region_name')
def test_signature_version(self):
self.assertEqual(self.signer.signature_version, 'v4')
def test_signing_name(self):
self.assertEqual(self.signer.signing_name, 'signing_name')
def test_region_required_for_sigv4(self):
self.signer = RequestSigner(
'service_name', None, 'signing_name', 'v4', self.credentials,
self.emitter)
with self.assertRaises(NoRegionError):
self.signer.sign('operation_name', mock.Mock())
def test_get_auth(self):
auth_cls = mock.Mock()
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4': auth_cls}):
auth = self.signer.get_auth('service_name', 'region_name')
self.assertEqual(auth, auth_cls.return_value)
auth_cls.assert_called_with(
credentials=self.fixed_credentials,
service_name='service_name',
region_name='region_name')
def test_get_auth_signature_override(self):
auth_cls = mock.Mock()
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4-custom': auth_cls}):
auth = self.signer.get_auth(
'service_name', 'region_name', signature_version='v4-custom')
self.assertEqual(auth, auth_cls.return_value)
auth_cls.assert_called_with(
credentials=self.fixed_credentials,
service_name='service_name',
region_name='region_name')
def test_get_auth_bad_override(self):
with self.assertRaises(UnknownSignatureVersionError):
self.signer.get_auth('service_name', 'region_name',
signature_version='bad')
def test_emits_choose_signer(self):
request = mock.Mock()
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4': mock.Mock()}):
self.signer.sign('operation_name', request)
self.emitter.emit_until_response.assert_called_with(
'choose-signer.service_name.operation_name',
signing_name='signing_name', region_name='region_name',
signature_version='v4', context=mock.ANY)
def test_choose_signer_override(self):
request = mock.Mock()
auth = mock.Mock()
auth.REQUIRES_REGION = False
self.emitter.emit_until_response.return_value = (None, 'custom')
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'custom': auth}):
self.signer.sign('operation_name', request)
auth.assert_called_with(credentials=self.fixed_credentials)
auth.return_value.add_auth.assert_called_with(request)
def test_emits_before_sign(self):
request = mock.Mock()
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4': mock.Mock()}):
self.signer.sign('operation_name', request)
self.emitter.emit.assert_called_with(
'before-sign.service_name.operation_name',
request=mock.ANY, signing_name='signing_name',
region_name='region_name', signature_version='v4',
request_signer=self.signer, operation_name='operation_name')
def test_disable_signing(self):
# Returning botocore.UNSIGNED from choose-signer disables signing!
request = mock.Mock()
auth = mock.Mock()
self.emitter.emit_until_response.return_value = (None,
botocore.UNSIGNED)
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4': auth}):
self.signer.sign('operation_name', request)
auth.assert_not_called()
def test_generate_url_emits_choose_signer(self):
request_dict = {
'headers': {},
'url': 'https://foo.com',
'body': b'',
'url_path': '/',
'method': 'GET',
'context': {}
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4': mock.Mock()}):
self.signer.generate_presigned_url(request_dict, 'operation_name')
self.emitter.emit_until_response.assert_called_with(
'choose-signer.service_name.operation_name',
signing_name='signing_name', region_name='region_name',
signature_version='v4-query', context=mock.ANY)
def test_choose_signer_passes_context(self):
request = mock.Mock()
request.context = {'foo': 'bar'}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4': mock.Mock()}):
self.signer.sign('operation_name', request)
self.emitter.emit_until_response.assert_called_with(
'choose-signer.service_name.operation_name',
signing_name='signing_name', region_name='region_name',
signature_version='v4', context={'foo': 'bar'})
def test_generate_url_choose_signer_override(self):
request_dict = {
'headers': {},
'url': 'https://foo.com',
'body': b'',
'url_path': '/',
'method': 'GET',
'context': {}
}
auth = mock.Mock()
auth.REQUIRES_REGION = False
self.emitter.emit_until_response.return_value = (None, 'custom')
auth_types_map = {'custom': mock.Mock(), 'custom-query': auth}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types_map):
self.signer.generate_presigned_url(request_dict, 'operation_name')
auth.assert_called_with(credentials=self.fixed_credentials,
expires=3600)
def test_generate_url_unsigned(self):
request_dict = {
'headers': {},
'url': 'https://foo.com',
'body': b'',
'url_path': '/',
'method': 'GET',
'context': {}
}
self.emitter.emit_until_response.return_value = (
None, botocore.UNSIGNED)
url = self.signer.generate_presigned_url(
request_dict, 'operation_name')
self.assertEqual(url, 'https://foo.com')
def test_generate_presigned_url(self):
auth = mock.Mock()
auth.REQUIRES_REGION = True
request_dict = {
'headers': {},
'url': 'https://foo.com',
'body': b'',
'url_path': '/',
'method': 'GET',
'context': {}
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4-query': auth}):
presigned_url = self.signer.generate_presigned_url(
request_dict, operation_name='operation_name')
auth.assert_called_with(
credentials=self.fixed_credentials, region_name='region_name',
service_name='signing_name', expires=3600)
self.assertEqual(presigned_url, 'https://foo.com')
def test_generate_presigned_url_with_region_override(self):
auth = mock.Mock()
auth.REQUIRES_REGION = True
request_dict = {
'headers': {},
'url': 'https://foo.com',
'body': b'',
'url_path': '/',
'method': 'GET',
'context': {}
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4-query': auth}):
presigned_url = self.signer.generate_presigned_url(
request_dict, operation_name='operation_name',
region_name='us-west-2')
auth.assert_called_with(
credentials=self.fixed_credentials, region_name='us-west-2',
service_name='signing_name', expires=3600)
self.assertEqual(presigned_url, 'https://foo.com')
def test_generate_presigned_url_with_exipres_in(self):
auth = mock.Mock()
auth.REQUIRES_REGION = True
request_dict = {
'headers': {},
'url': 'https://foo.com',
'body': b'',
'url_path': '/',
'method': 'GET',
'context': {}
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4-query': auth}):
presigned_url = self.signer.generate_presigned_url(
request_dict, operation_name='operation_name', expires_in=900)
auth.assert_called_with(
credentials=self.fixed_credentials,
region_name='region_name',
expires=900, service_name='signing_name')
self.assertEqual(presigned_url, 'https://foo.com')
def test_presigned_url_throws_unsupported_signature_error(self):
request_dict = {
'headers': {},
'url': 'https://s3.amazonaws.com/mybucket/myobject',
'body': b'',
'url_path': '/',
'method': 'GET',
'context': {}
}
self.signer = RequestSigner(
'service_name', 'region_name', 'signing_name',
'foo', self.credentials, self.emitter)
with self.assertRaises(UnsupportedSignatureVersionError):
self.signer.generate_presigned_url(
request_dict, operation_name='foo')
def test_signer_with_refreshable_credentials_gets_credential_set(self):
class FakeCredentials(Credentials):
def get_frozen_credentials(self):
return ReadOnlyCredentials('foo', 'bar', 'baz')
self.credentials = FakeCredentials('a', 'b', 'c')
self.signer = RequestSigner(
'service_name', 'region_name', 'signing_name',
'v4', self.credentials, self.emitter)
auth_cls = mock.Mock()
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4': auth_cls}):
auth = self.signer.get_auth('service_name', 'region_name')
self.assertEqual(auth, auth_cls.return_value)
# Note we're called with 'foo', 'bar', 'baz', and *not*
# 'a', 'b', 'c'.
auth_cls.assert_called_with(
credentials=ReadOnlyCredentials('foo', 'bar', 'baz'),
service_name='service_name',
region_name='region_name')
def test_no_credentials_case_is_forwarded_to_signer(self):
# If no credentials are given to the RequestSigner, we should
# forward that fact on to the Auth class and let them handle
# the error (which they already do).
self.credentials = None
self.signer = RequestSigner(
'service_name', 'region_name', 'signing_name',
'v4', self.credentials, self.emitter)
auth_cls = mock.Mock()
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4': auth_cls}):
auth = self.signer.get_auth_instance(
'service_name', 'region_name', 'v4')
auth_cls.assert_called_with(
service_name='service_name',
region_name='region_name',
credentials=None,
)
def test_sign_with_signing_type_standard(self):
auth = mock.Mock()
post_auth = mock.Mock()
query_auth = mock.Mock()
request = mock.Mock()
auth_types = {
'v4-presign-post': post_auth,
'v4-query': query_auth,
'v4': auth
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
self.signer.sign('operation_name', request,
signing_type='standard')
self.assertFalse(post_auth.called)
self.assertFalse(query_auth.called)
auth.assert_called_with(
credentials=ReadOnlyCredentials('key', 'secret', None),
service_name='signing_name',
region_name='region_name'
)
def test_sign_with_signing_type_presign_url(self):
auth = mock.Mock()
post_auth = mock.Mock()
query_auth = mock.Mock()
request = mock.Mock()
auth_types = {
'v4-presign-post': post_auth,
'v4-query': query_auth,
'v4': auth
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
self.signer.sign('operation_name', request,
signing_type='presign-url')
self.assertFalse(post_auth.called)
self.assertFalse(auth.called)
query_auth.assert_called_with(
credentials=ReadOnlyCredentials('key', 'secret', None),
service_name='signing_name',
region_name='region_name'
)
def test_sign_with_signing_type_presign_post(self):
auth = mock.Mock()
post_auth = mock.Mock()
query_auth = mock.Mock()
request = mock.Mock()
auth_types = {
'v4-presign-post': post_auth,
'v4-query': query_auth,
'v4': auth
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
self.signer.sign('operation_name', request,
signing_type='presign-post')
self.assertFalse(auth.called)
self.assertFalse(query_auth.called)
post_auth.assert_called_with(
credentials=ReadOnlyCredentials('key', 'secret', None),
service_name='signing_name',
region_name='region_name'
)
def test_sign_with_region_name(self):
request = mock.Mock()
auth = mock.Mock()
auth_types = {
'v4': auth
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
self.signer.sign('operation_name', request, region_name='foo')
auth.assert_called_with(
credentials=ReadOnlyCredentials('key', 'secret', None),
service_name='signing_name',
region_name='foo'
)
def test_sign_with_expires_in(self):
request = mock.Mock()
auth = mock.Mock()
auth_types = {
'v4': auth
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
self.signer.sign('operation_name', request, expires_in=2)
auth.assert_called_with(
credentials=ReadOnlyCredentials('key', 'secret', None),
service_name='signing_name',
region_name='region_name',
expires=2
)
def test_sign_with_custom_signing_name(self):
request = mock.Mock()
auth = mock.Mock()
auth_types = {
'v4': auth
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
self.signer.sign('operation_name', request, signing_name='foo')
auth.assert_called_with(
credentials=ReadOnlyCredentials('key', 'secret', None),
service_name='foo',
region_name='region_name'
)
def test_presign_with_custom_signing_name(self):
auth = mock.Mock()
auth.REQUIRES_REGION = True
request_dict = {
'headers': {},
'url': 'https://foo.com',
'body': b'',
'url_path': '/',
'method': 'GET',
'context': {}
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'v4-query': auth}):
presigned_url = self.signer.generate_presigned_url(
request_dict, operation_name='operation_name',
signing_name='foo')
auth.assert_called_with(
credentials=self.fixed_credentials,
region_name='region_name',
expires=3600, service_name='foo')
self.assertEqual(presigned_url, 'https://foo.com')
def test_unknown_signer_raises_unknown_on_standard(self):
request = mock.Mock()
auth = mock.Mock()
auth_types = {
'v4': auth
}
self.emitter.emit_until_response.return_value = (None, 'custom')
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
with self.assertRaises(UnknownSignatureVersionError):
self.signer.sign('operation_name', request,
signing_type='standard')
def test_unknown_signer_raises_unsupported_when_not_standard(self):
request = mock.Mock()
auth = mock.Mock()
auth_types = {
'v4': auth
}
self.emitter.emit_until_response.return_value = (None, 'custom')
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
with self.assertRaises(UnsupportedSignatureVersionError):
self.signer.sign('operation_name', request,
signing_type='presign-url')
with self.assertRaises(UnsupportedSignatureVersionError):
self.signer.sign('operation_name', request,
signing_type='presign-post')
class TestCloudfrontSigner(BaseSignerTest):
def setUp(self):
super(TestCloudfrontSigner, self).setUp()
self.signer = CloudFrontSigner("MY_KEY_ID", lambda message: b'signed')
# It helps but the long string diff will still be slightly different on
# Python 2.6/2.7/3.x. We won't soly rely on that anyway, so it's fine.
self.maxDiff = None
def test_build_canned_policy(self):
policy = self.signer.build_policy('foo', datetime.datetime(2016, 1, 1))
expected = (
'{"Statement":[{"Resource":"foo",'
'"Condition":{"DateLessThan":{"AWS:EpochTime":1451606400}}}]}')
self.assertEqual(json.loads(policy), json.loads(expected))
self.assertEqual(policy, expected) # This is to ensure the right order
def test_build_custom_policy(self):
policy = self.signer.build_policy(
'foo', datetime.datetime(2016, 1, 1),
date_greater_than=datetime.datetime(2015, 12, 1),
ip_address='12.34.56.78/9')
expected = {
"Statement": [{
"Resource": "foo",
"Condition": {
"DateGreaterThan": {"AWS:EpochTime": 1448928000},
"DateLessThan": {"AWS:EpochTime": 1451606400},
"IpAddress": {"AWS:SourceIp": "12.34.56.78/9"}
},
}]
}
self.assertEqual(json.loads(policy), expected)
def test_generate_presign_url_with_expire_time(self):
signed_url = self.signer.generate_presigned_url(
'http://test.com/foo.txt',
date_less_than=datetime.datetime(2016, 1, 1))
expected = (
'http://test.com/foo.txt?Expires=1451606400&Signature=c2lnbmVk'
'&Key-Pair-Id=MY_KEY_ID')
assert_url_equal(signed_url, expected)
def test_generate_presign_url_with_custom_policy(self):
policy = self.signer.build_policy(
'foo', datetime.datetime(2016, 1, 1),
date_greater_than=datetime.datetime(2015, 12, 1),
ip_address='12.34.56.78/9')
signed_url = self.signer.generate_presigned_url(
'http://test.com/index.html?foo=bar', policy=policy)
expected = (
'http://test.com/index.html?foo=bar'
'&Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiZm9vIiwiQ29uZ'
'Gl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIj'
'oxNDUxNjA2NDAwfSwiSXBBZGRyZXNzIjp7IkFXUzpTb3VyY2VJcCI'
'6IjEyLjM0LjU2Ljc4LzkifSwiRGF0ZUdyZWF0ZXJUaGFuIjp7IkFX'
'UzpFcG9jaFRpbWUiOjE0NDg5MjgwMDB9fX1dfQ__'
'&Signature=c2lnbmVk&Key-Pair-Id=MY_KEY_ID')
assert_url_equal(signed_url, expected)
class TestS3PostPresigner(BaseSignerTest):
def setUp(self):
super(TestS3PostPresigner, self).setUp()
self.request_signer = RequestSigner(
'service_name', 'region_name', 'signing_name',
's3v4', self.credentials, self.emitter)
self.signer = S3PostPresigner(self.request_signer)
self.request_dict = {
'headers': {},
'url': 'https://s3.amazonaws.com/mybucket',
'body': b'',
'url_path': '/',
'method': 'POST',
'context': {}
}
self.auth = mock.Mock()
self.auth.REQUIRES_REGION = True
self.add_auth = mock.Mock()
self.auth.return_value.add_auth = self.add_auth
self.fixed_credentials = self.credentials.get_frozen_credentials()
self.datetime_patch = mock.patch('botocore.signers.datetime')
self.datetime_mock = self.datetime_patch.start()
self.fixed_date = datetime.datetime(2014, 3, 10, 17, 2, 55, 0)
self.fixed_delta = datetime.timedelta(seconds=3600)
self.datetime_mock.datetime.utcnow.return_value = self.fixed_date
self.datetime_mock.timedelta.return_value = self.fixed_delta
def tearDown(self):
super(TestS3PostPresigner, self).tearDown()
self.datetime_patch.stop()
def test_generate_presigned_post(self):
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'s3v4-presign-post': self.auth}):
post_form_args = self.signer.generate_presigned_post(
self.request_dict)
self.auth.assert_called_with(
credentials=self.fixed_credentials, region_name='region_name',
service_name='signing_name')
self.assertEqual(self.add_auth.call_count, 1)
ref_request = self.add_auth.call_args[0][0]
ref_policy = ref_request.context['s3-presign-post-policy']
self.assertEqual(ref_policy['expiration'], '2014-03-10T18:02:55Z')
self.assertEqual(ref_policy['conditions'], [])
self.assertEqual(post_form_args['url'],
'https://s3.amazonaws.com/mybucket')
self.assertEqual(post_form_args['fields'], {})
def test_generate_presigned_post_emits_choose_signer(self):
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'s3v4-presign-post': self.auth}):
self.signer.generate_presigned_post(self.request_dict)
self.emitter.emit_until_response.assert_called_with(
'choose-signer.service_name.PutObject',
signing_name='signing_name', region_name='region_name',
signature_version='s3v4-presign-post', context=mock.ANY)
def test_generate_presigned_post_choose_signer_override(self):
auth = mock.Mock()
self.emitter.emit_until_response.return_value = (None, 'custom')
auth_types = {
's3v4-presign-post': self.auth,
'custom-presign-post': auth
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
self.signer.generate_presigned_post(self.request_dict)
auth.assert_called_with(
credentials=self.fixed_credentials, region_name='region_name',
service_name='signing_name')
def test_generate_presigne_post_choose_signer_override_known(self):
auth = mock.Mock()
self.emitter.emit_until_response.return_value = (
None, 's3v4-presign-post')
auth_types = {
's3v4-presign-post': self.auth,
'custom-presign-post': auth
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
self.signer.generate_presigned_post(self.request_dict)
self.auth.assert_called_with(
credentials=self.fixed_credentials, region_name='region_name',
service_name='signing_name')
def test_generate_presigned_post_bad_signer_raises_error(self):
auth = mock.Mock()
self.emitter.emit_until_response.return_value = (None, 's3-query')
auth_types = {
's3v4-presign-post': self.auth,
's3-query': auth
}
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS, auth_types):
with self.assertRaises(UnsupportedSignatureVersionError):
self.signer.generate_presigned_post(self.request_dict)
def test_generate_unsigned_post(self):
self.emitter.emit_until_response.return_value = (
None, botocore.UNSIGNED)
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'s3v4-presign-post': self.auth}):
post_form_args = self.signer.generate_presigned_post(
self.request_dict)
expected = {'fields': {}, 'url': 'https://s3.amazonaws.com/mybucket'}
self.assertEqual(post_form_args, expected)
def test_generate_presigned_post_with_conditions(self):
conditions = [
{'bucket': 'mybucket'},
['starts-with', '$key', 'bar']
]
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'s3v4-presign-post': self.auth}):
self.signer.generate_presigned_post(
self.request_dict, conditions=conditions)
self.auth.assert_called_with(
credentials=self.fixed_credentials, region_name='region_name',
service_name='signing_name')
self.assertEqual(self.add_auth.call_count, 1)
ref_request = self.add_auth.call_args[0][0]
ref_policy = ref_request.context['s3-presign-post-policy']
self.assertEqual(ref_policy['conditions'], conditions)
def test_generate_presigned_post_with_region_override(self):
with mock.patch.dict(botocore.auth.AUTH_TYPE_MAPS,
{'s3v4-presign-post': self.auth}):
self.signer.generate_presigned_post(
self.request_dict, region_name='foo')
self.auth.assert_called_with(
credentials=self.fixed_credentials, region_name='foo',
service_name='signing_name')
def test_presigned_post_throws_unsupported_signature_error(self):
request_dict = {
'headers': {},
'url': 'https://s3.amazonaws.com/mybucket/myobject',
'body': b'',
'url_path': '/',
'method': 'POST',
'context': {}
}
self.request_signer = RequestSigner(
'service_name', 'region_name', 'signing_name',
'foo', self.credentials, self.emitter)
self.signer = S3PostPresigner(self.request_signer)
with self.assertRaises(UnsupportedSignatureVersionError):
self.signer.generate_presigned_post(request_dict)
class TestGenerateUrl(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client('s3', region_name='us-east-1')
self.bucket = 'mybucket'
self.key = 'mykey'
self.client_kwargs = {'Bucket': self.bucket, 'Key': self.key}
self.generate_url_patch = mock.patch(
'botocore.signers.RequestSigner.generate_presigned_url')
self.generate_url_mock = self.generate_url_patch.start()
def tearDown(self):
self.generate_url_patch.stop()
def test_generate_presigned_url(self):
self.client.generate_presigned_url(
'get_object', Params={'Bucket': self.bucket, 'Key': self.key})
ref_request_dict = {
'body': b'',
'url': u'https://s3.amazonaws.com/mybucket/mykey',
'headers': {},
'query_string': {},
'url_path': u'/mybucket/mykey',
'method': u'GET',
# mock.ANY is used because client parameter related events
# inject values into the context. So using the context's exact
# value for these tests will be a maintenance burden if
# anymore customizations are added that inject into the context.
'context': mock.ANY}
self.generate_url_mock.assert_called_with(
request_dict=ref_request_dict, expires_in=3600,
operation_name='GetObject')
def test_generate_presigned_url_with_query_string(self):
disposition = 'attachment; filename="download.jpg"'
self.client.generate_presigned_url(
'get_object', Params={
'Bucket': self.bucket,
'Key': self.key,
'ResponseContentDisposition': disposition})
ref_request_dict = {
'body': b'',
'url': (u'https://s3.amazonaws.com/mybucket/mykey'
'?response-content-disposition='
'attachment%3B%20filename%3D%22download.jpg%22'),
'headers': {},
'query_string': {u'response-content-disposition': disposition},
'url_path': u'/mybucket/mykey',
'method': u'GET',
'context': mock.ANY}
self.generate_url_mock.assert_called_with(
request_dict=ref_request_dict, expires_in=3600,
operation_name='GetObject')
def test_generate_presigned_url_unknown_method_name(self):
with self.assertRaises(UnknownClientMethodError):
self.client.generate_presigned_url('getobject')
def test_generate_presigned_url_missing_required_params(self):
with self.assertRaises(ParamValidationError):
self.client.generate_presigned_url('get_object')
def test_generate_presigned_url_expires(self):
self.client.generate_presigned_url(
'get_object', Params={'Bucket': self.bucket, 'Key': self.key},
ExpiresIn=20)
ref_request_dict = {
'body': b'',
'url': u'https://s3.amazonaws.com/mybucket/mykey',
'headers': {},
'query_string': {},
'url_path': u'/mybucket/mykey',
'method': u'GET',
'context': mock.ANY}
self.generate_url_mock.assert_called_with(
request_dict=ref_request_dict, expires_in=20,
operation_name='GetObject')
def test_generate_presigned_url_override_http_method(self):
self.client.generate_presigned_url(
'get_object', Params={'Bucket': self.bucket, 'Key': self.key},
HttpMethod='PUT')
ref_request_dict = {
'body': b'',
'url': u'https://s3.amazonaws.com/mybucket/mykey',
'headers': {},
'query_string': {},
'url_path': u'/mybucket/mykey',
'method': u'PUT',
'context': mock.ANY}
self.generate_url_mock.assert_called_with(
request_dict=ref_request_dict, expires_in=3600,
operation_name='GetObject')
def test_generate_presigned_url_emits_param_events(self):
emitter = mock.Mock(HierarchicalEmitter)
emitter.emit.return_value = []
self.client.meta.events = emitter
self.client.generate_presigned_url(
'get_object', Params={'Bucket': self.bucket, 'Key': self.key})
events_emitted = [
emit_call[0][0] for emit_call in emitter.emit.call_args_list
]
self.assertEqual(
events_emitted,
[
'provide-client-params.s3.GetObject',
'before-parameter-build.s3.GetObject'
]
)
def test_generate_presign_url_emits_is_presign_in_context(self):
emitter = mock.Mock(HierarchicalEmitter)
emitter.emit.return_value = []
self.client.meta.events = emitter
self.client.generate_presigned_url(
'get_object', Params={'Bucket': self.bucket, 'Key': self.key})
kwargs_emitted = [
emit_call[1] for emit_call in emitter.emit.call_args_list
]
for kwargs in kwargs_emitted:
self.assertTrue(
kwargs.get('context', {}).get('is_presign_request'),
'The context did not have is_presign_request set to True for '
'the following kwargs emitted: %s' % kwargs
)
class TestGeneratePresignedPost(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client('s3', region_name='us-east-1')
self.bucket = 'mybucket'
self.key = 'mykey'
self.presign_post_patch = mock.patch(
'botocore.signers.S3PostPresigner.generate_presigned_post')
self.presign_post_mock = self.presign_post_patch.start()
def tearDown(self):
self.presign_post_patch.stop()
def test_generate_presigned_post(self):
self.client.generate_presigned_post(self.bucket, self.key)
_, post_kwargs = self.presign_post_mock.call_args
request_dict = post_kwargs['request_dict']
fields = post_kwargs['fields']
conditions = post_kwargs['conditions']
self.assertEqual(
request_dict['url'], 'https://s3.amazonaws.com/mybucket')
self.assertEqual(post_kwargs['expires_in'], 3600)
self.assertEqual(
conditions,
[{'bucket': 'mybucket'}, {'key': 'mykey'}])
self.assertEqual(
fields,
{'key': 'mykey'})
def test_generate_presigned_post_with_filename(self):
self.key = 'myprefix/${filename}'
self.client.generate_presigned_post(self.bucket, self.key)
_, post_kwargs = self.presign_post_mock.call_args
request_dict = post_kwargs['request_dict']
fields = post_kwargs['fields']
conditions = post_kwargs['conditions']
self.assertEqual(
request_dict['url'], 'https://s3.amazonaws.com/mybucket')
self.assertEqual(post_kwargs['expires_in'], 3600)
self.assertEqual(
conditions,
[{'bucket': 'mybucket'}, ['starts-with', '$key', 'myprefix/']])
self.assertEqual(
fields,
{'key': 'myprefix/${filename}'})
def test_generate_presigned_post_expires(self):
self.client.generate_presigned_post(
self.bucket, self.key, ExpiresIn=50)
_, post_kwargs = self.presign_post_mock.call_args
request_dict = post_kwargs['request_dict']
fields = post_kwargs['fields']
conditions = post_kwargs['conditions']
self.assertEqual(
request_dict['url'], 'https://s3.amazonaws.com/mybucket')
self.assertEqual(post_kwargs['expires_in'], 50)
self.assertEqual(
conditions,
[{'bucket': 'mybucket'}, {'key': 'mykey'}])
self.assertEqual(
fields,
{'key': 'mykey'})
def test_generate_presigned_post_with_prefilled(self):
conditions = [{'acl': 'public-read'}]
fields = {'acl': 'public-read'}
self.client.generate_presigned_post(
self.bucket, self.key, Fields=fields, Conditions=conditions)
_, post_kwargs = self.presign_post_mock.call_args
request_dict = post_kwargs['request_dict']
fields = post_kwargs['fields']
conditions = post_kwargs['conditions']
self.assertEqual(
request_dict['url'], 'https://s3.amazonaws.com/mybucket')
self.assertEqual(
conditions,
[{'acl': 'public-read'}, {'bucket': 'mybucket'}, {'key': 'mykey'}])
self.assertEqual(fields['acl'], 'public-read')
self.assertEqual(
fields, {'key': 'mykey', 'acl': 'public-read'})
def test_generate_presigned_post_non_s3_client(self):
self.client = self.session.create_client('ec2', 'us-west-2')
with self.assertRaises(AttributeError):
self.client.generate_presigned_post()
class TestGenerateDBAuthToken(BaseSignerTest):
maxDiff = None
def setUp(self):
self.session = botocore.session.get_session()
self.client = self.session.create_client(
'rds', region_name='us-east-1', aws_access_key_id='akid',
aws_secret_access_key='skid', config=Config(signature_version='v4')
)
def test_generate_db_auth_token(self):
hostname = 'prod-instance.us-east-1.rds.amazonaws.com'
port = 3306
username = 'someusername'
clock = datetime.datetime(2016, 11, 7, 17, 39, 33, tzinfo=tzutc())
with mock.patch('datetime.datetime') as dt:
dt.utcnow.return_value = clock
result = generate_db_auth_token(
self.client, hostname, port, username)
expected_result = (
'prod-instance.us-east-1.rds.amazonaws.com:3306/?Action=connect'
'&DBUser=someusername&X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Date=20161107T173933Z&X-Amz-SignedHeaders=host'
'&X-Amz-Expires=900&X-Amz-Credential=akid%2F20161107%2F'
'us-east-1%2Frds-db%2Faws4_request&X-Amz-Signature'
'=d1138cdbc0ca63eec012ec0fc6c2267e03642168f5884a7795320d4c18374c61'
)
# A scheme needs to be appended to the beginning or urlsplit may fail
# on certain systems.
assert_url_equal(
'https://' + result, 'https://' + expected_result)
def test_custom_region(self):
hostname = 'host.us-east-1.rds.amazonaws.com'
port = 3306
username = 'mySQLUser'
region = 'us-west-2'
result = generate_db_auth_token(
self.client, hostname, port, username, Region=region)
self.assertIn(region, result)
# The hostname won't be changed even if a different region is specified
self.assertIn(hostname, result)
|
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from datetime import datetime, timedelta
import subprocess
import mock
import os
import tempfile
import shutil
import json
import copy
from dateutil.tz import tzlocal, tzutc
from botocore import credentials
from botocore.utils import ContainerMetadataFetcher
from botocore.compat import json
from botocore.credentials import EnvProvider, create_assume_role_refresher
from botocore.credentials import CredentialProvider, AssumeRoleProvider
from botocore.credentials import ConfigProvider, SharedCredentialProvider
from botocore.credentials import Credentials
import botocore.exceptions
import botocore.session
from tests import unittest, BaseEnvVar, IntegerRefresher, skip_if_windows
# Passed to session to keep it from finding default config file
TESTENVVARS = {'config_file': (None, 'AWS_CONFIG_FILE', None)}
raw_metadata = {
'foobar': {
'Code': 'Success',
'LastUpdated': '2012-12-03T14:38:21Z',
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'Token': 'foobar',
'Expiration': '2012-12-03T20:48:03Z',
'Type': 'AWS-HMAC'
}
}
post_processed_metadata = {
'role_name': 'foobar',
'access_key': raw_metadata['foobar']['AccessKeyId'],
'secret_key': raw_metadata['foobar']['SecretAccessKey'],
'token': raw_metadata['foobar']['Token'],
'expiry_time': raw_metadata['foobar']['Expiration'],
}
def path(filename):
return os.path.join(os.path.dirname(__file__), 'cfg', filename)
class TestCredentials(BaseEnvVar):
def _ensure_credential_is_normalized_as_unicode(self, access, secret):
c = credentials.Credentials(access, secret)
self.assertTrue(isinstance(c.access_key, type(u'u')))
self.assertTrue(isinstance(c.secret_key, type(u'u')))
def test_detect_nonascii_character(self):
self._ensure_credential_is_normalized_as_unicode(
'foo\xe2\x80\x99', 'bar\xe2\x80\x99')
def test_unicode_input(self):
self._ensure_credential_is_normalized_as_unicode(
u'foo', u'bar')
class TestRefreshableCredentials(TestCredentials):
def setUp(self):
super(TestRefreshableCredentials, self).setUp()
self.refresher = mock.Mock()
self.future_time = datetime.now(tzlocal()) + timedelta(hours=24)
self.expiry_time = \
datetime.now(tzlocal()) - timedelta(minutes=30)
self.metadata = {
'access_key': 'NEW-ACCESS',
'secret_key': 'NEW-SECRET',
'token': 'NEW-TOKEN',
'expiry_time': self.future_time.isoformat(),
'role_name': 'rolename',
}
self.refresher.return_value = self.metadata
self.mock_time = mock.Mock()
self.creds = credentials.RefreshableCredentials(
'ORIGINAL-ACCESS', 'ORIGINAL-SECRET', 'ORIGINAL-TOKEN',
self.expiry_time, self.refresher, 'iam-role',
time_fetcher=self.mock_time
)
def test_refresh_needed(self):
# The expiry time was set for 30 minutes ago, so if we
# say the current time is utcnow(), then we should need
# a refresh.
self.mock_time.return_value = datetime.now(tzlocal())
self.assertTrue(self.creds.refresh_needed())
# We should refresh creds, if we try to access "access_key"
# or any of the cred vars.
self.assertEqual(self.creds.access_key, 'NEW-ACCESS')
self.assertEqual(self.creds.secret_key, 'NEW-SECRET')
self.assertEqual(self.creds.token, 'NEW-TOKEN')
def test_no_expiration(self):
creds = credentials.RefreshableCredentials(
'ORIGINAL-ACCESS', 'ORIGINAL-SECRET', 'ORIGINAL-TOKEN',
None, self.refresher, 'iam-role', time_fetcher=self.mock_time
)
self.assertFalse(creds.refresh_needed())
def test_no_refresh_needed(self):
# The expiry time was 30 minutes ago, let's say it's an hour
# ago currently. That would mean we don't need a refresh.
self.mock_time.return_value = (
datetime.now(tzlocal()) - timedelta(minutes=60))
self.assertTrue(not self.creds.refresh_needed())
self.assertEqual(self.creds.access_key, 'ORIGINAL-ACCESS')
self.assertEqual(self.creds.secret_key, 'ORIGINAL-SECRET')
self.assertEqual(self.creds.token, 'ORIGINAL-TOKEN')
def test_get_credentials_set(self):
# We need to return a consistent set of credentials to use during the
# signing process.
self.mock_time.return_value = (
datetime.now(tzlocal()) - timedelta(minutes=60))
self.assertTrue(not self.creds.refresh_needed())
credential_set = self.creds.get_frozen_credentials()
self.assertEqual(credential_set.access_key, 'ORIGINAL-ACCESS')
self.assertEqual(credential_set.secret_key, 'ORIGINAL-SECRET')
self.assertEqual(credential_set.token, 'ORIGINAL-TOKEN')
class TestDeferredRefreshableCredentials(unittest.TestCase):
def setUp(self):
self.refresher = mock.Mock()
self.future_time = datetime.now(tzlocal()) + timedelta(hours=24)
self.metadata = {
'access_key': 'NEW-ACCESS',
'secret_key': 'NEW-SECRET',
'token': 'NEW-TOKEN',
'expiry_time': self.future_time.isoformat(),
'role_name': 'rolename',
}
self.refresher.return_value = self.metadata
self.mock_time = mock.Mock()
self.mock_time.return_value = datetime.now(tzlocal())
def test_refresh_using_called_on_first_access(self):
creds = credentials.DeferredRefreshableCredentials(
self.refresher, 'iam-role', self.mock_time
)
# The credentials haven't been accessed, so there should be no calls.
self.refresher.assert_not_called()
# Now that the object has been accessed, it should have called the
# refresher
creds.get_frozen_credentials()
self.assertEqual(self.refresher.call_count, 1)
def test_refresh_only_called_once(self):
creds = credentials.DeferredRefreshableCredentials(
self.refresher, 'iam-role', self.mock_time
)
for _ in range(5):
creds.get_frozen_credentials()
# The credentials were accessed several times in a row, but only
# should call refresh once.
self.assertEqual(self.refresher.call_count, 1)
class TestAssumeRoleCredentialFetcher(BaseEnvVar):
def setUp(self):
super(TestAssumeRoleCredentialFetcher, self).setUp()
self.source_creds = credentials.Credentials('a', 'b', 'c')
self.role_arn = 'myrole'
def create_client_creator(self, with_response):
# Create a mock sts client that returns a specific response
# for assume_role.
client = mock.Mock()
if isinstance(with_response, list):
client.assume_role.side_effect = with_response
else:
client.assume_role.return_value = with_response
return mock.Mock(return_value=client)
def get_expected_creds_from_response(self, response):
expiration = response['Credentials']['Expiration']
if isinstance(expiration, datetime):
expiration = expiration.isoformat()
return {
'access_key': response['Credentials']['AccessKeyId'],
'secret_key': response['Credentials']['SecretAccessKey'],
'token': response['Credentials']['SessionToken'],
'expiry_time': expiration
}
def some_future_time(self):
timeobj = datetime.now(tzlocal())
return timeobj + timedelta(hours=24)
def test_no_cache(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat()
},
}
client_creator = self.create_client_creator(with_response=response)
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn
)
expected_response = self.get_expected_creds_from_response(response)
response = refresher.fetch_credentials()
self.assertEqual(response, expected_response)
def test_expiration_in_datetime_format(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
# Note the lack of isoformat(), we're using
# a datetime.datetime type. This will ensure
# we test both parsing as well as serializing
# from a given datetime because the credentials
# are immediately expired.
'Expiration': self.some_future_time()
},
}
client_creator = self.create_client_creator(with_response=response)
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn
)
expected_response = self.get_expected_creds_from_response(response)
response = refresher.fetch_credentials()
self.assertEqual(response, expected_response)
def test_retrieves_from_cache(self):
date_in_future = datetime.utcnow() + timedelta(seconds=1000)
utc_timestamp = date_in_future.isoformat() + 'Z'
cache_key = (
'793d6e2f27667ab2da104824407e486bfec24a47'
)
cache = {
cache_key: {
'Credentials': {
'AccessKeyId': 'foo-cached',
'SecretAccessKey': 'bar-cached',
'SessionToken': 'baz-cached',
'Expiration': utc_timestamp,
}
}
}
client_creator = mock.Mock()
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn, cache=cache
)
expected_response = self.get_expected_creds_from_response(
cache[cache_key]
)
response = refresher.fetch_credentials()
self.assertEqual(response, expected_response)
client_creator.assert_not_called()
def test_cache_key_is_windows_safe(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat()
},
}
cache = {}
client_creator = self.create_client_creator(with_response=response)
role_arn = 'arn:aws:iam::role/foo-role'
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, role_arn, cache=cache
)
refresher.fetch_credentials()
# On windows, you cannot use a a ':' in the filename, so
# we need to make sure that it doesn't make it into the cache key.
cache_key = (
'75c539f0711ba78c5b9e488d0add95f178a54d74'
)
self.assertIn(cache_key, cache)
self.assertEqual(cache[cache_key], response)
def test_cache_key_with_role_session_name(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat()
},
}
cache = {}
client_creator = self.create_client_creator(with_response=response)
role_session_name = 'my_session_name'
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn, cache=cache,
extra_args={'RoleSessionName': role_session_name}
)
refresher.fetch_credentials()
# This is the sha256 hex digest of the expected assume role args.
cache_key = (
'2964201f5648c8be5b9460a9cf842d73a266daf2'
)
self.assertIn(cache_key, cache)
self.assertEqual(cache[cache_key], response)
def test_cache_key_with_policy(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat()
},
}
cache = {}
client_creator = self.create_client_creator(with_response=response)
policy = json.dumps({
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "*",
"Resource": "*"
}
]
})
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn, cache=cache,
extra_args={'Policy': policy}
)
refresher.fetch_credentials()
# This is the sha256 hex digest of the expected assume role args.
cache_key = (
'176f223d915e82456c253545e192aa21d68f5ab8'
)
self.assertIn(cache_key, cache)
self.assertEqual(cache[cache_key], response)
def test_assume_role_in_cache_but_expired(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
cache = {
'development--myrole': {
'Credentials': {
'AccessKeyId': 'foo-cached',
'SecretAccessKey': 'bar-cached',
'SessionToken': 'baz-cached',
'Expiration': datetime.now(tzlocal()),
}
}
}
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn, cache=cache
)
expected = self.get_expected_creds_from_response(response)
response = refresher.fetch_credentials()
self.assertEqual(response, expected)
def test_role_session_name_can_be_provided(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
role_session_name = 'myname'
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn,
extra_args={'RoleSessionName': role_session_name}
)
refresher.fetch_credentials()
client = client_creator.return_value
client.assume_role.assert_called_with(
RoleArn=self.role_arn, RoleSessionName=role_session_name)
def test_external_id_can_be_provided(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
external_id = 'my_external_id'
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn,
extra_args={'ExternalId': external_id}
)
refresher.fetch_credentials()
client = client_creator.return_value
client.assume_role.assert_called_with(
RoleArn=self.role_arn, ExternalId=external_id,
RoleSessionName=mock.ANY)
def test_policy_can_be_provided(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
policy = json.dumps({
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": "*",
"Resource": "*"
}
]
})
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn,
extra_args={'Policy': policy}
)
refresher.fetch_credentials()
client = client_creator.return_value
client.assume_role.assert_called_with(
RoleArn=self.role_arn, Policy=policy,
RoleSessionName=mock.ANY)
def test_duration_seconds_can_be_provided(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
duration = 1234
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn,
extra_args={'DurationSeconds': duration}
)
refresher.fetch_credentials()
client = client_creator.return_value
client.assume_role.assert_called_with(
RoleArn=self.role_arn, DurationSeconds=duration,
RoleSessionName=mock.ANY)
def test_mfa(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
prompter = mock.Mock(return_value='token-code')
mfa_serial = 'mfa'
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn,
extra_args={'SerialNumber': mfa_serial}, mfa_prompter=prompter
)
refresher.fetch_credentials()
client = client_creator.return_value
# In addition to the normal assume role args, we should also
# inject the serial number from the config as well as the
# token code that comes from prompting the user (the prompter
# object).
client.assume_role.assert_called_with(
RoleArn='myrole', RoleSessionName=mock.ANY, SerialNumber='mfa',
TokenCode='token-code')
def test_refreshes(self):
responses = [{
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
# We're creating an expiry time in the past so as
# soon as we try to access the credentials, the
# refresh behavior will be triggered.
'Expiration': (
datetime.now(tzlocal()) -
timedelta(seconds=100)).isoformat(),
},
}, {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
}
}]
client_creator = self.create_client_creator(with_response=responses)
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn
)
# The first call will simply use whatever credentials it is given.
# The second will check the cache, and only make a call if the
# cached credentials are expired.
refresher.fetch_credentials()
refresher.fetch_credentials()
client = client_creator.return_value
assume_role_calls = client.assume_role.call_args_list
self.assertEqual(len(assume_role_calls), 2, assume_role_calls)
def test_mfa_refresh_enabled(self):
responses = [{
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
# We're creating an expiry time in the past so as
# soon as we try to access the credentials, the
# refresh behavior will be triggered.
'Expiration': (
datetime.now(tzlocal()) -
timedelta(seconds=100)).isoformat(),
},
}, {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
}
}]
client_creator = self.create_client_creator(with_response=responses)
token_code = 'token-code-1'
prompter = mock.Mock(side_effect=[token_code])
mfa_serial = 'mfa'
refresher = credentials.AssumeRoleCredentialFetcher(
client_creator, self.source_creds, self.role_arn,
extra_args={'SerialNumber': mfa_serial}, mfa_prompter=prompter
)
# This is will refresh credentials if they're expired. Because
# we set the expiry time to something in the past, this will
# trigger the refresh behavior.
refresher.fetch_credentials()
assume_role = client_creator.return_value.assume_role
calls = [c[1] for c in assume_role.call_args_list]
expected_calls = [
{
'RoleArn': self.role_arn,
'RoleSessionName': mock.ANY,
'SerialNumber': mfa_serial,
'TokenCode': token_code
}
]
self.assertEqual(calls, expected_calls)
class TestEnvVar(BaseEnvVar):
def test_envvars_are_found_no_token(self):
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
}
provider = credentials.EnvProvider(environ)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.method, 'env')
def test_envvars_found_with_security_token(self):
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_SECURITY_TOKEN': 'baz',
}
provider = credentials.EnvProvider(environ)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
self.assertEqual(creds.method, 'env')
def test_envvars_found_with_session_token(self):
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_SESSION_TOKEN': 'baz',
}
provider = credentials.EnvProvider(environ)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
self.assertEqual(creds.method, 'env')
def test_envvars_not_found(self):
provider = credentials.EnvProvider(environ={})
creds = provider.load()
self.assertIsNone(creds)
def test_can_override_env_var_mapping(self):
# We can change the env var provider to
# use our specified env var names.
environ = {
'FOO_ACCESS_KEY': 'foo',
'FOO_SECRET_KEY': 'bar',
'FOO_SESSION_TOKEN': 'baz',
}
mapping = {
'access_key': 'FOO_ACCESS_KEY',
'secret_key': 'FOO_SECRET_KEY',
'token': 'FOO_SESSION_TOKEN',
}
provider = credentials.EnvProvider(
environ, mapping
)
creds = provider.load()
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
def test_can_override_partial_env_var_mapping(self):
# Only changing the access key mapping.
# The other 2 use the default values of
# AWS_SECRET_ACCESS_KEY and AWS_SESSION_TOKEN
# use our specified env var names.
environ = {
'FOO_ACCESS_KEY': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_SESSION_TOKEN': 'baz',
}
provider = credentials.EnvProvider(
environ, {'access_key': 'FOO_ACCESS_KEY'}
)
creds = provider.load()
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
def test_can_override_expiry_env_var_mapping(self):
expiry_time = datetime.now(tzlocal()) - timedelta(hours=1)
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_SESSION_TOKEN': 'baz',
'FOO_EXPIRY': expiry_time.isoformat(),
}
provider = credentials.EnvProvider(
environ, {'expiry_time': 'FOO_EXPIRY'}
)
creds = provider.load()
# Since the credentials are expired, we'll trigger a refresh whenever
# we try to access them. Since the environment credentials are still
# expired, this will raise an error.
error_message = (
"Credentials were refreshed, but the refreshed credentials are "
"still expired."
)
with self.assertRaisesRegexp(RuntimeError, error_message):
creds.get_frozen_credentials()
def test_partial_creds_is_an_error(self):
# If the user provides an access key, they must also
# provide a secret key. Not doing so will generate an
# error.
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
# Missing the AWS_SECRET_ACCESS_KEY
}
provider = credentials.EnvProvider(environ)
with self.assertRaises(botocore.exceptions.PartialCredentialsError):
provider.load()
def test_missing_access_key_id_raises_error(self):
expiry_time = datetime.now(tzlocal()) - timedelta(hours=1)
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_CREDENTIAL_EXPIRATION': expiry_time.isoformat(),
}
provider = credentials.EnvProvider(environ)
creds = provider.load()
del environ['AWS_ACCESS_KEY_ID']
# Since the credentials are expired, we'll trigger a refresh
# whenever we try to access them. At that refresh time, the relevant
# environment variables are incomplete, so an error will be raised.
with self.assertRaises(botocore.exceptions.PartialCredentialsError):
creds.get_frozen_credentials()
def test_credentials_refresh(self):
# First initialize the credentials with an expired credential set.
expiry_time = datetime.now(tzlocal()) - timedelta(hours=1)
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_SESSION_TOKEN': 'baz',
'AWS_CREDENTIAL_EXPIRATION': expiry_time.isoformat(),
}
provider = credentials.EnvProvider(environ)
creds = provider.load()
self.assertIsInstance(creds, credentials.RefreshableCredentials)
# Since the credentials are expired, we'll trigger a refresh whenever
# we try to access them. But at this point the environment hasn't been
# updated, so when it refreshes it will trigger an exception because
# the new creds are still expired.
error_message = (
"Credentials were refreshed, but the refreshed credentials are "
"still expired."
)
with self.assertRaisesRegexp(RuntimeError, error_message):
creds.get_frozen_credentials()
# Now we update the environment with non-expired credentials,
# so when we access the creds it will refresh and grab the new ones.
expiry_time = datetime.now(tzlocal()) + timedelta(hours=1)
environ.update({
'AWS_ACCESS_KEY_ID': 'bin',
'AWS_SECRET_ACCESS_KEY': 'bam',
'AWS_SESSION_TOKEN': 'biz',
'AWS_CREDENTIAL_EXPIRATION': expiry_time.isoformat(),
})
frozen = creds.get_frozen_credentials()
self.assertEqual(frozen.access_key, 'bin')
self.assertEqual(frozen.secret_key, 'bam')
self.assertEqual(frozen.token, 'biz')
def test_credentials_only_refresh_when_needed(self):
expiry_time = datetime.now(tzlocal()) + timedelta(hours=2)
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_SESSION_TOKEN': 'baz',
'AWS_CREDENTIAL_EXPIRATION': expiry_time.isoformat(),
}
provider = credentials.EnvProvider(environ)
# Perform the initial credential load
creds = provider.load()
# Now that the initial load has been performed, we go ahead and
# change the environment. If the credentials were expired,
# they would immediately refresh upon access and we'd get the new
# ones. Since they've got plenty of time, they shouldn't refresh.
expiry_time = datetime.now(tzlocal()) + timedelta(hours=3)
environ.update({
'AWS_ACCESS_KEY_ID': 'bin',
'AWS_SECRET_ACCESS_KEY': 'bam',
'AWS_SESSION_TOKEN': 'biz',
'AWS_CREDENTIAL_EXPIRATION': expiry_time.isoformat(),
})
frozen = creds.get_frozen_credentials()
self.assertEqual(frozen.access_key, 'foo')
self.assertEqual(frozen.secret_key, 'bar')
self.assertEqual(frozen.token, 'baz')
def test_credentials_not_refreshable_if_no_expiry_present(self):
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_SESSION_TOKEN': 'baz',
}
provider = credentials.EnvProvider(environ)
creds = provider.load()
self.assertNotIsInstance(creds, credentials.RefreshableCredentials)
self.assertIsInstance(creds, credentials.Credentials)
def test_credentials_do_not_become_refreshable(self):
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_SESSION_TOKEN': 'baz',
}
provider = credentials.EnvProvider(environ)
creds = provider.load()
frozen = creds.get_frozen_credentials()
self.assertEqual(frozen.access_key, 'foo')
self.assertEqual(frozen.secret_key, 'bar')
self.assertEqual(frozen.token, 'baz')
expiry_time = datetime.now(tzlocal()) - timedelta(hours=1)
environ.update({
'AWS_ACCESS_KEY_ID': 'bin',
'AWS_SECRET_ACCESS_KEY': 'bam',
'AWS_SESSION_TOKEN': 'biz',
'AWS_CREDENTIAL_EXPIRATION': expiry_time.isoformat(),
})
frozen = creds.get_frozen_credentials()
self.assertEqual(frozen.access_key, 'foo')
self.assertEqual(frozen.secret_key, 'bar')
self.assertEqual(frozen.token, 'baz')
self.assertNotIsInstance(creds, credentials.RefreshableCredentials)
def test_credentials_throw_error_if_expiry_goes_away(self):
expiry_time = datetime.now(tzlocal()) - timedelta(hours=1)
environ = {
'AWS_ACCESS_KEY_ID': 'foo',
'AWS_SECRET_ACCESS_KEY': 'bar',
'AWS_CREDENTIAL_EXPIRATION': expiry_time.isoformat(),
}
provider = credentials.EnvProvider(environ)
creds = provider.load()
del environ['AWS_CREDENTIAL_EXPIRATION']
with self.assertRaises(credentials.PartialCredentialsError):
creds.get_frozen_credentials()
class TestSharedCredentialsProvider(BaseEnvVar):
def setUp(self):
super(TestSharedCredentialsProvider, self).setUp()
self.ini_parser = mock.Mock()
def test_credential_file_exists_default_profile(self):
self.ini_parser.return_value = {
'default': {
'aws_access_key_id': 'foo',
'aws_secret_access_key': 'bar',
}
}
provider = credentials.SharedCredentialProvider(
creds_filename='~/.aws/creds', profile_name='default',
ini_parser=self.ini_parser)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertIsNone(creds.token)
self.assertEqual(creds.method, 'shared-credentials-file')
def test_partial_creds_raise_error(self):
self.ini_parser.return_value = {
'default': {
'aws_access_key_id': 'foo',
# Missing 'aws_secret_access_key'.
}
}
provider = credentials.SharedCredentialProvider(
creds_filename='~/.aws/creds', profile_name='default',
ini_parser=self.ini_parser)
with self.assertRaises(botocore.exceptions.PartialCredentialsError):
provider.load()
def test_credentials_file_exists_with_session_token(self):
self.ini_parser.return_value = {
'default': {
'aws_access_key_id': 'foo',
'aws_secret_access_key': 'bar',
'aws_session_token': 'baz',
}
}
provider = credentials.SharedCredentialProvider(
creds_filename='~/.aws/creds', profile_name='default',
ini_parser=self.ini_parser)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
self.assertEqual(creds.method, 'shared-credentials-file')
def test_credentials_file_with_multiple_profiles(self):
self.ini_parser.return_value = {
# Here the user has a 'default' and a 'dev' profile.
'default': {
'aws_access_key_id': 'a',
'aws_secret_access_key': 'b',
'aws_session_token': 'c',
},
'dev': {
'aws_access_key_id': 'd',
'aws_secret_access_key': 'e',
'aws_session_token': 'f',
},
}
# And we specify a profile_name of 'dev'.
provider = credentials.SharedCredentialProvider(
creds_filename='~/.aws/creds', profile_name='dev',
ini_parser=self.ini_parser)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'd')
self.assertEqual(creds.secret_key, 'e')
self.assertEqual(creds.token, 'f')
self.assertEqual(creds.method, 'shared-credentials-file')
def test_credentials_file_does_not_exist_returns_none(self):
# It's ok if the credentials file does not exist, we should
# just catch the appropriate errors and return None.
self.ini_parser.side_effect = botocore.exceptions.ConfigNotFound(
path='foo')
provider = credentials.SharedCredentialProvider(
creds_filename='~/.aws/creds', profile_name='dev',
ini_parser=self.ini_parser)
creds = provider.load()
self.assertIsNone(creds)
class TestConfigFileProvider(BaseEnvVar):
def setUp(self):
super(TestConfigFileProvider, self).setUp()
profile_config = {
'aws_access_key_id': 'a',
'aws_secret_access_key': 'b',
'aws_session_token': 'c',
# Non creds related configs can be in a session's # config.
'region': 'us-west-2',
'output': 'json',
}
parsed = {'profiles': {'default': profile_config}}
parser = mock.Mock()
parser.return_value = parsed
self.parser = parser
def test_config_file_exists(self):
provider = credentials.ConfigProvider('cli.cfg', 'default',
self.parser)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertEqual(creds.token, 'c')
self.assertEqual(creds.method, 'config-file')
def test_config_file_missing_profile_config(self):
# Referring to a profile that's not in the config file
# will result in session.config returning an empty dict.
profile_name = 'NOT-default'
provider = credentials.ConfigProvider('cli.cfg', profile_name,
self.parser)
creds = provider.load()
self.assertIsNone(creds)
def test_config_file_errors_ignored(self):
# We should move on to the next provider if the config file
# can't be found.
self.parser.side_effect = botocore.exceptions.ConfigNotFound(
path='cli.cfg')
provider = credentials.ConfigProvider('cli.cfg', 'default',
self.parser)
creds = provider.load()
self.assertIsNone(creds)
def test_partial_creds_is_error(self):
profile_config = {
'aws_access_key_id': 'a',
# Missing aws_secret_access_key
}
parsed = {'profiles': {'default': profile_config}}
parser = mock.Mock()
parser.return_value = parsed
provider = credentials.ConfigProvider('cli.cfg', 'default', parser)
with self.assertRaises(botocore.exceptions.PartialCredentialsError):
provider.load()
class TestBotoProvider(BaseEnvVar):
def setUp(self):
super(TestBotoProvider, self).setUp()
self.ini_parser = mock.Mock()
def test_boto_config_file_exists_in_home_dir(self):
environ = {}
self.ini_parser.return_value = {
'Credentials': {
# boto's config file does not support a session token
# so we only test for access_key/secret_key.
'aws_access_key_id': 'a',
'aws_secret_access_key': 'b',
}
}
provider = credentials.BotoProvider(environ=environ,
ini_parser=self.ini_parser)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertIsNone(creds.token)
self.assertEqual(creds.method, 'boto-config')
def test_env_var_set_for_boto_location(self):
environ = {
'BOTO_CONFIG': 'alternate-config.cfg'
}
self.ini_parser.return_value = {
'Credentials': {
# boto's config file does not support a session token
# so we only test for access_key/secret_key.
'aws_access_key_id': 'a',
'aws_secret_access_key': 'b',
}
}
provider = credentials.BotoProvider(environ=environ,
ini_parser=self.ini_parser)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertIsNone(creds.token)
self.assertEqual(creds.method, 'boto-config')
# Assert that the parser was called with the filename specified
# in the env var.
self.ini_parser.assert_called_with('alternate-config.cfg')
def test_no_boto_config_file_exists(self):
self.ini_parser.side_effect = botocore.exceptions.ConfigNotFound(
path='foo')
provider = credentials.BotoProvider(environ={},
ini_parser=self.ini_parser)
creds = provider.load()
self.assertIsNone(creds)
def test_partial_creds_is_error(self):
ini_parser = mock.Mock()
ini_parser.return_value = {
'Credentials': {
'aws_access_key_id': 'a',
# Missing aws_secret_access_key.
}
}
provider = credentials.BotoProvider(environ={},
ini_parser=ini_parser)
with self.assertRaises(botocore.exceptions.PartialCredentialsError):
provider.load()
class TestOriginalEC2Provider(BaseEnvVar):
def test_load_ec2_credentials_file_not_exist(self):
provider = credentials.OriginalEC2Provider(environ={})
creds = provider.load()
self.assertIsNone(creds)
def test_load_ec2_credentials_file_exists(self):
environ = {
'AWS_CREDENTIAL_FILE': 'foo.cfg',
}
parser = mock.Mock()
parser.return_value = {
'AWSAccessKeyId': 'a',
'AWSSecretKey': 'b',
}
provider = credentials.OriginalEC2Provider(environ=environ,
parser=parser)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertIsNone(creds.token)
self.assertEqual(creds.method, 'ec2-credentials-file')
class TestInstanceMetadataProvider(BaseEnvVar):
def test_load_from_instance_metadata(self):
timeobj = datetime.now(tzlocal())
timestamp = (timeobj + timedelta(hours=24)).isoformat()
fetcher = mock.Mock()
fetcher.retrieve_iam_role_credentials.return_value = {
'access_key': 'a',
'secret_key': 'b',
'token': 'c',
'expiry_time': timestamp,
'role_name': 'myrole',
}
provider = credentials.InstanceMetadataProvider(
iam_role_fetcher=fetcher)
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertEqual(creds.token, 'c')
self.assertEqual(creds.method, 'iam-role')
def test_no_role_creds_exist(self):
fetcher = mock.Mock()
fetcher.retrieve_iam_role_credentials.return_value = {}
provider = credentials.InstanceMetadataProvider(
iam_role_fetcher=fetcher)
creds = provider.load()
self.assertIsNone(creds)
fetcher.retrieve_iam_role_credentials.assert_called_with()
class CredentialResolverTest(BaseEnvVar):
def setUp(self):
super(CredentialResolverTest, self).setUp()
self.provider1 = mock.Mock()
self.provider1.METHOD = 'provider1'
self.provider1.CANONICAL_NAME = 'CustomProvider1'
self.provider2 = mock.Mock()
self.provider2.METHOD = 'provider2'
self.provider2.CANONICAL_NAME = 'CustomProvider2'
self.fake_creds = credentials.Credentials('a', 'b', 'c')
def test_load_credentials_single_provider(self):
self.provider1.load.return_value = self.fake_creds
resolver = credentials.CredentialResolver(providers=[self.provider1])
creds = resolver.load_credentials()
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertEqual(creds.token, 'c')
def test_get_provider_by_name(self):
resolver = credentials.CredentialResolver(providers=[self.provider1])
result = resolver.get_provider('provider1')
self.assertIs(result, self.provider1)
def test_get_unknown_provider_raises_error(self):
resolver = credentials.CredentialResolver(providers=[self.provider1])
with self.assertRaises(botocore.exceptions.UnknownCredentialError):
resolver.get_provider('unknown-foo')
def test_first_credential_non_none_wins(self):
self.provider1.load.return_value = None
self.provider2.load.return_value = self.fake_creds
resolver = credentials.CredentialResolver(providers=[self.provider1,
self.provider2])
creds = resolver.load_credentials()
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertEqual(creds.token, 'c')
self.provider1.load.assert_called_with()
self.provider2.load.assert_called_with()
def test_no_creds_loaded(self):
self.provider1.load.return_value = None
self.provider2.load.return_value = None
resolver = credentials.CredentialResolver(providers=[self.provider1,
self.provider2])
creds = resolver.load_credentials()
self.assertIsNone(creds)
def test_inject_additional_providers_after_existing(self):
self.provider1.load.return_value = None
self.provider2.load.return_value = self.fake_creds
resolver = credentials.CredentialResolver(providers=[self.provider1,
self.provider2])
# Now, if we were to call resolver.load() now, provider2 would
# win because it's returning a non None response.
# However we can inject a new provider before provider2 to
# override this process.
# Providers can be added by the METHOD name of each provider.
new_provider = mock.Mock()
new_provider.METHOD = 'new_provider'
new_provider.load.return_value = credentials.Credentials('d', 'e', 'f')
resolver.insert_after('provider1', new_provider)
creds = resolver.load_credentials()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'd')
self.assertEqual(creds.secret_key, 'e')
self.assertEqual(creds.token, 'f')
# Provider 1 should have been called, but provider2 should
# *not* have been called because new_provider already returned
# a non-None response.
self.provider1.load.assert_called_with()
self.assertTrue(not self.provider2.called)
def test_inject_provider_before_existing(self):
new_provider = mock.Mock()
new_provider.METHOD = 'override'
new_provider.load.return_value = credentials.Credentials('x', 'y', 'z')
resolver = credentials.CredentialResolver(providers=[self.provider1,
self.provider2])
resolver.insert_before(self.provider1.METHOD, new_provider)
creds = resolver.load_credentials()
self.assertEqual(creds.access_key, 'x')
self.assertEqual(creds.secret_key, 'y')
self.assertEqual(creds.token, 'z')
def test_can_remove_providers(self):
self.provider1.load.return_value = credentials.Credentials(
'a', 'b', 'c')
self.provider2.load.return_value = credentials.Credentials(
'd', 'e', 'f')
resolver = credentials.CredentialResolver(providers=[self.provider1,
self.provider2])
resolver.remove('provider1')
creds = resolver.load_credentials()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'd')
self.assertEqual(creds.secret_key, 'e')
self.assertEqual(creds.token, 'f')
self.assertTrue(not self.provider1.load.called)
self.provider2.load.assert_called_with()
def test_provider_unknown(self):
resolver = credentials.CredentialResolver(providers=[self.provider1,
self.provider2])
# No error is raised if you try to remove an unknown provider.
resolver.remove('providerFOO')
# But an error IS raised if you try to insert after an unknown
# provider.
with self.assertRaises(botocore.exceptions.UnknownCredentialError):
resolver.insert_after('providerFoo', None)
class TestCreateCredentialResolver(BaseEnvVar):
def setUp(self):
super(TestCreateCredentialResolver, self).setUp()
self.session = mock.Mock()
self.session_instance_vars = {
'credentials_file': 'a',
'legacy_config_file': 'b',
'config_file': 'c',
'metadata_service_timeout': 'd',
'metadata_service_num_attempts': 'e',
}
self.fake_env_vars = {}
self.session.get_config_variable = self.fake_get_config_variable
def fake_get_config_variable(self, name, methods=None):
if methods == ('instance',):
return self.session_instance_vars.get(name)
elif methods is not None and 'env' in methods:
return self.fake_env_vars.get(name)
def test_create_credential_resolver(self):
resolver = credentials.create_credential_resolver(self.session)
self.assertIsInstance(resolver, credentials.CredentialResolver)
def test_explicit_profile_ignores_env_provider(self):
self.session_instance_vars['profile'] = 'dev'
resolver = credentials.create_credential_resolver(self.session)
self.assertTrue(
all(not isinstance(p, EnvProvider) for p in resolver.providers))
def test_no_profile_checks_env_provider(self):
# If no profile is provided,
self.session_instance_vars.pop('profile', None)
resolver = credentials.create_credential_resolver(self.session)
# Then an EnvProvider should be part of our credential lookup chain.
self.assertTrue(
any(isinstance(p, EnvProvider) for p in resolver.providers))
def test_env_provider_added_if_profile_from_env_set(self):
self.fake_env_vars['profile'] = 'profile-from-env'
resolver = credentials.create_credential_resolver(self.session)
self.assertTrue(
any(isinstance(p, EnvProvider) for p in resolver.providers))
def test_default_cache(self):
resolver = credentials.create_credential_resolver(self.session)
cache = resolver.get_provider('assume-role').cache
self.assertIsInstance(cache, dict)
self.assertEqual(cache, {})
def test_custom_cache(self):
custom_cache = credentials.JSONFileCache()
resolver = credentials.create_credential_resolver(
self.session, custom_cache
)
cache = resolver.get_provider('assume-role').cache
self.assertIs(cache, custom_cache)
class TestCanonicalNameSourceProvider(BaseEnvVar):
def setUp(self):
super(TestCanonicalNameSourceProvider, self).setUp()
self.custom_provider1 = mock.Mock(spec=CredentialProvider)
self.custom_provider1.METHOD = 'provider1'
self.custom_provider1.CANONICAL_NAME = 'CustomProvider1'
self.custom_provider2 = mock.Mock(spec=CredentialProvider)
self.custom_provider2.METHOD = 'provider2'
self.custom_provider2.CANONICAL_NAME = 'CustomProvider2'
self.fake_creds = credentials.Credentials('a', 'b', 'c')
def test_load_source_credentials(self):
provider = credentials.CanonicalNameCredentialSourcer(providers=[
self.custom_provider1, self.custom_provider2
])
self.custom_provider1.load.return_value = self.fake_creds
result = provider.source_credentials('CustomProvider1')
self.assertIs(result, self.fake_creds)
def test_load_source_credentials_case_insensitive(self):
provider = credentials.CanonicalNameCredentialSourcer(providers=[
self.custom_provider1, self.custom_provider2
])
self.custom_provider1.load.return_value = self.fake_creds
result = provider.source_credentials('cUsToMpRoViDeR1')
self.assertIs(result, self.fake_creds)
def test_load_unknown_canonical_name_raises_error(self):
provider = credentials.CanonicalNameCredentialSourcer(providers=[
self.custom_provider1])
with self.assertRaises(botocore.exceptions.UnknownCredentialError):
provider.source_credentials('CustomUnknown')
def _assert_assume_role_creds_returned_with_shared_file(self, provider):
assume_role_provider = mock.Mock(spec=AssumeRoleProvider)
assume_role_provider.METHOD = 'assume-role'
assume_role_provider.CANONICAL_NAME = None
source = credentials.CanonicalNameCredentialSourcer(providers=[
assume_role_provider, provider
])
# If the assume role provider returns credentials, those should be
# what is returned.
assume_role_provider.load.return_value = self.fake_creds
provider.load.return_value = credentials.Credentials(
'd', 'e', 'f'
)
creds = source.source_credentials(provider.CANONICAL_NAME)
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertEqual(creds.token, 'c')
self.assertFalse(provider.load.called)
def _assert_returns_creds_if_assume_role_not_used(self, provider):
assume_role_provider = mock.Mock(spec=AssumeRoleProvider)
assume_role_provider.METHOD = 'assume-role'
assume_role_provider.CANONICAL_NAME = None
source = credentials.CanonicalNameCredentialSourcer(providers=[
assume_role_provider, provider
])
# If the assume role provider returns nothing, then whatever is in
# the config provider should be returned.
assume_role_provider.load.return_value = None
provider.load.return_value = credentials.Credentials(
'd', 'e', 'f'
)
creds = source.source_credentials(provider.CANONICAL_NAME)
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'd')
self.assertEqual(creds.secret_key, 'e')
self.assertEqual(creds.token, 'f')
self.assertTrue(assume_role_provider.load.called)
def test_assume_role_creds_returned_with_config_file(self):
provider = mock.Mock(spec=ConfigProvider)
provider.METHOD = 'config-file'
provider.CANONICAL_NAME = 'SharedConfig'
self._assert_assume_role_creds_returned_with_shared_file(provider)
def test_config_file_returns_creds_if_assume_role_not_used(self):
provider = mock.Mock(spec=ConfigProvider)
provider.METHOD = 'config-file'
provider.CANONICAL_NAME = 'SharedConfig'
self._assert_returns_creds_if_assume_role_not_used(provider)
def test_assume_role_creds_returned_with_cred_file(self):
provider = mock.Mock(spec=SharedCredentialProvider)
provider.METHOD = 'credentials-file'
provider.CANONICAL_NAME = 'SharedCredentials'
self._assert_assume_role_creds_returned_with_shared_file(provider)
def test_creds_file_returns_creds_if_assume_role_not_used(self):
provider = mock.Mock(spec=SharedCredentialProvider)
provider.METHOD = 'credentials-file'
provider.CANONICAL_NAME = 'SharedCredentials'
self._assert_returns_creds_if_assume_role_not_used(provider)
def test_get_canonical_assume_role_without_shared_files(self):
assume_role_provider = mock.Mock(spec=AssumeRoleProvider)
assume_role_provider.METHOD = 'assume-role'
assume_role_provider.CANONICAL_NAME = None
assume_role_provider.load.return_value = self.fake_creds
provider = credentials.CanonicalNameCredentialSourcer(providers=[
assume_role_provider
])
creds = provider.source_credentials('SharedConfig')
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertEqual(creds.token, 'c')
creds = provider.source_credentials('SharedCredentials')
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'a')
self.assertEqual(creds.secret_key, 'b')
self.assertEqual(creds.token, 'c')
def test_get_canonical_shared_files_without_assume_role(self):
provider = credentials.CanonicalNameCredentialSourcer(
providers=[self.custom_provider1])
with self.assertRaises(botocore.exceptions.UnknownCredentialError):
provider.source_credentials('SharedConfig')
with self.assertRaises(botocore.exceptions.UnknownCredentialError):
provider.source_credentials('SharedCredentials')
class TestAssumeRoleCredentialProvider(unittest.TestCase):
maxDiff = None
def setUp(self):
self.fake_config = {
'profiles': {
'development': {
'role_arn': 'myrole',
'source_profile': 'longterm',
},
'longterm': {
'aws_access_key_id': 'akid',
'aws_secret_access_key': 'skid',
},
'non-static': {
'role_arn': 'myrole',
'credential_source': 'Environment'
},
'chained': {
'role_arn': 'chained-role',
'source_profile': 'development'
}
}
}
def create_config_loader(self, with_config=None):
if with_config is None:
with_config = self.fake_config
load_config = mock.Mock()
load_config.return_value = with_config
return load_config
def create_client_creator(self, with_response):
# Create a mock sts client that returns a specific response
# for assume_role.
client = mock.Mock()
if isinstance(with_response, list):
client.assume_role.side_effect = with_response
else:
client.assume_role.return_value = with_response
return mock.Mock(return_value=client)
def some_future_time(self):
timeobj = datetime.now(tzlocal())
return timeobj + timedelta(hours=24)
def test_assume_role_with_no_cache(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat()
},
}
client_creator = self.create_client_creator(with_response=response)
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
client_creator, cache={}, profile_name='development')
creds = provider.load()
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
def test_assume_role_with_datetime(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
# Note the lack of isoformat(), we're using
# a datetime.datetime type. This will ensure
# we test both parsing as well as serializing
# from a given datetime because the credentials
# are immediately expired.
'Expiration': datetime.now(tzlocal()) + timedelta(hours=20)
},
}
client_creator = self.create_client_creator(with_response=response)
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
client_creator, cache={}, profile_name='development')
creds = provider.load()
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
def test_assume_role_refresher_serializes_datetime(self):
client = mock.Mock()
time_zone = tzutc()
expiration = datetime(
year=2016, month=11, day=6, hour=1, minute=30, tzinfo=time_zone)
client.assume_role.return_value = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': expiration,
}
}
refresh = create_assume_role_refresher(client, {})
expiry_time = refresh()['expiry_time']
self.assertEqual(expiry_time, '2016-11-06T01:30:00UTC')
def test_assume_role_retrieves_from_cache(self):
date_in_future = datetime.utcnow() + timedelta(seconds=1000)
utc_timestamp = date_in_future.isoformat() + 'Z'
self.fake_config['profiles']['development']['role_arn'] = 'myrole'
cache_key = (
'793d6e2f27667ab2da104824407e486bfec24a47'
)
cache = {
cache_key: {
'Credentials': {
'AccessKeyId': 'foo-cached',
'SecretAccessKey': 'bar-cached',
'SessionToken': 'baz-cached',
'Expiration': utc_timestamp,
}
}
}
provider = credentials.AssumeRoleProvider(
self.create_config_loader(), mock.Mock(),
cache=cache, profile_name='development')
creds = provider.load()
self.assertEqual(creds.access_key, 'foo-cached')
self.assertEqual(creds.secret_key, 'bar-cached')
self.assertEqual(creds.token, 'baz-cached')
def test_chain_prefers_cache(self):
date_in_future = datetime.utcnow() + timedelta(seconds=1000)
utc_timestamp = date_in_future.isoformat() + 'Z'
# The profile we will be using has a cache entry, but the profile it
# is sourcing from does not. This should result in the cached
# credentials being used, and the source profile not being called.
cache_key = (
'3d440bf424caf7a5ee664fbf89139a84409f95c2'
)
cache = {
cache_key: {
'Credentials': {
'AccessKeyId': 'foo-cached',
'SecretAccessKey': 'bar-cached',
'SessionToken': 'baz-cached',
'Expiration': utc_timestamp,
}
}
}
client_creator = self.create_client_creator([
Exception("Attempted to call assume role when not needed.")
])
provider = credentials.AssumeRoleProvider(
self.create_config_loader(), client_creator,
cache=cache, profile_name='chained')
creds = provider.load()
self.assertEqual(creds.access_key, 'foo-cached')
self.assertEqual(creds.secret_key, 'bar-cached')
self.assertEqual(creds.token, 'baz-cached')
def test_cache_key_is_windows_safe(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat()
},
}
cache = {}
self.fake_config['profiles']['development']['role_arn'] = (
'arn:aws:iam::foo-role')
client_creator = self.create_client_creator(with_response=response)
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
client_creator, cache=cache, profile_name='development')
provider.load().get_frozen_credentials()
# On windows, you cannot use a a ':' in the filename, so
# we need to make sure it doesn't come up in the cache key.
cache_key = (
'3f8e35c8dca6211d496e830a2de723b2387921e3'
)
self.assertIn(cache_key, cache)
self.assertEqual(cache[cache_key], response)
def test_cache_key_with_role_session_name(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat()
},
}
cache = {}
self.fake_config['profiles']['development']['role_arn'] = (
'arn:aws:iam::foo-role')
self.fake_config['profiles']['development']['role_session_name'] = (
'foo_role_session_name')
client_creator = self.create_client_creator(with_response=response)
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
client_creator, cache=cache, profile_name='development')
# The credentials won't actually be assumed until they're requested.
provider.load().get_frozen_credentials()
cache_key = (
'5e75ce21b6a64ab183b29c4a159b6f0248121d51'
)
self.assertIn(cache_key, cache)
self.assertEqual(cache[cache_key], response)
def test_assume_role_in_cache_but_expired(self):
expired_creds = datetime.now(tzlocal())
valid_creds = expired_creds + timedelta(hours=1)
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': valid_creds,
},
}
client_creator = self.create_client_creator(with_response=response)
cache = {
'development--myrole': {
'Credentials': {
'AccessKeyId': 'foo-cached',
'SecretAccessKey': 'bar-cached',
'SessionToken': 'baz-cached',
'Expiration': expired_creds,
}
}
}
provider = credentials.AssumeRoleProvider(
self.create_config_loader(), client_creator,
cache=cache, profile_name='development')
creds = provider.load()
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
def test_role_session_name_provided(self):
dev_profile = self.fake_config['profiles']['development']
dev_profile['role_session_name'] = 'myname'
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
client_creator, cache={}, profile_name='development')
# The credentials won't actually be assumed until they're requested.
provider.load().get_frozen_credentials()
client = client_creator.return_value
client.assume_role.assert_called_with(
RoleArn='myrole', RoleSessionName='myname')
def test_external_id_provided(self):
self.fake_config['profiles']['development']['external_id'] = 'myid'
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
client_creator, cache={}, profile_name='development')
# The credentials won't actually be assumed until they're requested.
provider.load().get_frozen_credentials()
client = client_creator.return_value
client.assume_role.assert_called_with(
RoleArn='myrole', ExternalId='myid', RoleSessionName=mock.ANY)
def test_assume_role_with_mfa(self):
self.fake_config['profiles']['development']['mfa_serial'] = 'mfa'
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
prompter = mock.Mock(return_value='token-code')
provider = credentials.AssumeRoleProvider(
self.create_config_loader(), client_creator,
cache={}, profile_name='development', prompter=prompter)
# The credentials won't actually be assumed until they're requested.
provider.load().get_frozen_credentials()
client = client_creator.return_value
# In addition to the normal assume role args, we should also
# inject the serial number from the config as well as the
# token code that comes from prompting the user (the prompter
# object).
client.assume_role.assert_called_with(
RoleArn='myrole', RoleSessionName=mock.ANY, SerialNumber='mfa',
TokenCode='token-code')
def test_assume_role_populates_session_name_on_refresh(self):
expiration_time = self.some_future_time()
next_expiration_time = expiration_time + timedelta(hours=4)
responses = [{
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
# We're creating an expiry time in the past so as
# soon as we try to access the credentials, the
# refresh behavior will be triggered.
'Expiration': expiration_time.isoformat(),
},
}, {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': next_expiration_time.isoformat(),
}
}]
client_creator = self.create_client_creator(with_response=responses)
provider = credentials.AssumeRoleProvider(
self.create_config_loader(), client_creator,
cache={}, profile_name='development',
prompter=mock.Mock(return_value='token-code'))
local_now = mock.Mock(return_value=datetime.now(tzlocal()))
with mock.patch('botocore.credentials._local_now', local_now):
# This will trigger the first assume_role() call. It returns
# credentials that are expired and will trigger a refresh.
creds = provider.load()
creds.get_frozen_credentials()
# This will trigger the second assume_role() call because
# a refresh is needed.
local_now.return_value = expiration_time
creds.get_frozen_credentials()
client = client_creator.return_value
assume_role_calls = client.assume_role.call_args_list
self.assertEqual(len(assume_role_calls), 2, assume_role_calls)
# The args should be identical. That is, the second
# assume_role call should have the exact same args as the
# initial assume_role call.
self.assertEqual(assume_role_calls[0], assume_role_calls[1])
def test_assume_role_mfa_cannot_refresh_credentials(self):
# Note: we should look into supporting optional behavior
# in the future that allows for reprompting for credentials.
# But for now, if we get temp creds with MFA then when those
# creds expire, we can't refresh the credentials.
self.fake_config['profiles']['development']['mfa_serial'] = 'mfa'
expiration_time = self.some_future_time()
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
# We're creating an expiry time in the past so as
# soon as we try to access the credentials, the
# refresh behavior will be triggered.
'Expiration': expiration_time.isoformat(),
},
}
client_creator = self.create_client_creator(with_response=response)
provider = credentials.AssumeRoleProvider(
self.create_config_loader(), client_creator,
cache={}, profile_name='development',
prompter=mock.Mock(return_value='token-code'))
local_now = mock.Mock(return_value=datetime.now(tzlocal()))
with mock.patch('botocore.credentials._local_now', local_now):
# Loads the credentials, resulting in the first assume role call.
creds = provider.load()
creds.get_frozen_credentials()
local_now.return_value = expiration_time
with self.assertRaises(credentials.RefreshWithMFAUnsupportedError):
# access_key is a property that will refresh credentials
# if they're expired. Because we set the expiry time to
# something in the past, this will trigger the refresh
# behavior, with with MFA will currently raise an exception.
creds.access_key
def test_no_config_is_noop(self):
self.fake_config['profiles']['development'] = {
'aws_access_key_id': 'foo',
'aws_secret_access_key': 'bar',
}
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
mock.Mock(), cache={}, profile_name='development')
# Because a role_arn was not specified, the AssumeRoleProvider
# is a noop and will not return credentials (which means we
# move on to the next provider).
creds = provider.load()
self.assertIsNone(creds)
def test_source_profile_not_provided(self):
del self.fake_config['profiles']['development']['source_profile']
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
mock.Mock(), cache={}, profile_name='development')
# source_profile is required, we shoudl get an error.
with self.assertRaises(botocore.exceptions.PartialCredentialsError):
provider.load()
def test_source_profile_does_not_exist(self):
dev_profile = self.fake_config['profiles']['development']
dev_profile['source_profile'] = 'does-not-exist'
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
mock.Mock(), cache={}, profile_name='development')
# source_profile is required, we shoudl get an error.
with self.assertRaises(botocore.exceptions.InvalidConfigError):
provider.load()
def test_incomplete_source_credentials_raises_error(self):
del self.fake_config['profiles']['longterm']['aws_access_key_id']
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
mock.Mock(), cache={}, profile_name='development')
with self.assertRaises(botocore.exceptions.PartialCredentialsError):
provider.load()
def test_source_profile_and_credential_source_provided(self):
profile = self.fake_config['profiles']['development']
profile['credential_source'] = 'SomeCredentialProvider'
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
mock.Mock(), cache={}, profile_name='development')
with self.assertRaises(botocore.exceptions.InvalidConfigError):
provider.load()
def test_credential_source_with_no_resolver_configured(self):
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
mock.Mock(), cache={}, profile_name='non-static')
with self.assertRaises(botocore.exceptions.InvalidConfigError):
provider.load()
def test_credential_source_with_no_providers_configured(self):
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
mock.Mock(), cache={}, profile_name='non-static',
credential_sourcer=credentials.CanonicalNameCredentialSourcer([])
)
with self.assertRaises(botocore.exceptions.InvalidConfigError):
provider.load()
def test_credential_source_not_among_providers(self):
fake_provider = mock.Mock()
fake_provider.CANONICAL_NAME = 'CustomFakeProvider'
provider = credentials.AssumeRoleProvider(
self.create_config_loader(),
mock.Mock(), cache={}, profile_name='non-static',
credential_sourcer=credentials.CanonicalNameCredentialSourcer(
[fake_provider])
)
# We configured the assume role provider with a single fake source
# provider, CustomFakeProvider. The profile we are attempting to use
# calls for the Environment credential provider as the credentials
# source. Since that isn't one of the configured source providers,
# an error is thrown.
with self.assertRaises(botocore.exceptions.InvalidConfigError):
provider.load()
def test_assume_role_with_credential_source(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat()
},
}
client_creator = self.create_client_creator(with_response=response)
config = {
'profiles': {
'sourced': {
'role_arn': 'myrole',
'credential_source': 'CustomMockProvider'
}
}
}
config_loader = self.create_config_loader(with_config=config)
fake_provider = mock.Mock()
fake_provider.CANONICAL_NAME = 'CustomMockProvider'
fake_creds = credentials.Credentials(
'akid', 'skid', 'token'
)
fake_provider.load.return_value = fake_creds
provider = credentials.AssumeRoleProvider(
config_loader, client_creator, cache={}, profile_name='sourced',
credential_sourcer=credentials.CanonicalNameCredentialSourcer(
[fake_provider])
)
creds = provider.load()
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
client_creator.assert_called_with(
'sts', aws_access_key_id=fake_creds.access_key,
aws_secret_access_key=fake_creds.secret_key,
aws_session_token=fake_creds.token
)
def test_credential_source_returns_none(self):
config = {
'profiles': {
'sourced': {
'role_arn': 'myrole',
'credential_source': 'CustomMockProvider'
}
}
}
config_loader = self.create_config_loader(with_config=config)
fake_provider = mock.Mock()
fake_provider.CANONICAL_NAME = 'CustomMockProvider'
fake_provider.load.return_value = None
provider = credentials.AssumeRoleProvider(
config_loader, mock.Mock(), cache={}, profile_name='sourced',
credential_sourcer=credentials.CanonicalNameCredentialSourcer(
[fake_provider])
)
with self.assertRaises(botocore.exceptions.CredentialRetrievalError):
provider.load()
def test_source_profile_can_reference_self(self):
response = {
'Credentials': {
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': self.some_future_time().isoformat()
},
}
client_creator = self.create_client_creator(with_response=response)
config = {
'profiles': {
'self-referencial': {
'aws_access_key_id': 'akid',
'aws_secret_access_key': 'skid',
'role_arn': 'myrole',
'source_profile': 'self-referencial'
}
}
}
provider = credentials.AssumeRoleProvider(
self.create_config_loader(config),
client_creator, cache={}, profile_name='self-referencial'
)
creds = provider.load()
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
def test_infinite_looping_profiles_raises_error(self):
config = {
'profiles': {
'first': {
'role_arn': 'first',
'source_profile': 'second'
},
'second': {
'role_arn': 'second',
'source_profile': 'first'
}
}
}
provider = credentials.AssumeRoleProvider(
self.create_config_loader(config),
mock.Mock(), cache={}, profile_name='first'
)
with self.assertRaises(botocore.credentials.InfiniteLoopConfigError):
provider.load()
def test_recursive_assume_role(self):
assume_responses = [
Credentials('foo', 'bar', 'baz'),
Credentials('spam', 'eggs', 'spamandegss'),
]
responses = []
for credential_set in assume_responses:
responses.append({
'Credentials': {
'AccessKeyId': credential_set.access_key,
'SecretAccessKey': credential_set.secret_key,
'SessionToken': credential_set.token,
'Expiration': self.some_future_time().isoformat()
}
})
client_creator = self.create_client_creator(with_response=responses)
static_credentials = Credentials('akid', 'skid')
config = {
'profiles': {
'first': {
'role_arn': 'first',
'source_profile': 'second'
},
'second': {
'role_arn': 'second',
'source_profile': 'third'
},
'third': {
'aws_access_key_id': static_credentials.access_key,
'aws_secret_access_key': static_credentials.secret_key,
}
}
}
provider = credentials.AssumeRoleProvider(
self.create_config_loader(config),
client_creator, cache={}, profile_name='first'
)
creds = provider.load()
expected_creds = assume_responses[-1]
self.assertEqual(creds.access_key, expected_creds.access_key)
self.assertEqual(creds.secret_key, expected_creds.secret_key)
self.assertEqual(creds.token, expected_creds.token)
client_creator.assert_has_calls([
mock.call(
'sts', aws_access_key_id=static_credentials.access_key,
aws_secret_access_key=static_credentials.secret_key,
aws_session_token=static_credentials.token
),
mock.call(
'sts', aws_access_key_id=assume_responses[0].access_key,
aws_secret_access_key=assume_responses[0].secret_key,
aws_session_token=assume_responses[0].token
),
])
class TestJSONCache(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.cache = credentials.JSONFileCache(self.tempdir)
def tearDown(self):
shutil.rmtree(self.tempdir)
def test_supports_contains_check(self):
# By default the cache is empty because we're
# using a new temp dir everytime.
self.assertTrue('mykey' not in self.cache)
def test_add_key_and_contains_check(self):
self.cache['mykey'] = {'foo': 'bar'}
self.assertTrue('mykey' in self.cache)
def test_added_key_can_be_retrieved(self):
self.cache['mykey'] = {'foo': 'bar'}
self.assertEqual(self.cache['mykey'], {'foo': 'bar'})
def test_only_accepts_json_serializable_data(self):
with self.assertRaises(ValueError):
# set()'s cannot be serialized to a JSON string.
self.cache['mykey'] = set()
def test_can_override_existing_values(self):
self.cache['mykey'] = {'foo': 'bar'}
self.cache['mykey'] = {'baz': 'newvalue'}
self.assertEqual(self.cache['mykey'], {'baz': 'newvalue'})
def test_can_add_multiple_keys(self):
self.cache['mykey'] = {'foo': 'bar'}
self.cache['mykey2'] = {'baz': 'qux'}
self.assertEqual(self.cache['mykey'], {'foo': 'bar'})
self.assertEqual(self.cache['mykey2'], {'baz': 'qux'})
def test_working_dir_does_not_exist(self):
working_dir = os.path.join(self.tempdir, 'foo')
cache = credentials.JSONFileCache(working_dir)
cache['foo'] = {'bar': 'baz'}
self.assertEqual(cache['foo'], {'bar': 'baz'})
def test_key_error_raised_when_cache_key_does_not_exist(self):
with self.assertRaises(KeyError):
self.cache['foo']
def test_file_is_truncated_before_writing(self):
self.cache['mykey'] = {
'really long key in the cache': 'really long value in cache'}
# Now overwrite it with a smaller value.
self.cache['mykey'] = {'a': 'b'}
self.assertEqual(self.cache['mykey'], {'a': 'b'})
@skip_if_windows('File permissions tests not supported on Windows.')
def test_permissions_for_file_restricted(self):
self.cache['mykey'] = {'foo': 'bar'}
filename = os.path.join(self.tempdir, 'mykey.json')
self.assertEqual(os.stat(filename).st_mode & 0xFFF, 0o600)
class TestRefreshLogic(unittest.TestCase):
def test_mandatory_refresh_needed(self):
creds = IntegerRefresher(
# These values will immediately trigger
# a manadatory refresh.
creds_last_for=2,
mandatory_refresh=3,
advisory_refresh=3)
temp = creds.get_frozen_credentials()
self.assertEqual(
temp, credentials.ReadOnlyCredentials('1', '1', '1'))
def test_advisory_refresh_needed(self):
creds = IntegerRefresher(
# These values will immediately trigger
# a manadatory refresh.
creds_last_for=4,
mandatory_refresh=2,
advisory_refresh=5)
temp = creds.get_frozen_credentials()
self.assertEqual(
temp, credentials.ReadOnlyCredentials('1', '1', '1'))
def test_refresh_fails_is_not_an_error_during_advisory_period(self):
fail_refresh = mock.Mock(side_effect=Exception("refresh failed"))
creds = IntegerRefresher(
creds_last_for=5,
advisory_refresh=7,
mandatory_refresh=3,
refresh_function=fail_refresh
)
temp = creds.get_frozen_credentials()
# We should have called the refresh function.
self.assertTrue(fail_refresh.called)
# The fail_refresh function will raise an exception.
# Because we're in the advisory period we'll not propogate
# the exception and return the current set of credentials
# (generation '1').
self.assertEqual(
temp, credentials.ReadOnlyCredentials('0', '0', '0'))
def test_exception_propogated_on_error_during_mandatory_period(self):
fail_refresh = mock.Mock(side_effect=Exception("refresh failed"))
creds = IntegerRefresher(
creds_last_for=5,
advisory_refresh=10,
# Note we're in the mandatory period now (5 < 7< 10).
mandatory_refresh=7,
refresh_function=fail_refresh
)
with self.assertRaisesRegexp(Exception, 'refresh failed'):
creds.get_frozen_credentials()
def test_exception_propogated_on_expired_credentials(self):
fail_refresh = mock.Mock(side_effect=Exception("refresh failed"))
creds = IntegerRefresher(
# Setting this to 0 mean the credentials are immediately
# expired.
creds_last_for=0,
advisory_refresh=10,
mandatory_refresh=7,
refresh_function=fail_refresh
)
with self.assertRaisesRegexp(Exception, 'refresh failed'):
# Because credentials are actually expired, any
# failure to refresh should be propagated.
creds.get_frozen_credentials()
def test_refresh_giving_expired_credentials_raises_exception(self):
# This verifies an edge cases where refreshed credentials
# still give expired credentials:
# 1. We see credentials are expired.
# 2. We try to refresh the credentials.
# 3. The "refreshed" credentials are still expired.
#
# In this case, we hard fail and let the user know what
# happened.
creds = IntegerRefresher(
# Negative number indicates that the credentials
# have already been expired for 2 seconds, even
# on refresh.
creds_last_for=-2,
)
err_msg = 'refreshed credentials are still expired'
with self.assertRaisesRegexp(RuntimeError, err_msg):
# Because credentials are actually expired, any
# failure to refresh should be propagated.
creds.get_frozen_credentials()
class TestContainerProvider(BaseEnvVar):
def test_noop_if_env_var_is_not_set(self):
# The 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI' env var
# is not present as an env var.
environ = {}
provider = credentials.ContainerProvider(environ)
creds = provider.load()
self.assertIsNone(creds)
def full_url(self, url):
return 'http://%s%s' % (ContainerMetadataFetcher.IP_ADDRESS, url)
def create_fetcher(self):
fetcher = mock.Mock(spec=ContainerMetadataFetcher)
fetcher.full_url = self.full_url
return fetcher
def test_retrieve_from_provider_if_env_var_present(self):
environ = {
'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI': '/latest/credentials?id=foo'
}
fetcher = self.create_fetcher()
timeobj = datetime.now(tzlocal())
timestamp = (timeobj + timedelta(hours=24)).isoformat()
fetcher.retrieve_full_uri.return_value = {
"AccessKeyId" : "access_key",
"SecretAccessKey" : "secret_key",
"Token" : "token",
"Expiration" : timestamp,
}
provider = credentials.ContainerProvider(environ, fetcher)
creds = provider.load()
fetcher.retrieve_full_uri.assert_called_with(
self.full_url('/latest/credentials?id=foo'), headers=None)
self.assertEqual(creds.access_key, 'access_key')
self.assertEqual(creds.secret_key, 'secret_key')
self.assertEqual(creds.token, 'token')
self.assertEqual(creds.method, 'container-role')
def test_creds_refresh_when_needed(self):
environ = {
'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI': '/latest/credentials?id=foo'
}
fetcher = mock.Mock(spec=credentials.ContainerMetadataFetcher)
timeobj = datetime.now(tzlocal())
expired_timestamp = (timeobj - timedelta(hours=23)).isoformat()
future_timestamp = (timeobj + timedelta(hours=1)).isoformat()
fetcher.retrieve_full_uri.side_effect = [
{
"AccessKeyId" : "access_key_old",
"SecretAccessKey" : "secret_key_old",
"Token" : "token_old",
"Expiration" : expired_timestamp,
},
{
"AccessKeyId" : "access_key_new",
"SecretAccessKey" : "secret_key_new",
"Token" : "token_new",
"Expiration" : future_timestamp,
}
]
provider = credentials.ContainerProvider(environ, fetcher)
creds = provider.load()
frozen_creds = creds.get_frozen_credentials()
self.assertEqual(frozen_creds.access_key, 'access_key_new')
self.assertEqual(frozen_creds.secret_key, 'secret_key_new')
self.assertEqual(frozen_creds.token, 'token_new')
def test_http_error_propagated(self):
environ = {
'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI': '/latest/credentials?id=foo'
}
fetcher = mock.Mock(spec=credentials.ContainerMetadataFetcher)
timeobj = datetime.now(tzlocal())
expired_timestamp = (timeobj - timedelta(hours=23)).isoformat()
future_timestamp = (timeobj + timedelta(hours=1)).isoformat()
exception = botocore.exceptions.CredentialRetrievalError
fetcher.retrieve_full_uri.side_effect = exception(provider='ecs-role',
error_msg='fake http error')
with self.assertRaises(exception):
provider = credentials.ContainerProvider(environ, fetcher)
creds = provider.load()
def test_http_error_propagated_on_refresh(self):
# We should ensure errors are still propagated even in the
# case of a failed refresh.
environ = {
'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI': '/latest/credentials?id=foo'
}
fetcher = mock.Mock(spec=credentials.ContainerMetadataFetcher)
timeobj = datetime.now(tzlocal())
expired_timestamp = (timeobj - timedelta(hours=23)).isoformat()
http_exception = botocore.exceptions.MetadataRetrievalError
raised_exception = botocore.exceptions.CredentialRetrievalError
fetcher.retrieve_full_uri.side_effect = [
{
"AccessKeyId" : "access_key_old",
"SecretAccessKey" : "secret_key_old",
"Token" : "token_old",
"Expiration" : expired_timestamp,
},
http_exception(error_msg='HTTP connection timeout')
]
provider = credentials.ContainerProvider(environ, fetcher)
# First time works with no issues.
creds = provider.load()
# Second time with a refresh should propagate an error.
with self.assertRaises(raised_exception):
frozen_creds = creds.get_frozen_credentials()
def test_can_use_full_url(self):
environ = {
'AWS_CONTAINER_CREDENTIALS_FULL_URI': 'http://localhost/foo'
}
fetcher = self.create_fetcher()
timeobj = datetime.now(tzlocal())
timestamp = (timeobj + timedelta(hours=24)).isoformat()
fetcher.retrieve_full_uri.return_value = {
"AccessKeyId" : "access_key",
"SecretAccessKey" : "secret_key",
"Token" : "token",
"Expiration" : timestamp,
}
provider = credentials.ContainerProvider(environ, fetcher)
creds = provider.load()
fetcher.retrieve_full_uri.assert_called_with('http://localhost/foo',
headers=None)
self.assertEqual(creds.access_key, 'access_key')
self.assertEqual(creds.secret_key, 'secret_key')
self.assertEqual(creds.token, 'token')
self.assertEqual(creds.method, 'container-role')
def test_can_pass_basic_auth_token(self):
environ = {
'AWS_CONTAINER_CREDENTIALS_FULL_URI': 'http://localhost/foo',
'AWS_CONTAINER_AUTHORIZATION_TOKEN': 'Basic auth-token',
}
fetcher = self.create_fetcher()
timeobj = datetime.now(tzlocal())
timestamp = (timeobj + timedelta(hours=24)).isoformat()
fetcher.retrieve_full_uri.return_value = {
"AccessKeyId" : "access_key",
"SecretAccessKey" : "secret_key",
"Token" : "token",
"Expiration" : timestamp,
}
provider = credentials.ContainerProvider(environ, fetcher)
creds = provider.load()
fetcher.retrieve_full_uri.assert_called_with(
'http://localhost/foo', headers={'Authorization': 'Basic auth-token'})
self.assertEqual(creds.access_key, 'access_key')
self.assertEqual(creds.secret_key, 'secret_key')
self.assertEqual(creds.token, 'token')
self.assertEqual(creds.method, 'container-role')
class TestProcessProvider(BaseEnvVar):
def setUp(self):
super(TestProcessProvider, self).setUp()
self.loaded_config = {}
self.load_config = mock.Mock(return_value=self.loaded_config)
self.invoked_process = mock.Mock()
self.popen_mock = mock.Mock(return_value=self.invoked_process,
spec=subprocess.Popen)
def create_process_provider(self, profile_name='default'):
provider = credentials.ProcessProvider(profile_name, self.load_config,
popen=self.popen_mock)
return provider
def _get_output(self, stdout, stderr=''):
return json.dumps(stdout).encode('utf-8'), stderr.encode('utf-8')
def _set_process_return_value(self, stdout, stderr='', rc=0):
output = self._get_output(stdout, stderr)
self.invoked_process.communicate.return_value = output
self.invoked_process.returncode = rc
def test_process_not_invoked_if_profile_does_not_exist(self):
# self.loaded_config is an empty dictionary with no profile
# information.
provider = self.create_process_provider()
self.assertIsNone(provider.load())
def test_process_not_invoked_if_not_configured_for_empty_config(self):
# No credential_process configured so we skip this provider.
self.loaded_config['profiles'] = {'default': {}}
provider = self.create_process_provider()
self.assertIsNone(provider.load())
def test_can_retrieve_via_process(self):
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
self._set_process_return_value({
'Version': 1,
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': '2020-01-01T00:00:00Z',
})
provider = self.create_process_provider()
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
self.assertEqual(creds.method, 'custom-process')
self.popen_mock.assert_called_with(
['my-process'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
def test_can_pass_arguments_through(self):
self.loaded_config['profiles'] = {
'default': {
'credential_process': 'my-process --foo --bar "one two"'
}
}
self._set_process_return_value({
'Version': 1,
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': '2020-01-01T00:00:00Z',
})
provider = self.create_process_provider()
creds = provider.load()
self.assertIsNotNone(creds)
self.popen_mock.assert_called_with(
['my-process', '--foo', '--bar', 'one two'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
def test_can_refresh_credentials(self):
# We given a time that's already expired so .access_key
# will trigger the refresh worfklow. We just need to verify
# that the refresh function gives the same result as the
# initial retrieval.
expired_date = '2016-01-01T00:00:00Z'
future_date = str(datetime.now(tzlocal()) + timedelta(hours=24))
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
old_creds = self._get_output({
'Version': 1,
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': expired_date,
})
new_creds = self._get_output({
'Version': 1,
'AccessKeyId': 'foo2',
'SecretAccessKey': 'bar2',
'SessionToken': 'baz2',
'Expiration': future_date,
})
self.invoked_process.communicate.side_effect = [old_creds, new_creds]
self.invoked_process.returncode = 0
provider = self.create_process_provider()
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo2')
self.assertEqual(creds.secret_key, 'bar2')
self.assertEqual(creds.token, 'baz2')
self.assertEqual(creds.method, 'custom-process')
def test_non_zero_rc_raises_exception(self):
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
self._set_process_return_value('', 'Error Message', 1)
provider = self.create_process_provider()
exception = botocore.exceptions.CredentialRetrievalError
with self.assertRaisesRegexp(exception, 'Error Message'):
provider.load()
def test_unsupported_version_raises_mismatch(self):
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
bad_version = 100
self._set_process_return_value({
'Version': bad_version,
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': '2020-01-01T00:00:00Z',
})
provider = self.create_process_provider()
exception = botocore.exceptions.CredentialRetrievalError
with self.assertRaisesRegexp(exception, 'Unsupported version'):
provider.load()
def test_missing_version_in_payload_returned_raises_exception(self):
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
self._set_process_return_value({
# Let's say they forget a 'Version' key.
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': '2020-01-01T00:00:00Z',
})
provider = self.create_process_provider()
exception = botocore.exceptions.CredentialRetrievalError
with self.assertRaisesRegexp(exception, 'Unsupported version'):
provider.load()
def test_missing_access_key_raises_exception(self):
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
self._set_process_return_value({
'Version': 1,
# Missing access key.
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
'Expiration': '2020-01-01T00:00:00Z',
})
provider = self.create_process_provider()
exception = botocore.exceptions.CredentialRetrievalError
with self.assertRaisesRegexp(exception, 'Missing required key'):
provider.load()
def test_missing_secret_key_raises_exception(self):
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
self._set_process_return_value({
'Version': 1,
'AccessKeyId': 'foo',
# Missing secret key.
'SessionToken': 'baz',
'Expiration': '2020-01-01T00:00:00Z',
})
provider = self.create_process_provider()
exception = botocore.exceptions.CredentialRetrievalError
with self.assertRaisesRegexp(exception, 'Missing required key'):
provider.load()
def test_missing_session_token(self):
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
self._set_process_return_value({
'Version': 1,
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
# Missing session token.
'Expiration': '2020-01-01T00:00:00Z',
})
provider = self.create_process_provider()
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertIsNone(creds.token)
self.assertEqual(creds.method, 'custom-process')
def test_missing_expiration(self):
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
self._set_process_return_value({
'Version': 1,
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
'SessionToken': 'baz',
# Missing expiration.
})
provider = self.create_process_provider()
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertEqual(creds.token, 'baz')
self.assertEqual(creds.method, 'custom-process')
def test_missing_expiration_and_session_token(self):
self.loaded_config['profiles'] = {
'default': {'credential_process': 'my-process'}
}
self._set_process_return_value({
'Version': 1,
'AccessKeyId': 'foo',
'SecretAccessKey': 'bar',
# Missing session token and expiration
})
provider = self.create_process_provider()
creds = provider.load()
self.assertIsNotNone(creds)
self.assertEqual(creds.access_key, 'foo')
self.assertEqual(creds.secret_key, 'bar')
self.assertIsNone(creds.token)
self.assertEqual(creds.method, 'custom-process')
|
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import os
import contextlib
import copy
import mock
from botocore.exceptions import DataNotFoundError, UnknownServiceError
from botocore.loaders import JSONFileLoader
from botocore.loaders import Loader, create_loader
from botocore.loaders import ExtrasProcessor
from tests import BaseEnvVar
class TestJSONFileLoader(BaseEnvVar):
def setUp(self):
super(TestJSONFileLoader, self).setUp()
self.data_path = os.path.join(os.path.dirname(__file__), 'data')
self.file_loader = JSONFileLoader()
self.valid_file_path = os.path.join(self.data_path, 'foo')
def test_load_file(self):
data = self.file_loader.load_file(self.valid_file_path)
self.assertEqual(len(data), 3)
self.assertTrue('test_key_1' in data)
def test_load_json_file_does_not_exist_returns_none(self):
# None is used to indicate that the loader could not find a
# file to load.
self.assertIsNone(self.file_loader.load_file('fooasdfasdfasdf'))
def test_file_exists_check(self):
self.assertTrue(self.file_loader.exists(self.valid_file_path))
def test_file_does_not_exist_returns_false(self):
self.assertFalse(self.file_loader.exists(
os.path.join(self.data_path, 'does', 'not', 'exist')))
def test_file_with_non_ascii(self):
try:
filename = os.path.join(self.data_path, 'non_ascii')
self.assertTrue(self.file_loader.load_file(filename) is not None)
except UnicodeDecodeError:
self.fail('Fail to handle data file with non-ascii characters')
class TestLoader(BaseEnvVar):
def test_default_search_paths(self):
loader = Loader()
self.assertEqual(len(loader.search_paths), 2)
# We should also have ~/.aws/models added to
# the search path. To deal with cross platform
# issues we'll just check for a path that ends
# with .aws/models.
home_dir_path = os.path.join('.aws', 'models')
self.assertTrue(
any(p.endswith(home_dir_path) for p in
loader.search_paths))
def test_can_add_to_search_path(self):
loader = Loader()
loader.search_paths.append('mypath')
self.assertIn('mypath', loader.search_paths)
def test_can_initialize_with_search_paths(self):
loader = Loader(extra_search_paths=['foo', 'bar'])
# Note that the extra search paths are before
# the customer/builtin data paths.
self.assertEqual(
loader.search_paths,
['foo', 'bar', loader.CUSTOMER_DATA_PATH,
loader.BUILTIN_DATA_PATH])
# The file loader isn't consulted unless the current
# search path exists, so we're patching isdir to always
# say that a directory exists.
@mock.patch('os.path.isdir', mock.Mock(return_value=True))
def test_load_data_uses_loader(self):
search_paths = ['foo', 'bar', 'baz']
class FakeLoader(object):
def load_file(self, name):
expected_ending = os.path.join('bar', 'baz')
if name.endswith(expected_ending):
return ['loaded data']
loader = Loader(extra_search_paths=search_paths,
file_loader=FakeLoader())
loaded = loader.load_data('baz')
self.assertEqual(loaded, ['loaded data'])
def test_data_not_found_raises_exception(self):
class FakeLoader(object):
def load_file(self, name):
# Returning None indicates that the
# loader couldn't find anything.
return None
loader = Loader(file_loader=FakeLoader())
with self.assertRaises(DataNotFoundError):
loader.load_data('baz')
@mock.patch('os.path.isdir', mock.Mock(return_value=True))
def test_error_raised_if_service_does_not_exist(self):
loader = Loader(extra_search_paths=[],
include_default_search_paths=False)
with self.assertRaises(DataNotFoundError):
loader.determine_latest_version('unknownservice', 'service-2')
@mock.patch('os.path.isdir', mock.Mock(return_value=True))
def test_load_service_model(self):
class FakeLoader(object):
def load_file(self, name):
return ['loaded data']
loader = Loader(extra_search_paths=['foo'],
file_loader=FakeLoader(),
include_default_search_paths=False,
include_default_extras=False)
loader.determine_latest_version = mock.Mock(return_value='2015-03-01')
loader.list_available_services = mock.Mock(return_value=['baz'])
loaded = loader.load_service_model('baz', type_name='service-2')
self.assertEqual(loaded, ['loaded data'])
@mock.patch('os.path.isdir', mock.Mock(return_value=True))
def test_load_service_model_enforces_case(self):
class FakeLoader(object):
def load_file(self, name):
return ['loaded data']
loader = Loader(extra_search_paths=['foo'],
file_loader=FakeLoader(),
include_default_search_paths=False)
loader.determine_latest_version = mock.Mock(return_value='2015-03-01')
loader.list_available_services = mock.Mock(return_value=['baz'])
# Should have a) the unknown service name and b) list of valid
# service names.
with self.assertRaisesRegexp(UnknownServiceError,
'Unknown service.*BAZ.*baz'):
loader.load_service_model('BAZ', type_name='service-2')
def test_load_service_model_uses_provided_type_name(self):
loader = Loader(extra_search_paths=['foo'],
file_loader=mock.Mock(),
include_default_search_paths=False)
loader.list_available_services = mock.Mock(return_value=['baz'])
# Should have a) the unknown service name and b) list of valid
# service names.
provided_type_name = 'not-service-2'
with self.assertRaisesRegexp(UnknownServiceError,
'Unknown service.*BAZ.*baz'):
loader.load_service_model(
'BAZ', type_name=provided_type_name)
loader.list_available_services.assert_called_with(provided_type_name)
def test_create_loader_parses_data_path(self):
search_path = os.pathsep.join(['foo', 'bar', 'baz'])
loader = create_loader(search_path)
self.assertIn('foo', loader.search_paths)
self.assertIn('bar', loader.search_paths)
self.assertIn('baz', loader.search_paths)
class TestMergeExtras(BaseEnvVar):
def setUp(self):
super(TestMergeExtras, self).setUp()
self.file_loader = mock.Mock()
self.data_loader = Loader(
extra_search_paths=['datapath'], file_loader=self.file_loader,
include_default_search_paths=False)
self.data_loader.determine_latest_version = mock.Mock(
return_value='2015-03-01')
self.data_loader.list_available_services = mock.Mock(
return_value=['myservice'])
isdir_mock = mock.Mock(return_value=True)
self.isdir_patch = mock.patch('os.path.isdir', isdir_mock)
self.isdir_patch.start()
def tearDown(self):
super(TestMergeExtras, self).tearDown()
self.isdir_patch.stop()
def test_merge_extras(self):
service_data = {'foo': 'service', 'bar': 'service'}
sdk_extras = {'merge': {'foo': 'sdk'}}
self.file_loader.load_file.side_effect = [service_data, sdk_extras]
loaded = self.data_loader.load_service_model('myservice', 'service-2')
expected = {'foo': 'sdk', 'bar': 'service'}
self.assertEqual(loaded, expected)
call_args = self.file_loader.load_file.call_args_list
call_args = [c[0][0] for c in call_args]
base_path = os.path.join('datapath', 'myservice', '2015-03-01')
expected_call_args = [
os.path.join(base_path, 'service-2'),
os.path.join(base_path, 'service-2.sdk-extras')
]
self.assertEqual(call_args, expected_call_args)
def test_extras_not_found(self):
service_data = {'foo': 'service', 'bar': 'service'}
service_data_copy = copy.copy(service_data)
self.file_loader.load_file.side_effect = [service_data, None]
loaded = self.data_loader.load_service_model('myservice', 'service-2')
self.assertEqual(loaded, service_data_copy)
def test_no_merge_in_extras(self):
service_data = {'foo': 'service', 'bar': 'service'}
service_data_copy = copy.copy(service_data)
self.file_loader.load_file.side_effect = [service_data, {}]
loaded = self.data_loader.load_service_model('myservice', 'service-2')
self.assertEqual(loaded, service_data_copy)
def test_include_default_extras(self):
self.data_loader = Loader(
extra_search_paths=['datapath'], file_loader=self.file_loader,
include_default_search_paths=False,
include_default_extras=False)
self.data_loader.determine_latest_version = mock.Mock(
return_value='2015-03-01')
self.data_loader.list_available_services = mock.Mock(
return_value=['myservice'])
service_data = {'foo': 'service', 'bar': 'service'}
service_data_copy = copy.copy(service_data)
sdk_extras = {'merge': {'foo': 'sdk'}}
self.file_loader.load_file.side_effect = [service_data, sdk_extras]
loaded = self.data_loader.load_service_model('myservice', 'service-2')
self.assertEqual(loaded, service_data_copy)
def test_append_extra_type(self):
service_data = {'foo': 'service', 'bar': 'service'}
sdk_extras = {'merge': {'foo': 'sdk'}}
cli_extras = {'merge': {'cli': True}}
self.file_loader.load_file.side_effect = [
service_data, sdk_extras, cli_extras]
self.data_loader.extras_types.append('cli')
loaded = self.data_loader.load_service_model('myservice', 'service-2')
expected = {'foo': 'sdk', 'bar': 'service', 'cli': True}
self.assertEqual(loaded, expected)
call_args = self.file_loader.load_file.call_args_list
call_args = [c[0][0] for c in call_args]
base_path = os.path.join('datapath', 'myservice', '2015-03-01')
expected_call_args = [
os.path.join(base_path, 'service-2'),
os.path.join(base_path, 'service-2.sdk-extras'),
os.path.join(base_path, 'service-2.cli-extras')
]
self.assertEqual(call_args, expected_call_args)
def test_sdk_empty_extras_skipped(self):
service_data = {'foo': 'service', 'bar': 'service'}
cli_extras = {'merge': {'foo': 'cli'}}
self.file_loader.load_file.side_effect = [
service_data, None, cli_extras]
self.data_loader.extras_types.append('cli')
loaded = self.data_loader.load_service_model('myservice', 'service-2')
expected = {'foo': 'cli', 'bar': 'service'}
self.assertEqual(loaded, expected)
class TestExtrasProcessor(BaseEnvVar):
def setUp(self):
super(TestExtrasProcessor, self).setUp()
self.processor = ExtrasProcessor()
self.service_data = {
'shapes': {
'StringShape': {'type': 'string'},
}
}
self.service_data_copy = copy.deepcopy(self.service_data)
def test_process_empty_list(self):
self.processor.process(self.service_data, [])
self.assertEqual(self.service_data, self.service_data_copy)
def test_process_empty_extras(self):
self.processor.process(self.service_data, [{}])
self.assertEqual(self.service_data, self.service_data_copy)
def test_process_merge_key(self):
extras = {'merge': {'shapes': {'BooleanShape': {'type': 'boolean'}}}}
self.processor.process(self.service_data, [extras])
self.assertNotEqual(self.service_data, self.service_data_copy)
boolean_shape = self.service_data['shapes'].get('BooleanShape')
self.assertEqual(boolean_shape, {'type': 'boolean'})
def test_process_in_order(self):
extras = [
{'merge': {'shapes': {'BooleanShape': {'type': 'boolean'}}}},
{'merge': {'shapes': {'BooleanShape': {'type': 'string'}}}}
]
self.processor.process(self.service_data, extras)
self.assertNotEqual(self.service_data, self.service_data_copy)
boolean_shape = self.service_data['shapes'].get('BooleanShape')
self.assertEqual(boolean_shape, {'type': 'string'})
class TestLoadersWithDirectorySearching(BaseEnvVar):
def setUp(self):
super(TestLoadersWithDirectorySearching, self).setUp()
self.fake_directories = {}
def tearDown(self):
super(TestLoadersWithDirectorySearching, self).tearDown()
@contextlib.contextmanager
def loader_with_fake_dirs(self):
mock_file_loader = mock.Mock()
mock_file_loader.exists = self.fake_exists
search_paths = list(self.fake_directories)
loader = Loader(extra_search_paths=search_paths,
include_default_search_paths=False,
file_loader=mock_file_loader)
with mock.patch('os.listdir', self.fake_listdir):
with mock.patch('os.path.isdir', mock.Mock(return_value=True)):
yield loader
def fake_listdir(self, dirname):
parts = dirname.split(os.path.sep)
result = self.fake_directories
while parts:
current = parts.pop(0)
result = result[current]
return list(result)
def fake_exists(self, path):
parts = path.split(os.sep)
result = self.fake_directories
while len(parts) > 1:
current = parts.pop(0)
result = result[current]
return parts[0] in result
def test_list_available_services(self):
self.fake_directories = {
'foo': {
'ec2': {
'2010-01-01': ['service-2'],
'2014-10-01': ['service-1'],
},
'dynamodb': {
'2010-01-01': ['service-2'],
},
},
'bar': {
'ec2': {
'2015-03-01': ['service-1'],
},
'rds': {
# This will not show up in
# list_available_services() for type
# service-2 because it does not contains
# a service-2.
'2012-01-01': ['resource-1'],
},
},
}
with self.loader_with_fake_dirs() as loader:
self.assertEqual(
loader.list_available_services(type_name='service-2'),
['dynamodb', 'ec2'])
self.assertEqual(
loader.list_available_services(type_name='resource-1'),
['rds'])
def test_determine_latest(self):
# Fake mapping of directories to subdirectories.
# In this example, we can see that the 'bar' directory
# contains the latest EC2 API version, 2015-03-01,
# so loader.determine_latest('ec2') should return
# this value 2015-03-01.
self.fake_directories = {
'foo': {
'ec2': {
'2010-01-01': ['service-2'],
# This directory contains the latest API version
# for EC2 because its the highest API directory
# that contains a service-2.
'2014-10-01': ['service-2'],
},
},
'bar': {
'ec2': {
'2012-01-01': ['service-2'],
# 2015-03-1 is *not* the latest for service-2,
# because its directory only has service-1.json.
'2015-03-01': ['service-1'],
},
},
}
with self.loader_with_fake_dirs() as loader:
latest = loader.determine_latest_version('ec2', 'service-2')
self.assertEqual(loader.determine_latest_version('ec2', 'service-2'),
'2014-10-01')
self.assertEqual(loader.determine_latest_version('ec2', 'service-1'),
'2015-03-01')
|
#!/usr/bin/env
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import botocore.config
from tests import unittest
import mock
from botocore import args
from botocore.config import Config
class TestCreateClientArgs(unittest.TestCase):
def setUp(self):
self.args_create = args.ClientArgsCreator(None, None, None, None, None)
def test_compute_s3_configuration(self):
scoped_config = {}
client_config = None
self.assertIsNone(
self.args_create.compute_s3_config(
scoped_config, client_config))
def test_compute_s3_config_only_scoped_config(self):
scoped_config = {
's3': {'use_accelerate_endpoint': True},
}
client_config = None
self.assertEqual(
self.args_create.compute_s3_config(scoped_config, client_config),
{'use_accelerate_endpoint': True}
)
def test_client_s3_accelerate_from_varying_forms_of_true(self):
scoped_config= {'s3': {'use_accelerate_endpoint': 'True'}}
client_config = None
self.assertEqual(
self.args_create.compute_s3_config(
{'s3': {'use_accelerate_endpoint': 'True'}},
client_config=None),
{'use_accelerate_endpoint': True}
)
self.assertEqual(
self.args_create.compute_s3_config(
{'s3': {'use_accelerate_endpoint': 'true'}},
client_config=None),
{'use_accelerate_endpoint': True}
)
self.assertEqual(
self.args_create.compute_s3_config(
{'s3': {'use_accelerate_endpoint': True}},
client_config=None),
{'use_accelerate_endpoint': True}
)
def test_client_s3_accelerate_from_client_config(self):
self.assertEqual(
self.args_create.compute_s3_config(
scoped_config=None,
client_config=Config(s3={'use_accelerate_endpoint': True})
),
{'use_accelerate_endpoint': True}
)
def test_client_s3_accelerate_client_config_overrides_scoped(self):
self.assertEqual(
self.args_create.compute_s3_config(
scoped_config={'s3': {'use_accelerate_endpoint': False}},
client_config=Config(s3={'use_accelerate_endpoint': True})
),
# client_config beats scoped_config
{'use_accelerate_endpoint': True}
)
def test_client_s3_dualstack_handles_varying_forms_of_true(self):
scoped_config= {'s3': {'use_dualstack_endpoint': 'True'}}
client_config = None
self.assertEqual(
self.args_create.compute_s3_config(
{'s3': {'use_dualstack_endpoint': 'True'}},
client_config=None),
{'use_dualstack_endpoint': True}
)
self.assertEqual(
self.args_create.compute_s3_config(
{'s3': {'use_dualstack_endpoint': 'true'}},
client_config=None),
{'use_dualstack_endpoint': True}
)
self.assertEqual(
self.args_create.compute_s3_config(
{'s3': {'use_dualstack_endpoint': True}},
client_config=None),
{'use_dualstack_endpoint': True}
)
def test_max_pool_from_client_config_forwarded_to_endpoint_creator(self):
args_create = args.ClientArgsCreator(
mock.Mock(), None, None, None, None)
config = botocore.config.Config(max_pool_connections=20)
service_model = mock.Mock()
service_model.metadata = {
'serviceFullName': 'MyService',
'protocol': 'query'
}
service_model.operation_names = []
bridge = mock.Mock()
bridge.resolve.return_value = {
'region_name': 'us-west-2', 'signature_version': 'v4',
'endpoint_url': 'https://ec2/',
'signing_name': 'ec2', 'signing_region': 'us-west-2',
'metadata': {}}
with mock.patch('botocore.args.EndpointCreator') as m:
args_create.get_client_args(
service_model, 'us-west-2', True, 'https://ec2/', True,
None, {}, config, bridge)
m.return_value.create_endpoint.assert_called_with(
mock.ANY, endpoint_url='https://ec2/', region_name='us-west-2',
response_parser_factory=None, timeout=(60, 60), verify=True,
max_pool_connections=20, proxies=None
)
def test_proxies_from_client_config_forwarded_to_endpoint_creator(self):
args_create = args.ClientArgsCreator(
mock.Mock(), None, None, None, None)
proxies = {'http': 'http://foo.bar:1234',
'https': 'https://foo.bar:4321'}
config = botocore.config.Config(proxies=proxies)
service_model = mock.Mock()
service_model.metadata = {
'serviceFullName': 'MyService',
'protocol': 'query'
}
service_model.operation_names = []
bridge = mock.Mock()
bridge.resolve.return_value = {
'region_name': 'us-west-2', 'signature_version': 'v4',
'endpoint_url': 'https://ec2/',
'signing_name': 'ec2', 'signing_region': 'us-west-2',
'metadata': {}}
with mock.patch('botocore.args.EndpointCreator') as m:
args_create.get_client_args(
service_model, 'us-west-2', True, 'https://ec2/', True,
None, {}, config, bridge)
m.return_value.create_endpoint.assert_called_with(
mock.ANY, endpoint_url='https://ec2/', region_name='us-west-2',
response_parser_factory=None, timeout=(60, 60), verify=True,
proxies=proxies, max_pool_connections=10
)
def test_s3_with_endpoint_url_still_resolves_region(self):
self.args_create = args.ClientArgsCreator(
mock.Mock(), None, None, None, None)
service_model = mock.Mock()
service_model.endpoint_prefix = 's3'
service_model.metadata = {'protocol': 'rest-xml'}
config = botocore.config.Config()
bridge = mock.Mock()
bridge.resolve.side_effect = [
{
'region_name': None, 'signature_version': 's3v4',
'endpoint_url': 'http://other.com/', 'signing_name': 's3',
'signing_region': None, 'metadata': {}
},
{
'region_name': 'us-west-2', 'signature_version': 's3v4',
'enpoint_url': 'https://s3-us-west-2.amazonaws.com',
'signing_name': 's3', 'signing_region': 'us-west-2',
'metadata': {}
}
]
client_args = self.args_create.get_client_args(
service_model, 'us-west-2', True, 'http://other.com/', True, None,
{}, config, bridge)
self.assertEqual(
client_args['client_config'].region_name, 'us-west-2')
def test_region_does_not_resolve_if_not_s3_and_endpoint_url_provided(self):
self.args_create = args.ClientArgsCreator(
mock.Mock(), None, None, None, None)
service_model = mock.Mock()
service_model.endpoint_prefix = 'ec2'
service_model.metadata = {'protocol': 'query'}
config = botocore.config.Config()
bridge = mock.Mock()
bridge.resolve.side_effect = [{
'region_name': None, 'signature_version': 'v4',
'endpoint_url': 'http://other.com/', 'signing_name': 'ec2',
'signing_region': None, 'metadata': {}
}]
client_args = self.args_create.get_client_args(
service_model, 'us-west-2', True, 'http://other.com/', True, None,
{}, config, bridge)
self.assertEqual(client_args['client_config'].region_name, None)
def test_provide_retry_config(self):
self.args_create = args.ClientArgsCreator(
mock.Mock(), None, None, None, None)
service_model = mock.Mock()
service_model.endpoint_prefix = 'ec2'
service_model.metadata = {'protocol': 'query'}
config = botocore.config.Config(
retries={'max_attempts': 10}
)
bridge = mock.Mock()
bridge.resolve.side_effect = [{
'region_name': None, 'signature_version': 'v4',
'endpoint_url': 'http://other.com/', 'signing_name': 'ec2',
'signing_region': None, 'metadata': {}
}]
client_args = self.args_create.get_client_args(
service_model, 'us-west-2', True, 'https://ec2/', True, None,
{}, config, bridge)
self.assertEqual(
client_args['client_config'].retries, {'max_attempts': 10})
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import copy
import functools
from tests import unittest
from functools import partial
from botocore.hooks import HierarchicalEmitter, first_non_none_response
class TestHierarchicalEventEmitter(unittest.TestCase):
def setUp(self):
self.emitter = HierarchicalEmitter()
self.hook_calls = []
def hook(self, **kwargs):
self.hook_calls.append(kwargs)
def test_non_dot_behavior(self):
self.emitter.register('no-dot', self.hook)
self.emitter.emit('no-dot')
self.assertEqual(len(self.hook_calls), 1)
def test_with_dots(self):
self.emitter.register('foo.bar.baz', self.hook)
self.emitter.emit('foo.bar.baz')
self.assertEqual(len(self.hook_calls), 1)
def test_catch_all_hook(self):
self.emitter.register('foo', self.hook)
self.emitter.register('foo.bar', self.hook)
self.emitter.register('foo.bar.baz', self.hook)
self.emitter.emit('foo.bar.baz')
self.assertEqual(len(self.hook_calls), 3, self.hook_calls)
# The hook is called with the same event name three times.
self.assertEqual([e['event_name'] for e in self.hook_calls],
['foo.bar.baz', 'foo.bar.baz', 'foo.bar.baz'])
def test_hook_called_in_proper_order(self):
# We should call the hooks from most specific to least
# specific.
calls = []
self.emitter.register('foo', lambda **kwargs: calls.append('foo'))
self.emitter.register('foo.bar',
lambda **kwargs: calls.append('foo.bar'))
self.emitter.register('foo.bar.baz',
lambda **kwargs: calls.append('foo.bar.baz'))
self.emitter.emit('foo.bar.baz')
self.assertEqual(calls, ['foo.bar.baz', 'foo.bar', 'foo'])
class TestStopProcessing(unittest.TestCase):
def setUp(self):
self.emitter = HierarchicalEmitter()
self.hook_calls = []
def hook1(self, **kwargs):
self.hook_calls.append('hook1')
def hook2(self, **kwargs):
self.hook_calls.append('hook2')
return 'hook2-response'
def hook3(self, **kwargs):
self.hook_calls.append('hook3')
return 'hook3-response'
def test_all_hooks(self):
# Here we register three hooks and sanity check
# that all three would be called by a normal emit.
# This ensures our hook calls are setup properly for
# later tests.
self.emitter.register('foo', self.hook1)
self.emitter.register('foo', self.hook2)
self.emitter.register('foo', self.hook3)
self.emitter.emit('foo')
self.assertEqual(self.hook_calls, ['hook1', 'hook2', 'hook3'])
def test_stop_processing_after_first_response(self):
# Here we register three hooks, but only the first
# two should ever execute.
self.emitter.register('foo', self.hook1)
self.emitter.register('foo', self.hook2)
self.emitter.register('foo', self.hook3)
handler, response = self.emitter.emit_until_response('foo')
self.assertEqual(response, 'hook2-response')
self.assertEqual(self.hook_calls, ['hook1', 'hook2'])
def test_no_responses(self):
# Here we register a handler that will not return a response
# and ensure we get back proper values.
self.emitter.register('foo', self.hook1)
responses = self.emitter.emit('foo')
self.assertEqual(self.hook_calls, ['hook1'])
self.assertEqual(responses, [(self.hook1, None)])
def test_no_handlers(self):
# Here we have no handlers, but still expect a tuple of return
# values.
handler, response = self.emitter.emit_until_response('foo')
self.assertIsNone(handler)
self.assertIsNone(response)
class TestFirstNonNoneResponse(unittest.TestCase):
def test_all_none(self):
self.assertIsNone(first_non_none_response([]))
def test_first_non_none(self):
correct_value = 'correct_value'
wrong_value = 'wrong_value'
# The responses are tuples of (handler, response),
# and we don't care about the handler so we just use a value of
# None.
responses = [(None, None), (None, correct_value), (None, wrong_value)]
self.assertEqual(first_non_none_response(responses), correct_value)
def test_default_value_if_non_none_found(self):
responses = [(None, None), (None, None)]
# If no response is found and a default value is passed in, it will
# be returned.
self.assertEqual(
first_non_none_response(responses, default='notfound'), 'notfound')
class TestWildcardHandlers(unittest.TestCase):
def setUp(self):
self.emitter = HierarchicalEmitter()
self.hook_calls = []
def hook(self, **kwargs):
self.hook_calls.append(kwargs)
def register(self, event_name):
func = partial(self.hook, registered_with=event_name)
self.emitter.register(event_name, func)
return func
def assert_hook_is_called_given_event(self, event):
starting = len(self.hook_calls)
self.emitter.emit(event)
after = len(self.hook_calls)
if not after > starting:
self.fail("Handler was not called for event: %s" % event)
self.assertEqual(self.hook_calls[-1]['event_name'], event)
def assert_hook_is_not_called_given_event(self, event):
starting = len(self.hook_calls)
self.emitter.emit(event)
after = len(self.hook_calls)
if not after == starting:
self.fail("Handler was called for event but was not "
"suppose to be called: %s, last_event: %s" %
(event, self.hook_calls[-1]))
def test_one_level_wildcard_handler(self):
self.emitter.register('foo.*.baz', self.hook)
# Also register for a number of other events to check
# for false positives.
self.emitter.register('other.bar.baz', self.hook)
self.emitter.register('qqq.baz', self.hook)
self.emitter.register('dont.call.me', self.hook)
self.emitter.register('dont', self.hook)
# These calls should trigger our hook.
self.assert_hook_is_called_given_event('foo.bar.baz')
self.assert_hook_is_called_given_event('foo.qux.baz')
self.assert_hook_is_called_given_event('foo.anything.baz')
# These calls should not match our hook.
self.assert_hook_is_not_called_given_event('foo')
self.assert_hook_is_not_called_given_event('foo.bar')
self.assert_hook_is_not_called_given_event('bar.qux.baz')
self.assert_hook_is_not_called_given_event('foo-bar')
def test_hierarchical_wildcard_handler(self):
self.emitter.register('foo.*.baz', self.hook)
self.assert_hook_is_called_given_event('foo.bar.baz.qux')
self.assert_hook_is_called_given_event('foo.bar.baz.qux.foo')
self.assert_hook_is_called_given_event('foo.qux.baz.qux')
self.assert_hook_is_called_given_event('foo.qux.baz.qux.foo')
self.assert_hook_is_not_called_given_event('bar.qux.baz.foo')
def test_multiple_wildcard_events(self):
self.emitter.register('foo.*.*.baz', self.hook)
self.assert_hook_is_called_given_event('foo.bar.baz.baz')
self.assert_hook_is_called_given_event('foo.ANY.THING.baz')
self.assert_hook_is_called_given_event('foo.AT.ALL.baz')
# More specific than what we registered for.
self.assert_hook_is_called_given_event('foo.bar.baz.baz.extra')
self.assert_hook_is_called_given_event('foo.bar.baz.baz.extra.stuff')
# Too short:
self.assert_hook_is_not_called_given_event('foo')
self.assert_hook_is_not_called_given_event('foo.bar')
self.assert_hook_is_not_called_given_event('foo.bar.baz')
# Bad ending segment.
self.assert_hook_is_not_called_given_event('foo.ANY.THING.notbaz')
self.assert_hook_is_not_called_given_event('foo.ANY.THING.stillnotbaz')
def test_can_unregister_for_wildcard_events(self):
self.emitter.register('foo.*.*.baz', self.hook)
# Call multiple times to verify caching behavior.
self.assert_hook_is_called_given_event('foo.bar.baz.baz')
self.assert_hook_is_called_given_event('foo.bar.baz.baz')
self.assert_hook_is_called_given_event('foo.bar.baz.baz')
self.emitter.unregister('foo.*.*.baz', self.hook)
self.assert_hook_is_not_called_given_event('foo.bar.baz.baz')
self.assert_hook_is_not_called_given_event('foo.bar.baz.baz')
self.assert_hook_is_not_called_given_event('foo.bar.baz.baz')
self.emitter.register('foo.*.*.baz', self.hook)
self.assert_hook_is_called_given_event('foo.bar.baz.baz')
self.assert_hook_is_called_given_event('foo.bar.baz.baz')
def test_unregister_does_not_exist(self):
self.emitter.register('foo.*.*.baz', self.hook)
self.emitter.unregister('foo.*.*.baz', self.hook)
self.emitter.unregister('foo.*.*.baz', self.hook)
self.assert_hook_is_not_called_given_event('foo.bar.baz.baz')
def test_cache_cleared_properly(self):
self.emitter.register('foo.*.*.baz', self.hook)
self.assert_hook_is_called_given_event('foo.bar.baz.baz')
self.emitter.register('foo.*.*.bar', self.hook)
self.assert_hook_is_called_given_event('foo.bar.baz.baz')
self.assert_hook_is_called_given_event('foo.bar.baz.bar')
self.emitter.unregister('foo.*.*.baz', self.hook)
self.assert_hook_is_called_given_event('foo.bar.baz.bar')
self.assert_hook_is_not_called_given_event('foo.bar.baz.baz')
def test_complicated_register_unregister(self):
r = self.emitter.register
u = partial(self.emitter.unregister, handler=self.hook)
r('foo.bar.baz.qux', self.hook)
r('foo.bar.baz', self.hook)
r('foo.bar', self.hook)
r('foo', self.hook)
u('foo.bar.baz')
u('foo')
u('foo.bar')
self.assert_hook_is_called_given_event('foo.bar.baz.qux')
self.assert_hook_is_not_called_given_event('foo.bar.baz')
self.assert_hook_is_not_called_given_event('foo.bar')
self.assert_hook_is_not_called_given_event('foo')
def test_register_multiple_handlers_for_same_event(self):
self.emitter.register('foo.bar.baz', self.hook)
self.emitter.register('foo.bar.baz', self.hook)
self.emitter.emit('foo.bar.baz')
self.assertEqual(len(self.hook_calls), 2)
def test_register_with_unique_id(self):
self.emitter.register('foo.bar.baz', self.hook, unique_id='foo')
# Since we're using the same unique_id, this registration is ignored.
self.emitter.register('foo.bar.baz', self.hook, unique_id='foo')
# This also works across event names, so this registration is ignored
# as well.
self.emitter.register('foo.other', self.hook, unique_id='foo')
self.emitter.emit('foo.bar.baz')
self.assertEqual(len(self.hook_calls), 1)
self.hook_calls = []
self.emitter.emit('foo.other')
self.assertEqual(len(self.hook_calls), 0)
def test_remove_handler_with_unique_id(self):
hook2 = lambda **kwargs: self.hook_calls.append(kwargs)
self.emitter.register('foo.bar.baz', self.hook, unique_id='foo')
self.emitter.register('foo.bar.baz', hook2)
self.emitter.emit('foo.bar.baz')
self.assertEqual(len(self.hook_calls), 2)
# Reset the hook calls.
self.hook_calls = []
self.emitter.unregister('foo.bar.baz', hook2)
self.emitter.emit('foo.bar.baz')
self.assertEqual(len(self.hook_calls), 1)
self.hook_calls = []
# Can provide the unique_id to unregister.
self.emitter.unregister('foo.bar.baz', unique_id='foo')
self.emitter.emit('foo.bar.baz')
self.assertEqual(len(self.hook_calls), 0)
# Same as with not specifying a unique_id, you can call
# unregister multiple times and not get an exception.
self.emitter.unregister('foo.bar.baz', unique_id='foo')
def test_remove_handler_with_and_without_unique_id(self):
self.emitter.register('foo.bar.baz', self.hook, unique_id='foo')
self.emitter.register('foo.bar.baz', self.hook)
self.emitter.unregister('foo.bar.baz', self.hook)
self.emitter.emit('foo.bar.baz')
self.assertEqual(len(self.hook_calls), 1)
self.hook_calls = []
self.emitter.unregister('foo.bar.baz', self.hook)
self.emitter.emit('foo.bar.baz')
self.assertEqual(len(self.hook_calls), 0)
def test_register_with_uses_count_initially(self):
self.emitter.register('foo', self.hook, unique_id='foo',
unique_id_uses_count=True)
# Subsequent calls must set ``unique_id_uses_count`` to True.
with self.assertRaises(ValueError):
self.emitter.register('foo', self.hook, unique_id='foo')
def test_register_with_uses_count_not_initially(self):
self.emitter.register('foo', self.hook, unique_id='foo')
# Subsequent calls must set ``unique_id_uses_count`` to False.
with self.assertRaises(ValueError):
self.emitter.register('foo', self.hook, unique_id='foo',
unique_id_uses_count=True)
def test_register_with_uses_count_unregister(self):
self.emitter.register('foo', self.hook, unique_id='foo',
unique_id_uses_count=True)
self.emitter.register('foo', self.hook, unique_id='foo',
unique_id_uses_count=True)
# Event was registered to use a count so it must be specified
# that a count is used when unregistering
with self.assertRaises(ValueError):
self.emitter.unregister('foo', self.hook, unique_id='foo')
# Event should not have been unregistered.
self.emitter.emit('foo')
self.assertEqual(len(self.hook_calls), 1)
self.emitter.unregister('foo', self.hook, unique_id='foo',
unique_id_uses_count=True)
# Event still should not be unregistered.
self.hook_calls = []
self.emitter.emit('foo')
self.assertEqual(len(self.hook_calls), 1)
self.emitter.unregister('foo', self.hook, unique_id='foo',
unique_id_uses_count=True)
# Now the event should be unregistered.
self.hook_calls = []
self.emitter.emit('foo')
self.assertEqual(len(self.hook_calls), 0)
def test_register_with_no_uses_count_unregister(self):
self.emitter.register('foo', self.hook, unique_id='foo')
# The event was not registered to use a count initially
with self.assertRaises(ValueError):
self.emitter.unregister('foo', self.hook, unique_id='foo',
unique_id_uses_count=True)
def test_handlers_called_in_order(self):
def handler(call_number, **kwargs):
kwargs['call_number'] = call_number
self.hook_calls.append(kwargs)
self.emitter.register('foo', partial(handler, call_number=1))
self.emitter.register('foo', partial(handler, call_number=2))
self.emitter.emit('foo')
self.assertEqual([k['call_number'] for k in self.hook_calls],
[1, 2])
def test_handler_call_order_with_hierarchy(self):
def handler(call_number, **kwargs):
kwargs['call_number'] = call_number
self.hook_calls.append(kwargs)
# We go from most specific to least specific, and each level is called
# in the order they were registered for that particular hierarchy
# level.
self.emitter.register('foo.bar.baz', partial(handler, call_number=1))
self.emitter.register('foo.bar', partial(handler, call_number=3))
self.emitter.register('foo', partial(handler, call_number=5))
self.emitter.register('foo.bar.baz', partial(handler, call_number=2))
self.emitter.register('foo.bar', partial(handler, call_number=4))
self.emitter.register('foo', partial(handler, call_number=6))
self.emitter.emit('foo.bar.baz')
self.assertEqual([k['call_number'] for k in self.hook_calls],
[1, 2, 3, 4, 5, 6])
def test_register_first_single_level(self):
def handler(call_number, **kwargs):
kwargs['call_number'] = call_number
self.hook_calls.append(kwargs)
# Handlers registered through register_first() are always called
# before handlers registered with register().
self.emitter.register('foo', partial(handler, call_number=3))
self.emitter.register('foo', partial(handler, call_number=4))
self.emitter.register_first('foo', partial(handler, call_number=1))
self.emitter.register_first('foo', partial(handler, call_number=2))
self.emitter.register('foo', partial(handler, call_number=5))
self.emitter.emit('foo')
self.assertEqual([k['call_number'] for k in self.hook_calls],
[1, 2, 3, 4, 5])
def test_register_first_hierarchy(self):
def handler(call_number, **kwargs):
kwargs['call_number'] = call_number
self.hook_calls.append(kwargs)
self.emitter.register('foo', partial(handler, call_number=5))
self.emitter.register('foo.bar', partial(handler, call_number=2))
self.emitter.register_first('foo', partial(handler, call_number=4))
self.emitter.register_first('foo.bar', partial(handler, call_number=1))
self.emitter.register('foo', partial(handler, call_number=6))
self.emitter.register('foo.bar', partial(handler, call_number=3))
self.emitter.emit('foo.bar')
self.assertEqual([k['call_number'] for k in self.hook_calls],
[1, 2, 3, 4, 5, 6])
def test_register_last_hierarchy(self):
def handler(call_number, **kwargs):
kwargs['call_number'] = call_number
self.hook_calls.append(kwargs)
self.emitter.register_last('foo', partial(handler, call_number=3))
self.emitter.register('foo', partial(handler, call_number=2))
self.emitter.register_first('foo', partial(handler, call_number=1))
self.emitter.emit('foo')
self.assertEqual([k['call_number'] for k in self.hook_calls],
[1, 2, 3])
def test_register_unregister_first_last(self):
self.emitter.register('foo', self.hook)
self.emitter.register_last('foo.bar', self.hook)
self.emitter.register_first('foo.bar.baz', self.hook)
self.emitter.unregister('foo.bar.baz', self.hook)
self.emitter.unregister('foo.bar', self.hook)
self.emitter.unregister('foo', self.hook)
self.emitter.emit('foo')
self.assertEqual(self.hook_calls, [])
def test_copy_emitter(self):
# Here we're not testing copy directly, we're testing
# the observable behavior from copying an event emitter.
first = []
def first_handler(id_name, **kwargs):
first.append(id_name)
second = []
def second_handler(id_name, **kwargs):
second.append(id_name)
self.emitter.register('foo.bar.baz', first_handler)
# First time we emit, only the first handler should be called.
self.emitter.emit('foo.bar.baz', id_name='first-time')
self.assertEqual(first, ['first-time'])
self.assertEqual(second, [])
copied_emitter = copy.copy(self.emitter)
# If we emit from the copied emitter, we should still
# only see the first handler called.
copied_emitter.emit('foo.bar.baz', id_name='second-time')
self.assertEqual(first, ['first-time', 'second-time'])
self.assertEqual(second, [])
# However, if we register an event handler with the copied
# emitter, the first emitter will not see this.
copied_emitter.register('foo.bar.baz', second_handler)
copied_emitter.emit('foo.bar.baz', id_name='third-time')
self.assertEqual(first, ['first-time', 'second-time', 'third-time'])
# And now the second handler is called.
self.assertEqual(second, ['third-time'])
# And vice-versa, emitting from the original emitter
# will not trigger the second_handler.
# We'll double check this by unregistering/re-registering
# the event handler.
self.emitter.unregister('foo.bar.baz', first_handler)
self.emitter.register('foo.bar.baz', first_handler)
self.emitter.emit('foo.bar.baz', id_name='last-time')
self.assertEqual(second, ['third-time'])
def test_copy_emitter_with_unique_id_event(self):
# Here we're not testing copy directly, we're testing
# the observable behavior from copying an event emitter.
first = []
def first_handler(id_name, **kwargs):
first.append(id_name)
second = []
def second_handler(id_name, **kwargs):
second.append(id_name)
self.emitter.register('foo', first_handler, 'bar')
self.emitter.emit('foo', id_name='first-time')
self.assertEqual(first, ['first-time'])
self.assertEqual(second, [])
copied_emitter = copy.copy(self.emitter)
# If we register an event handler with the copied
# emitter, the event should not get registered again
# because the unique id was already used.
copied_emitter.register('foo', second_handler, 'bar')
copied_emitter.emit('foo', id_name='second-time')
self.assertEqual(first, ['first-time', 'second-time'])
self.assertEqual(second, [])
# If we unregister the first event from the copied emitter,
# We should be able to register the second handler.
copied_emitter.unregister('foo', first_handler, 'bar')
copied_emitter.register('foo', second_handler, 'bar')
copied_emitter.emit('foo', id_name='third-time')
self.assertEqual(first, ['first-time', 'second-time'])
self.assertEqual(second, ['third-time'])
# The original event emitter should have the unique id event still
# registered though.
self.emitter.emit('foo', id_name='fourth-time')
self.assertEqual(first, ['first-time', 'second-time', 'fourth-time'])
self.assertEqual(second, ['third-time'])
def test_copy_events_with_partials(self):
# There's a bug in python2.6 where you can't deepcopy
# a partial object. We want to ensure that doesn't
# break when a partial is hooked up as an event handler.
def handler(a, b, **kwargs):
return b
f = functools.partial(handler, 1)
self.emitter.register('a.b', f)
copied = copy.copy(self.emitter)
self.assertEqual(copied.emit_until_response(
'a.b', b='return-val')[1], 'return-val')
if __name__ == '__main__':
unittest.main()
|
# Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
from botocore import translate
class TestBuildRetryConfig(unittest.TestCase):
def setUp(self):
self.retry = {
"definitions": {
"def_name": {
"from": {"definition": "file"}
}
},
"retry": {
"__default__": {
"max_attempts": 5,
"delay": "global_delay",
"policies": {
"global_one": "global",
"override_me": "global",
}
},
"sts": {
"__default__": {
"delay": "service_specific_delay",
"policies": {
"service_one": "service",
"override_me": "service",
}
},
"AssumeRole": {
"policies": {
"name": "policy",
"other": {"$ref": "def_name"}
}
}
}
}
}
def test_inject_retry_config(self):
retry = translate.build_retry_config('sts', self.retry['retry'],
self.retry['definitions'])
self.assertIn('__default__', retry)
self.assertEqual(
retry['__default__'], {
"max_attempts": 5,
"delay": "service_specific_delay",
"policies": {
"global_one": "global",
"override_me": "service",
"service_one": "service",
}
}
)
# Policies should be merged.
operation_config = retry['AssumeRole']
self.assertEqual(operation_config['policies']['name'], 'policy')
def test_resolve_reference(self):
retry = translate.build_retry_config('sts', self.retry['retry'],
self.retry['definitions'])
operation_config = retry['AssumeRole']
# And we should resolve references.
self.assertEqual(operation_config['policies']['other'],
{"from": {"definition": "file"}})
def test_service_specific_defaults_no_mutate_default_retry(self):
retry = translate.build_retry_config('sts', self.retry['retry'],
self.retry['definitions'])
# sts has a specific policy
self.assertEqual(
retry['__default__'], {
"max_attempts": 5,
"delay": "service_specific_delay",
"policies": {
"global_one": "global",
"override_me": "service",
"service_one": "service",
}
}
)
# The general defaults for the upstream model should not have been
# mutated from building the retry config
self.assertEqual(
self.retry['retry']['__default__'],
{
"max_attempts": 5,
"delay": "global_delay",
"policies": {
"global_one": "global",
"override_me": "global",
}
}
)
def test_client_override_max_attempts(self):
retry = translate.build_retry_config(
'sts', self.retry['retry'], self.retry['definitions'],
client_retry_config={'max_attempts': 9}
)
self.assertEqual(retry['__default__']['max_attempts'], 10)
# But it should not mutate the original retry model
self.assertEqual(
self.retry['retry']['__default__']['max_attempts'], 5)
|
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import datetime
import mock
from nose.tools import assert_equal, assert_raises
from botocore.exceptions import MD5UnavailableError
from botocore.compat import (
total_seconds, unquote_str, six, ensure_bytes, get_md5, compat_shell_split
)
from tests import BaseEnvVar, unittest
class TotalSecondsTest(BaseEnvVar):
def test_total_seconds(self):
delta = datetime.timedelta(days=1, seconds=45)
remaining = total_seconds(delta)
self.assertEqual(remaining, 86445.0)
delta = datetime.timedelta(seconds=33, microseconds=772)
remaining = total_seconds(delta)
self.assertEqual(remaining, 33.000772)
class TestUnquoteStr(unittest.TestCase):
def test_unquote_str(self):
value = u'%E2%9C%93'
# Note: decoded to unicode and utf-8 decoded as well.
# This would work in python2 and python3.
self.assertEqual(unquote_str(value), u'\u2713')
def test_unquote_normal(self):
value = u'foo'
# Note: decoded to unicode and utf-8 decoded as well.
# This would work in python2 and python3.
self.assertEqual(unquote_str(value), u'foo')
def test_unquote_with_spaces(self):
value = u'foo+bar'
# Note: decoded to unicode and utf-8 decoded as well.
# This would work in python2 and python3.
self.assertEqual(unquote_str(value), 'foo bar')
class TestEnsureBytes(unittest.TestCase):
def test_string(self):
value = 'foo'
response = ensure_bytes(value)
self.assertIsInstance(response, six.binary_type)
self.assertEqual(response, b'foo')
def test_binary(self):
value = b'bar'
response = ensure_bytes(value)
self.assertIsInstance(response, six.binary_type)
self.assertEqual(response, b'bar')
def test_unicode(self):
value = u'baz'
response = ensure_bytes(value)
self.assertIsInstance(response, six.binary_type)
self.assertEqual(response, b'baz')
def test_non_ascii(self):
value = u'\u2713'
response = ensure_bytes(value)
self.assertIsInstance(response, six.binary_type)
self.assertEqual(response, b'\xe2\x9c\x93')
def test_non_string_or_bytes_raises_error(self):
value = 500
with self.assertRaises(ValueError):
ensure_bytes(value)
class TestGetMD5(unittest.TestCase):
def test_available(self):
md5 = mock.Mock()
with mock.patch('botocore.compat.MD5_AVAILABLE', True):
with mock.patch('hashlib.md5', mock.Mock(return_value=md5)):
self.assertEqual(get_md5(), md5)
def test_unavailable_raises_error(self):
with mock.patch('botocore.compat.MD5_AVAILABLE', False):
with self.assertRaises(MD5UnavailableError):
get_md5()
def test_compat_shell_split_windows():
windows_cases = {
r'': [],
r'spam \\': [r'spam', '\\\\'],
r'spam ': [r'spam'],
r' spam': [r'spam'],
'spam eggs': [r'spam', r'eggs'],
'spam\teggs': [r'spam', r'eggs'],
'spam\neggs': ['spam\neggs'],
'""': [''],
'" "': [' '],
'"\t"': ['\t'],
'\\\\': ['\\\\'],
'\\\\ ': ['\\\\'],
'\\\\\t': ['\\\\'],
r'\"': ['"'],
# The following four test cases are official test cases given in
# Microsoft's documentation.
r'"abc" d e': [r'abc', r'd', r'e'],
r'a\\b d"e f"g h': [r'a\\b', r'de fg', r'h'],
r'a\\\"b c d': [r'a\"b', r'c', r'd'],
r'a\\\\"b c" d e': [r'a\\b c', r'd', r'e']
}
runner = ShellSplitTestRunner()
for input_string, expected_output in windows_cases.items():
yield runner.assert_equal, input_string, expected_output, "win32"
yield runner.assert_raises, r'"', ValueError, "win32"
def test_compat_shell_split_unix():
unix_cases = {
r'': [],
r'spam \\': [r'spam', '\\'],
r'spam ': [r'spam'],
r' spam': [r'spam'],
'spam eggs': [r'spam', r'eggs'],
'spam\teggs': [r'spam', r'eggs'],
'spam\neggs': ['spam', 'eggs'],
'""': [''],
'" "': [' '],
'"\t"': ['\t'],
'\\\\': ['\\'],
'\\\\ ': ['\\'],
'\\\\\t': ['\\'],
r'\"': ['"'],
# The following four test cases are official test cases given in
# Microsoft's documentation, but adapted to unix shell splitting.
r'"abc" d e': [r'abc', r'd', r'e'],
r'a\\b d"e f"g h': [r'a\b', r'de fg', r'h'],
r'a\\\"b c d': [r'a\"b', r'c', r'd'],
r'a\\\\"b c" d e': [r'a\\b c', r'd', r'e']
}
runner = ShellSplitTestRunner()
for input_string, expected_output in unix_cases.items():
yield runner.assert_equal, input_string, expected_output, "linux2"
yield runner.assert_equal, input_string, expected_output, "darwin"
yield runner.assert_raises, r'"', ValueError, "linux2"
yield runner.assert_raises, r'"', ValueError, "darwin"
class ShellSplitTestRunner(object):
def assert_equal(self, s, expected, platform):
assert_equal(compat_shell_split(s, platform), expected)
def assert_raises(self, s, exception_cls, platform):
assert_raises(exception_cls, compat_shell_split, s, platform)
|
"""Additional tests for request serialization.
While there are compliance tests in tests/unit/protocols where
the majority of the request serialization/response parsing is tested,
this test module contains additional tests that go above and beyond the
spec. This can happen for a number of reasons:
* We are testing python specific behavior that doesn't make sense as a
compliance test.
* We are testing behavior that is not strictly part of the spec. These
may result in a a coverage gap that would otherwise be untested.
"""
import base64
import json
import datetime
import dateutil.tz
from tests import unittest
from botocore.model import ServiceModel
from botocore import serialize
from botocore.compat import six
from botocore.exceptions import ParamValidationError
class BaseModelWithBlob(unittest.TestCase):
def setUp(self):
self.model = {
'metadata': {'protocol': 'query', 'apiVersion': '2014-01-01'},
'documentation': '',
'operations': {
'TestOperation': {
'name': 'TestOperation',
'http': {
'method': 'POST',
'requestUri': '/',
},
'input': {'shape': 'InputShape'},
}
},
'shapes': {
'InputShape': {
'type': 'structure',
'members': {
'Blob': {'shape': 'BlobType'},
}
},
'BlobType': {
'type': 'blob',
}
}
}
def serialize_to_request(self, input_params):
service_model = ServiceModel(self.model)
request_serializer = serialize.create_serializer(
service_model.metadata['protocol'])
return request_serializer.serialize_to_request(
input_params, service_model.operation_model('TestOperation'))
def assert_serialized_blob_equals(self, request, blob_bytes):
# This method handles all the details of the base64 decoding.
encoded = base64.b64encode(blob_bytes)
# Now the serializers actually have the base64 encoded contents
# as str types so we need to decode back. We know that this is
# ascii so it's safe to use the ascii encoding.
expected = encoded.decode('ascii')
self.assertEqual(request['body']['Blob'], expected)
class TestBinaryTypes(BaseModelWithBlob):
def test_blob_accepts_bytes_type(self):
body = b'bytes body'
request = self.serialize_to_request(input_params={'Blob': body})
self.assert_serialized_blob_equals(
request, blob_bytes=body)
def test_blob_accepts_str_type(self):
body = u'ascii text'
request = self.serialize_to_request(input_params={'Blob': body})
self.assert_serialized_blob_equals(
request, blob_bytes=body.encode('ascii'))
def test_blob_handles_unicode_chars(self):
body = u'\u2713'
request = self.serialize_to_request(input_params={'Blob': body})
self.assert_serialized_blob_equals(
request, blob_bytes=body.encode('utf-8'))
class TestBinaryTypesJSON(BaseModelWithBlob):
def setUp(self):
super(TestBinaryTypesJSON, self).setUp()
self.model['metadata'] = {
'protocol': 'json',
'apiVersion': '2014-01-01',
'jsonVersion': '1.1',
'targetPrefix': 'foo',
}
def test_blob_accepts_bytes_type(self):
body = b'bytes body'
request = self.serialize_to_request(input_params={'Blob': body})
serialized_blob = json.loads(request['body'].decode('utf-8'))['Blob']
self.assertEqual(
base64.b64encode(body).decode('ascii'),
serialized_blob)
class TestBinaryTypesWithRestXML(BaseModelWithBlob):
def setUp(self):
super(TestBinaryTypesWithRestXML, self).setUp()
self.model['metadata'] = {
'protocol': 'rest-xml',
'apiVersion': '2014-01-01',
}
self.model['operations']['TestOperation']['input'] = {
'shape': 'InputShape',
'locationName': 'OperationRequest',
'payload': 'Blob',
}
def test_blob_serialization_with_file_like_object(self):
body = six.BytesIO(b'foobar')
request = self.serialize_to_request(input_params={'Blob': body})
self.assertEqual(request['body'], body)
def test_blob_serialization_when_payload_is_unicode(self):
# When the body is a text type, we should encode the
# text to bytes.
body = u'\u2713'
request = self.serialize_to_request(input_params={'Blob': body})
self.assertEqual(request['body'], body.encode('utf-8'))
def test_blob_serialization_when_payload_is_bytes(self):
body = b'bytes body'
request = self.serialize_to_request(input_params={'Blob': body})
self.assertEqual(request['body'], body)
class TestTimestampHeadersWithRestXML(unittest.TestCase):
def setUp(self):
self.model = {
'metadata': {'protocol': 'rest-xml', 'apiVersion': '2014-01-01'},
'documentation': '',
'operations': {
'TestOperation': {
'name': 'TestOperation',
'http': {
'method': 'POST',
'requestUri': '/',
},
'input': {'shape': 'InputShape'},
}
},
'shapes': {
'InputShape': {
'type': 'structure',
'members': {
'TimestampHeader': {
'shape': 'TimestampType',
'location': 'header',
'locationName': 'x-timestamp'
},
}
},
'TimestampType': {
'type': 'timestamp',
}
}
}
self.service_model = ServiceModel(self.model)
def serialize_to_request(self, input_params):
request_serializer = serialize.create_serializer(
self.service_model.metadata['protocol'])
return request_serializer.serialize_to_request(
input_params, self.service_model.operation_model('TestOperation'))
def test_accepts_datetime_object(self):
request = self.serialize_to_request(
{'TimestampHeader': datetime.datetime(2014, 1, 1, 12, 12, 12,
tzinfo=dateutil.tz.tzutc())})
self.assertEqual(request['headers']['x-timestamp'],
'Wed, 01 Jan 2014 12:12:12 GMT')
def test_accepts_iso_8601_format(self):
request = self.serialize_to_request(
{'TimestampHeader': '2014-01-01T12:12:12+00:00'})
self.assertEqual(request['headers']['x-timestamp'],
'Wed, 01 Jan 2014 12:12:12 GMT')
def test_accepts_iso_8601_format_non_utc(self):
request = self.serialize_to_request(
{'TimestampHeader': '2014-01-01T07:12:12-05:00'})
self.assertEqual(request['headers']['x-timestamp'],
'Wed, 01 Jan 2014 12:12:12 GMT')
def test_accepts_rfc_822_format(self):
request = self.serialize_to_request(
{'TimestampHeader': 'Wed, 01 Jan 2014 12:12:12 GMT'})
self.assertEqual(request['headers']['x-timestamp'],
'Wed, 01 Jan 2014 12:12:12 GMT')
def test_accepts_unix_timestamp_integer(self):
request = self.serialize_to_request(
{'TimestampHeader': 1388578332})
self.assertEqual(request['headers']['x-timestamp'],
'Wed, 01 Jan 2014 12:12:12 GMT')
class TestTimestamps(unittest.TestCase):
def setUp(self):
self.model = {
'metadata': {'protocol': 'query', 'apiVersion': '2014-01-01'},
'documentation': '',
'operations': {
'TestOperation': {
'name': 'TestOperation',
'http': {
'method': 'POST',
'requestUri': '/',
},
'input': {'shape': 'InputShape'},
}
},
'shapes': {
'InputShape': {
'type': 'structure',
'members': {
'Timestamp': {'shape': 'TimestampType'},
}
},
'TimestampType': {
'type': 'timestamp',
}
}
}
self.service_model = ServiceModel(self.model)
def serialize_to_request(self, input_params):
request_serializer = serialize.create_serializer(
self.service_model.metadata['protocol'])
return request_serializer.serialize_to_request(
input_params, self.service_model.operation_model('TestOperation'))
def test_accepts_datetime_object(self):
request = self.serialize_to_request(
{'Timestamp': datetime.datetime(2014, 1, 1, 12, 12, 12,
tzinfo=dateutil.tz.tzutc())})
self.assertEqual(request['body']['Timestamp'], '2014-01-01T12:12:12Z')
def test_accepts_naive_datetime_object(self):
request = self.serialize_to_request(
{'Timestamp': datetime.datetime(2014, 1, 1, 12, 12, 12)})
self.assertEqual(request['body']['Timestamp'], '2014-01-01T12:12:12Z')
def test_accepts_iso_8601_format(self):
request = self.serialize_to_request(
{'Timestamp': '2014-01-01T12:12:12Z'})
self.assertEqual(request['body']['Timestamp'], '2014-01-01T12:12:12Z')
def test_accepts_timestamp_without_tz_info(self):
# If a timezone/utc is not specified, assume they meant
# UTC. This is also the previous behavior from older versions
# of botocore so we want to make sure we preserve this behavior.
request = self.serialize_to_request(
{'Timestamp': '2014-01-01T12:12:12'})
self.assertEqual(request['body']['Timestamp'], '2014-01-01T12:12:12Z')
def test_microsecond_timestamp_without_tz_info(self):
request = self.serialize_to_request(
{'Timestamp': '2014-01-01T12:12:12.123456'})
self.assertEqual(request['body']['Timestamp'],
'2014-01-01T12:12:12.123456Z')
class TestJSONTimestampSerialization(unittest.TestCase):
def setUp(self):
self.model = {
'metadata': {'protocol': 'json', 'apiVersion': '2014-01-01',
'jsonVersion': '1.1', 'targetPrefix': 'foo'},
'documentation': '',
'operations': {
'TestOperation': {
'name': 'TestOperation',
'http': {
'method': 'POST',
'requestUri': '/',
},
'input': {'shape': 'InputShape'},
}
},
'shapes': {
'InputShape': {
'type': 'structure',
'members': {
'Timestamp': {'shape': 'TimestampType'},
}
},
'TimestampType': {
'type': 'timestamp',
}
}
}
self.service_model = ServiceModel(self.model)
def serialize_to_request(self, input_params):
request_serializer = serialize.create_serializer(
self.service_model.metadata['protocol'])
return request_serializer.serialize_to_request(
input_params, self.service_model.operation_model('TestOperation'))
def test_accepts_iso_8601_format(self):
body = json.loads(self.serialize_to_request(
{'Timestamp': '1970-01-01T00:00:00'})['body'].decode('utf-8'))
self.assertEqual(body['Timestamp'], 0)
def test_accepts_epoch(self):
body = json.loads(self.serialize_to_request(
{'Timestamp': '0'})['body'].decode('utf-8'))
self.assertEqual(body['Timestamp'], 0)
# Can also be an integer 0.
body = json.loads(self.serialize_to_request(
{'Timestamp': 0})['body'].decode('utf-8'))
self.assertEqual(body['Timestamp'], 0)
def test_accepts_partial_iso_format(self):
body = json.loads(self.serialize_to_request(
{'Timestamp': '1970-01-01'})['body'].decode('utf-8'))
self.assertEqual(body['Timestamp'], 0)
class TestInstanceCreation(unittest.TestCase):
def setUp(self):
self.model = {
'metadata': {'protocol': 'query', 'apiVersion': '2014-01-01'},
'documentation': '',
'operations': {
'TestOperation': {
'name': 'TestOperation',
'http': {
'method': 'POST',
'requestUri': '/',
},
'input': {'shape': 'InputShape'},
}
},
'shapes': {
'InputShape': {
'type': 'structure',
'members': {
'Timestamp': {'shape': 'StringTestType'},
}
},
'StringTestType': {
'type': 'string',
'min': 15
}
}
}
self.service_model = ServiceModel(self.model)
def assert_serialize_valid_parameter(self, request_serializer):
valid_string = 'valid_string_with_min_15_chars'
request = request_serializer.serialize_to_request(
{'Timestamp': valid_string},
self.service_model.operation_model('TestOperation'))
self.assertEqual(request['body']['Timestamp'], valid_string)
def assert_serialize_invalid_parameter(self, request_serializer):
invalid_string = 'short string'
request = request_serializer.serialize_to_request(
{'Timestamp': invalid_string},
self.service_model.operation_model('TestOperation'))
self.assertEqual(request['body']['Timestamp'], invalid_string)
def test_instantiate_without_validation(self):
request_serializer = serialize.create_serializer(
self.service_model.metadata['protocol'], False)
try:
self.assert_serialize_valid_parameter(request_serializer)
except ParamValidationError as e:
self.fail(
"Shouldn't fail serializing valid parameter without validation".format(e))
try:
self.assert_serialize_invalid_parameter(request_serializer)
except ParamValidationError as e:
self.fail(
"Shouldn't fail serializing invalid parameter without validation".format(e))
def test_instantiate_with_validation(self):
request_serializer = serialize.create_serializer(
self.service_model.metadata['protocol'], True)
try:
self.assert_serialize_valid_parameter(request_serializer)
except ParamValidationError as e:
self.fail(
"Shouldn't fail serializing valid parameter with validation".format(e))
with self.assertRaises(ParamValidationError):
self.assert_serialize_invalid_parameter(request_serializer)
class TestHeaderSerialization(BaseModelWithBlob):
def setUp(self):
self.model = {
'metadata': {'protocol': 'rest-xml', 'apiVersion': '2014-01-01'},
'documentation': '',
'operations': {
'TestOperation': {
'name': 'TestOperation',
'http': {
'method': 'POST',
'requestUri': '/',
},
'input': {'shape': 'InputShape'},
}
},
'shapes': {
'InputShape': {
'type': 'structure',
'members': {
'ContentLength': {
'shape': 'Integer',
'location': 'header',
'locationName': 'Content-Length'
},
}
},
'Integer': {
'type': 'integer'
},
}
}
self.service_model = ServiceModel(self.model)
def test_always_serialized_as_str(self):
request = self.serialize_to_request({'ContentLength': 100})
self.assertEqual(request['headers']['Content-Length'], '100')
class TestRestXMLUnicodeSerialization(unittest.TestCase):
def setUp(self):
self.model = {
'metadata': {'protocol': 'rest-xml', 'apiVersion': '2014-01-01'},
'documentation': '',
'operations': {
'TestOperation': {
'name': 'TestOperation',
'http': {
'method': 'POST',
'requestUri': '/',
},
'input': {'shape': 'InputShape'},
}
},
'shapes': {
'InputShape': {
'type': 'structure',
'members': {
'Foo': {
'shape': 'FooShape',
'locationName': 'Foo'
},
},
'payload': 'Foo'
},
'FooShape': {
'type': 'list',
'member': {'shape': 'StringShape'}
},
'StringShape': {
'type': 'string',
}
}
}
self.service_model = ServiceModel(self.model)
def serialize_to_request(self, input_params):
request_serializer = serialize.create_serializer(
self.service_model.metadata['protocol'])
return request_serializer.serialize_to_request(
input_params, self.service_model.operation_model('TestOperation'))
def test_restxml_serializes_unicode(self):
params = {
'Foo': [u'\u65e5\u672c\u8a9e\u3067\u304a\uff4b']
}
try:
self.serialize_to_request(params)
except UnicodeEncodeError:
self.fail("RestXML serializer failed to serialize unicode text.")
|
from tests import unittest
from datetime import datetime
import decimal
from botocore.compat import six
from botocore.model import ShapeResolver
from botocore.validate import ParamValidator
BOILER_PLATE_SHAPES = {
'StringType': {
'type': 'string'
}
}
class BaseTestValidate(unittest.TestCase):
def assert_has_validation_errors(self, given_shapes, input_params, errors):
# Given the shape definitions ``given_shape`` and the user input
# parameters ``input_params``, verify that the validation has
# validation errors containing the list of ``errors``.
# Also, this assumes the input shape name is "Input".
errors_found = self.get_validation_error_message(
given_shapes, input_params)
self.assertTrue(errors_found.has_errors())
error_message = errors_found.generate_report()
for error in errors:
self.assertIn(error, error_message)
def get_validation_error_message(self, given_shapes, input_params):
s = ShapeResolver(given_shapes)
input_shape = s.get_shape_by_name('Input')
validator = ParamValidator()
errors_found = validator.validate(input_params, input_shape)
error_message = errors_found.generate_report()
return errors_found
class TestValidateRequiredParams(BaseTestValidate):
def test_validate_required_params(self):
self.assert_has_validation_errors(
given_shapes={
'Input': {
'type': 'structure',
'required': ['A', 'B'],
'members': {
'A': {'shape': 'StringType'},
'B': {'shape': 'StringType'}
}
},
'StringType': {'type': 'string'}
},
input_params={'A': 'foo'},
errors=['Missing required parameter'])
def test_validate_nested_required_param(self):
self.assert_has_validation_errors(
given_shapes={
'Input': {
'type': 'structure',
'members': {
'A': {'shape': 'SubStruct'}
}
},
'SubStruct': {
'type': 'structure',
'required': ['B', 'C'],
'members': {
'B': {'shape': 'StringType'},
'C': {'shape': 'StringType'},
}
},
'StringType': {
'type': 'string',
}
},
input_params={'A': {'B': 'foo'}},
errors=['Missing required parameter'])
def test_validate_unknown_param(self):
self.assert_has_validation_errors(
given_shapes={
'Input': {
'type': 'structure',
'required': ['A'],
'members': {
'A': {'shape': 'StringType'},
}
},
'StringType': {'type': 'string'}
},
input_params={'A': 'foo', 'B': 'bar'},
errors=['Unknown parameter'])
class TestValidateJSONValueTrait(BaseTestValidate):
def test_accepts_jsonvalue_string(self):
self.shapes = {
'Input': {
'type': 'structure',
'members': {
'json': {
'shape': 'StrType',
'jsonvalue': True,
'location': 'header',
'locationName': 'header-name'
}
}
},
'StrType': {'type': 'string'}
}
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={
'json': {'data': [1, 2.3, '3'], 'unicode': u'\u2713'}
})
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_validate_jsonvalue_string(self):
self.shapes = {
'Input': {
'type': 'structure',
'members': {
'json': {
'shape': 'StrType',
'jsonvalue': True,
'location': 'header',
'locationName': 'header-name'
}
}
},
'StrType': {'type': 'string'}
}
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'json': {'date': datetime(2017, 4, 27, 0, 0)}
},
errors=[
('Invalid parameter json must be json serializable: ')
])
class TestValidateTypes(BaseTestValidate):
def setUp(self):
self.shapes = {
'Input': {
'type': 'structure',
'members': {
'Str': {'shape': 'StrType'},
'Int': {'shape': 'IntType'},
'Bool': {'shape': 'BoolType'},
'List': {'shape': 'ListType'},
'Struct': {'shape': 'StructType'},
'Double': {'shape': 'DoubleType'},
'Long': {'shape': 'LongType'},
'Map': {'shape': 'MapType'},
'Timestamp': {'shape': 'TimeType'},
}
},
'StrType': {'type': 'string'},
'IntType': {'type': 'integer'},
'BoolType': {'type': 'boolean'},
'ListType': {'type': 'list'},
'StructType': {'type': 'structure'},
'DoubleType': {'type': 'double'},
'LongType': {'type': 'long'},
'MapType': {'type': 'map'},
'TimeType': {'type': 'timestamp'},
}
def test_validate_string(self):
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'Str': 24,
'Int': 'notInt',
'Bool': 'notBool',
'List': 'notList',
'Struct': 'notDict',
'Double': 'notDouble',
'Long': 'notLong',
'Map': 'notDict',
'Timestamp': 'notTimestamp',
},
errors=[
'Invalid type for parameter Str',
'Invalid type for parameter Int',
'Invalid type for parameter Bool',
'Invalid type for parameter List',
'Invalid type for parameter Struct',
'Invalid type for parameter Double',
'Invalid type for parameter Long',
'Invalid type for parameter Map',
'Invalid type for parameter Timestamp',
]
)
def test_datetime_type_accepts_datetime_obj(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={'Timestamp': datetime.now(),})
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_datetime_accepts_string_timestamp(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={'Timestamp': '2014-01-01 12:00:00'})
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_can_handle_none_datetimes(self):
# This is specifically to test a workaround a bug in dateutil
# where low level exceptions can propogate back up to
# us.
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={'Timestamp': None})
error_msg = errors.generate_report()
self.assertIn('Invalid type for parameter Timestamp', error_msg)
class TestValidateRanges(BaseTestValidate):
def setUp(self):
self.shapes = {
'Input': {
'type': 'structure',
'members': {
'Int': {'shape': 'IntType'},
'Long': {'shape': 'IntType'},
'String': {'shape': 'StringType'},
'List': {'shape': 'ListType'},
'OnlyMin': {'shape': 'MinStrOnly'},
'OnlyMax': {'shape': 'MaxStrOnly'},
}
},
'IntType': {
'type': 'integer',
'min': 0,
'max': 1000,
},
'LongType': {
'type': 'long',
'min': 0,
'max': 1000,
},
'StringType': {
'type': 'string',
'min': 1,
'max': 10,
},
'MinStrOnly': {
'type': 'string',
'min': 1
},
'MaxStrOnly': {
'type': 'string',
'max': 10
},
'ListType': {
'type': 'list',
'min': 1,
'max': 5,
'member': {
'shape': 'StringType'
}
},
}
def test_less_than_range(self):
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'Int': -10,
'Long': -10,
},
errors=[
'Invalid range for parameter Int',
'Invalid range for parameter Long',
]
)
def test_does_not_validate_greater_than_range(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={
'Int': 100000000,
'Long': 100000000,
},
)
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_within_range(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={'Int': 10})
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_string_min_length_contraint(self):
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'String': '',
},
errors=[
'Invalid length for parameter String',
]
)
def test_does_not_validate_string_max_length_contraint(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={
'String': 'more than ten characters',
},
)
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_list_min_length_constraint(self):
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'List': [],
},
errors=[
'Invalid length for parameter List',
]
)
def test_does_not_validate_list_max_length_constraint(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={
'List': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'],
},
)
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_only_min_value_specified(self):
# min anx max don't have to both be provided.
# It's valid to just have min with no max, and
# vice versa.
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'OnlyMin': '',
},
errors=[
'Invalid length for parameter OnlyMin',
]
)
def test_does_not_validate_max_when_only_max_value_specified(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={
'OnlyMax': 'more than ten characters',
},
)
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
class TestValidateMapType(BaseTestValidate):
def setUp(self):
self.shapes = {
'Input': {
'type': 'structure',
'members': {
'Map': {'shape': 'MapType'},
}
},
'MapType': {
'type': 'map',
'key': {'shape': 'StringType'},
'value': {'shape': 'StringType'},
},
'StringType': {
'type': 'string',
'min': 2,
},
}
def test_validate_keys_and_values(self):
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'Map': {'foo': '', 'a': 'foobar'}
},
errors=[
'Invalid length for parameter Map',
]
)
class TestValidationFloatType(BaseTestValidate):
def setUp(self):
self.shapes = {
'Input': {
'type': 'structure',
'members': {
'Float': {'shape': 'FloatType'},
}
},
'FloatType': {
'type': 'float',
'min': 2,
'max': 5,
},
}
def test_range_float(self):
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'Float': 1,
},
errors=[
'Invalid range for parameter Float',
]
)
def test_decimal_allowed(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={'Float': decimal.Decimal('2.12345')})
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_decimal_still_validates_range(self):
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'Float': decimal.Decimal('1'),
},
errors=[
'Invalid range for parameter Float',
]
)
class TestValidateTypeBlob(BaseTestValidate):
def setUp(self):
self.shapes = {
'Input': {
'type': 'structure',
'members': {
'Blob': {'shape': 'BlobType'},
}
},
'BlobType': {
'type': 'blob',
'min': 2,
'max': 5,
},
}
def test_validates_bytes(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={'Blob': b'12345'}
)
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_validates_bytearray(self):
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={'Blob': bytearray(b'12345')},
)
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_validates_file_like_object(self):
value = six.BytesIO(b'foo')
errors = self.get_validation_error_message(
given_shapes=self.shapes,
input_params={'Blob': value},
)
error_msg = errors.generate_report()
self.assertEqual(error_msg, '')
def test_validate_type(self):
self.assert_has_validation_errors(
given_shapes=self.shapes,
input_params={
'Blob': 24,
},
errors=[
'Invalid type for parameter Blob',
]
)
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
from mock import Mock, patch, sentinel
from botocore.vendored.requests import ConnectionError
from botocore.compat import six
from botocore.awsrequest import AWSRequest
from botocore.endpoint import Endpoint, DEFAULT_TIMEOUT
from botocore.endpoint import EndpointCreator
from botocore.endpoint import BotocoreHTTPSession
from botocore.exceptions import EndpointConnectionError
from botocore.exceptions import ConnectionClosedError
def request_dict():
return {
'headers': {},
'body': '',
'url_path': '/',
'query_string': '',
'method': 'POST',
'url': 'https://example.com',
'context': {}
}
class RecordStreamResets(six.StringIO):
def __init__(self, value):
six.StringIO.__init__(self, value)
self.total_resets = 0
def seek(self, where):
self.total_resets += 1
six.StringIO.seek(self, where)
class TestEndpointBase(unittest.TestCase):
def setUp(self):
self.op = Mock()
self.op.has_streaming_output = False
self.op.metadata = {'protocol': 'json'}
self.event_emitter = Mock()
self.event_emitter.emit.return_value = []
self.factory_patch = patch(
'botocore.parsers.ResponseParserFactory')
self.factory = self.factory_patch.start()
self.endpoint = Endpoint(
'https://ec2.us-west-2.amazonaws.com/',
endpoint_prefix='ec2',
event_emitter=self.event_emitter)
self.http_session = Mock()
self.http_session.send.return_value = Mock(
status_code=200, headers={}, content=b'{"Foo": "bar"}',
)
self.endpoint.http_session = self.http_session
def tearDown(self):
self.factory_patch.stop()
class TestEndpointFeatures(TestEndpointBase):
def test_timeout_can_be_specified(self):
timeout_override = 120
self.endpoint.timeout = timeout_override
self.endpoint.make_request(self.op, request_dict())
kwargs = self.http_session.send.call_args[1]
self.assertEqual(kwargs['timeout'], timeout_override)
def test_make_request_with_proxies(self):
proxies = {'http': 'http://localhost:8888'}
self.endpoint.proxies = proxies
self.endpoint.make_request(self.op, request_dict())
prepared_request = self.http_session.send.call_args[0][0]
self.http_session.send.assert_called_with(
prepared_request, verify=True, stream=False,
proxies=proxies, timeout=DEFAULT_TIMEOUT)
def test_make_request_with_no_auth(self):
self.endpoint.auth = None
self.endpoint.make_request(self.op, request_dict())
# http_session should be used to send the request.
self.assertTrue(self.http_session.send.called)
prepared_request = self.http_session.send.call_args[0][0]
self.assertNotIn('Authorization', prepared_request.headers)
def test_make_request_no_signature_version(self):
self.endpoint.make_request(self.op, request_dict())
# http_session should be used to send the request.
self.assertTrue(self.http_session.send.called)
prepared_request = self.http_session.send.call_args[0][0]
self.assertNotIn('Authorization', prepared_request.headers)
def test_make_request_injects_better_dns_error_msg(self):
fake_request = Mock(url='https://ec2.us-west-2.amazonaws.com')
self.http_session.send.side_effect = ConnectionError(
"Fake gaierror(8, node or host not known)", request=fake_request)
with self.assertRaisesRegexp(EndpointConnectionError,
'Could not connect'):
self.endpoint.make_request(self.op, request_dict())
def test_make_request_injects_better_bad_status_line_error_msg(self):
fake_request = Mock(url='https://ec2.us-west-2.amazonaws.com')
self.http_session.send.side_effect = ConnectionError(
"""'Connection aborted.', BadStatusLine("''",)""",
request=fake_request)
with self.assertRaisesRegexp(ConnectionClosedError,
'Connection was closed'):
self.endpoint.make_request(self.op, request_dict())
def test_make_request_with_context(self):
r = request_dict()
r['context'] = {'signing': {'region': 'us-west-2'}}
with patch('botocore.endpoint.Endpoint.prepare_request') as prepare:
self.endpoint.make_request(self.op, r)
request = prepare.call_args[0][0]
self.assertEqual(request.context['signing']['region'], 'us-west-2')
def test_can_specify_max_pool_connections(self):
endpoint = Endpoint('https://ec2.us-west-2.amazonaws.com', 'ec2',
self.event_emitter, max_pool_connections=50)
# We can look in endpoint.http_session.adapters[0]._pool_maxsize,
# but that feels like testing too much implementation detail.
self.assertEqual(endpoint.max_pool_connections, 50)
def test_can_specify_proxies(self):
proxies = {'http': 'http://foo.bar:1234'}
endpoint = Endpoint('https://ec2.us-west-2.amazonaws.com', 'ec2',
self.event_emitter, proxies=proxies)
self.assertEqual(endpoint.proxies, proxies)
class TestRetryInterface(TestEndpointBase):
def setUp(self):
super(TestRetryInterface, self).setUp()
self.retried_on_exception = None
def test_retry_events_are_emitted(self):
op = Mock()
op.name = 'DescribeInstances'
op.metadata = {'protocol': 'query'}
op.has_streaming_output = False
self.endpoint.make_request(op, request_dict())
call_args = self.event_emitter.emit.call_args
self.assertEqual(call_args[0][0],
'needs-retry.ec2.DescribeInstances')
def test_retry_events_can_alter_behavior(self):
op = Mock()
op.name = 'DescribeInstances'
op.metadata = {'protocol': 'json'}
self.event_emitter.emit.side_effect = [
[(None, None)], # Request created.
[(None, 0)], # Check if retry needed. Retry needed.
[(None, None)], # Request created.
[(None, None)] # Check if retry needed. Retry not needed.
]
self.endpoint.make_request(op, request_dict())
call_args = self.event_emitter.emit.call_args_list
self.assertEqual(self.event_emitter.emit.call_count, 4)
# Check that all of the events are as expected.
self.assertEqual(call_args[0][0][0],
'request-created.ec2.DescribeInstances')
self.assertEqual(call_args[1][0][0],
'needs-retry.ec2.DescribeInstances')
self.assertEqual(call_args[2][0][0],
'request-created.ec2.DescribeInstances')
self.assertEqual(call_args[3][0][0],
'needs-retry.ec2.DescribeInstances')
def test_retry_on_socket_errors(self):
op = Mock()
op.name = 'DescribeInstances'
self.event_emitter.emit.side_effect = [
[(None, None)], # Request created.
[(None, 0)], # Check if retry needed. Retry needed.
[(None, None)], # Request created
[(None, None)] # Check if retry needed. Retry not needed.
]
self.http_session.send.side_effect = ConnectionError()
with self.assertRaises(ConnectionError):
self.endpoint.make_request(op, request_dict())
call_args = self.event_emitter.emit.call_args_list
self.assertEqual(self.event_emitter.emit.call_count, 4)
# Check that all of the events are as expected.
self.assertEqual(call_args[0][0][0],
'request-created.ec2.DescribeInstances')
self.assertEqual(call_args[1][0][0],
'needs-retry.ec2.DescribeInstances')
self.assertEqual(call_args[2][0][0],
'request-created.ec2.DescribeInstances')
self.assertEqual(call_args[3][0][0],
'needs-retry.ec2.DescribeInstances')
def test_retry_attempts_added_to_response_metadata(self):
op = Mock(name='DescribeInstances')
op.metadata = {'protocol': 'query'}
self.event_emitter.emit.side_effect = [
[(None, None)], # Request created.
[(None, 0)], # Check if retry needed. Retry needed.
[(None, None)], # Request created.
[(None, None)] # Check if retry needed. Retry not needed.
]
parser = Mock()
parser.parse.return_value = {'ResponseMetadata': {}}
self.factory.return_value.create_parser.return_value = parser
response = self.endpoint.make_request(op, request_dict())
self.assertEqual(response[1]['ResponseMetadata']['RetryAttempts'], 1)
def test_retry_attempts_is_zero_when_not_retried(self):
op = Mock(name='DescribeInstances', metadata={'protocol': 'query'})
self.event_emitter.emit.side_effect = [
[(None, None)], # Request created.
[(None, None)], # Check if retry needed. Retry needed.
]
parser = Mock()
parser.parse.return_value = {'ResponseMetadata': {}}
self.factory.return_value.create_parser.return_value = parser
response = self.endpoint.make_request(op, request_dict())
self.assertEqual(response[1]['ResponseMetadata']['RetryAttempts'], 0)
class TestS3ResetStreamOnRetry(TestEndpointBase):
def setUp(self):
super(TestS3ResetStreamOnRetry, self).setUp()
def max_attempts_retry_handler(self, attempts, **kwargs):
# Simulate a max requests of 3.
self.total_calls += 1
if attempts == 3:
return None
else:
# Returning anything non-None will trigger a retry,
# but 0 here is so that time.sleep(0) happens.
return 0
def test_reset_stream_on_retry(self):
op = Mock()
body = RecordStreamResets('foobar')
op.name = 'PutObject'
op.has_streaming_output = True
op.metadata = {'protocol': 'rest-xml'}
request = request_dict()
request['body'] = body
self.event_emitter.emit.side_effect = [
[(None, None)], # Request created.
[(None, 0)], # Check if retry needed. Needs Retry.
[(None, None)], # Request created.
[(None, 0)], # Check if retry needed again. Needs Retry.
[(None, None)], # Request created.
[(None, None)], # Finally emit no rety is needed.
]
self.endpoint.make_request(op, request)
self.assertEqual(body.total_resets, 2)
class TestEndpointCreator(unittest.TestCase):
def setUp(self):
self.service_model = Mock(
endpoint_prefix='ec2', signature_version='v2',
signing_name='ec2')
self.environ = {}
self.environ_patch = patch('os.environ', self.environ)
self.environ_patch.start()
self.creator = EndpointCreator(Mock())
def tearDown(self):
self.environ_patch.stop()
def test_creates_endpoint_with_configured_url(self):
endpoint = self.creator.create_endpoint(
self.service_model, region_name='us-east-1',
endpoint_url='https://endpoint.url')
self.assertEqual(endpoint.host, 'https://endpoint.url')
def test_create_endpoint_with_default_timeout(self):
endpoint = self.creator.create_endpoint(
self.service_model, region_name='us-west-2',
endpoint_url='https://example.com')
self.assertEqual(endpoint.timeout, DEFAULT_TIMEOUT)
def test_create_endpoint_with_customized_timeout(self):
endpoint = self.creator.create_endpoint(
self.service_model, region_name='us-west-2',
endpoint_url='https://example.com', timeout=123)
self.assertEqual(endpoint.timeout, 123)
def test_get_endpoint_default_verify_ssl(self):
endpoint = self.creator.create_endpoint(
self.service_model, region_name='us-west-2',
endpoint_url='https://example.com')
self.assertTrue(endpoint.verify)
def test_verify_ssl_can_be_disabled(self):
endpoint = self.creator.create_endpoint(
self.service_model, region_name='us-west-2',
endpoint_url='https://example.com', verify=False)
self.assertFalse(endpoint.verify)
def test_verify_ssl_can_specify_cert_bundle(self):
endpoint = self.creator.create_endpoint(
self.service_model, region_name='us-west-2',
endpoint_url='https://example.com', verify='/path/cacerts.pem')
self.assertEqual(endpoint.verify, '/path/cacerts.pem')
def test_honor_cert_bundle_env_var(self):
self.environ['REQUESTS_CA_BUNDLE'] = '/env/cacerts.pem'
endpoint = self.creator.create_endpoint(
self.service_model, region_name='us-west-2',
endpoint_url='https://example.com')
self.assertEqual(endpoint.verify, '/env/cacerts.pem')
def test_env_ignored_if_explicitly_passed(self):
self.environ['REQUESTS_CA_BUNDLE'] = '/env/cacerts.pem'
endpoint = self.creator.create_endpoint(
self.service_model, region_name='us-west-2',
endpoint_url='https://example.com', verify='/path/cacerts.pem')
# /path/cacerts.pem wins over the value from the env var.
self.assertEqual(endpoint.verify, '/path/cacerts.pem')
def test_can_specify_max_pool_conns(self):
endpoint = self.creator.create_endpoint(
self.service_model, region_name='us-west-2',
endpoint_url='https://example.com',
max_pool_connections=100
)
self.assertEqual(endpoint.max_pool_connections, 100)
class TestAWSSession(unittest.TestCase):
def test_auth_header_preserved_from_s3_redirects(self):
request = AWSRequest()
request.url = 'https://bucket.s3.amazonaws.com/'
request.method = 'GET'
request.headers['Authorization'] = 'original auth header'
prepared_request = request.prepare()
fake_response = Mock()
fake_response.headers = {
'location': 'https://bucket.s3-us-west-2.amazonaws.com'}
fake_response.url = request.url
fake_response.status_code = 307
fake_response.is_permanent_redirect = False
# This line is needed to disable the cookie handling
# code in requests.
fake_response.raw._original_response = None
success_response = Mock()
success_response.raw._original_response = None
success_response.is_redirect = False
success_response.status_code = 200
session = BotocoreHTTPSession()
session.send = Mock(return_value=success_response)
list(session.resolve_redirects(
fake_response, prepared_request, stream=False))
redirected_request = session.send.call_args[0][0]
# The Authorization header for the newly sent request should
# still have our original Authorization header.
self.assertEqual(
redirected_request.headers['Authorization'],
'original auth header')
def test_max_pool_conns_injects_custom_adapter(self):
http_adapter_cls = Mock(return_value=sentinel.HTTP_ADAPTER)
session = BotocoreHTTPSession(max_pool_connections=20,
http_adapter_cls=http_adapter_cls)
http_adapter_cls.assert_called_with(pool_maxsize=20)
self.assertEqual(session.adapters['https://'], sentinel.HTTP_ADAPTER)
self.assertEqual(session.adapters['http://'], sentinel.HTTP_ADAPTER)
|
#!/usr/bin/env
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import botocore.config
from tests import unittest
import mock
import botocore
from botocore import utils
from botocore import client
from botocore.endpoint import DEFAULT_TIMEOUT
from botocore import hooks
from botocore.client import ClientEndpointBridge
from botocore.credentials import Credentials
from botocore.exceptions import ParamValidationError
from botocore.exceptions import InvalidS3AddressingStyleError
from botocore.exceptions import UnknownSignatureVersionError
from botocore.exceptions import InvalidRetryConfigurationError
from botocore.exceptions import InvalidMaxRetryAttemptsError
from botocore.errorfactory import ClientExceptionsFactory
from botocore.stub import Stubber
from botocore import exceptions
from botocore.compat import six
class TestAutoGeneratedClient(unittest.TestCase):
def setUp(self):
self.service_description = {
'metadata': {
'serviceFullName': 'AWS MyService',
'apiVersion': '2014-01-01',
'endpointPrefix': 'myservice',
'signatureVersion': 'v4',
'protocol': 'query'
},
'operations': {
'TestOperation': {
'name': 'TestOperation',
'http': {
'method': 'POST',
'requestUri': '/',
},
'input': {'shape': 'TestOperationRequest'},
'errors': [{'shape': 'TestOperationException'}],
'documentation': 'Documents TestOperation'
}
},
'shapes': {
'TestOperationRequest': {
'type': 'structure',
'required': ['Foo'],
'members': {
'Foo': {'shape': 'StringType',
'documentation': 'Documents Foo'},
'Bar': {'shape': 'StringType',
'documentation': 'Documents Bar'},
}
},
"TestOperationException": {
'type': 'structure',
'exception': True,
'error': {
'code': 'TestOperationErrorCode'
}
},
'StringType': {'type': 'string'}
}
}
self.retry_config = {
"retry": {
"__default__": {
"max_attempts": 5,
"delay": {
"type": "exponential",
"base": "rand",
"growth_factor": 2
},
"policies": {}
}
}
}
self.loader = mock.Mock()
self.loader.load_service_model.return_value = self.service_description
self.loader.load_data.return_value = self.retry_config
self.credentials = Credentials('access-key', 'secret-key')
self.endpoint_creator_patch = mock.patch(
'botocore.args.EndpointCreator')
self.endpoint_creator_cls = self.endpoint_creator_patch.start()
self.endpoint_creator = self.endpoint_creator_cls.return_value
self.endpoint = mock.Mock()
self.endpoint.host = 'https://myservice.amazonaws.com'
self.endpoint.make_request.return_value = (
mock.Mock(status_code=200), {})
self.endpoint_creator.create_endpoint.return_value = self.endpoint
self.resolver = mock.Mock()
self.endpoint_data = {
'partition': 'aws',
'hostname': 'foo',
'endpointName': 'us-west-2',
'signatureVersions': ['v4'],
}
self.resolver.construct_endpoint.return_value = self.endpoint_data
self.resolver.get_available_endpoints.return_value = ['us-west-2']
def tearDown(self):
self.endpoint_creator_patch.stop()
def create_mock_emitter(self, responses=None):
if responses is None:
responses = []
emitter = mock.Mock()
emitter.emit.return_value = responses
return emitter
def create_client_creator(self, endpoint_creator=None, event_emitter=None,
retry_handler_factory=None,
retry_config_translator=None,
response_parser_factory=None,
endpoint_prefix=None,
exceptions_factory=None):
if event_emitter is None:
event_emitter = hooks.HierarchicalEmitter()
if retry_handler_factory is None:
retry_handler_factory = botocore.retryhandler
if retry_config_translator is None:
retry_config_translator = botocore.translate
if endpoint_prefix is not None:
self.service_description['metadata']['endpointPrefix']\
= endpoint_prefix
if endpoint_creator is not None:
self.endpoint_creator_cls.return_value = endpoint_creator
if exceptions_factory is None:
exceptions_factory = ClientExceptionsFactory()
creator = client.ClientCreator(
self.loader, self.resolver, 'user-agent', event_emitter,
retry_handler_factory, retry_config_translator,
response_parser_factory, exceptions_factory)
return creator
def assert_no_param_error_raised(self, client):
try:
self.make_api_call_with_missing_param(client)
except ParamValidationError:
self.fail("ParamValidationError shouldn't be raised "
"with validation disabled")
def make_api_call_with_missing_param(self, service_client):
# Missing required 'Foo' param.
service_client.test_operation(Bar='two')
def test_client_name(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(service_client.__class__.__name__, 'MyService')
def test_client_name_with_amazon(self):
self.service_description['metadata']['serviceFullName'] = (
'Amazon MyService')
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(service_client.__class__.__name__, 'MyService')
def test_client_name_using_abreviation(self):
self.service_description['metadata']['serviceAbbreviation'] = (
'Abbreviation')
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(service_client.__class__.__name__, 'Abbreviation')
def test_client_name_with_non_alphabet_characters(self):
self.service_description['metadata']['serviceFullName'] = (
'Amazon My-Service')
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(service_client.__class__.__name__, 'MyService')
def test_client_name_with_no_full_name_or_abbreviation(self):
del self.service_description['metadata']['serviceFullName']
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(service_client.__class__.__name__, 'myservice')
def test_client_generated_from_model(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(hasattr(service_client, 'test_operation'))
def test_client_method_docstring(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
method_docstring = str(service_client.test_operation.__doc__)
ref_docstring_lines = [
'Documents TestOperation',
'**Request Syntax**',
' response = client.test_operation(',
' Bar=\'string\'',
' Foo=\'string\'',
' )',
':type Bar: string',
':param Bar: Documents Bar',
':type Foo: string',
':param Foo: **[REQUIRED]** Documents Foo'
]
for line in ref_docstring_lines:
self.assertIn(line, method_docstring)
def test_client_method_help(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
with mock.patch('sys.stdout', six.StringIO()) as mock_stdout:
help(service_client.test_operation)
method_docstring = mock_stdout.getvalue()
ref_docstring_lines = [
'Documents TestOperation',
'**Request Syntax**',
' response = client.test_operation(',
' Bar=\'string\'',
' Foo=\'string\'',
' )',
':type Bar: string',
':param Bar: Documents Bar',
':type Foo: string',
':param Foo: **[REQUIRED]** Documents Foo'
]
for line in ref_docstring_lines:
self.assertIn(line, method_docstring)
def test_client_create_unicode(self):
creator = self.create_client_creator()
service_client = creator.create_client(
u'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(hasattr(service_client, 'test_operation'))
def test_client_has_region_name_on_meta(self):
creator = self.create_client_creator()
region_name = 'us-west-2'
self.endpoint.region_name = region_name
service_client = creator.create_client(
'myservice', region_name, credentials=self.credentials)
self.assertEqual(service_client.meta.region_name, region_name)
def test_client_has_endpoint_url_on_meta(self):
creator = self.create_client_creator()
self.endpoint.host = 'https://foo.bar'
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertEqual(service_client.meta.endpoint_url,
'https://foo.bar')
def test_client_has_standard_partition_on_meta(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertEqual(service_client.meta.partition,
'aws')
def test_client_has_non_standard_partition_on_meta(self):
creator = self.create_client_creator()
self.resolver.construct_endpoint.return_value = {
'partition': 'aws-cn',
'hostname': 'foo',
'endpointName': 'cn-north-1',
'signatureVersions': ['v4'],
}
service_client = creator.create_client(
'myservice', 'cn-north-1', credentials=self.credentials)
self.assertEqual(service_client.meta.partition,
'aws-cn')
def test_client_has_exceptions_attribute(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(hasattr(service_client, 'exceptions'))
def test_client_has_modeled_exceptions(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(
issubclass(service_client.exceptions.TestOperationException,
client.ClientError)
)
def test_api_version_is_passed_to_loader_if_provided(self):
creator = self.create_client_creator()
self.endpoint.host = 'https://foo.bar'
specific_api_version = '2014-03-01'
creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials,
api_version=specific_api_version)
self.loader.load_service_model.assert_called_with(
'myservice', 'service-2', api_version=specific_api_version)
def test_create_client_class_creates_class(self):
creator = self.create_client_creator()
client_class = creator.create_client_class('myservice')
self.assertTrue(hasattr(client_class, 'test_operation'))
def test_create_client_class_forwards_api_version(self):
creator = self.create_client_creator()
specific_api_version = '2014-03-01'
creator.create_client_class('myservice',
api_version=specific_api_version)
self.loader.load_service_model.assert_called_with(
'myservice', 'service-2', api_version=specific_api_version)
def test_signing_region_does_not_change_client_region(self):
with mock.patch('botocore.args.RequestSigner') as mock_signer:
credential_scope_region = 'us-east-1'
self.resolver.construct_endpoint.return_value = {
'partition': 'aws',
'hostname': 'endpoint.url',
'endpointName': 'us-west-2',
'signatureVersions': ['v4'],
'credentialScope': {'region': credential_scope_region,}
}
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertEqual(service_client.meta.region_name, 'us-west-2')
call_args = mock_signer.call_args
self.assertEqual(credential_scope_region, call_args[0][1])
def test_client_uses_signing_region_from_credential_scope(self):
with mock.patch('botocore.args.RequestSigner') as mock_signer:
credential_scope_region = 'us-east-1'
self.resolver.construct_endpoint.return_value = {
'partition': 'aws',
'endpointName': 'us-west-2',
'hostname': 'endpoint.url',
'signatureVersions': ['v4'],
'credentialScope': {'region': credential_scope_region}
}
creator = self.create_client_creator()
service_client = creator.create_client(
service_name='myservice', region_name='us-west-2',
credentials=self.credentials)
# Use the resolved region as the region value.
self.assertEqual(service_client.meta.region_name, 'us-west-2')
# Ensure that we use the credential scope region for signing,
# and not the resolved region name.
call_args = mock_signer.call_args
self.assertEqual(credential_scope_region, call_args[0][1])
def test_client_uses_signing_name_from_credential_scope(self):
with mock.patch('botocore.args.RequestSigner') as mock_signer:
self.resolver.construct_endpoint.return_value = {
'partition': 'aws',
'endpointName': 'us-west-2',
'hostname': 'endpoint.url',
'signatureVersions': ['v4'],
'credentialScope': {'service': 'override'}
}
creator = self.create_client_creator()
service_client = creator.create_client(
service_name='myservice', region_name='us-west-2',
credentials=self.credentials)
call_args = mock_signer.call_args
self.assertEqual('myservice', call_args[0][0])
self.assertEqual('override', call_args[0][2])
def test_client_uses_given_region_name_and_endpoint_url_when_present(self):
with mock.patch('botocore.args.RequestSigner') as mock_signer:
credential_scope_region = 'us-east-1'
self.resolver.construct_endpoint.return_value = {
'partition': 'aws',
'endpointName': 'us-west-2',
'hostname': 'endpoint.url',
'signatureVersions': ['v4'],
'credentialScope': {'region': credential_scope_region}
}
creator = self.create_client_creator()
service_client = creator.create_client(
service_name='myservice', region_name='us-west-2',
credentials=self.credentials, endpoint_url='https://foo')
self.assertEqual(service_client.meta.region_name, 'us-west-2')
call_args = mock_signer.call_args
self.assertEqual('us-west-2', call_args[0][1])
def test_client_uses_signing_name_from_model_if_present_if_resolved(self):
self.service_description['metadata']['signingName'] = 'otherName'
with mock.patch('botocore.args.RequestSigner') as mock_signer:
self.resolver.construct_endpoint.return_value = {
'partition': 'aws',
'endpointName': 'us-west-2',
'hostname': 'endpoint.url',
'signatureVersions': ['v4'],
}
creator = self.create_client_creator()
service_client = creator.create_client(
service_name='myservice', region_name='us-west-2',
credentials=self.credentials, endpoint_url='https://foo')
self.assertEqual(service_client.meta.region_name, 'us-west-2')
call_args = mock_signer.call_args[0]
self.assertEqual('otherName', call_args[2])
def test_client_uses_signing_name_even_with_no_resolve(self):
self.service_description['metadata']['signingName'] = 'otherName'
with mock.patch('botocore.args.RequestSigner') as mock_signer:
self.resolver.construct_endpoint.return_value = {}
creator = self.create_client_creator()
service_client = creator.create_client(
service_name='myservice', region_name='us-west-2',
credentials=self.credentials, endpoint_url='https://foo')
self.assertEqual(service_client.meta.region_name, 'us-west-2')
call_args = mock_signer.call_args[0]
self.assertEqual('otherName', call_args[2])
@mock.patch('botocore.args.RequestSigner')
def test_client_signature_no_override(self, request_signer):
creator = self.create_client_creator()
creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials,
scoped_config={})
request_signer.assert_called_with(
mock.ANY, mock.ANY, mock.ANY, 'v4', mock.ANY, mock.ANY)
@mock.patch('botocore.args.RequestSigner')
def test_client_signature_override_config_file(self, request_signer):
creator = self.create_client_creator()
config = {
'myservice': {'signature_version': 'foo'}
}
creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials,
scoped_config=config)
request_signer.assert_called_with(
mock.ANY, mock.ANY, mock.ANY, 'foo', mock.ANY, mock.ANY)
@mock.patch('botocore.args.RequestSigner')
def test_client_signature_override_arg(self, request_signer):
creator = self.create_client_creator()
config = botocore.config.Config(signature_version='foo')
creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials,
client_config=config)
request_signer.assert_called_with(
mock.ANY, mock.ANY, mock.ANY, 'foo', mock.ANY, mock.ANY)
def test_client_method_to_api_mapping(self):
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
self.assertEqual(service_client.meta.method_to_api_mapping,
{'test_operation': 'TestOperation'})
def test_anonymous_client_request(self):
creator = self.create_client_creator()
config = botocore.config.Config(signature_version=botocore.UNSIGNED)
service_client = creator.create_client(
'myservice', 'us-west-2', client_config=config)
service_client.test_operation(Foo='one')
# Make sure a request has been attempted
self.assertTrue(self.endpoint.make_request.called)
# Make sure the request parameters do NOT include auth
# information. The service defined above for these tests
# uses sigv4 by default (which we disable).
params = dict((k.lower(), v) for k, v in
self.endpoint.make_request.call_args[0][1].items())
self.assertNotIn('authorization', params)
self.assertNotIn('x-amz-signature', params)
def test_client_user_agent_in_request(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2')
service_client.test_operation(Foo='one')
self.assertTrue(self.endpoint.make_request.called)
params = dict((k.lower(), v) for k, v in
self.endpoint.make_request.call_args[0][1].items())
self.assertEqual(params['headers']['User-Agent'], 'user-agent')
def test_client_custom_user_agent_in_request(self):
creator = self.create_client_creator()
config = botocore.config.Config(user_agent='baz')
service_client = creator.create_client(
'myservice', 'us-west-2', client_config=config)
service_client.test_operation(Foo='one')
self.assertTrue(self.endpoint.make_request.called)
params = dict((k.lower(), v) for k, v in
self.endpoint.make_request.call_args[0][1].items())
self.assertEqual(params['headers']['User-Agent'], 'baz')
def test_client_custom_user_agent_extra_in_request(self):
creator = self.create_client_creator()
config = botocore.config.Config(user_agent_extra='extrastuff')
service_client = creator.create_client(
'myservice', 'us-west-2', client_config=config)
service_client.test_operation(Foo='one')
headers = self.endpoint.make_request.call_args[0][1]['headers']
self.assertEqual(headers['User-Agent'], 'user-agent extrastuff')
def test_client_registers_request_created_handler(self):
event_emitter = self.create_mock_emitter()
creator = self.create_client_creator(event_emitter=event_emitter)
creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertIn(
mock.call('request-created.myservice', mock.ANY),
event_emitter.register.call_args_list)
def test_client_makes_call(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertTrue(self.endpoint_creator.create_endpoint.called)
response = service_client.test_operation(Foo='one', Bar='two')
self.assertEqual(response, {})
def test_client_error_message_for_positional_args(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
with self.assertRaisesRegexp(
TypeError, 'only accepts keyword arguments'):
service_client.test_operation('foo')
@mock.patch('botocore.args.RequestSigner.sign')
def test_client_signs_call(self, signer_mock):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
request = mock.Mock()
# Emit the request created event to see if it would be signed.
# We tested above to ensure this event is registered when
# a client is created. This prevents testing the entire client
# call logic.
service_client.meta.events.emit(
'request-created.myservice.test_operation', request=request,
operation_name='test_operation')
signer_mock.assert_called_with(
'test_operation', request)
def test_client_validates_params_by_default(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
with self.assertRaises(ParamValidationError):
self.make_api_call_with_missing_param(service_client)
def test_client_doesnt_validate_params_when_validation_disabled(self):
creator = self.create_client_creator()
client_config = botocore.config.Config()
client_config.parameter_validation = False
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials,
client_config=client_config)
self.assert_no_param_error_raised(service_client)
def test_can_disable_param_validation_from_scoped_config(self):
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials,
scoped_config={'parameter_validation': False})
self.assert_no_param_error_raised(service_client)
def test_client_config_trumps_scoped_config(self):
creator = self.create_client_creator()
scoped_config = {'parameter_validation': True}
client_config = botocore.config.Config(parameter_validation=False)
# Client config should win and param validation is disabled.
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials,
scoped_config=scoped_config, client_config=client_config)
self.assert_no_param_error_raised(service_client)
def test_client_with_custom_both_timeout(self):
self.create_client_creator().create_client(
'myservice', 'us-west-2',
client_config=botocore.config.Config(connect_timeout=123, read_timeout=234))
call_kwargs = self.endpoint_creator.create_endpoint.call_args[1]
self.assertEqual(call_kwargs['timeout'], (123, 234))
def test_client_with_custom_connect_timeout(self):
self.create_client_creator().create_client(
'myservice', 'us-west-2',
client_config=botocore.config.Config(connect_timeout=123))
call_kwargs = self.endpoint_creator.create_endpoint.call_args[1]
self.assertEqual(call_kwargs['timeout'], (123, DEFAULT_TIMEOUT))
def test_client_with_custom_read_timeout(self):
self.create_client_creator().create_client(
'myservice', 'us-west-2',
client_config=botocore.config.Config(read_timeout=234))
call_kwargs = self.endpoint_creator.create_endpoint.call_args[1]
self.assertEqual(call_kwargs['timeout'], (DEFAULT_TIMEOUT, 234))
def test_client_with_custom_neither_timeout(self):
self.create_client_creator().create_client('myservice', 'us-west-2')
call_kwargs = self.endpoint_creator.create_endpoint.call_args[1]
self.assertEqual(call_kwargs['timeout'],
(DEFAULT_TIMEOUT, DEFAULT_TIMEOUT))
def test_client_with_custom_params(self):
creator = self.create_client_creator()
creator.create_client('myservice', 'us-west-2',
is_secure=False, verify=False)
call_kwargs = self.endpoint_creator.create_endpoint.call_args[1]
self.assertFalse(call_kwargs['verify'])
self.assertNotIn('is_secure', call_kwargs)
def test_client_with_endpoint_url(self):
creator = self.create_client_creator()
creator.create_client('myservice', 'us-west-2',
endpoint_url='http://custom.foo')
call_kwargs = self.endpoint_creator.create_endpoint.call_args[1]
self.assertEqual(call_kwargs['endpoint_url'], 'http://custom.foo')
def test_client_can_use_guessed_endpoints(self):
# Ensure the resolver returns None (meaning a guess is made)
self.resolver.construct_endpoint.return_value = None
creator = self.create_client_creator()
client = creator.create_client('myservice', region_name='invalid')
self.assertEqual('invalid', client.meta.region_name)
def test_client_with_response_parser_factory(self):
factory = mock.Mock()
creator = self.create_client_creator(response_parser_factory=factory)
creator.create_client('myservice', 'us-west-2')
call_kwargs = self.endpoint_creator.create_endpoint.call_args[1]
self.assertEqual(call_kwargs['response_parser_factory'], factory)
def test_operation_cannot_paginate(self):
pagination_config = {
'pagination': {
# Note that there's no pagination config for
# 'TestOperation', indicating that TestOperation
# is not pageable.
'SomeOtherOperation': {
"input_token": "Marker",
"output_token": "Marker",
"more_results": "IsTruncated",
"limit_key": "MaxItems",
"result_key": "Users"
}
}
}
self.loader.load_service_model.side_effect = [
self.service_description, pagination_config]
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
self.assertFalse(service_client.can_paginate('test_operation'))
def test_operation_can_paginate(self):
pagination_config = {
'pagination': {
'TestOperation': {
"input_token": "Marker",
"output_token": "Marker",
"more_results": "IsTruncated",
"limit_key": "MaxItems",
"result_key": "Users"
}
}
}
self.loader.load_service_model.side_effect = [
self.service_description, pagination_config]
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
self.assertTrue(service_client.can_paginate('test_operation'))
# Also, the config is cached, but we want to make sure we get
# the same answer when we ask again.
self.assertTrue(service_client.can_paginate('test_operation'))
def test_service_has_no_pagination_configs(self):
# This is the case where there is an actual *.paginator.json, file,
# but the specific operation itself is not actually pageable.
# If the loader cannot load pagination configs, it communicates
# this by raising a DataNotFoundError.
self.loader.load_service_model.side_effect = [
self.service_description,
exceptions.DataNotFoundError(data_path='/foo')]
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
self.assertFalse(service_client.can_paginate('test_operation'))
def test_waiter_config_uses_service_name_not_endpoint_prefix(self):
waiter_config = {
'version': 2,
'waiters': {}
}
self.loader.load_service_model.side_effect = [
self.service_description,
waiter_config
]
creator = self.create_client_creator()
# We're going to verify that the loader loads a service called
# 'other-service-name', and even though the endpointPrefix is
# 'myservice', we use 'other-service-name' for waiters/paginators, etc.
service_client = creator.create_client('other-service-name',
'us-west-2')
self.assertEqual(service_client.waiter_names, [])
# Note we're using other-service-name, not
# 'myservice', which is the endpointPrefix.
self.loader.load_service_model.assert_called_with(
'other-service-name', 'waiters-2', '2014-01-01')
def test_service_has_waiter_configs(self):
waiter_config = {
'version': 2,
'waiters': {
"Waiter1": {
'operation': 'TestOperation',
'delay': 5,
'maxAttempts': 20,
'acceptors': [],
},
"Waiter2": {
'operation': 'TestOperation',
'delay': 5,
'maxAttempts': 20,
'acceptors': [],
},
}
}
self.loader.load_service_model.side_effect = [
self.service_description,
waiter_config
]
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
self.assertEqual(sorted(service_client.waiter_names),
sorted(['waiter_1', 'waiter_2']))
self.assertTrue(hasattr(service_client.get_waiter('waiter_1'), 'wait'))
def test_service_has_no_waiter_configs(self):
self.loader.load_service_model.side_effect = [
self.service_description,
exceptions.DataNotFoundError(data_path='/foo')]
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
self.assertEqual(service_client.waiter_names, [])
with self.assertRaises(ValueError):
service_client.get_waiter("unknown_waiter")
def test_service_has_retry_event(self):
# A retry event should be registered for the service.
event_emitter = self.create_mock_emitter()
creator = self.create_client_creator(event_emitter=event_emitter)
creator.create_client('myservice', 'us-west-2')
event_emitter.register.assert_any_call(
'needs-retry.myservice', mock.ANY,
unique_id='retry-config-myservice')
def test_service_creates_retryhandler(self):
# A retry handler with the expected configuration should be
# created when instantiating a client.
retry_handler_factory = mock.Mock()
creator = self.create_client_creator(
retry_handler_factory=retry_handler_factory)
creator.create_client('myservice', 'us-west-2')
retry_handler_factory.create_retry_handler.assert_called_with({
'__default__': {
'delay': {
'growth_factor': 2,
'base': 'rand',
'type': 'exponential'
},
'policies': {},
'max_attempts': 5
}
}, 'myservice')
def test_service_registers_retry_handler(self):
# The retry handler returned from ``create_retry_handler``
# that was tested above needs to be set as the handler for
# the event emitter.
retry_handler_factory = mock.Mock()
handler = mock.Mock()
event_emitter = self.create_mock_emitter()
retry_handler_factory.create_retry_handler.return_value = handler
creator = self.create_client_creator(
event_emitter=event_emitter,
retry_handler_factory=retry_handler_factory)
creator.create_client('myservice', 'us-west-2')
event_emitter.register.assert_any_call(
mock.ANY, handler, unique_id=mock.ANY)
def test_service_retry_missing_config(self):
# No config means we should never see any retry events registered.
self.loader.load_data.return_value = {}
event_emitter = self.create_mock_emitter()
creator = self.create_client_creator(event_emitter=event_emitter)
creator.create_client('myservice', 'us-west-2')
for call in event_emitter.register.call_args_list:
self.assertNotIn('needs-retry', call[0][0])
def test_can_override_max_attempts(self):
retry_handler_factory = mock.Mock(botocore.retryhandler)
creator = self.create_client_creator(
retry_handler_factory=retry_handler_factory)
creator.create_client(
'myservice', 'us-west-2',
client_config=botocore.config.Config(retries={'max_attempts': 9}))
retry_handler_factory.create_retry_handler.assert_called_with({
'__default__': {
'delay': {
'growth_factor': 2,
'base': 'rand',
'type': 'exponential'
},
'policies': {},
'max_attempts': 10
}
}, 'myservice')
def test_try_to_paginate_non_paginated(self):
self.loader.load_service_model.side_effect = [
self.service_description,
exceptions.DataNotFoundError(data_path='/foo')
]
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
with self.assertRaises(exceptions.OperationNotPageableError):
service_client.get_paginator('test_operation')
def test_successful_pagination_object_created(self):
pagination_config = {
'pagination': {
'TestOperation': {
"input_token": "Marker",
"output_token": "Marker",
"more_results": "IsTruncated",
"limit_key": "MaxItems",
"result_key": "Users"
}
}
}
self.loader.load_service_model.side_effect = [
self.service_description,
pagination_config
]
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
paginator = service_client.get_paginator('test_operation')
# The pagination logic itself is tested elsewhere (test_paginate.py),
# but we can at least make sure it looks like a paginator.
self.assertTrue(hasattr(paginator, 'paginate'))
def test_paginator_class_name_from_client(self):
pagination_config = {
'pagination': {
'TestOperation': {
"input_token": "Marker",
"output_token": "Marker",
"more_results": "IsTruncated",
"limit_key": "MaxItems",
"result_key": "Users"
}
}
}
self.loader.load_service_model.side_effect = [
self.service_description,
pagination_config
]
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
paginator = service_client.get_paginator('test_operation')
self.assertEqual(
paginator.__class__.__name__,
'MyService.Paginator.TestOperation')
def test_paginator_help_from_client(self):
pagination_config = {
'pagination': {
'TestOperation': {
"input_token": "Marker",
"output_token": "Marker",
"more_results": "IsTruncated",
"limit_key": "MaxItems",
"result_key": "Users"
}
}
}
self.loader.load_service_model.side_effect = [
self.service_description,
pagination_config
]
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
paginator = service_client.get_paginator('test_operation')
with mock.patch('sys.stdout', six.StringIO()) as mock_stdout:
help(paginator.paginate)
contents = mock_stdout.getvalue()
lines = [
(' Creates an iterator that will paginate through responses '
'from :py:meth:`MyService.Client.test_operation`.'),
' **Request Syntax** ',
' ::',
' response_iterator = paginator.paginate(',
" Foo='string',",
" Bar='string',",
' PaginationConfig={',
" 'MaxItems': 123,",
" 'PageSize': 123,",
" 'StartingToken': 'string'",
' }',
' )',
' :type Foo: string',
' :param Foo: **[REQUIRED]** Documents Foo',
' :type Bar: string',
' :param Bar: Documents Bar',
' :type PaginationConfig: dict',
' :param PaginationConfig: ',
(' A dictionary that provides parameters to control '
'pagination.'),
' - **MaxItems** *(integer) --* ',
(' The total number of items to return. If the total '
'number of items available is more than the value specified '
'in max-items then a ``NextToken`` will be provided in the '
'output that you can use to resume pagination.'),
' - **PageSize** *(integer) --* ',
' The size of each page.',
' - **StartingToken** *(string) --* ',
(' A token to specify where to start paginating. This is '
'the ``NextToken`` from a previous response.'),
' :returns: None',
]
for line in lines:
self.assertIn(line, contents)
def test_can_set_credentials_in_client_init(self):
creator = self.create_client_creator()
credentials = Credentials(
access_key='access_key', secret_key='secret_key',
token='session_token')
client = creator.create_client(
'myservice', 'us-west-2', credentials=credentials)
# Verify that we create an endpoint with a credentials object
# matching our creds arguments.
self.assertEqual(client._request_signer._credentials, credentials)
def test_event_emitted_when_invoked(self):
event_emitter = hooks.HierarchicalEmitter()
creator = self.create_client_creator(event_emitter=event_emitter)
calls = []
handler = lambda **kwargs: calls.append(kwargs)
event_emitter.register('before-call', handler)
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
service_client.test_operation(Foo='one', Bar='two')
self.assertEqual(len(calls), 1)
def test_events_are_per_client(self):
event_emitter = hooks.HierarchicalEmitter()
creator = self.create_client_creator(event_emitter=event_emitter)
first_calls = []
first_handler = lambda **kwargs: first_calls.append(kwargs)
second_calls = []
second_handler = lambda **kwargs: second_calls.append(kwargs)
first_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
second_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
first_client.meta.events.register('before-call', first_handler)
second_client.meta.events.register('before-call', second_handler)
# Now, if we invoke an operation from either client, only
# the handlers registered with the specific client will be invoked.
# So if we invoke the first client.
first_client.test_operation(Foo='one', Bar='two')
# Only first_calls is populated, not second_calls.
self.assertEqual(len(first_calls), 1)
self.assertEqual(len(second_calls), 0)
# If we invoke an operation from the second client,
# only second_calls will be populated, not first_calls.
second_client.test_operation(Foo='one', Bar='two')
# first_calls == 1 from the previous first_client.test_operation()
# call.
self.assertEqual(len(first_calls), 1)
self.assertEqual(len(second_calls), 1)
def test_clients_inherit_handlers_from_session(self):
# Even though clients get their own event emitters, they still
# inherit any handlers that were registered on the event emitter
# at the time the client was created.
event_emitter = hooks.HierarchicalEmitter()
creator = self.create_client_creator(event_emitter=event_emitter)
# So if an event handler is registered before any clients are created:
base_calls = []
base_handler = lambda **kwargs: base_calls.append(kwargs)
event_emitter.register('before-call', base_handler)
# Then any client created from this point forward from the
# event_emitter passed into the ClientCreator will have this
# handler.
first_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
first_client.test_operation(Foo='one', Bar='two')
self.assertEqual(len(base_calls), 1)
# Same thing if we create another client.
second_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
second_client.test_operation(Foo='one', Bar='two')
self.assertEqual(len(base_calls), 2)
def test_clients_inherit_only_at_create_time(self):
# If event handlers are added to the copied event emitter
# _after_ a client is created, we don't pick those up.
event_emitter = hooks.HierarchicalEmitter()
creator = self.create_client_creator(event_emitter=event_emitter)
# 1. Create a client.
first_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
# 2. Now register an event handler from the originating event emitter.
base_calls = []
base_handler = lambda **kwargs: base_calls.append(kwargs)
event_emitter.register('before-call', base_handler)
# 3. The client will _not_ see this because it already has its
# own copy of the event handlers.
first_client.test_operation(Foo='one', Bar='two')
self.assertEqual(len(base_calls), 0)
def test_clients_have_meta_object(self):
creator = self.create_client_creator()
service_client = creator.create_client('myservice', 'us-west-2')
self.assertTrue(hasattr(service_client, 'meta'))
self.assertTrue(hasattr(service_client.meta, 'events'))
# Sanity check the event emitter has an .emit() method.
self.assertTrue(hasattr(service_client.meta.events, 'emit'))
def test_client_register_seperate_unique_id_event(self):
event_emitter = hooks.HierarchicalEmitter()
creator = self.create_client_creator(event_emitter=event_emitter)
client1 = creator.create_client('myservice', 'us-west-2')
client2 = creator.create_client('myservice', 'us-west-2')
def ping(**kwargs):
return 'foo'
client1.meta.events.register('some-event', ping, 'my-unique-id')
client2.meta.events.register('some-event', ping, 'my-unique-id')
# Ensure both clients can register a function with an unique id
client1_responses = client1.meta.events.emit('some-event')
self.assertEqual(len(client1_responses), 1)
self.assertEqual(client1_responses[0][1], 'foo')
client2_responses = client2.meta.events.emit('some-event')
self.assertEqual(len(client2_responses), 1)
self.assertEqual(client2_responses[0][1], 'foo')
# Ensure when a client is unregistered the other client has
# the unique-id event still registered.
client1.meta.events.unregister('some-event', ping, 'my-unique-id')
client1_responses = client1.meta.events.emit('some-event')
self.assertEqual(len(client1_responses), 0)
client2_responses = client2.meta.events.emit('some-event')
self.assertEqual(len(client2_responses), 1)
self.assertEqual(client2_responses[0][1], 'foo')
# Ensure that the other client can unregister the event
client2.meta.events.unregister('some-event', ping, 'my-unique-id')
client2_responses = client2.meta.events.emit('some-event')
self.assertEqual(len(client2_responses), 0)
def test_client_created_emits_events(self):
called = []
def on_client_create(class_attributes, **kwargs):
called.append(class_attributes)
event_emitter = hooks.HierarchicalEmitter()
event_emitter.register('creating-client-class', on_client_create)
creator = self.create_client_creator(event_emitter=event_emitter)
creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
self.assertEqual(len(called), 1)
self.assertIn('test_operation', called[0])
def test_client_method_called_event(self):
event_emitter = hooks.HierarchicalEmitter()
def inject_params(params, **kwargs):
new_params = params.copy()
new_params['Foo'] = 'zero'
return new_params
event_emitter.register(
'provide-client-params.myservice.TestOperation', inject_params)
wrapped_emitter = mock.Mock(wraps=event_emitter)
creator = self.create_client_creator(event_emitter=wrapped_emitter)
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
params = {'Foo': 'one', 'Bar': 'two'}
service_client.test_operation(**params)
# Ensure that the initial params were not modified in the handler
self.assertEqual(params, {'Foo': 'one', 'Bar': 'two'})
# Ensure the handler passed on the correct param values.
body = self.endpoint.make_request.call_args[0][1]['body']
self.assertEqual(body['Foo'], 'zero')
def test_client_default_for_s3_addressing_style(self):
creator = self.create_client_creator()
client = creator.create_client('myservice', 'us-west-2')
self.assertEqual(client.meta.config.s3, None)
def test_client_s3_addressing_style_with_config(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
client_config=botocore.config.Config(s3={'addressing_style': 'auto'})
)
self.assertEqual(
my_client.meta.config.s3['addressing_style'], 'auto')
def test_client_s3_addressing_style_with_bad_value(self):
creator = self.create_client_creator()
client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': ''},
)
self.assertIsNone(client.meta.config.s3)
def test_client_s3_addressing_style_with_scoped_config(self):
creator = self.create_client_creator()
client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'addressing_style': 'virtual'}}
)
self.assertEqual(
client.meta.config.s3['addressing_style'], 'virtual')
def test_client_s3_addressing_style_with_incorrect_style(self):
with self.assertRaises(InvalidS3AddressingStyleError):
botocore.config.Config(s3={'addressing_style': 'foo'})
def test_client_s3_addressing_style_config_overrides_scoped_config(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'addressing_style': 'virtual'}},
client_config=botocore.config.Config(s3={'addressing_style': 'auto'})
)
self.assertEqual(
my_client.meta.config.s3['addressing_style'], 'auto')
def test_client_s3_addressing_style_default_registers_correctly(self):
event_emitter = self.create_mock_emitter()
creator = self.create_client_creator(event_emitter=event_emitter)
client = creator.create_client('s3', 'us-west-2')
self.assertIn(
mock.call('before-sign.s3', utils.fix_s3_host),
client.meta.events.register.call_args_list
)
def test_client_s3_addressing_style_auto_registers_correctly(self):
event_emitter = self.create_mock_emitter()
creator = self.create_client_creator(event_emitter=event_emitter)
client = creator.create_client(
's3', 'us-west-2',
scoped_config={'s3': {'addressing_style': 'auto'}}
)
self.assertIn(
mock.call('before-sign.s3', utils.fix_s3_host),
client.meta.events.register.call_args_list
)
def test_client_s3_addressing_style_virtual_registers_correctly(self):
event_emitter = self.create_mock_emitter()
creator = self.create_client_creator(event_emitter=event_emitter)
client = creator.create_client(
's3', 'us-west-2',
scoped_config={'s3': {'addressing_style': 'virtual'}}
)
self.assertNotIn(
mock.call('before-sign.s3', utils.fix_s3_host),
client.meta.events.unregister.call_args_list
)
self.assertIn(
mock.call('before-sign.s3', utils.switch_to_virtual_host_style),
client.meta.events.register.call_args_list
)
def test_client_s3_addressing_style_path_registers_correctly(self):
event_emitter = self.create_mock_emitter()
creator = self.create_client_creator(event_emitter=event_emitter)
client = creator.create_client(
's3', 'us-west-2',
scoped_config={'s3': {'addressing_style': 'path'}}
)
self.assertNotIn(
mock.call('before-sign.s3', utils.fix_s3_host),
client.meta.events.register.call_args_list
)
self.assertNotIn(
mock.call('before-sign.s3', utils.switch_to_virtual_host_style),
client.meta.events.register.call_args_list
)
def test_custom_endpoint_uses_path_style(self):
event_emitter = self.create_mock_emitter()
creator = self.create_client_creator(event_emitter=event_emitter)
# fix_s3_host should be registered if we don't provide a url
client = creator.create_client('s3', 'us-west-2')
self.assertIn(
mock.call('before-sign.s3', utils.fix_s3_host),
client.meta.events.register.call_args_list
)
# If we do provide a url, fix_s3_host should not be registered
event_emitter.reset_mock()
client = creator.create_client(
's3', 'us-west-2',
endpoint_url="foo.com"
)
self.assertNotIn(
mock.call('before-sign.s3', mock.ANY),
client.meta.events.register.call_args_list
)
def test_custom_accelerate_url_forces_virtual_host(self):
event_emitter = self.create_mock_emitter()
creator = self.create_client_creator(event_emitter=event_emitter)
client = creator.create_client(
's3', 'us-west-2',
endpoint_url='https://s3-accelerate.amazonaws.com'
)
self.assertIn(
mock.call('before-sign.s3', utils.switch_to_virtual_host_style),
client.meta.events.register.call_args_list
)
def test_client_payload_signing_from_scoped_config(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'payload_signing_enabled': True}}
)
self.assertEqual(
my_client.meta.config.s3['payload_signing_enabled'], True)
def test_client_payload_signing_from_varying_forms_of_true(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'payload_signing_enabled': 'True'}}
)
self.assertEqual(
my_client.meta.config.s3['payload_signing_enabled'], True)
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'payload_signing_enabled': 'true'}}
)
self.assertEqual(
my_client.meta.config.s3['payload_signing_enabled'], True)
def test_client_payload_signing_from_client_config(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
client_config=client.Config(s3={'payload_signing_enabled': True})
)
self.assertEqual(
my_client.meta.config.s3['payload_signing_enabled'], True)
def test_client_payload_signing_client_config_overrides_scoped(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'payload_signing_enabled': False}},
client_config=client.Config(s3={'payload_signing_enabled': True})
)
self.assertEqual(
my_client.meta.config.s3['payload_signing_enabled'], True)
def test_client_s3_accelerate_from_scoped_config(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'use_accelerate_endpoint': True}}
)
self.assertEqual(
my_client.meta.config.s3['use_accelerate_endpoint'], True)
def test_client_s3_accelerate_from_varying_forms_of_true(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'use_accelerate_endpoint': 'True'}}
)
self.assertEqual(
my_client.meta.config.s3['use_accelerate_endpoint'], True)
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'use_accelerate_endpoint': 'true'}}
)
self.assertEqual(
my_client.meta.config.s3['use_accelerate_endpoint'], True)
def test_client_s3_accelerate_from_client_config(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
client_config=client.Config(s3={'use_accelerate_endpoint': True})
)
self.assertEqual(
my_client.meta.config.s3['use_accelerate_endpoint'], True)
def test_client_s3_accelerate_client_config_overrides_scoped(self):
creator = self.create_client_creator()
my_client = creator.create_client(
'myservice', 'us-west-2',
scoped_config={'s3': {'use_accelerate_endpoint': False}},
client_config=client.Config(s3={'use_accelerate_endpoint': True})
)
self.assertEqual(
my_client.meta.config.s3['use_accelerate_endpoint'], True)
def test_before_call_short_circuits_request(self):
def return_mock_tuple(**kwargs):
http_mock = mock.Mock()
http_mock.status_code = 200
return http_mock, mock.Mock()
emitter = hooks.HierarchicalEmitter()
emitter.register_last('before-call.*.*', return_mock_tuple)
creator = self.create_client_creator(event_emitter=emitter)
service_client = creator.create_client(
'myservice', 'us-west-2')
service_client.test_operation(Foo='one')
self.assertFalse(self.endpoint.make_request.called)
def test_getattr_emits_event(self):
emitter = self.create_mock_emitter()
emitter.emit_until_response.return_value = (None, None)
creator = self.create_client_creator(event_emitter=emitter)
service_client = creator.create_client('myservice', 'us-west-2')
# Assert that the event hasn't fired yet
emitter.emit_until_response.assert_not_called()
with self.assertRaises(AttributeError):
service_client.attribute_that_does_not_exist
emitter.emit_until_response.assert_called_once_with(
'getattr.myservice.attribute_that_does_not_exist',
client=service_client
)
def test_getattr_event_returns_response(self):
emitter = self.create_mock_emitter()
emitter.emit_until_response.return_value = (None, 'success')
creator = self.create_client_creator(event_emitter=emitter)
service_client = creator.create_client('myservice', 'us-west-2')
value = service_client.attribute_that_does_not_exist
self.assertEqual(value, 'success')
class TestClientErrors(TestAutoGeneratedClient):
def add_error_response(self, error_response):
self.endpoint.make_request.return_value = (
mock.Mock(status_code=400), error_response)
def test_client_makes_call_with_error(self):
creator = self.create_client_creator()
client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
with Stubber(client) as stub:
stub.add_client_error(
'test_operation', 'TestOperationErrorCode', 'error occurred')
with self.assertRaises(client.exceptions.TestOperationException):
client.test_operation(Foo='one', Bar='two')
def test_error_with_no_wire_code(self):
creator = self.create_client_creator()
client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
with Stubber(client) as stub:
stub.add_client_error('test_operation', '404', 'Not Found')
try:
client.test_operation(Foo='one', Bar='two')
except client.exceptions.ClientError as e:
# This is needed becasue the error could be a subclass of
# ClientError.
# We explicitly want it to be a generic ClientError though
self.assertEqual(e.__class__, exceptions.ClientError)
def test_error_with_dot_separated_code(self):
creator = self.create_client_creator()
client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
with Stubber(client) as stub:
stub.add_client_error(
'test_operation', 'InvalidAddress.NotFound', 'Not Found')
try:
client.test_operation(Foo='one', Bar='two')
except client.exceptions.ClientError as e:
# This is needed becasue the error could be a subclass of
# ClientError.
# We explicitly want it to be a generic ClientError though
self.assertEqual(e.__class__, exceptions.ClientError)
def test_error_with_empty_message(self):
creator = self.create_client_creator()
client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
with Stubber(client) as stub:
stub.add_client_error(
'test_operation', 'TestOperationErrorCode')
with self.assertRaises(client.exceptions.TestOperationException):
client.test_operation(Foo='one', Bar='two')
def test_error_with_empty_code(self):
creator = self.create_client_creator()
client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
with Stubber(client) as stub:
stub.add_client_error('test_operation')
try:
client.test_operation(Foo='one', Bar='two')
except client.exceptions.ClientError as e:
# This is needed becasue the error could be a subclass of
# ClientError.
# We explicitly want it to be a generic ClientError though
self.assertEqual(e.__class__, exceptions.ClientError)
def test_error_with_missing_code(self):
error_response = {'Error': {'Message': 'error occurred'}}
# The stubber is not being used because it will always populate the
# the message and code.
self.endpoint.make_request.return_value = (
mock.Mock(status_code=400), error_response)
creator = self.create_client_creator()
client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
try:
client.test_operation(Foo='one', Bar='two')
except client.exceptions.ClientError as e:
# This is needed becasue the error could be a subclass of
# ClientError.
# We explicitly want it to be a generic ClientError though
self.assertEqual(e.__class__, exceptions.ClientError)
def test_error_with_empty_contents(self):
error_response = {'Error': {}}
# The stubber is not being used because it will always populate the
# the message and code.
self.endpoint.make_request.return_value = (
mock.Mock(status_code=400), error_response)
creator = self.create_client_creator()
client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
try:
client.test_operation(Foo='one', Bar='two')
except client.exceptions.ClientError as e:
# This is needed becasue the error could be a subclass of
# ClientError.
# We explicitly want it to be a generic ClientError though
self.assertEqual(e.__class__, exceptions.ClientError)
def test_exception_classes_across_clients_are_the_same(self):
creator = self.create_client_creator(
exceptions_factory=ClientExceptionsFactory())
client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
client2 = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials)
with Stubber(client) as stub:
stub.add_client_error(
'test_operation', 'TestOperationErrorCode', 'error occurred')
try:
client.test_operation(Foo='one', Bar='two')
except client2.exceptions.TestOperationException as e:
# Caught exception should as well be an instance of the
# other client's TestOperationException
self.assertIsInstance(
e, client.exceptions.TestOperationException)
class TestConfig(unittest.TestCase):
def test_can_use_args_to_construct(self):
config = botocore.config.Config(*botocore.config.Config.OPTION_DEFAULTS.values())
for option, default_value in botocore.config.Config.OPTION_DEFAULTS.items():
self.assertTrue(hasattr(config, option))
self.assertEqual(getattr(config, option), default_value)
def test_can_use_kwargs_to_construct(self):
config = botocore.config.Config(**botocore.config.Config.OPTION_DEFAULTS)
for option, default_value in botocore.config.Config.OPTION_DEFAULTS.items():
self.assertTrue(hasattr(config, option))
self.assertEqual(getattr(config, option), default_value)
def test_can_use_mix_of_args_and_kwargs(self):
config = botocore.config.Config('us-east-1', read_timeout=50)
self.assertEqual(config.region_name, 'us-east-1')
self.assertEqual(config.read_timeout, 50)
def test_invalid_kwargs(self):
with self.assertRaisesRegexp(TypeError, 'Got unexpected keyword'):
botocore.config.Config(foo='foo')
def test_pass_invalid_length_of_args(self):
with self.assertRaisesRegexp(TypeError, 'Takes at most'):
botocore.config.Config('foo', *botocore.config.Config.OPTION_DEFAULTS.values())
def test_create_with_multiple_kwargs(self):
with self.assertRaisesRegexp(TypeError, 'Got multiple values'):
botocore.config.Config('us-east-1', region_name='us-east-1')
def test_merge_returns_new_config_object(self):
config = botocore.config.Config()
other_config = botocore.config.Config()
new_config = config.merge(other_config)
# Check the type is correct
self.assertIsInstance(new_config, botocore.config.Config)
# Make sure the new config is a brand new config object
self.assertIsNot(new_config, config)
self.assertIsNot(new_config, other_config)
def test_general_merge_keeps_default_values(self):
config = botocore.config.Config()
other_config = botocore.config.Config()
config_properties = vars(config)
new_config = config.merge(other_config)
# Ensure that the values all stayed the same in the new config
self.assertEqual(config_properties, vars(new_config))
def test_merge_overrides_values(self):
config = botocore.config.Config(region_name='us-east-1')
other_config = botocore.config.Config(region_name='us-west-2')
new_config = config.merge(other_config)
self.assertEqual(new_config.region_name, 'us-west-2')
def test_merge_overrides_values_even_when_using_default(self):
config = botocore.config.Config(region_name='us-east-1')
other_config = botocore.config.Config(region_name=None)
new_config = config.merge(other_config)
self.assertEqual(new_config.region_name, None)
def test_merge_overrides_values_even_when_using_default_timeout(self):
config = botocore.config.Config(read_timeout=30)
other_config = botocore.config.Config(read_timeout=DEFAULT_TIMEOUT)
new_config = config.merge(other_config)
self.assertEqual(new_config.read_timeout, DEFAULT_TIMEOUT)
def test_merge_overrides_only_when_user_provided_values(self):
config = botocore.config.Config(
region_name='us-east-1', signature_version='s3v4')
other_config = botocore.config.Config(region_name='us-west-2')
new_config = config.merge(other_config)
self.assertEqual(new_config.region_name, 'us-west-2')
self.assertEqual(new_config.signature_version, 's3v4')
def test_can_set_retry_max_attempts(self):
config = botocore.config.Config(retries={'max_attempts': 15})
self.assertEqual(config.retries['max_attempts'], 15)
def test_validates_retry_config(self):
with self.assertRaisesRegexp(
InvalidRetryConfigurationError,
'Cannot provide retry configuration for "not-allowed"'):
botocore.config.Config(retries={'not-allowed': True})
def test_validates_max_retry_attempts(self):
with self.assertRaises(InvalidMaxRetryAttemptsError):
botocore.config.Config(retries={'max_attempts': -1})
class TestClientEndpointBridge(unittest.TestCase):
def setUp(self):
self.resolver = mock.Mock()
self.boilerplate_response = {
'endpointName': 'us-east-1',
'hostname': 's3.amazonaws.com',
'partition': 'aws',
'protocols': ['http', 'https'],
'dnsSuffix': 'amazonaws.com',
'signatureVersions': ['s3', 's3v4']
}
self.resolver.construct_endpoint.return_value = \
self.boilerplate_response
def test_guesses_endpoint_as_last_resort(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = None
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('myservice', region_name='guess')
self.assertEqual('guess', resolved['region_name'])
self.assertEqual('guess', resolved['signing_region'])
self.assertEqual('myservice', resolved['signing_name'])
self.assertEqual('myservice', resolved['service_name'])
self.assertEqual('v4', resolved['signature_version'])
self.assertEqual('https://myservice.guess.amazonaws.com',
resolved['endpoint_url'])
def test_uses_us_east_1_by_default_for_s3(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 's3.amazonaws.com',
'endpointName': 'us-east-1', 'signatureVersions': ['s3', 's3v4'],
'protocols': ['https']}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('s3')
self.assertEqual('us-east-1', resolved['region_name'])
self.assertEqual('us-east-1', resolved['signing_region'])
self.assertEqual('https://s3.amazonaws.com',
resolved['endpoint_url'])
def test_uses_region_from_client_config_if_available(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = None
client_config = mock.Mock()
client_config.region_name = 'us-foo-bar'
bridge = ClientEndpointBridge(resolver, client_config=client_config)
resolved = bridge.resolve('test')
self.assertEqual('us-foo-bar', resolved['region_name'])
self.assertEqual('us-foo-bar', resolved['signing_region'])
self.assertEqual('https://test.us-foo-bar.amazonaws.com',
resolved['endpoint_url'])
def test_can_guess_endpoint_and_use_given_endpoint_url(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = None
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve(
'test', 'guess', endpoint_url='http://test.com')
self.assertEqual('guess', resolved['region_name'])
self.assertEqual('guess', resolved['signing_region'])
self.assertEqual('http://test.com', resolved['endpoint_url'])
def test_can_use_endpoint_url_with_resolved_endpoint(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'do-not-use-this',
'endpointName': 'us-west-2', 'signatureVersions': ['v2']}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve(
'ec2', 'us-west-2', endpoint_url='https://foo')
self.assertEqual('us-west-2', resolved['region_name'])
self.assertEqual('us-west-2', resolved['signing_region'])
self.assertEqual('https://foo', resolved['endpoint_url'])
self.assertEqual('v2', resolved['signature_version'])
def test_uses_ssl_common_name_over_hostname_if_present(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'do-not-use-this',
'signatureVersions': ['v4'], 'sslCommonName': 'common-name.com',
'endpointName': 'us-west-2', 'protocols': ['https']}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('myservice', 'us-west-2')
self.assertEqual('us-west-2', resolved['region_name'])
self.assertEqual('us-west-2', resolved['signing_region'])
self.assertEqual('https://common-name.com', resolved['endpoint_url'])
def test_can_create_http_urls(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'host.com',
'signatureVersions': ['v4'],
'endpointName': 'us-foo-baz'}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('myservice', 'us-foo-baz', is_secure=False)
self.assertEqual('http://host.com', resolved['endpoint_url'])
def test_can_create_http_urls(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'host.com',
'signatureVersions': ['v4'],
'endpointName': 'us-foo-baz'}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('myservice', 'us-foo-baz', is_secure=False)
self.assertEqual('http://host.com', resolved['endpoint_url'])
def test_credential_scope_overrides_signing_region(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws',
'hostname': 'host.com',
'endpointName': 'us-foo-baz',
'signatureVersions': ['v4'],
'credentialScope': {'region': 'override'}
}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('myservice', 'us-foo-baz')
self.assertEqual('us-foo-baz', resolved['region_name'])
self.assertEqual('override', resolved['signing_region'])
def test_cred_scope_does_not_override_signing_region_if_endpoint_url(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws',
'hostname': 'will-not-use.com',
'endpointName': 'us-foo-baz',
'signatureVersions': ['v4'],
'credentialScope': {'region': 'override'}
}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('myservice', 'us-foo-baz',
endpoint_url='https://override.com')
self.assertEqual('us-foo-baz', resolved['region_name'])
self.assertEqual('us-foo-baz', resolved['signing_region'])
self.assertEqual('https://override.com', resolved['endpoint_url'])
def test_resolved_region_overrides_region_when_no_endpoint_url(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws',
'hostname': 'host.com',
'signatureVersions': ['v4'],
'endpointName': 'override',
'protocols': ['https'],
}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('myservice', 'will-not-be-there')
self.assertEqual('override', resolved['region_name'])
self.assertEqual('override', resolved['signing_region'])
self.assertEqual('https://host.com', resolved['endpoint_url'])
def test_does_not_use_https_if_not_available(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws',
'hostname': 'host.com',
'signatureVersions': ['v4'],
'endpointName': 'foo',
# Note: http, not https
'protocols': ['http'],
}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('myservice')
# We should resolve to http://, not https://
self.assertEqual('http://host.com', resolved['endpoint_url'])
def test_uses_signature_version_from_client_config(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test.com',
'endpointName': 'us-west-2', 'signatureVersions': ['v2']}
client_config = mock.Mock()
client_config.signature_version = 's3'
bridge = ClientEndpointBridge(resolver, client_config=client_config)
resolved = bridge.resolve('test', 'us-west-2')
self.assertEqual('s3', resolved['signature_version'])
def test_uses_signature_version_from_client_config_when_guessing(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = None
client_config = mock.Mock()
client_config.signature_version = 's3v4'
bridge = ClientEndpointBridge(resolver, client_config=client_config)
resolved = bridge.resolve('test', 'us-west-2')
self.assertEqual('s3v4', resolved['signature_version'])
def test_uses_signature_version_from_scoped_config(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test.com',
'endpointName': 'us-west-2', 'signatureVersions': ['v2']}
scoped_config = mock.Mock()
scoped_config.get.return_value = {'signature_version': 's3'}
bridge = ClientEndpointBridge(resolver, scoped_config)
resolved = bridge.resolve('test', 'us-west-2')
self.assertEqual('s3', resolved['signature_version'])
def test_uses_s3v4_over_s3_for_s3(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test.com',
'endpointName': 'us-west-2', 'signatureVersions': ['s3v4', 's3']}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('s3', 'us-west-2')
self.assertEqual('s3v4', resolved['signature_version'])
def test_uses_s3v4_over_others_for_s3(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test.com',
'endpointName': 'us-west-2', 'signatureVersions': ['s3v4', 'v4']}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('s3', 'us-west-2')
self.assertEqual('s3v4', resolved['signature_version'])
def test_uses_v4_over_other_signers(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test',
'signatureVersions': ['v2', 'v4'], 'endpointName': 'us-west-2'}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('test', 'us-west-2')
self.assertEqual('v4', resolved['signature_version'])
def test_uses_known_signers_from_list_of_signature_versions(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test',
'signatureVersions': ['foo', 'baz', 'v3https'],
'endpointName': 'us-west-2'}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('test', 'us-west-2')
self.assertEqual('v3https', resolved['signature_version'])
def test_raises_when_signature_version_is_unknown(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test',
'endpointName': 'us-west-2', 'signatureVersions': ['foo']}
bridge = ClientEndpointBridge(resolver)
with self.assertRaises(UnknownSignatureVersionError):
bridge.resolve('test', 'us-west-2')
def test_raises_when_signature_version_is_not_found(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test',
'endpointName': 'us-west-2'}
bridge = ClientEndpointBridge(resolver)
with self.assertRaises(UnknownSignatureVersionError):
bridge.resolve('test', 'us-west-2')
def test_uses_service_name_as_signing_name(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test',
'signatureVersions': ['v4'],
'endpointName': 'us-west-2'}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('test', 'us-west-2')
self.assertEqual('test', resolved['signing_name'])
def test_uses_credential_scope_signing_name(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws',
'hostname': 'test',
'endpointName': 'us-west-2',
'signatureVersions': ['v4'],
'credentialScope': {'service': 'override'}
}
bridge = ClientEndpointBridge(resolver)
resolved = bridge.resolve('test', 'us-west-2')
self.assertEqual('override', resolved['signing_name'])
def test_uses_service_signing_name_when_present_and_no_cred_scope(self):
resolver = mock.Mock()
resolver.construct_endpoint.return_value = {
'partition': 'aws', 'hostname': 'test',
'signatureVersions': ['v4'],
'endpointName': 'us-west-2'}
bridge = ClientEndpointBridge(resolver, service_signing_name='foo')
resolved = bridge.resolve('test', 'us-west-2')
self.assertEqual('foo', resolved['signing_name'])
def test_can_construct_dualstack_endpoint_when_enabled(self):
scoped_config = {'s3': {'use_dualstack_endpoint': True}}
bridge = ClientEndpointBridge(self.resolver, scoped_config)
resolved = bridge.resolve('s3', 'us-east-1')
self.assertEqual(
resolved['endpoint_url'],
'https://s3.dualstack.us-east-1.amazonaws.com')
def test_dualstack_can_use_client_config(self):
config = botocore.config.Config(s3={'use_dualstack_endpoint': True})
bridge = ClientEndpointBridge(self.resolver, client_config=config)
resolved = bridge.resolve('s3', 'us-east-1')
self.assertEqual(
resolved['endpoint_url'],
'https://s3.dualstack.us-east-1.amazonaws.com')
def test_dualstack_client_config_beats_scoped_config(self):
scoped_config = {'s3': {'use_dualstack_endpoint': False}}
config = botocore.config.Config(s3={'use_dualstack_endpoint': True})
bridge = ClientEndpointBridge(self.resolver, scoped_config,
client_config=config)
resolved = bridge.resolve('s3', 'us-east-1')
self.assertEqual(
resolved['endpoint_url'],
'https://s3.dualstack.us-east-1.amazonaws.com')
def test_disable_dualstack_explicitly(self):
scoped_config = {'s3': {'use_dualstack_endpoint': True}}
config = botocore.config.Config(s3={'use_dualstack_endpoint': False})
bridge = ClientEndpointBridge(self.resolver, scoped_config,
client_config=config)
resolved = bridge.resolve('s3', 'us-east-1')
self.assertEqual(
resolved['endpoint_url'],
'https://s3.amazonaws.com')
def test_dualstack_honors_dns_suffix(self):
scoped_config = {'s3': {'use_dualstack_endpoint': True}}
self.boilerplate_response['dnsSuffix'] = 'amazonaws.com.cn'
self.boilerplate_response['endpointName'] = 'cn-north-1'
bridge = ClientEndpointBridge(self.resolver, scoped_config)
resolved = bridge.resolve('s3', 'cn-north-1')
self.assertEqual(
resolved['endpoint_url'],
'https://s3.dualstack.cn-north-1.amazonaws.com.cn'
)
|
# Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import mock
from botocore.stub import Stubber
from botocore.exceptions import ParamValidationError, StubResponseError, UnStubbedResponseError
from botocore.model import ServiceModel
from botocore import hooks
class TestStubber(unittest.TestCase):
def setUp(self):
self.event_emitter = hooks.HierarchicalEmitter()
self.client = mock.Mock()
self.client.meta.events = self.event_emitter
self.client.meta.method_to_api_mapping.get.return_value = 'foo'
self.stubber = Stubber(self.client)
self.validate_parameters_mock = mock.Mock()
self.validate_parameters_patch = mock.patch(
'botocore.stub.validate_parameters', self.validate_parameters_mock)
self.validate_parameters_patch.start()
def tearDown(self):
self.validate_parameters_patch.stop()
def emit_get_response_event(self, model=None, request_dict=None,
signer=None, context=None):
if model is None:
model = mock.Mock()
model.name = 'foo'
handler, response = self.event_emitter.emit_until_response(
event_name='before-call.myservice.foo', model=model,
params=request_dict, request_signer=signer, context=context)
return response
def test_stubber_registers_events(self):
self.event_emitter = mock.Mock()
self.client.meta.events = self.event_emitter
self.stubber.activate()
# This just ensures that we register at the correct event
# and nothing more
self.event_emitter.register_first.assert_called_with(
'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY)
self.event_emitter.register.assert_called_with(
'before-call.*.*', mock.ANY, unique_id=mock.ANY)
def test_stubber_unregisters_events(self):
self.event_emitter = mock.Mock()
self.client.meta.events = self.event_emitter
self.stubber.activate()
self.stubber.deactivate()
self.event_emitter.unregister.assert_any_call(
'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY)
self.event_emitter.unregister.assert_any_call(
'before-call.*.*', mock.ANY, unique_id=mock.ANY)
def test_context_manager(self):
self.event_emitter = mock.Mock()
self.client.meta.events = self.event_emitter
with self.stubber:
# Ensure events are registered in context
self.event_emitter.register_first.assert_called_with(
'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY)
self.event_emitter.register.assert_called_with(
'before-call.*.*', mock.ANY, unique_id=mock.ANY)
# Ensure events are no longer registered once we leave the context
self.event_emitter.unregister.assert_any_call(
'before-parameter-build.*.*', mock.ANY, unique_id=mock.ANY)
self.event_emitter.unregister.assert_any_call(
'before-call.*.*', mock.ANY, unique_id=mock.ANY)
def test_add_response(self):
response = {'foo': 'bar'}
self.stubber.add_response('foo', response)
with self.assertRaises(AssertionError):
self.stubber.assert_no_pending_responses()
def test_add_response_fails_when_missing_client_method(self):
del self.client.foo
with self.assertRaises(ValueError):
self.stubber.add_response('foo', {})
def test_validates_service_response(self):
self.stubber.add_response('foo', {})
self.assertTrue(self.validate_parameters_mock.called)
def test_validate_ignores_response_metadata(self):
service_response = {'ResponseMetadata': {'foo': 'bar'}}
service_model = ServiceModel({
'documentation': '',
'operations': {
'foo': {
'name': 'foo',
'input': {'shape': 'StringShape'},
'output': {'shape': 'StringShape'}
}
},
'shapes': {
'StringShape': {'type': 'string'}
}
})
op_name = service_model.operation_names[0]
output_shape = service_model.operation_model(op_name).output_shape
self.client.meta.service_model = service_model
self.stubber.add_response('TestOperation', service_response)
self.validate_parameters_mock.assert_called_with(
{}, output_shape)
# Make sure service response hasn't been mutated
self.assertEqual(
service_response, {'ResponseMetadata': {'foo': 'bar'}})
def test_validates_on_empty_output_shape(self):
service_model = ServiceModel({
'documentation': '',
'operations': {
'foo': {
'name': 'foo'
}
}
})
self.client.meta.service_model = service_model
with self.assertRaises(ParamValidationError):
self.stubber.add_response('TestOperation', {'foo': 'bar'})
def test_get_response(self):
service_response = {'bar': 'baz'}
self.stubber.add_response('foo', service_response)
self.stubber.activate()
response = self.emit_get_response_event()
self.assertEqual(response[1], service_response)
self.assertEqual(response[0].status_code, 200)
def test_get_client_error_response(self):
error_code = "foo"
service_message = "bar"
self.stubber.add_client_error('foo', error_code, service_message)
self.stubber.activate()
response = self.emit_get_response_event()
self.assertEqual(response[1]['Error']['Message'], service_message)
self.assertEqual(response[1]['Error']['Code'], error_code)
def test_get_client_error_with_extra_keys(self):
error_code = "foo"
error_message = "bar"
error_meta = {
"Endpoint": "https://foo.bar.baz",
}
self.stubber.add_client_error(
'foo', error_code, error_message,
http_status_code=301,
service_error_meta=error_meta)
with self.stubber:
response = self.emit_get_response_event()
error = response[1]['Error']
self.assertIn('Endpoint', error)
self.assertEqual(error['Endpoint'], "https://foo.bar.baz")
def test_get_response_errors_with_no_stubs(self):
self.stubber.activate()
with self.assertRaises(UnStubbedResponseError):
self.emit_get_response_event()
def test_assert_no_responses_remaining(self):
self.stubber.add_response('foo', {})
with self.assertRaises(AssertionError):
self.stubber.assert_no_pending_responses()
|
from tests import unittest
import mock
from botocore.history import HistoryRecorder
from botocore.history import BaseHistoryHandler
from botocore.history import get_global_history_recorder
class TerribleError(Exception):
pass
class ExceptionThrowingHandler(BaseHistoryHandler):
def emit(self, event_type, payload, source):
raise TerribleError('Bad behaving handler')
class TestHistoryRecorder(unittest.TestCase):
def test_can_attach_and_call_handler_emit(self):
mock_handler = mock.Mock(spec=BaseHistoryHandler)
recorder = HistoryRecorder()
recorder.enable()
recorder.add_handler(mock_handler)
recorder.record('foo', 'bar', source='source')
mock_handler.emit.assert_called_with('foo', 'bar', 'source')
def test_can_call_multiple_handlers(self):
first_handler = mock.Mock(spec=BaseHistoryHandler)
second_handler = mock.Mock(spec=BaseHistoryHandler)
recorder = HistoryRecorder()
recorder.enable()
recorder.add_handler(first_handler)
recorder.add_handler(second_handler)
recorder.record('foo', 'bar', source='source')
first_handler.emit.assert_called_with('foo', 'bar', 'source')
second_handler.emit.assert_called_with('foo', 'bar', 'source')
def test_does_use_botocore_source_by_default(self):
mock_handler = mock.Mock(spec=BaseHistoryHandler)
recorder = HistoryRecorder()
recorder.enable()
recorder.add_handler(mock_handler)
recorder.record('foo', 'bar')
mock_handler.emit.assert_called_with('foo', 'bar', 'BOTOCORE')
def test_does_not_call_handlers_when_never_enabled(self):
mock_handler = mock.Mock(spec=BaseHistoryHandler)
recorder = HistoryRecorder()
recorder.add_handler(mock_handler)
recorder.record('foo', 'bar')
mock_handler.emit.assert_not_called()
def test_does_not_call_handlers_when_disabled(self):
mock_handler = mock.Mock(spec=BaseHistoryHandler)
recorder = HistoryRecorder()
recorder.enable()
recorder.disable()
recorder.add_handler(mock_handler)
recorder.record('foo', 'bar')
mock_handler.emit.assert_not_called()
def test_can_ignore_handler_exceptions(self):
mock_handler = mock.Mock(spec=BaseHistoryHandler)
recorder = HistoryRecorder()
recorder.enable()
bad_handler = ExceptionThrowingHandler()
recorder.add_handler(bad_handler)
recorder.add_handler(mock_handler)
try:
recorder.record('foo', 'bar')
except TerribleError:
self.fail('Should not have raised a TerribleError')
mock_handler.emit.assert_called_with('foo', 'bar', 'BOTOCORE')
class TestGetHistoryRecorder(unittest.TestCase):
def test_can_get_history_recorder(self):
recorder = get_global_history_recorder()
self.assertTrue(isinstance(recorder, HistoryRecorder))
def test_does_reuse_history_recorder(self):
recorder_1 = get_global_history_recorder()
recorder_2 = get_global_history_recorder()
self.assertIs(recorder_1, recorder_2)
|
#!/usr/bin/env
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import mock
from botocore.vendored.requests import ConnectionError, Timeout
from botocore.vendored.requests.packages.urllib3.exceptions import ClosedPoolError
from botocore import retryhandler
from botocore.exceptions import ChecksumError
HTTP_500_RESPONSE = mock.Mock()
HTTP_500_RESPONSE.status_code = 500
HTTP_400_RESPONSE = mock.Mock()
HTTP_400_RESPONSE.status_code = 400
HTTP_200_RESPONSE = mock.Mock()
HTTP_200_RESPONSE.status_code = 200
class TestRetryCheckers(unittest.TestCase):
def assert_should_be_retried(self, response, attempt_number=1,
caught_exception=None):
self.assertTrue(self.checker(
response=response, attempt_number=attempt_number,
caught_exception=caught_exception))
def assert_should_not_be_retried(self, response, attempt_number=1,
caught_exception=None):
self.assertFalse(self.checker(
response=response, attempt_number=attempt_number,
caught_exception=caught_exception))
def test_status_code_checker(self):
self.checker = retryhandler.HTTPStatusCodeChecker(500)
self.assert_should_be_retried(response=(HTTP_500_RESPONSE, {}))
def test_max_attempts(self):
self.checker = retryhandler.MaxAttemptsDecorator(
retryhandler.HTTPStatusCodeChecker(500), max_attempts=3)
response = {'ResponseMetadata': {}}
# Retry up to three times.
self.assert_should_be_retried(
(HTTP_500_RESPONSE, response), attempt_number=1)
self.assert_should_be_retried(
(HTTP_500_RESPONSE, {}), attempt_number=2)
# On the third failed response, we've reached the
# max attempts so we should return False.
self.assert_should_not_be_retried(
(HTTP_500_RESPONSE, response), attempt_number=3)
self.assertTrue(response['ResponseMetadata']['MaxAttemptsReached'])
def test_max_attempts_successful(self):
self.checker = retryhandler.MaxAttemptsDecorator(
retryhandler.HTTPStatusCodeChecker(500), max_attempts=3)
self.assert_should_be_retried(
(HTTP_500_RESPONSE, {}), attempt_number=1)
# The second retry is successful.
self.assert_should_not_be_retried(
(HTTP_200_RESPONSE, {}), attempt_number=2)
# But now we can reuse this object.
self.assert_should_be_retried(
(HTTP_500_RESPONSE, {}), attempt_number=1)
self.assert_should_be_retried(
(HTTP_500_RESPONSE, {}), attempt_number=2)
self.assert_should_not_be_retried(
(HTTP_500_RESPONSE, {}), attempt_number=3)
def test_error_code_checker(self):
self.checker = retryhandler.ServiceErrorCodeChecker(
status_code=400, error_code='Throttled')
response = (HTTP_400_RESPONSE,
{'Error': {'Code': 'Throttled'}})
self.assert_should_be_retried(response)
def test_error_code_checker_does_not_match(self):
self.checker = retryhandler.ServiceErrorCodeChecker(
status_code=400, error_code='Throttled')
response = (HTTP_400_RESPONSE,
{'Error': {'Code': 'NotThrottled'}})
self.assert_should_not_be_retried(response)
def test_error_code_checker_ignore_caught_exception(self):
self.checker = retryhandler.ServiceErrorCodeChecker(
status_code=400, error_code='Throttled')
self.assert_should_not_be_retried(response=None,
caught_exception=RuntimeError())
def test_multi_checker(self):
checker = retryhandler.ServiceErrorCodeChecker(
status_code=400, error_code='Throttled')
checker2 = retryhandler.HTTPStatusCodeChecker(500)
self.checker = retryhandler.MultiChecker([checker, checker2])
self.assert_should_be_retried((HTTP_500_RESPONSE, {}))
self.assert_should_be_retried(
response=(HTTP_400_RESPONSE, {'Error': {'Code': 'Throttled'}}))
self.assert_should_not_be_retried(
response=(HTTP_200_RESPONSE, {}))
def test_exception_checker_ignores_response(self):
self.checker = retryhandler.ExceptionRaiser()
self.assert_should_not_be_retried(
response=(HTTP_200_RESPONSE, {}), caught_exception=None)
def test_value_error_raised_when_missing_response_and_exception(self):
self.checker = retryhandler.ExceptionRaiser()
with self.assertRaises(ValueError):
self.checker(1, response=None, caught_exception=None)
class TestCreateRetryConfiguration(unittest.TestCase):
def setUp(self):
self.retry_config = {
'__default__': {
'max_attempts': 5,
'delay': {
'type': 'exponential',
'base': 1,
'growth_factor': 2,
},
'policies': {
'throttling': {
'applies_when': {
'response': {
'service_error_code': 'Throttling',
'http_status_code': 400,
}
}
}
}
},
'OperationFoo': {
'policies': {
'crc32check': {
'applies_when': {
'response': {
'crc32body': 'x-amz-crc32',
}
}
}
}
},
'OperationBar': {
'policies': {
'socket_errors': {
'applies_when': {
'socket_errors': ["GENERAL_CONNECTION_ERROR"],
}
}
}
},
}
def test_create_retry_single_checker_service_level(self):
checker = retryhandler.create_checker_from_retry_config(
self.retry_config, operation_name=None)
self.assertIsInstance(checker, retryhandler.MaxAttemptsDecorator)
# We're reaching into internal fields here, but only to check
# that the object is created properly.
self.assertEqual(checker._max_attempts, 5)
self.assertIsInstance(checker._checker,
retryhandler.ServiceErrorCodeChecker)
self.assertEqual(checker._checker._error_code, 'Throttling')
self.assertEqual(checker._checker._status_code, 400)
def test_create_retry_for_operation(self):
checker = retryhandler.create_checker_from_retry_config(
self.retry_config, operation_name='OperationFoo')
self.assertIsInstance(checker, retryhandler.MaxAttemptsDecorator)
self.assertEqual(checker._max_attempts, 5)
self.assertIsInstance(checker._checker,
retryhandler.MultiChecker)
def test_retry_with_socket_errors(self):
checker = retryhandler.create_checker_from_retry_config(
self.retry_config, operation_name='OperationBar')
self.assertIsInstance(checker, retryhandler.BaseChecker)
all_checkers = checker._checker._checkers
self.assertIsInstance(all_checkers[0],
retryhandler.ServiceErrorCodeChecker)
self.assertIsInstance(all_checkers[1],
retryhandler.ExceptionRaiser)
def test_create_retry_handler_with_socket_errors(self):
handler = retryhandler.create_retry_handler(
self.retry_config, operation_name='OperationBar')
with self.assertRaises(ConnectionError):
handler(response=None, attempts=10,
caught_exception=ConnectionError())
# No connection error raised because attempts < max_attempts.
sleep_time = handler(response=None, attempts=1,
caught_exception=ConnectionError())
self.assertEqual(sleep_time, 1)
# But any other exception should be raised even if
# attempts < max_attempts.
with self.assertRaises(ValueError):
sleep_time = handler(response=None, attempts=1,
caught_exception=ValueError())
def test_connection_timeouts_are_retried(self):
# If a connection times out, we get a Timout exception
# from requests. We should be retrying those.
handler = retryhandler.create_retry_handler(
self.retry_config, operation_name='OperationBar')
sleep_time = handler(response=None, attempts=1,
caught_exception=Timeout())
self.assertEqual(sleep_time, 1)
def test_retry_pool_closed_errors(self):
# A ClosedPoolError is retried (this is a workaround for a urllib3
# bug). Can be removed once we upgrade to requests 2.0.0.
handler = retryhandler.create_retry_handler(
self.retry_config, operation_name='OperationBar')
# 4th attempt is retried.
sleep_time = handler(
response=None, attempts=4,
caught_exception=ClosedPoolError('FakePool', 'Message'))
self.assertEqual(sleep_time, 8)
# But the 5th time propogates the error.
with self.assertRaises(ClosedPoolError):
handler(response=None, attempts=10,
caught_exception=ClosedPoolError('FakePool', 'Message'))
def test_create_retry_handler_with_no_operation(self):
handler = retryhandler.create_retry_handler(
self.retry_config, operation_name=None)
self.assertIsInstance(handler, retryhandler.RetryHandler)
# No good way to test for the delay function as the action
# other than to just invoke it.
self.assertEqual(handler._action(attempts=2), 2)
self.assertEqual(handler._action(attempts=3), 4)
def test_crc32_check_propogates_error(self):
handler = retryhandler.create_retry_handler(
self.retry_config, operation_name='OperationFoo')
http_response = mock.Mock()
http_response.status_code = 200
# This is not the crc32 of b'foo', so this should
# fail the crc32 check.
http_response.headers = {'x-amz-crc32': 2356372768}
http_response.content = b'foo'
# The first 10 attempts we get a retry.
self.assertEqual(handler(response=(http_response, {}), attempts=1,
caught_exception=None), 1)
with self.assertRaises(ChecksumError):
handler(response=(http_response, {}), attempts=10,
caught_exception=None)
class TestRetryHandler(unittest.TestCase):
def test_action_tied_to_policy(self):
# When a retry rule matches we should return the
# amount of time to sleep, otherwise we should return None.
delay_function = retryhandler.create_exponential_delay_function( 1, 2)
checker = retryhandler.HTTPStatusCodeChecker(500)
handler = retryhandler.RetryHandler(checker, delay_function)
response = (HTTP_500_RESPONSE, {})
self.assertEqual(
handler(response=response, attempts=1, caught_exception=None), 1)
self.assertEqual(
handler(response=response, attempts=2, caught_exception=None), 2)
self.assertEqual(
handler(response=response, attempts=3, caught_exception=None), 4)
self.assertEqual(
handler(response=response, attempts=4, caught_exception=None), 8)
def test_none_response_when_no_matches(self):
delay_function = retryhandler.create_exponential_delay_function( 1, 2)
checker = retryhandler.HTTPStatusCodeChecker(500)
handler = retryhandler.RetryHandler(checker, delay_function)
response = (HTTP_200_RESPONSE, {})
self.assertIsNone(handler(response=response, attempts=1,
caught_exception=None))
class TestCRC32Checker(unittest.TestCase):
def setUp(self):
self.checker = retryhandler.CRC32Checker('x-amz-crc32')
def test_crc32_matches(self):
http_response = mock.Mock()
http_response.status_code = 200
# This is the crc32 of b'foo', so this should
# pass the crc32 check.
http_response.headers = {'x-amz-crc32': 2356372769}
http_response.content = b'foo'
self.assertIsNone(self.checker(
response=(http_response, {}), attempt_number=1,
caught_exception=None))
def test_crc32_missing(self):
# It's not an error is the crc32 header is missing.
http_response = mock.Mock()
http_response.status_code = 200
http_response.headers = {}
self.assertIsNone(self.checker(
response=(http_response, {}), attempt_number=1,
caught_exception=None))
def test_crc32_check_fails(self):
http_response = mock.Mock()
http_response.status_code = 200
# This is not the crc32 of b'foo', so this should
# fail the crc32 check.
http_response.headers = {'x-amz-crc32': 2356372768}
http_response.content = b'foo'
with self.assertRaises(ChecksumError):
self.checker(response=(http_response, {}), attempt_number=1,
caught_exception=None)
class TestDelayExponential(unittest.TestCase):
def test_delay_with_numeric_base(self):
self.assertEqual(retryhandler.delay_exponential(base=3,
growth_factor=2,
attempts=3), 12)
def test_delay_with_rand_string(self):
delay = retryhandler.delay_exponential(base='rand',
growth_factor=2,
attempts=3)
# 2 ** (3 - 1) == 4, so the retry is between 0, 4.
self.assertTrue(0 <= delay <= 4)
def test_value_error_raised_with_non_positive_number(self):
with self.assertRaises(ValueError):
retryhandler.delay_exponential(
base=-1, growth_factor=2, attempts=3)
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Test runner for the JSON models compliance tests
This is a test runner for all the JSON tests defined in
``tests/unit/protocols/``, including both the input/output tests.
You can use the normal ``nosetests tests/unit/test_protocols.py`` to run
this test. In addition, there are several env vars you can use during
development.
Tests are broken down by filename, test suite, testcase. When a test fails
you'll see the protocol (filename), test suite, and test case number of the
failed test.
::
Description : Scalar members (0:0) <--- (suite_id:test_id)
Protocol: : ec2 <--- test file (ec2.json)
Given : ...
Response : ...
Expected serialization: ...
Actual serialization : ...
Assertion message : ...
To run tests from only a single file, you can set the
BOTOCORE_TEST env var::
BOTOCORE_TEST=tests/unit/compliance/input/json.json nosetests tests/unit/test_protocols.py
To run a single test suite you can set the BOTOCORE_TEST_ID env var:
BOTOCORE_TEST=tests/unit/compliance/input/json.json BOTOCORE_TEST_ID=5 \
nosetests tests/unit/test_protocols.py
To run a single test case in a suite (useful when debugging a single test), you
can set the BOTOCORE_TEST_ID env var with the ``suite_id:test_id`` syntax.
BOTOCORE_TEST_ID=5:1 nosetests test/unit/test_protocols.py
"""
import os
import copy
from dateutil.tz import tzutc
from botocore.compat import json, OrderedDict
from botocore.model import ServiceModel, OperationModel
from botocore.serialize import EC2Serializer, QuerySerializer, \
JSONSerializer, RestJSONSerializer, RestXMLSerializer
from botocore.parsers import QueryParser, JSONParser, \
RestJSONParser, RestXMLParser
from botocore.utils import parse_timestamp, percent_encode_sequence
from calendar import timegm
from nose.tools import assert_equal as _assert_equal
TEST_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'protocols')
NOT_SPECIFIED = object()
PROTOCOL_SERIALIZERS = {
'ec2': EC2Serializer,
'query': QuerySerializer,
'json': JSONSerializer,
'rest-json': RestJSONSerializer,
'rest-xml': RestXMLSerializer,
}
PROTOCOL_PARSERS = {
# ec2/query have the same response parsing logic.
'ec2': QueryParser,
'query': QueryParser,
'json': JSONParser,
'rest-json': RestJSONParser,
'rest-xml': RestXMLParser,
}
PROTOCOL_TEST_BLACKLIST = [
'Idempotency token auto fill'
]
def test_compliance():
for full_path in _walk_files():
if full_path.endswith('.json'):
for model, case, basename in _load_cases(full_path):
if model.get('description') in PROTOCOL_TEST_BLACKLIST:
continue
if 'params' in case:
yield _test_input, model, case, basename
elif 'response' in case:
yield _test_output, model, case, basename
def _test_input(json_description, case, basename):
service_description = copy.deepcopy(json_description)
service_description['operations'] = {
case.get('name', 'OperationName'): case,
}
model = ServiceModel(service_description)
protocol_type = model.metadata['protocol']
try:
protocol_serializer = PROTOCOL_SERIALIZERS[protocol_type]
except KeyError:
raise RuntimeError("Unknown protocol: %s" % protocol_type)
serializer = protocol_serializer()
serializer.MAP_TYPE = OrderedDict
operation_model = OperationModel(case['given'], model)
request = serializer.serialize_to_request(case['params'], operation_model)
_serialize_request_description(request)
try:
_assert_request_body_is_bytes(request['body'])
_assert_requests_equal(request, case['serialized'])
except AssertionError as e:
_input_failure_message(protocol_type, case, request, e)
def _assert_request_body_is_bytes(body):
if not isinstance(body, bytes):
raise AssertionError("Expected body to be serialized as type "
"bytes(), instead got: %s" % type(body))
def _test_output(json_description, case, basename):
service_description = copy.deepcopy(json_description)
service_description['operations'] = {
case.get('name', 'OperationName'): case,
}
try:
model = ServiceModel(service_description)
operation_model = OperationModel(case['given'], model)
parser = PROTOCOL_PARSERS[model.metadata['protocol']](
timestamp_parser=_compliance_timestamp_parser)
# We load the json as utf-8, but the response parser is at the
# botocore boundary, so it expects to work with bytes.
body = case['response']['body']
case['response']['body'] = body.encode('utf-8')
parsed = parser.parse(case['response'], operation_model.output_shape)
parsed = _fixup_parsed_result(parsed)
except Exception as e:
msg = (
"\nFailed to run test : %s\n"
"Protocol : %s\n"
"Description : %s (%s:%s)\n" % (
e, model.metadata['protocol'],
case['description'], case['suite_id'], case['test_id']))
raise AssertionError(msg)
try:
assert_equal(parsed, case['result'], "Body")
except Exception as e:
_output_failure_message(model.metadata['protocol'],
case, parsed, e)
def _fixup_parsed_result(parsed):
# This function contains all the transformation we need
# to do from the response _our_ response parsers give
# vs. the expected responses in the protocol tests.
# These are implementation specific changes, not any
# "we're not following the spec"-type changes.
# 1. RequestMetadata. We parse this onto the returned dict, but compliance
# tests don't have any specs for how to deal with request metadata.
if 'ResponseMetadata' in parsed:
del parsed['ResponseMetadata']
# 2. Binary blob types. In the protocol test, blob types, when base64
# decoded, always decode to something that can be expressed via utf-8.
# This is not always the case. In python3, the blob type is designed to
# return a bytes (not str) object. However, for these tests we'll work for
# any bytes type, and decode it as utf-8 because we know that's safe for
# the compliance tests.
parsed = _convert_bytes_to_str(parsed)
return parsed
def _convert_bytes_to_str(parsed):
if isinstance(parsed, dict):
new_dict = {}
for key, value in parsed.items():
new_dict[key] = _convert_bytes_to_str(value)
return new_dict
elif isinstance(parsed, bytes):
return parsed.decode('utf-8')
elif isinstance(parsed, list):
new_list = []
for item in parsed:
new_list.append(_convert_bytes_to_str(item))
return new_list
else:
return parsed
def _compliance_timestamp_parser(value):
datetime = parse_timestamp(value)
# Convert from our time zone to UTC
datetime = datetime.astimezone(tzutc())
# Convert to epoch.
return int(timegm(datetime.timetuple()))
def _output_failure_message(protocol_type, case, actual_parsed, error):
j = _try_json_dump
error_message = (
"\nDescription : %s (%s:%s)\n"
"Protocol: : %s\n"
"Given : %s\n"
"Response : %s\n"
"Expected serialization: %s\n"
"Actual serialization : %s\n"
"Assertion message : %s\n" % (
case['description'], case['suite_id'],
case['test_id'], protocol_type,
j(case['given']), j(case['response']),
j(case['result']), j(actual_parsed), error))
raise AssertionError(error_message)
def _input_failure_message(protocol_type, case, actual_request, error):
j = _try_json_dump
error_message = (
"\nDescription : %s (%s:%s)\n"
"Protocol: : %s\n"
"Given : %s\n"
"Params : %s\n"
"Expected serialization: %s\n"
"Actual serialization : %s\n"
"Assertion message : %s\n" % (
case['description'], case['suite_id'],
case['test_id'], protocol_type,
j(case['given']), j(case['params']),
j(case['serialized']), j(actual_request), error))
raise AssertionError(error_message)
def _try_json_dump(obj):
try:
return json.dumps(obj)
except (ValueError, TypeError):
return str(obj)
def assert_equal(first, second, prefix):
# A better assert equals. It allows you to just provide
# prefix instead of the entire message.
try:
_assert_equal(first, second)
except Exception:
try:
better = "%s (actual != expected)\n%s !=\n%s" % (
prefix,
json.dumps(first, indent=2),
json.dumps(second, indent=2))
except (ValueError, TypeError):
better = "%s (actual != expected)\n%s !=\n%s" % (
prefix, first, second)
raise AssertionError(better)
def _serialize_request_description(request_dict):
if isinstance(request_dict.get('body'), dict):
# urlencode the request body.
encoded = percent_encode_sequence(request_dict['body']).encode('utf-8')
request_dict['body'] = encoded
if isinstance(request_dict.get('query_string'), dict):
encoded = percent_encode_sequence(request_dict.pop('query_string'))
if encoded:
# 'requests' automatically handle this, but we in the
# test runner we need to handle the case where the url_path
# already has query params.
if '?' not in request_dict['url_path']:
request_dict['url_path'] += '?%s' % encoded
else:
request_dict['url_path'] += '&%s' % encoded
def _assert_requests_equal(actual, expected):
assert_equal(actual['body'], expected['body'].encode('utf-8'),
'Body value')
actual_headers = dict(actual['headers'])
expected_headers = expected.get('headers', {})
assert_equal(actual_headers, expected_headers, "Header values")
assert_equal(actual['url_path'], expected.get('uri', ''), "URI")
if 'method' in expected:
assert_equal(actual['method'], expected['method'], "Method")
def _walk_files():
# Check for a shortcut when running the tests interactively.
# If a BOTOCORE_TEST env var is defined, that file is used as the
# only test to run. Useful when doing feature development.
single_file = os.environ.get('BOTOCORE_TEST')
if single_file is not None:
yield os.path.abspath(single_file)
else:
for root, _, filenames in os.walk(TEST_DIR):
for filename in filenames:
yield os.path.join(root, filename)
def _load_cases(full_path):
# During developement, you can set the BOTOCORE_TEST_ID
# to run a specific test suite or even a specific test case.
# The format is BOTOCORE_TEST_ID=suite_id:test_id or
# BOTOCORE_TEST_ID=suite_id
suite_id, test_id = _get_suite_test_id()
all_test_data = json.load(open(full_path), object_pairs_hook=OrderedDict)
basename = os.path.basename(full_path)
for i, test_data in enumerate(all_test_data):
if suite_id is not None and i != suite_id:
continue
cases = test_data.pop('cases')
description = test_data['description']
for j, case in enumerate(cases):
if test_id is not None and j != test_id:
continue
case['description'] = description
case['suite_id'] = i
case['test_id'] = j
yield (test_data, case, basename)
def _get_suite_test_id():
if 'BOTOCORE_TEST_ID' not in os.environ:
return None, None
test_id = None
suite_id = None
split = os.environ['BOTOCORE_TEST_ID'].split(':')
try:
if len(split) == 2:
suite_id, test_id = int(split[0]), int(split[1])
else:
suite_id = int(split([0]))
except TypeError:
# Same exception, just give a better error message.
raise TypeError("Invalid format for BOTOCORE_TEST_ID, should be "
"suite_id[:test_id], and both values should be "
"integers.")
return suite_id, test_id
|
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import os
from tests import unittest, BaseEnvVar
import mock
import botocore
from botocore.compat import six
from botocore.exceptions import ClientError, WaiterConfigError, WaiterError
from botocore.waiter import Waiter, WaiterModel, SingleWaiterConfig
from botocore.waiter import create_waiter_with_client
from botocore.waiter import NormalizedOperationMethod
from botocore.loaders import Loader
from botocore.model import ServiceModel
class TestWaiterModel(unittest.TestCase):
def setUp(self):
self.boiler_plate_config = {
'description': 'Waiter description',
'operation': 'HeadBucket',
'delay': 5,
'maxAttempts': 20,
}
def create_acceptor_function(self, for_config):
single_waiter = {
'acceptors': [for_config]
}
single_waiter.update(self.boiler_plate_config)
config = SingleWaiterConfig(single_waiter)
return config.acceptors[0].matcher_func
def test_waiter_version(self):
self.assertEqual(WaiterModel({'version': 2, 'waiters': {}}).version, 2)
def test_wont_load_missing_version_in_config(self):
# We only load waiter configs if we know for sure that we're
# loading version 2 of the format.
waiters = {
# Missing the 'version' key.
'waiters': {}
}
with self.assertRaises(WaiterConfigError):
WaiterModel(waiters)
def test_unsupported_waiter_version(self):
waiters = {
'version': 1,
'waiters': {}
}
with self.assertRaises(WaiterConfigError):
WaiterModel(waiters)
def test_waiter_names(self):
waiters = {
'version': 2,
'waiters': {
'BarWaiter': {},
'FooWaiter': {},
}
}
self.assertEqual(WaiterModel(waiters).waiter_names, ['BarWaiter',
'FooWaiter'])
def test_get_single_waiter_config(self):
single_waiter = {
'description': 'Waiter description',
'operation': 'HeadBucket',
'delay': 5,
'maxAttempts': 20,
'acceptors': [
{'state': 'success', 'matcher': 'status', 'expected': 200},
{'state': 'retry', 'matcher': 'status', 'expected': 404},
],
}
waiters = {
'version': 2,
'waiters': {
'BucketExists': single_waiter,
}
}
model = WaiterModel(waiters)
config = model.get_waiter('BucketExists')
self.assertEqual(config.operation, 'HeadBucket')
def test_get_waiter_does_not_exist(self):
waiters = {
'version': 2,
'waiters': {}
}
model = WaiterModel(waiters)
with self.assertRaises(ValueError):
model.get_waiter('UnknownWaiter')
def test_single_waiter_config_attributes(self):
single_waiter = {
'description': 'Waiter description',
'operation': 'HeadBucket',
'delay': 5,
'maxAttempts': 20,
'acceptors': [
],
}
config = SingleWaiterConfig(single_waiter)
self.assertEqual(config.description, 'Waiter description')
self.assertEqual(config.operation, 'HeadBucket')
self.assertEqual(config.delay, 5)
self.assertEqual(config.max_attempts, 20)
def test_single_waiter_acceptors_built_with_matcher_func(self):
# When the list of acceptors are requested, we actually will transform
# them into values that are easier to use.
single_waiter = {
'acceptors': [
{'state': 'success', 'matcher': 'status', 'expected': 200},
],
}
single_waiter.update(self.boiler_plate_config)
config = SingleWaiterConfig(single_waiter)
success_acceptor = config.acceptors[0]
self.assertEqual(success_acceptor.state, 'success')
self.assertEqual(success_acceptor.matcher, 'status')
self.assertEqual(success_acceptor.expected, 200)
self.assertTrue(callable(success_acceptor.matcher_func))
def test_single_waiter_acceptor_matches_jmespath(self):
single_waiter = {
'acceptors': [
{'state': 'success', 'matcher': 'path',
'argument': 'Table.TableStatus', 'expected': 'ACCEPTED'},
],
}
single_waiter.update(self.boiler_plate_config)
config = SingleWaiterConfig(single_waiter)
success_acceptor = config.acceptors[0].matcher_func
# success_acceptor is a callable that takes a response dict and returns
# True or False.
self.assertTrue(
success_acceptor({'Table': {'TableStatus': 'ACCEPTED'}}))
self.assertFalse(
success_acceptor({'Table': {'TableStatus': 'CREATING'}}))
def test_single_waiter_supports_status_code(self):
single_waiter = {
'acceptors': [
{'state': 'success', 'matcher': 'status',
'expected': 200}
],
}
single_waiter.update(self.boiler_plate_config)
config = SingleWaiterConfig(single_waiter)
success_acceptor = config.acceptors[0].matcher_func
self.assertTrue(
success_acceptor({'ResponseMetadata': {'HTTPStatusCode': 200}}))
self.assertFalse(
success_acceptor({'ResponseMetadata': {'HTTPStatusCode': 404}}))
def test_single_waiter_supports_error(self):
single_waiter = {
'acceptors': [
{'state': 'success', 'matcher': 'error',
'expected': 'DoesNotExistError'}
],
}
single_waiter.update(self.boiler_plate_config)
config = SingleWaiterConfig(single_waiter)
success_acceptor = config.acceptors[0].matcher_func
self.assertTrue(
success_acceptor({'Error': {'Code': 'DoesNotExistError'}}))
self.assertFalse(
success_acceptor({'Error': {'Code': 'DoesNotExistErorr'}}))
def test_unknown_matcher(self):
unknown_type = 'arbitrary_type'
single_waiter = {
'acceptors': [
{'state': 'success', 'matcher': unknown_type,
'expected': 'foo'}
]
}
single_waiter.update(self.boiler_plate_config)
config = SingleWaiterConfig(single_waiter)
with self.assertRaises(WaiterConfigError):
config.acceptors
def test_single_waiter_supports_path_all(self):
matches = self.create_acceptor_function(
for_config={'state': 'success', 'matcher': 'pathAll',
'argument': 'Tables[].State', 'expected': 'GOOD'})
self.assertTrue(
matches({'Tables': [{"State": "GOOD"}]}))
self.assertTrue(
matches({'Tables': [{"State": "GOOD"}, {"State": "GOOD"}]}))
def test_single_waiter_supports_path_any(self):
matches = self.create_acceptor_function(
for_config={'state': 'failure', 'matcher': 'pathAny',
'argument': 'Tables[].State', 'expected': 'FAIL'})
self.assertTrue(
matches({'Tables': [{"State": "FAIL"}]}))
self.assertTrue(
matches({'Tables': [{"State": "GOOD"}, {"State": "FAIL"}]}))
def test_waiter_handles_error_responses_with_path_matchers(self):
path_any = self.create_acceptor_function(
for_config={'state': 'success', 'matcher': 'pathAny',
'argument': 'length(Tables) > `0`',
'expected': True})
path_all = self.create_acceptor_function(
for_config={'state': 'success', 'matcher': 'pathAll',
'argument': 'length(Tables) > `0`',
'expected': True})
path = self.create_acceptor_function(
for_config={'state': 'success', 'matcher': 'path',
'argument': 'length(Tables) > `0`',
'expected': True})
self.assertFalse(path_any({'Error': {'Code': 'DoesNotExist'}}))
self.assertFalse(path_all({'Error': {'Code': 'DoesNotExist'}}))
self.assertFalse(path({'Error': {'Code': 'DoesNotExist'}}))
def test_single_waiter_does_not_match_path_all(self):
matches = self.create_acceptor_function(
for_config={'state': 'success', 'matcher': 'pathAll',
'argument': 'Tables[].State', 'expected': 'GOOD'})
self.assertFalse(
matches({'Tables': [{"State": "GOOD"}, {"State": "BAD"}]}))
self.assertFalse(
matches({'Tables': [{"State": "BAD"}, {"State": "GOOD"}]}))
self.assertFalse(
matches({'Tables': [{"State": "BAD"}, {"State": "BAD"}]}))
self.assertFalse(
matches({'Tables': []}))
self.assertFalse(
matches({'Tables': [{"State": "BAD"},
{"State": "BAD"},
{"State": "BAD"},
{"State": "BAD"}]}))
def test_path_all_missing_field(self):
matches = self.create_acceptor_function(
for_config={'state': 'success', 'matcher': 'pathAll',
'argument': 'Tables[].State', 'expected': 'GOOD'})
self.assertFalse(
matches({'Tables': [{"NotState": "GOOD"}, {"NotState": "BAD"}]}))
def test_path_all_matcher_does_not_receive_list(self):
matches = self.create_acceptor_function(
for_config={'state': 'success', 'matcher': 'pathAll',
'argument': 'Tables[].State', 'expected': 'GOOD'})
self.assertFalse(
matches({"NotTables": []}))
def test_single_waiter_supports_all_three_states(self):
single_waiter = {
'acceptors': [
{'state': 'success', 'matcher': 'error',
'expected': 'DoesNotExistError'},
{'state': 'success', 'matcher': 'status',
'expected': 200},
{'state': 'success', 'matcher': 'path',
'argument': 'Foo.Bar', 'expected': 'baz'},
],
}
single_waiter.update(self.boiler_plate_config)
config = SingleWaiterConfig(single_waiter)
acceptors = config.acceptors
# Each acceptors should be able to handle not matching
# any type of response.
matches_nothing = {}
self.assertFalse(acceptors[0].matcher_func(matches_nothing))
self.assertFalse(acceptors[1].matcher_func(matches_nothing))
self.assertFalse(acceptors[2].matcher_func(matches_nothing))
class TestWaitersObjects(unittest.TestCase):
def setUp(self):
pass
def client_responses_are(self, *responses, **kwargs):
operation_method = kwargs['for_operation']
operation_method.side_effect = responses
return operation_method
def create_waiter_config(self, operation='MyOperation',
delay=0, max_attempts=3,
acceptors=None):
if acceptors is None:
# Create some arbitrary acceptor that will never match.
acceptors = [{'state': 'success', 'matcher': 'status',
'expected': 1000}]
waiter_config = {
'operation': operation,
'delay': delay,
'maxAttempts': max_attempts,
'acceptors': acceptors
}
config = SingleWaiterConfig(waiter_config)
return config
def test_waiter_waits_until_acceptor_matches(self):
config = self.create_waiter_config(
max_attempts=3,
acceptors=[{'state': 'success', 'matcher': 'path',
'argument': 'Foo', 'expected': 'SUCCESS'}])
# Simulate the client having two calls that don't
# match followed by a third call that matches the
# acceptor.
operation_method = mock.Mock()
waiter = Waiter('MyWaiter', config, operation_method)
self.client_responses_are(
{'Foo': 'FAILURE'},
{'Foo': 'FAILURE'},
{'Foo': 'SUCCESS'},
for_operation=operation_method
)
waiter.wait()
self.assertEqual(operation_method.call_count, 3)
def test_waiter_never_matches(self):
# Verify that a matcher will fail after max_attempts
# is exceeded.
config = self.create_waiter_config(max_attempts=3)
operation_method = mock.Mock()
self.client_responses_are(
{'Foo': 'FAILURE'},
{'Foo': 'FAILURE'},
{'Foo': 'FAILURE'},
for_operation=operation_method
)
waiter = Waiter('MyWaiter', config, operation_method)
with self.assertRaises(WaiterError):
waiter.wait()
def test_unspecified_errors_stops_waiter(self):
# If a waiter receives an error response, then the
# waiter immediately stops.
config = self.create_waiter_config()
operation_method = mock.Mock()
self.client_responses_are(
# This is an unknown error that's not called out
# in any of the waiter config, so when the
# waiter encounters this response it will transition
# to the failure state.
{'Error': {'Code': 'UnknownError', 'Message': 'bad error'}},
for_operation=operation_method
)
waiter = Waiter('MyWaiter', config, operation_method)
with self.assertRaises(WaiterError):
waiter.wait()
def test_last_response_available_on_waiter_error(self):
last_response = {'Error': {'Code': 'UnknownError', 'Message': 'bad error'}}
config = self.create_waiter_config()
operation_method = mock.Mock()
self.client_responses_are(last_response,
for_operation=operation_method)
waiter = Waiter('MyWaiter', config, operation_method)
with self.assertRaises(WaiterError) as e:
waiter.wait()
self.assertEqual(e.exception.last_response, last_response)
def test_unspecified_errors_propagate_error_code(self):
# If a waiter receives an error response, then the
# waiter should pass along the error code
config = self.create_waiter_config()
operation_method = mock.Mock()
error_code = 'error_message'
error_message = 'error_message'
self.client_responses_are(
# This is an unknown error that's not called out
# in any of the waiter config, so when the
# waiter encounters this response it will transition
# to the failure state.
{'Error': {'Code': error_code, 'Message': error_message}},
for_operation=operation_method
)
waiter = Waiter('MyWaiter', config, operation_method)
with self.assertRaisesRegexp(WaiterError, error_message):
waiter.wait()
def test_waiter_transitions_to_failure_state(self):
acceptors = [
# A success state that will never be hit.
{'state': 'success', 'matcher': 'status', 'expected': 1000},
{'state': 'failure', 'matcher': 'error', 'expected': 'FailError'},
]
config = self.create_waiter_config(acceptors=acceptors)
operation_method = mock.Mock()
self.client_responses_are(
{'Nothing': 'foo'},
# And on the second attempt, a FailError is seen, which
# causes the waiter to fail fast.
{'Error': {'Code': 'FailError', 'Message': 'foo'}},
{'WillNeverGetCalled': True},
for_operation=operation_method
)
waiter = Waiter('MyWaiter', config, operation_method)
with self.assertRaises(WaiterError):
waiter.wait()
# Not only should we raise an exception, but we should have
# only called the operation_method twice because the second
# response triggered a fast fail.
self.assertEqual(operation_method.call_count, 2)
def test_waiter_handles_retry_state(self):
acceptor_with_retry_state = [
{'state': 'success', 'matcher': 'status', 'expected': 200},
{'state': 'retry', 'matcher': 'error', 'expected': 'RetryMe'},
]
config = self.create_waiter_config(
acceptors=acceptor_with_retry_state)
operation_method = mock.Mock()
self.client_responses_are(
{'Nothing': 'foo'},
{'Error': {'Code': 'RetryMe', 'Message': 'foo'}},
{'Success': True,
'ResponseMetadata': {'HTTPStatusCode': 200}},
{'NeverCalled': True},
for_operation=operation_method
)
waiter = Waiter('MyWaiter', config, operation_method)
waiter.wait()
self.assertEqual(operation_method.call_count, 3)
def test_waiter_transitions_to_retry_but_max_attempts_exceeded(self):
acceptors = [
{'state': 'success', 'matcher': 'status', 'expected': 200},
{'state': 'retry', 'matcher': 'error', 'expected': 'RetryMe'},
]
config = self.create_waiter_config(acceptors=acceptors)
operation_method = mock.Mock()
self.client_responses_are(
{'Success': False},
{'Error': {'Code': 'RetryMe', 'Message': 'foo'}},
{'Success': False},
{'Success': False},
for_operation=operation_method
)
waiter = Waiter('MyWaiter', config, operation_method)
with self.assertRaises(WaiterError):
waiter.wait()
def test_kwargs_are_passed_through(self):
acceptors = [
{'state': 'success', 'matcher': 'error', 'expected': 'MyError'},
]
config = self.create_waiter_config(acceptors=acceptors)
operation_method = mock.Mock()
self.client_responses_are(
{'Error': {'Code': 'MyError'}},
for_operation=operation_method)
waiter = Waiter('MyWaiter', config, operation_method)
waiter.wait(Foo='foo', Bar='bar', Baz='baz')
operation_method.assert_called_with(Foo='foo', Bar='bar',
Baz='baz')
@mock.patch('time.sleep')
def test_waiter_honors_delay_time_between_retries(self, sleep_mock):
delay_time = 5
config = self.create_waiter_config(delay=delay_time)
operation_method = mock.Mock()
self.client_responses_are(
# This is an unknown error that's not called out
# in any of the waiter config, so when the
# waiter encounters this response it will transition
# to the failure state.
{'Success': False},
{'Success': False},
{'Success': False},
for_operation=operation_method
)
waiter = Waiter('MyWaiter', config, operation_method)
with self.assertRaises(WaiterError):
waiter.wait()
# We attempt three times, which means we need to sleep
# twice, once before each subsequent request.
self.assertEqual(sleep_mock.call_count, 2)
sleep_mock.assert_called_with(delay_time)
@mock.patch('time.sleep')
def test_waiter_invocation_config_honors_delay(self, sleep_mock):
config = self.create_waiter_config()
operation_method = mock.Mock()
self.client_responses_are(
{'Success': False},
{'Success': False},
{'Success': False},
for_operation=operation_method
)
waiter = Waiter('MyWaiter', config, operation_method)
custom_delay = 3
with self.assertRaises(WaiterError):
waiter.wait(WaiterConfig={'Delay': custom_delay})
# We attempt three times, which means we need to sleep
# twice, once before each subsequent request.
self.assertEqual(sleep_mock.call_count, 2)
sleep_mock.assert_called_with(custom_delay)
def test_waiter_invocation_config_honors_max_attempts(self):
config = self.create_waiter_config()
operation_method = mock.Mock()
self.client_responses_are(
{'Success': False},
{'Success': False},
for_operation=operation_method
)
waiter = Waiter('MyWaiter', config, operation_method)
custom_max = 2
with self.assertRaises(WaiterError):
waiter.wait(WaiterConfig={'MaxAttempts': custom_max})
self.assertEqual(operation_method.call_count, 2)
class TestCreateWaiter(unittest.TestCase):
def setUp(self):
self.waiter_config = {
'version': 2,
'waiters': {
'WaiterName': {
'operation': 'Foo',
'delay': 1,
'maxAttempts': 1,
'acceptors': [],
},
},
}
self.waiter_model = WaiterModel(self.waiter_config)
self.service_json_model = {
'metadata': {
'serviceFullName': 'Amazon MyService'
},
'operations': {
'Foo': {
'name': 'Foo',
'input': {'shape': 'FooInputOutput'},
'output': {'shape': 'FooInputOutput'}
}
},
'shapes': {
'FooInputOutput': {
'type': 'structure',
'members': {
'bar': {
'shape': 'String',
'documentation': 'Documents bar'
}
}
},
'String': {
'type': 'string'
}
}
}
self.service_model = ServiceModel(self.service_json_model, 'myservice')
self.client = mock.Mock()
self.client.meta.service_model = self.service_model
def test_can_create_waiter_from_client(self):
waiter_name = 'WaiterName'
waiter = create_waiter_with_client(
waiter_name, self.waiter_model, self.client)
self.assertIsInstance(waiter, Waiter)
def test_waiter_class_name(self):
waiter_name = 'WaiterName'
waiter = create_waiter_with_client(
waiter_name, self.waiter_model, self.client)
self.assertEqual(
waiter.__class__.__name__,
'MyService.Waiter.WaiterName'
)
def test_waiter_help_documentation(self):
waiter_name = 'WaiterName'
waiter = create_waiter_with_client(
waiter_name, self.waiter_model, self.client)
with mock.patch('sys.stdout', six.StringIO()) as mock_stdout:
help(waiter.wait)
content = mock_stdout.getvalue()
lines = [
(' Polls :py:meth:`MyService.Client.foo` every 1 '
'seconds until a successful state is reached. An error '
'is returned after 1 failed checks.'),
' **Request Syntax** ',
' ::',
' waiter.wait(',
" bar='string'",
' )',
' :type bar: string',
' :param bar: Documents bar',
' :returns: None',
]
for line in lines:
self.assertIn(line, content)
class TestOperationMethods(unittest.TestCase):
def test_normalized_op_method_makes_call(self):
client_method = mock.Mock()
op = NormalizedOperationMethod(client_method)
op(Foo='a', Bar='b')
client_method.assert_called_with(Foo='a', Bar='b')
def test_normalized_op_returns_error_response(self):
# Client objects normally throw exceptions when an error
# occurs, but we need to return the parsed error response.
client_method = mock.Mock()
op = NormalizedOperationMethod(client_method)
parsed_response = {
'Error': {'Code': 'Foo', 'Message': 'bar'}
}
exception = ClientError(parsed_response, 'OperationName')
client_method.side_effect = exception
actual_response = op(Foo='a', Bar='b')
self.assertEqual(actual_response, parsed_response)
class ServiceWaiterFunctionalTest(BaseEnvVar):
"""
This class is used as a base class if you want to functionally test the
waiters for a specific service.
"""
def setUp(self):
super(ServiceWaiterFunctionalTest, self).setUp()
self.data_path = os.path.join(
os.path.dirname(botocore.__file__), 'data')
self.environ['AWS_DATA_PATH'] = self.data_path
self.loader = Loader([self.data_path])
def get_waiter_model(self, service, api_version=None):
"""Get the waiter model for the service."""
with mock.patch('botocore.loaders.Loader.list_available_services',
return_value=[service]):
return WaiterModel(self.loader.load_service_model(
service, type_name='waiters-2', api_version=api_version))
def get_service_model(self, service, api_version=None):
"""Get the service model for the service."""
with mock.patch('botocore.loaders.Loader.list_available_services',
return_value=[service]):
return ServiceModel(
self.loader.load_service_model(
service, type_name='service-2', api_version=api_version),
service_name=service
)
class CloudFrontWaitersTest(ServiceWaiterFunctionalTest):
def setUp(self):
super(CloudFrontWaitersTest, self).setUp()
self.client = mock.Mock()
self.service = 'cloudfront'
self.old_api_versions = ['2014-05-31']
def assert_distribution_deployed_call_count(self, api_version=None):
waiter_name = 'DistributionDeployed'
waiter_model = self.get_waiter_model(self.service, api_version)
self.client.meta.service_model = self.get_service_model(
self.service, api_version)
self.client.get_distribution.side_effect = [
{'Distribution': {'Status': 'Deployed'}}
]
waiter = create_waiter_with_client(waiter_name, waiter_model,
self.client)
waiter.wait()
self.assertEqual(self.client.get_distribution.call_count, 1)
def assert_invalidation_completed_call_count(self, api_version=None):
waiter_name = 'InvalidationCompleted'
waiter_model = self.get_waiter_model(self.service, api_version)
self.client.meta.service_model = self.get_service_model(
self.service, api_version)
self.client.get_invalidation.side_effect = [
{'Invalidation': {'Status': 'Completed'}}
]
waiter = create_waiter_with_client(waiter_name, waiter_model,
self.client)
waiter.wait()
self.assertEqual(self.client.get_invalidation.call_count, 1)
def assert_streaming_distribution_deployed_call_count(
self, api_version=None):
waiter_name = 'StreamingDistributionDeployed'
waiter_model = self.get_waiter_model(self.service, api_version)
self.client.meta.service_model = self.get_service_model(
self.service, api_version)
self.client.get_streaming_distribution.side_effect = [
{'StreamingDistribution': {'Status': 'Deployed'}}
]
waiter = create_waiter_with_client(waiter_name, waiter_model,
self.client)
waiter.wait()
self.assertEqual(self.client.get_streaming_distribution.call_count, 1)
def test_distribution_deployed(self):
# Test the latest version.
self.assert_distribution_deployed_call_count()
self.client.reset_mock()
# Test previous api versions.
for api_version in self.old_api_versions:
self.assert_distribution_deployed_call_count(api_version)
self.client.reset_mock()
def test_invalidation_completed(self):
# Test the latest version.
self.assert_invalidation_completed_call_count()
self.client.reset_mock()
# Test previous api versions.
for api_version in self.old_api_versions:
self.assert_invalidation_completed_call_count(api_version)
self.client.reset_mock()
def test_streaming_distribution_deployed(self):
# Test the latest version.
self.assert_streaming_distribution_deployed_call_count()
self.client.reset_mock()
# Test previous api versions.
for api_version in self.old_api_versions:
self.assert_streaming_distribution_deployed_call_count(api_version)
self.client.reset_mock()
|
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
|
#!/usr/bin/env
# Copyright (c) 2012-2013 Mitch Garnaat http://garnaat.org/
# Copyright 2012-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import datetime
import time
import base64
import json
import mock
import botocore.auth
import botocore.credentials
from botocore.compat import HTTPHeaders, urlsplit, parse_qs, six
from botocore.awsrequest import AWSRequest
from botocore.vendored.requests.models import Request
class BaseTestWithFixedDate(unittest.TestCase):
def setUp(self):
self.datetime_patch = mock.patch('botocore.auth.datetime')
self.datetime_mock = self.datetime_patch.start()
self.fixed_date = datetime.datetime(2014, 3, 10, 17, 2, 55, 0)
self.datetime_mock.datetime.utcnow.return_value = self.fixed_date
self.datetime_mock.datetime.strptime.return_value = self.fixed_date
def tearDown(self):
self.datetime_patch.stop()
class TestHMACV1(unittest.TestCase):
maxDiff = None
def setUp(self):
access_key = '44CF9590006BF252F707'
secret_key = 'OtxrzxIsfpFjA7SwPzILwy8Bw21TLhquhboDYROV'
self.credentials = botocore.credentials.Credentials(access_key,
secret_key)
self.hmacv1 = botocore.auth.HmacV1Auth(self.credentials, None, None)
self.date_mock = mock.patch('botocore.auth.formatdate')
self.formatdate = self.date_mock.start()
self.formatdate.return_value = 'Thu, 17 Nov 2005 18:49:58 GMT'
def tearDown(self):
self.date_mock.stop()
def test_put(self):
headers = {'Date': 'Thu, 17 Nov 2005 18:49:58 GMT',
'Content-Md5': 'c8fdb181845a4ca6b8fec737b3581d76',
'Content-Type': 'text/html',
'X-Amz-Meta-Author': '[email protected]',
'X-Amz-Magic': 'abracadabra'}
http_headers = HTTPHeaders.from_dict(headers)
split = urlsplit('/quotes/nelson')
cs = self.hmacv1.canonical_string('PUT', split, http_headers)
expected_canonical = (
"PUT\nc8fdb181845a4ca6b8fec737b3581d76\ntext/html\n"
"Thu, 17 Nov 2005 18:49:58 GMT\nx-amz-magic:abracadabra\n"
"x-amz-meta-author:[email protected]\n/quotes/nelson")
expected_signature = 'jZNOcbfWmD/A/f3hSvVzXZjM2HU='
self.assertEqual(cs, expected_canonical)
sig = self.hmacv1.get_signature('PUT', split, http_headers)
self.assertEqual(sig, expected_signature)
def test_duplicate_headers(self):
pairs = [('Date', 'Thu, 17 Nov 2005 18:49:58 GMT'),
('Content-Md5', 'c8fdb181845a4ca6b8fec737b3581d76'),
('Content-Type', 'text/html'),
('X-Amz-Meta-Author', '[email protected]'),
('X-Amz-Meta-Author', '[email protected]'),
('X-Amz-Magic', 'abracadabra')]
http_headers = HTTPHeaders.from_pairs(pairs)
split = urlsplit('/quotes/nelson')
sig = self.hmacv1.get_signature('PUT', split, http_headers)
self.assertEqual(sig, 'kIdMxyiYB+F+83zYGR6sSb3ICcE=')
def test_query_string(self):
split = urlsplit('/quotes/nelson?uploads')
pairs = [('Date', 'Thu, 17 Nov 2005 18:49:58 GMT')]
sig = self.hmacv1.get_signature('PUT', split,
HTTPHeaders.from_pairs(pairs))
self.assertEqual(sig, 'P7pBz3Z4p3GxysRSJ/gR8nk7D4o=')
def test_bucket_operations(self):
# Check that the standard operations on buckets that are
# specified as query strings end up in the canonical resource.
operations = ('acl', 'cors', 'lifecycle', 'policy',
'notification', 'logging', 'tagging',
'requestPayment', 'versioning', 'website')
for operation in operations:
url = '/quotes?%s' % operation
split = urlsplit(url)
cr = self.hmacv1.canonical_resource(split)
self.assertEqual(cr, '/quotes?%s' % operation)
def test_sign_with_token(self):
credentials = botocore.credentials.Credentials(
access_key='foo', secret_key='bar', token='baz')
auth = botocore.auth.HmacV1Auth(credentials)
request = AWSRequest()
request.headers['Date'] = 'Thu, 17 Nov 2005 18:49:58 GMT'
request.headers['Content-Type'] = 'text/html'
request.method = 'PUT'
request.url = 'https://s3.amazonaws.com/bucket/key'
auth.add_auth(request)
self.assertIn('Authorization', request.headers)
# We're not actually checking the signature here, we're
# just making sure the auth header has the right format.
self.assertTrue(request.headers['Authorization'].startswith('AWS '))
def test_resign_with_token(self):
credentials = botocore.credentials.Credentials(
access_key='foo', secret_key='bar', token='baz')
auth = botocore.auth.HmacV1Auth(credentials)
request = AWSRequest()
request.headers['Date'] = 'Thu, 17 Nov 2005 18:49:58 GMT'
request.headers['Content-Type'] = 'text/html'
request.method = 'PUT'
request.url = 'https://s3.amazonaws.com/bucket/key'
auth.add_auth(request)
original_auth = request.headers['Authorization']
# Resigning the request shouldn't change the authorization
# header. We are also ensuring that the date stays the same
# because we're mocking out the formatdate() call. There's
# another unit test that verifies we use the latest time
# when we sign the request.
auth.add_auth(request)
self.assertEqual(request.headers.get_all('Authorization'),
[original_auth])
def test_resign_uses_most_recent_date(self):
dates = [
'Thu, 17 Nov 2005 18:49:58 GMT',
'Thu, 17 Nov 2014 20:00:00 GMT',
]
self.formatdate.side_effect = dates
request = AWSRequest()
request.headers['Content-Type'] = 'text/html'
request.method = 'PUT'
request.url = 'https://s3.amazonaws.com/bucket/key'
self.hmacv1.add_auth(request)
original_date = request.headers['Date']
self.hmacv1.add_auth(request)
modified_date = request.headers['Date']
# Each time we sign a request, we make another call to formatdate()
# so we should have a different date header each time.
self.assertEqual(original_date, dates[0])
self.assertEqual(modified_date, dates[1])
class TestSigV2(unittest.TestCase):
maxDiff = None
def setUp(self):
access_key = 'foo'
secret_key = 'bar'
self.credentials = botocore.credentials.Credentials(access_key,
secret_key)
self.signer = botocore.auth.SigV2Auth(self.credentials)
self.time_patcher = mock.patch.object(
botocore.auth.time, 'gmtime',
mock.Mock(wraps=time.gmtime)
)
mocked_time = self.time_patcher.start()
mocked_time.return_value = time.struct_time(
[2014, 6, 20, 8, 40, 23, 4, 171, 0])
def tearDown(self):
self.time_patcher.stop()
def test_put(self):
request = mock.Mock()
request.url = '/'
request.method = 'POST'
params = {'Foo': u'\u2713'}
result = self.signer.calc_signature(request, params)
self.assertEqual(
result, ('Foo=%E2%9C%93',
u'VCtWuwaOL0yMffAT8W4y0AFW3W4KUykBqah9S40rB+Q='))
def test_fields(self):
request = Request()
request.url = '/'
request.method = 'POST'
request.data = {'Foo': u'\u2713'}
self.signer.add_auth(request)
self.assertEqual(request.data['AWSAccessKeyId'], 'foo')
self.assertEqual(request.data['Foo'], u'\u2713')
self.assertEqual(request.data['Timestamp'], '2014-06-20T08:40:23Z')
self.assertEqual(request.data['Signature'],
u'Tiecw+t51tok4dTT8B4bg47zxHEM/KcD55f2/x6K22o=')
self.assertEqual(request.data['SignatureMethod'], 'HmacSHA256')
self.assertEqual(request.data['SignatureVersion'], '2')
def test_resign(self):
# Make sure that resigning after e.g. retries works
request = Request()
request.url = '/'
request.method = 'POST'
params = {
'Foo': u'\u2713',
'Signature': u'VCtWuwaOL0yMffAT8W4y0AFW3W4KUykBqah9S40rB+Q='
}
result = self.signer.calc_signature(request, params)
self.assertEqual(
result, ('Foo=%E2%9C%93',
u'VCtWuwaOL0yMffAT8W4y0AFW3W4KUykBqah9S40rB+Q='))
def test_get(self):
request = Request()
request.url = '/'
request.method = 'GET'
request.params = {'Foo': u'\u2713'}
self.signer.add_auth(request)
self.assertEqual(request.params['AWSAccessKeyId'], 'foo')
self.assertEqual(request.params['Foo'], u'\u2713')
self.assertEqual(request.params['Timestamp'], '2014-06-20T08:40:23Z')
self.assertEqual(request.params['Signature'],
u'Un97klqZCONP65bA1+Iv4H3AcB2I40I4DBvw5ZERFPw=')
self.assertEqual(request.params['SignatureMethod'], 'HmacSHA256')
self.assertEqual(request.params['SignatureVersion'], '2')
class TestSigV3(unittest.TestCase):
maxDiff = None
def setUp(self):
self.access_key = 'access_key'
self.secret_key = 'secret_key'
self.credentials = botocore.credentials.Credentials(self.access_key,
self.secret_key)
self.auth = botocore.auth.SigV3Auth(self.credentials)
self.date_mock = mock.patch('botocore.auth.formatdate')
self.formatdate = self.date_mock.start()
self.formatdate.return_value = 'Thu, 17 Nov 2005 18:49:58 GMT'
def tearDown(self):
self.date_mock.stop()
def test_signature_with_date_headers(self):
request = AWSRequest()
request.headers = {'Date': 'Thu, 17 Nov 2005 18:49:58 GMT'}
request.url = 'https://route53.amazonaws.com'
self.auth.add_auth(request)
self.assertEqual(
request.headers['X-Amzn-Authorization'],
('AWS3-HTTPS AWSAccessKeyId=access_key,Algorithm=HmacSHA256,'
'Signature=M245fo86nVKI8rLpH4HgWs841sBTUKuwciiTpjMDgPs='))
def test_resign_with_token(self):
credentials = botocore.credentials.Credentials(
access_key='foo', secret_key='bar', token='baz')
auth = botocore.auth.SigV3Auth(credentials)
request = AWSRequest()
request.headers['Date'] = 'Thu, 17 Nov 2005 18:49:58 GMT'
request.method = 'PUT'
request.url = 'https://route53.amazonaws.com/'
auth.add_auth(request)
original_auth = request.headers['X-Amzn-Authorization']
# Resigning the request shouldn't change the authorization
# header.
auth.add_auth(request)
self.assertEqual(request.headers.get_all('X-Amzn-Authorization'),
[original_auth])
class TestS3SigV4Auth(BaseTestWithFixedDate):
maxDiff = None
def setUp(self):
super(TestS3SigV4Auth, self).setUp()
self.credentials = botocore.credentials.Credentials(
access_key='foo', secret_key='bar', token='baz')
self.auth = botocore.auth.S3SigV4Auth(
self.credentials, 'ec2', 'eu-central-1')
self.request = AWSRequest(data=six.BytesIO(b"foo bar baz"))
self.request.method = 'PUT'
self.request.url = 'https://s3.eu-central-1.amazonaws.com/'
self.client_config = mock.Mock()
self.s3_config = {}
self.client_config.s3 = self.s3_config
self.request.context = {
'client_config': self.client_config
}
def test_resign_with_content_hash(self):
self.auth.add_auth(self.request)
original_auth = self.request.headers['Authorization']
self.auth.add_auth(self.request)
self.assertEqual(self.request.headers.get_all('Authorization'),
[original_auth])
def test_signature_is_not_normalized(self):
request = AWSRequest()
request.url = 'https://s3.amazonaws.com/bucket/foo/./bar/../bar'
request.method = 'GET'
credentials = botocore.credentials.Credentials('access_key',
'secret_key')
auth = botocore.auth.S3SigV4Auth(credentials, 's3', 'us-east-1')
auth.add_auth(request)
self.assertTrue(
request.headers['Authorization'].startswith('AWS4-HMAC-SHA256'))
def test_query_string_params_in_urls(self):
request = AWSRequest()
request.url = (
'https://s3.amazonaws.com/bucket?'
'marker=%C3%A4%C3%B6%C3%BC-01.txt&prefix'
)
request.data = {'Action': 'MyOperation'}
request.method = 'GET'
# Check that the canonical query string is correct formatting
# by ensuring that query string paramters that are added to the
# canonical query string are correctly formatted.
cqs = self.auth.canonical_query_string(request)
self.assertEqual('marker=%C3%A4%C3%B6%C3%BC-01.txt&prefix=', cqs)
def _test_blacklist_header(self, header, value):
request = AWSRequest()
request.url = 'https://s3.amazonaws.com/bucket/foo'
request.method = 'PUT'
request.headers[header] = value
credentials = botocore.credentials.Credentials('access_key',
'secret_key')
auth = botocore.auth.S3SigV4Auth(credentials, 's3', 'us-east-1')
auth.add_auth(request)
self.assertNotIn(header, request.headers['Authorization'])
def test_blacklist_expect_headers(self):
self._test_blacklist_header('expect', '100-continue')
def test_blacklist_trace_id(self):
self._test_blacklist_header('x-amzn-trace-id',
'Root=foo;Parent=bar;Sampleid=1')
def test_blacklist_headers(self):
self._test_blacklist_header('user-agent', 'botocore/1.4.11')
def test_context_sets_signing_region(self):
original_signing_region = 'eu-central-1'
new_signing_region = 'us-west-2'
self.auth.add_auth(self.request)
auth = self.request.headers['Authorization']
self.assertIn(original_signing_region, auth)
self.assertNotIn(new_signing_region, auth)
self.request.context = {'signing': {'region': new_signing_region}}
self.auth.add_auth(self.request)
auth = self.request.headers['Authorization']
self.assertIn(new_signing_region, auth)
self.assertNotIn(original_signing_region, auth)
def test_uses_sha256_if_config_value_is_true(self):
self.client_config.s3['payload_signing_enabled'] = True
self.auth.add_auth(self.request)
sha_header = self.request.headers['X-Amz-Content-SHA256']
self.assertNotEqual(sha_header, 'UNSIGNED-PAYLOAD')
def test_does_not_use_sha256_if_config_value_is_false(self):
self.client_config.s3['payload_signing_enabled'] = False
self.auth.add_auth(self.request)
sha_header = self.request.headers['X-Amz-Content-SHA256']
self.assertEqual(sha_header, 'UNSIGNED-PAYLOAD')
def test_uses_sha256_if_md5_unset(self):
self.request.context['has_streaming_input'] = True
self.auth.add_auth(self.request)
sha_header = self.request.headers['X-Amz-Content-SHA256']
self.assertNotEqual(sha_header, 'UNSIGNED-PAYLOAD')
def test_uses_sha256_if_not_https(self):
self.request.context['has_streaming_input'] = True
self.request.headers.add_header('Content-MD5', 'foo')
self.request.url = 'http://s3.amazonaws.com/bucket'
self.auth.add_auth(self.request)
sha_header = self.request.headers['X-Amz-Content-SHA256']
self.assertNotEqual(sha_header, 'UNSIGNED-PAYLOAD')
def test_uses_sha256_if_not_streaming_upload(self):
self.request.context['has_streaming_input'] = False
self.request.headers.add_header('Content-MD5', 'foo')
self.request.url = 'https://s3.amazonaws.com/bucket'
self.auth.add_auth(self.request)
sha_header = self.request.headers['X-Amz-Content-SHA256']
self.assertNotEqual(sha_header, 'UNSIGNED-PAYLOAD')
def test_does_not_use_sha256_if_md5_set(self):
self.request.context['has_streaming_input'] = True
self.request.headers.add_header('Content-MD5', 'foo')
self.auth.add_auth(self.request)
sha_header = self.request.headers['X-Amz-Content-SHA256']
self.assertEqual(sha_header, 'UNSIGNED-PAYLOAD')
def test_does_not_use_sha256_if_context_config_set(self):
self.request.context['payload_signing_enabled'] = False
self.request.headers.add_header('Content-MD5', 'foo')
self.auth.add_auth(self.request)
sha_header = self.request.headers['X-Amz-Content-SHA256']
self.assertEqual(sha_header, 'UNSIGNED-PAYLOAD')
def test_sha256_if_context_set_on_http(self):
self.request.context['payload_signing_enabled'] = False
self.request.headers.add_header('Content-MD5', 'foo')
self.request.url = 'http://s3.amazonaws.com/bucket'
self.auth.add_auth(self.request)
sha_header = self.request.headers['X-Amz-Content-SHA256']
self.assertNotEqual(sha_header, 'UNSIGNED-PAYLOAD')
def test_sha256_if_context_set_without_md5(self):
self.request.context['payload_signing_enabled'] = False
self.request.url = 'https://s3.amazonaws.com/bucket'
self.auth.add_auth(self.request)
sha_header = self.request.headers['X-Amz-Content-SHA256']
self.assertNotEqual(sha_header, 'UNSIGNED-PAYLOAD')
class TestSigV4(unittest.TestCase):
def setUp(self):
self.credentials = botocore.credentials.Credentials(
access_key='foo', secret_key='bar')
def create_signer(self, service_name='myservice', region='us-west-2'):
auth = botocore.auth.SigV4Auth(
self.credentials, service_name, region)
return auth
def test_canonical_query_string(self):
request = AWSRequest()
request.url = (
'https://search-testdomain1-j67dwxlet67gf7ghwfmik2c67i.us-west-2.'
'cloudsearch.amazonaws.com/'
'2013-01-01/search?format=sdk&pretty=true&'
'q.options=%7B%22defaultOperator%22%3A%20%22and%22%2C%20%22'
'fields%22%3A%5B%22directors%5E10%22%5D%7D&q=George%20Lucas'
)
request.method = 'GET'
auth = self.create_signer('cloudsearchdomain', 'us-west-2')
actual = auth.canonical_query_string(request)
# Here 'q' should come before 'q.options'.
expected = ("format=sdk&pretty=true&q=George%20Lucas&q.options=%7B%22"
"defaultOperator%22%3A%20%22and%22%2C%20%22fields%22%3A%5B"
"%22directors%5E10%22%5D%7D")
self.assertEqual(actual, expected)
def test_thread_safe_timestamp(self):
request = AWSRequest()
request.url = (
'https://search-testdomain1-j67dwxlet67gf7ghwfmik2c67i.us-west-2.'
'cloudsearch.amazonaws.com/'
'2013-01-01/search?format=sdk&pretty=true&'
'q.options=%7B%22defaultOperator%22%3A%20%22and%22%2C%20%22'
'fields%22%3A%5B%22directors%5E10%22%5D%7D&q=George%20Lucas'
)
request.method = 'GET'
auth = self.create_signer('cloudsearchdomain', 'us-west-2')
with mock.patch.object(
botocore.auth.datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)) as mock_datetime:
original_utcnow = datetime.datetime(2014, 1, 1, 0, 0)
mock_datetime.utcnow.return_value = original_utcnow
# Go through the add_auth process once. This will attach
# a timestamp to the request at the beginning of auth.
auth.add_auth(request)
self.assertEqual(request.context['timestamp'], '20140101T000000Z')
# Ensure the date is in the Authorization header
self.assertIn('20140101', request.headers['Authorization'])
# Now suppose the utc time becomes the next day all of a sudden
mock_datetime.utcnow.return_value = datetime.datetime(
2014, 1, 2, 0, 0)
# Smaller methods like the canonical request and string_to_sign
# should have the timestamp attached to the request in their
# body and not what the time is now mocked as. This is to ensure
# there is no mismatching in timestamps when signing.
cr = auth.canonical_request(request)
self.assertIn('x-amz-date:20140101T000000Z', cr)
self.assertNotIn('x-amz-date:20140102T000000Z', cr)
sts = auth.string_to_sign(request, cr)
self.assertIn('20140101T000000Z', sts)
self.assertNotIn('20140102T000000Z', sts)
def test_payload_is_binary_file(self):
request = AWSRequest()
request.data = six.BytesIO(u'\u2713'.encode('utf-8'))
request.url = 'https://amazonaws.com'
auth = self.create_signer()
payload = auth.payload(request)
self.assertEqual(
payload,
'1dabba21cdad44541f6b15796f8d22978fc7ea10c46aeceeeeb66c23b3ac7604')
def test_payload_is_bytes_type(self):
request = AWSRequest()
request.data = u'\u2713'.encode('utf-8')
request.url = 'https://amazonaws.com'
auth = self.create_signer()
payload = auth.payload(request)
self.assertEqual(
payload,
'1dabba21cdad44541f6b15796f8d22978fc7ea10c46aeceeeeb66c23b3ac7604')
def test_payload_not_signed_if_disabled_in_context(self):
request = AWSRequest()
request.data = u'\u2713'.encode('utf-8')
request.url = 'https://amazonaws.com'
request.context['payload_signing_enabled'] = False
auth = self.create_signer()
payload = auth.payload(request)
self.assertEqual(payload, 'UNSIGNED-PAYLOAD')
def test_content_sha256_set_if_payload_signing_disabled(self):
request = AWSRequest()
request.data = six.BytesIO(u'\u2713'.encode('utf-8'))
request.url = 'https://amazonaws.com'
request.context['payload_signing_enabled'] = False
request.method = 'PUT'
auth = self.create_signer()
auth.add_auth(request)
sha_header = request.headers['X-Amz-Content-SHA256']
self.assertEqual(sha_header, 'UNSIGNED-PAYLOAD')
def test_collapse_multiple_spaces(self):
auth = self.create_signer()
original = HTTPHeaders()
original['foo'] = 'double space'
headers = auth.canonical_headers(original)
self.assertEqual(headers, 'foo:double space')
def test_trims_leading_trailing_spaces(self):
auth = self.create_signer()
original = HTTPHeaders()
original['foo'] = ' leading and trailing '
headers = auth.canonical_headers(original)
self.assertEqual(headers, 'foo:leading and trailing')
def test_strips_http_default_port(self):
request = AWSRequest()
request.url = 'http://s3.us-west-2.amazonaws.com:80/'
request.method = 'GET'
auth = self.create_signer('s3', 'us-west-2')
actual = auth.headers_to_sign(request)['host']
expected = 's3.us-west-2.amazonaws.com'
self.assertEqual(actual, expected)
def test_strips_https_default_port(self):
request = AWSRequest()
request.url = 'https://s3.us-west-2.amazonaws.com:443/'
request.method = 'GET'
auth = self.create_signer('s3', 'us-west-2')
actual = auth.headers_to_sign(request)['host']
expected = 's3.us-west-2.amazonaws.com'
self.assertEqual(actual, expected)
def test_strips_http_auth(self):
request = AWSRequest()
request.url = 'https://username:[email protected]/'
request.method = 'GET'
auth = self.create_signer('s3', 'us-west-2')
actual = auth.headers_to_sign(request)['host']
expected = 's3.us-west-2.amazonaws.com'
self.assertEqual(actual, expected)
def test_strips_default_port_and_http_auth(self):
request = AWSRequest()
request.url = 'http://username:[email protected]:80/'
request.method = 'GET'
auth = self.create_signer('s3', 'us-west-2')
actual = auth.headers_to_sign(request)['host']
expected = 's3.us-west-2.amazonaws.com'
self.assertEqual(actual, expected)
class TestSigV4Resign(BaseTestWithFixedDate):
maxDiff = None
def setUp(self):
super(TestSigV4Resign, self).setUp()
self.credentials = botocore.credentials.Credentials(
access_key='foo', secret_key='bar', token='baz')
self.auth = botocore.auth.SigV4Auth(self.credentials,
'ec2', 'us-west-2')
self.request = AWSRequest()
self.request.method = 'PUT'
self.request.url = 'https://ec2.amazonaws.com/'
def test_resign_request_with_date(self):
self.request.headers['Date'] = 'Thu, 17 Nov 2005 18:49:58 GMT'
self.auth.add_auth(self.request)
original_auth = self.request.headers['Authorization']
self.auth.add_auth(self.request)
self.assertEqual(self.request.headers.get_all('Authorization'),
[original_auth])
def test_sigv4_without_date(self):
self.auth.add_auth(self.request)
original_auth = self.request.headers['Authorization']
self.auth.add_auth(self.request)
self.assertEqual(self.request.headers.get_all('Authorization'),
[original_auth])
class BasePresignTest(unittest.TestCase):
def get_parsed_query_string(self, request):
query_string_dict = parse_qs(urlsplit(request.url).query)
# Also, parse_qs sets each value in the dict to be a list, but
# because we know that we won't have repeated keys, we simplify
# the dict and convert it back to a single value.
for key in query_string_dict:
query_string_dict[key] = query_string_dict[key][0]
return query_string_dict
class TestS3SigV2Presign(BasePresignTest):
def setUp(self):
self.access_key = 'access_key'
self.secret_key = 'secret_key'
self.credentials = botocore.credentials.Credentials(self.access_key,
self.secret_key)
self.expires = 3000
self.auth = botocore.auth.HmacV1QueryAuth(
self.credentials, expires=self.expires)
self.current_epoch_time = 1427427247.465591
self.time_patch = mock.patch('time.time')
self.time_mock = self.time_patch.start()
self.time_mock.return_value = self.current_epoch_time
self.request = AWSRequest()
self.bucket = 'mybucket'
self.key = 'myobject'
self.path = 'https://s3.amazonaws.com/%s/%s' % (
self.bucket, self.key)
self.request.url = self.path
self.request.method = 'GET'
def tearDown(self):
self.time_patch.stop()
def test_presign_with_query_string(self):
self.request.url = (
u'https://foo-bucket.s3.amazonaws.com/image.jpg'
u'?response-content-disposition='
'attachment%3B%20filename%3D%22download.jpg%22')
self.auth.add_auth(self.request)
query_string = self.get_parsed_query_string(self.request)
# We should have still kept the response-content-disposition
# in the query string.
self.assertIn('response-content-disposition', query_string)
self.assertEqual(query_string['response-content-disposition'],
'attachment; filename="download.jpg"')
# But we should have also added the parts from the signer.
self.assertEqual(query_string['AWSAccessKeyId'], self.access_key)
def test_presign_no_headers(self):
self.auth.add_auth(self.request)
self.assertTrue(self.request.url.startswith(self.path + '?'))
query_string = self.get_parsed_query_string(self.request)
self.assertEqual(query_string['AWSAccessKeyId'], self.access_key)
self.assertEqual(query_string['Expires'],
str(int(self.current_epoch_time) + self.expires))
self.assertEqual(query_string['Signature'],
'ZRSgywstwIruKLTLt/Bcrf9H1K4=')
def test_presign_with_x_amz_headers(self):
self.request.headers['x-amz-security-token'] = 'foo'
self.request.headers['x-amz-acl'] = 'read-only'
self.auth.add_auth(self.request)
query_string = self.get_parsed_query_string(self.request)
self.assertEqual(query_string['x-amz-security-token'], 'foo')
self.assertEqual(query_string['x-amz-acl'], 'read-only')
self.assertEqual(query_string['Signature'],
'5oyMAGiUk1E5Ry2BnFr6cIS3Gus=')
def test_presign_with_content_headers(self):
self.request.headers['content-type'] = 'txt'
self.request.headers['content-md5'] = 'foo'
self.auth.add_auth(self.request)
query_string = self.get_parsed_query_string(self.request)
self.assertEqual(query_string['content-type'], 'txt')
self.assertEqual(query_string['content-md5'], 'foo')
self.assertEqual(query_string['Signature'],
'/YQRFdQGywXP74WrOx2ET/RUqz8=')
def test_presign_with_unused_headers(self):
self.request.headers['user-agent'] = 'botocore'
self.auth.add_auth(self.request)
query_string = self.get_parsed_query_string(self.request)
self.assertNotIn('user-agent', query_string)
self.assertEqual(query_string['Signature'],
'ZRSgywstwIruKLTLt/Bcrf9H1K4=')
class TestSigV4Presign(BasePresignTest):
maxDiff = None
def setUp(self):
self.access_key = 'access_key'
self.secret_key = 'secret_key'
self.credentials = botocore.credentials.Credentials(self.access_key,
self.secret_key)
self.service_name = 'myservice'
self.region_name = 'myregion'
self.auth = botocore.auth.SigV4QueryAuth(
self.credentials, self.service_name, self.region_name, expires=60)
self.datetime_patcher = mock.patch.object(
botocore.auth.datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)
)
mocked_datetime = self.datetime_patcher.start()
mocked_datetime.utcnow.return_value = datetime.datetime(
2014, 1, 1, 0, 0)
def tearDown(self):
self.datetime_patcher.stop()
def test_presign_no_params(self):
request = AWSRequest()
request.method = 'GET'
request.url = 'https://ec2.us-east-1.amazonaws.com/'
self.auth.add_auth(request)
query_string = self.get_parsed_query_string(request)
self.assertEqual(
query_string,
{'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': ('access_key/20140101/myregion/'
'myservice/aws4_request'),
'X-Amz-Date': '20140101T000000Z',
'X-Amz-Expires': '60',
'X-Amz-Signature': ('c70e0bcdb4cd3ee324f71c78195445b878'
'8315af0800bbbdbbb6d05a616fb84c'),
'X-Amz-SignedHeaders': 'host'})
def test_operation_params_before_auth_params(self):
# The spec is picky about this.
request = AWSRequest()
request.method = 'GET'
request.url = 'https://ec2.us-east-1.amazonaws.com/?Action=MyOperation'
self.auth.add_auth(request)
# Verify auth params come after the existing params.
self.assertIn(
'?Action=MyOperation&X-Amz', request.url)
def test_operation_params_before_auth_params_in_body(self):
request = AWSRequest()
request.method = 'GET'
request.url = 'https://ec2.us-east-1.amazonaws.com/'
request.data = {'Action': 'MyOperation'}
self.auth.add_auth(request)
# Same situation, the params from request.data come before the auth
# params in the query string.
self.assertIn(
'?Action=MyOperation&X-Amz', request.url)
def test_presign_with_spaces_in_param(self):
request = AWSRequest()
request.method = 'GET'
request.url = 'https://ec2.us-east-1.amazonaws.com/'
request.data = {'Action': 'MyOperation', 'Description': 'With Spaces'}
self.auth.add_auth(request)
# Verify we encode spaces as '%20, and we don't use '+'.
self.assertIn('Description=With%20Spaces', request.url)
def test_presign_with_empty_param_value(self):
request = AWSRequest()
request.method = 'POST'
# actual URL format for creating a multipart upload
request.url = 'https://s3.amazonaws.com/mybucket/mykey?uploads'
self.auth.add_auth(request)
# verify that uploads param is still in URL
self.assertIn('uploads', request.url)
def test_s3_sigv4_presign(self):
auth = botocore.auth.S3SigV4QueryAuth(
self.credentials, self.service_name, self.region_name, expires=60)
request = AWSRequest()
request.method = 'GET'
request.url = (
'https://s3.us-west-2.amazonaws.com/mybucket/keyname/.bar')
auth.add_auth(request)
query_string = self.get_parsed_query_string(request)
# We use a different payload:
self.assertEqual(auth.payload(request), 'UNSIGNED-PAYLOAD')
# which will result in a different X-Amz-Signature:
self.assertEqual(
query_string,
{'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': ('access_key/20140101/myregion/'
'myservice/aws4_request'),
'X-Amz-Date': '20140101T000000Z',
'X-Amz-Expires': '60',
'X-Amz-Signature': ('ac1b8b9e47e8685c5c963d75e35e8741d55251'
'cd955239cc1efad4dc7201db66'),
'X-Amz-SignedHeaders': 'host'})
def test_presign_with_security_token(self):
self.credentials.token = 'security-token'
auth = botocore.auth.S3SigV4QueryAuth(
self.credentials, self.service_name, self.region_name, expires=60)
request = AWSRequest()
request.method = 'GET'
request.url = 'https://ec2.us-east-1.amazonaws.com/'
auth.add_auth(request)
query_string = self.get_parsed_query_string(request)
self.assertEqual(
query_string['X-Amz-Security-Token'], 'security-token')
def test_presign_where_body_is_json_bytes(self):
request = AWSRequest()
request.method = 'GET'
request.url = 'https://myservice.us-east-1.amazonaws.com/'
request.data = b'{"Param": "value"}'
self.auth.add_auth(request)
query_string = self.get_parsed_query_string(request)
expected_query_string = {
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': (
'access_key/20140101/myregion/myservice/aws4_request'),
'X-Amz-Expires': '60',
'X-Amz-Date': '20140101T000000Z',
'X-Amz-Signature': (
'8e1d372d168d532313ce6df8f64a7dc51d'
'e6f312a9cfba6e5b345d8a771e839c'),
'X-Amz-SignedHeaders': 'host',
'Param': 'value'
}
self.assertEqual(query_string, expected_query_string)
def test_presign_where_body_is_json_string(self):
request = AWSRequest()
request.method = 'GET'
request.url = 'https://myservice.us-east-1.amazonaws.com/'
request.data = '{"Param": "value"}'
self.auth.add_auth(request)
query_string = self.get_parsed_query_string(request)
expected_query_string = {
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': (
'access_key/20140101/myregion/myservice/aws4_request'),
'X-Amz-Expires': '60',
'X-Amz-Date': '20140101T000000Z',
'X-Amz-Signature': (
'8e1d372d168d532313ce6df8f64a7dc51d'
'e6f312a9cfba6e5b345d8a771e839c'),
'X-Amz-SignedHeaders': 'host',
'Param': 'value'
}
self.assertEqual(query_string, expected_query_string)
def test_presign_content_type_form_encoded_not_signed(self):
request = AWSRequest()
request.method = 'GET'
request.url = 'https://myservice.us-east-1.amazonaws.com/'
request.headers['Content-Type'] = (
'application/x-www-form-urlencoded; charset=utf-8'
)
self.auth.add_auth(request)
query_string = self.get_parsed_query_string(request)
signed_headers = query_string.get('X-Amz-SignedHeaders')
self.assertNotIn('content-type', signed_headers)
class BaseS3PresignPostTest(unittest.TestCase):
def setUp(self):
self.access_key = 'access_key'
self.secret_key = 'secret_key'
self.credentials = botocore.credentials.Credentials(
self.access_key, self.secret_key)
self.service_name = 'myservice'
self.region_name = 'myregion'
self.bucket = 'mybucket'
self.key = 'mykey'
self.policy = {
"expiration": "2007-12-01T12:00:00.000Z",
"conditions": [
{"acl": "public-read"},
{"bucket": self.bucket},
["starts-with", "$key", self.key],
]
}
self.fields = {
'key': self.key,
'acl': 'public-read',
}
self.request = AWSRequest()
self.request.url = 'https://s3.amazonaws.com/%s' % self.bucket
self.request.method = 'POST'
self.request.context['s3-presign-post-fields'] = self.fields
self.request.context['s3-presign-post-policy'] = self.policy
class TestS3SigV2Post(BaseS3PresignPostTest):
def setUp(self):
super(TestS3SigV2Post, self).setUp()
self.auth = botocore.auth.HmacV1PostAuth(self.credentials)
self.current_epoch_time = 1427427247.465591
self.time_patch = mock.patch('time.time')
self.time_mock = self.time_patch.start()
self.time_mock.return_value = self.current_epoch_time
def tearDown(self):
self.time_patch.stop()
def test_presign_post(self):
self.auth.add_auth(self.request)
result_fields = self.request.context['s3-presign-post-fields']
self.assertEqual(
result_fields['AWSAccessKeyId'], self.credentials.access_key)
result_policy = json.loads(base64.b64decode(
result_fields['policy']).decode('utf-8'))
self.assertEqual(result_policy['expiration'],
'2007-12-01T12:00:00.000Z')
self.assertEqual(
result_policy['conditions'],
[{"acl": "public-read"},
{"bucket": "mybucket"},
["starts-with", "$key", "mykey"]])
self.assertIn('signature', result_fields)
def test_presign_post_with_security_token(self):
self.credentials.token = 'my-token'
self.auth = botocore.auth.HmacV1PostAuth(self.credentials)
self.auth.add_auth(self.request)
result_fields = self.request.context['s3-presign-post-fields']
self.assertEqual(result_fields['x-amz-security-token'], 'my-token')
def test_empty_fields_and_policy(self):
self.request = AWSRequest()
self.request.url = 'https://s3.amazonaws.com/%s' % self.bucket
self.request.method = 'POST'
self.auth.add_auth(self.request)
result_fields = self.request.context['s3-presign-post-fields']
self.assertEqual(
result_fields['AWSAccessKeyId'], self.credentials.access_key)
result_policy = json.loads(base64.b64decode(
result_fields['policy']).decode('utf-8'))
self.assertEqual(result_policy['conditions'], [])
self.assertIn('signature', result_fields)
class TestS3SigV4Post(BaseS3PresignPostTest):
def setUp(self):
super(TestS3SigV4Post, self).setUp()
self.auth = botocore.auth.S3SigV4PostAuth(
self.credentials, self.service_name, self.region_name)
self.datetime_patcher = mock.patch.object(
botocore.auth.datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)
)
mocked_datetime = self.datetime_patcher.start()
mocked_datetime.utcnow.return_value = datetime.datetime(
2014, 1, 1, 0, 0)
def tearDown(self):
self.datetime_patcher.stop()
def test_presign_post(self):
self.auth.add_auth(self.request)
result_fields = self.request.context['s3-presign-post-fields']
self.assertEqual(result_fields['x-amz-algorithm'], 'AWS4-HMAC-SHA256')
self.assertEqual(
result_fields['x-amz-credential'],
'access_key/20140101/myregion/myservice/aws4_request')
self.assertEqual(
result_fields['x-amz-date'],
'20140101T000000Z')
result_policy = json.loads(base64.b64decode(
result_fields['policy']).decode('utf-8'))
self.assertEqual(result_policy['expiration'],
'2007-12-01T12:00:00.000Z')
self.assertEqual(
result_policy['conditions'],
[{"acl": "public-read"}, {"bucket": "mybucket"},
["starts-with", "$key", "mykey"],
{"x-amz-algorithm": "AWS4-HMAC-SHA256"},
{"x-amz-credential":
"access_key/20140101/myregion/myservice/aws4_request"},
{"x-amz-date": "20140101T000000Z"}])
self.assertIn('x-amz-signature', result_fields)
def test_presign_post_with_security_token(self):
self.credentials.token = 'my-token'
self.auth = botocore.auth.S3SigV4PostAuth(
self.credentials, self.service_name, self.region_name)
self.auth.add_auth(self.request)
result_fields = self.request.context['s3-presign-post-fields']
self.assertEqual(result_fields['x-amz-security-token'], 'my-token')
def test_empty_fields_and_policy(self):
self.request = AWSRequest()
self.request.url = 'https://s3.amazonaws.com/%s' % self.bucket
self.request.method = 'POST'
self.auth.add_auth(self.request)
result_fields = self.request.context['s3-presign-post-fields']
self.assertEqual(result_fields['x-amz-algorithm'], 'AWS4-HMAC-SHA256')
self.assertEqual(
result_fields['x-amz-credential'],
'access_key/20140101/myregion/myservice/aws4_request')
self.assertEqual(
result_fields['x-amz-date'],
'20140101T000000Z')
result_policy = json.loads(base64.b64decode(
result_fields['policy']).decode('utf-8'))
self.assertEqual(
result_policy['conditions'],
[{"x-amz-algorithm": "AWS4-HMAC-SHA256"},
{"x-amz-credential":
"access_key/20140101/myregion/myservice/aws4_request"},
{"x-amz-date": "20140101T000000Z"}])
self.assertIn('x-amz-signature', result_fields)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.