2
0

handlers.py 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842
  1. # Copyright 2012-2014 ksyun.com, Inc. or its affiliates. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License"). You
  4. # may not use this file except in compliance with the License. A copy of
  5. # the License is located at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # or in the "license" file accompanying this file. This file is
  10. # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
  11. # ANY KIND, either express or implied. See the License for the specific
  12. # language governing permissions and limitations under the License.
  13. """Builtin event handlers.
  14. This module contains builtin handlers for events emitted by kscore.
  15. """
  16. import base64
  17. import logging
  18. import xml.etree.cElementTree
  19. import copy
  20. import re
  21. import warnings
  22. from kscore.compat import unquote, json, six, unquote_str, \
  23. ensure_bytes, get_md5, MD5_AVAILABLE
  24. from kscore.docs.utils import AutoPopulatedParam
  25. from kscore.docs.utils import HideParamFromOperations
  26. from kscore.docs.utils import AppendParamDocumentation
  27. from kscore.signers import add_generate_presigned_url
  28. from kscore.signers import add_generate_presigned_post
  29. from kscore.exceptions import ParamValidationError
  30. from kscore.exceptions import AliasConflictParameterError
  31. from kscore.exceptions import UnsupportedTLSVersionWarning
  32. from kscore.utils import percent_encode, SAFE_CHARS
  33. from kscore.utils import switch_host_with_param
  34. from kscore import retryhandler
  35. from kscore import utils
  36. from kscore import translate
  37. import kscore
  38. import kscore.auth
  39. logger = logging.getLogger(__name__)
  40. REGISTER_FIRST = object()
  41. REGISTER_LAST = object()
  42. # From the S3 docs:
  43. # The rules for bucket names in the US Standard region allow bucket names
  44. # to be as long as 255 characters, and bucket names can contain any
  45. # combination of uppercase letters, lowercase letters, numbers, periods
  46. # (.), hyphens (-), and underscores (_).
  47. VALID_BUCKET = re.compile('^[a-zA-Z0-9.\-_]{1,255}$')
  48. VERSION_ID_SUFFIX = re.compile(r'\?versionId=[^\s]+$')
  49. def check_for_200_error(response, **kwargs):
  50. if response is None:
  51. # A None response can happen if an exception is raised while
  52. # trying to retrieve the response. See Endpoint._get_response().
  53. return
  54. http_response, parsed = response
  55. if _looks_like_special_case_error(http_response):
  56. logger.debug("Error found for response with 200 status code, "
  57. "errors: %s, changing status code to "
  58. "500.", parsed)
  59. http_response.status_code = 500
  60. def _looks_like_special_case_error(http_response):
  61. if http_response.status_code == 200:
  62. parser = xml.etree.cElementTree.XMLParser(
  63. target=xml.etree.cElementTree.TreeBuilder(),
  64. encoding='utf-8')
  65. parser.feed(http_response.content)
  66. root = parser.close()
  67. if root.tag == 'Error':
  68. return True
  69. return False
  70. def decode_console_output(parsed, **kwargs):
  71. if 'Output' in parsed:
  72. try:
  73. value = base64.b64decode(six.b(parsed['Output'])).decode('utf-8')
  74. parsed['Output'] = value
  75. except (ValueError, TypeError, AttributeError):
  76. logger.debug('Error decoding base64', exc_info=True)
  77. def decode_quoted_jsondoc(value):
  78. try:
  79. value = json.loads(unquote(value))
  80. except (ValueError, TypeError):
  81. logger.debug('Error loading quoted JSON', exc_info=True)
  82. return value
  83. def json_decode_template_body(parsed, **kwargs):
  84. if 'TemplateBody' in parsed:
  85. try:
  86. value = json.loads(parsed['TemplateBody'])
  87. parsed['TemplateBody'] = value
  88. except (ValueError, TypeError):
  89. logger.debug('error loading JSON', exc_info=True)
  90. def calculate_md5(params, **kwargs):
  91. request_dict = params
  92. if request_dict['body'] and 'Content-MD5' not in params['headers']:
  93. body = request_dict['body']
  94. if isinstance(body, bytes):
  95. binary_md5 = _calculate_md5_from_bytes(body)
  96. else:
  97. binary_md5 = _calculate_md5_from_file(body)
  98. base64_md5 = base64.b64encode(binary_md5).decode('ascii')
  99. params['headers']['Content-MD5'] = base64_md5
  100. def _calculate_md5_from_bytes(body_bytes):
  101. md5 = get_md5(body_bytes)
  102. return md5.digest()
  103. def _calculate_md5_from_file(fileobj):
  104. start_position = fileobj.tell()
  105. md5 = get_md5()
  106. for chunk in iter(lambda: fileobj.read(1024 * 1024), b''):
  107. md5.update(chunk)
  108. fileobj.seek(start_position)
  109. return md5.digest()
  110. def conditionally_calculate_md5(params, **kwargs):
  111. """Only add a Content-MD5 when not using sigv4"""
  112. signer = kwargs['request_signer']
  113. if signer.signature_version not in ['v4', 's3v4'] and MD5_AVAILABLE:
  114. calculate_md5(params, **kwargs)
  115. def validate_bucket_name(params, **kwargs):
  116. if 'Bucket' not in params:
  117. return
  118. bucket = params['Bucket']
  119. if VALID_BUCKET.search(bucket) is None:
  120. error_msg = (
  121. 'Invalid bucket name "%s": Bucket name must match '
  122. 'the regex "%s"' % (bucket, VALID_BUCKET.pattern))
  123. raise ParamValidationError(report=error_msg)
  124. def sse_md5(params, **kwargs):
  125. """
  126. S3 server-side encryption requires the encryption key to be sent to the
  127. server base64 encoded, as well as a base64-encoded MD5 hash of the
  128. encryption key. This handler does both if the MD5 has not been set by
  129. the caller.
  130. """
  131. _sse_md5(params, 'SSECustomer')
  132. def copy_source_sse_md5(params, **kwargs):
  133. """
  134. S3 server-side encryption requires the encryption key to be sent to the
  135. server base64 encoded, as well as a base64-encoded MD5 hash of the
  136. encryption key. This handler does both if the MD5 has not been set by
  137. the caller specifically if the parameter is for the copy-source sse-c key.
  138. """
  139. _sse_md5(params, 'CopySourceSSECustomer')
  140. def _sse_md5(params, sse_member_prefix='SSECustomer'):
  141. if not _needs_s3_sse_customization(params, sse_member_prefix):
  142. return
  143. sse_key_member = sse_member_prefix + 'Key'
  144. sse_md5_member = sse_member_prefix + 'KeyMD5'
  145. key_as_bytes = params[sse_key_member]
  146. if isinstance(key_as_bytes, six.text_type):
  147. key_as_bytes = key_as_bytes.encode('utf-8')
  148. key_md5_str = base64.b64encode(
  149. get_md5(key_as_bytes).digest()).decode('utf-8')
  150. key_b64_encoded = base64.b64encode(key_as_bytes).decode('utf-8')
  151. params[sse_key_member] = key_b64_encoded
  152. params[sse_md5_member] = key_md5_str
  153. def _needs_s3_sse_customization(params, sse_member_prefix):
  154. return (params.get(sse_member_prefix + 'Key') is not None and
  155. sse_member_prefix + 'KeyMD5' not in params)
  156. def register_retries_for_service(service_data, session,
  157. service_name, **kwargs):
  158. loader = session.get_component('data_loader')
  159. endpoint_prefix = service_data.get('metadata', {}).get('endpointPrefix')
  160. if endpoint_prefix is None:
  161. logger.debug("Not registering retry handlers, could not endpoint "
  162. "prefix from model for service %s", service_name)
  163. return
  164. config = _load_retry_config(loader, endpoint_prefix)
  165. if not config:
  166. return
  167. logger.debug("Registering retry handlers for service: %s", service_name)
  168. handler = retryhandler.create_retry_handler(
  169. config, endpoint_prefix)
  170. unique_id = 'retry-config-%s' % endpoint_prefix
  171. session.register('needs-retry.%s' % endpoint_prefix,
  172. handler, unique_id=unique_id)
  173. _register_for_operations(config, session,
  174. service_name=endpoint_prefix)
  175. def _load_retry_config(loader, endpoint_prefix):
  176. original_config = loader.load_data('_retry')
  177. retry_config = translate.build_retry_config(
  178. endpoint_prefix, original_config['retry'],
  179. original_config.get('definitions', {}))
  180. return retry_config
  181. def _register_for_operations(config, session, service_name):
  182. # There's certainly a tradeoff for registering the retry config
  183. # for the operations when the service is created. In practice,
  184. # there aren't a whole lot of per operation retry configs so
  185. # this is ok for now.
  186. for key in config:
  187. if key == '__default__':
  188. continue
  189. handler = retryhandler.create_retry_handler(config, key)
  190. unique_id = 'retry-config-%s-%s' % (service_name, key)
  191. session.register('needs-retry.%s.%s' % (service_name, key),
  192. handler, unique_id=unique_id)
  193. def disable_signing(**kwargs):
  194. """
  195. This handler disables request signing by setting the signer
  196. name to a special sentinel value.
  197. """
  198. return kscore.UNSIGNED
  199. def add_expect_header(model, params, **kwargs):
  200. if model.http.get('method', '') not in ['PUT', 'POST']:
  201. return
  202. if 'body' in params:
  203. body = params['body']
  204. if hasattr(body, 'read'):
  205. # Any file like object will use an expect 100-continue
  206. # header regardless of size.
  207. logger.debug("Adding expect 100 continue header to request.")
  208. params['headers']['Expect'] = '100-continue'
  209. def document_copy_source_form(section, event_name, **kwargs):
  210. if 'request-example' in event_name:
  211. parent = section.get_section('structure-value')
  212. param_line = parent.get_section('CopySource')
  213. value_portion = param_line.get_section('member-value')
  214. value_portion.clear_text()
  215. value_portion.write("'string' or {'Bucket': 'string', "
  216. "'Key': 'string', 'VersionId': 'string'}")
  217. elif 'request-params' in event_name:
  218. param_section = section.get_section('CopySource')
  219. type_section = param_section.get_section('param-type')
  220. type_section.clear_text()
  221. type_section.write(':type CopySource: str or dict')
  222. doc_section = param_section.get_section('param-documentation')
  223. doc_section.clear_text()
  224. doc_section.write(
  225. "The name of the source bucket, key name of the source object, "
  226. "and optional version ID of the source object. You can either "
  227. "provide this value as a string or a dictionary. The "
  228. "string form is {bucket}/{key} or "
  229. "{bucket}/{key}?versionId={versionId} if you want to copy a "
  230. "specific version. You can also provide this value as a "
  231. "dictionary. The dictionary format is recommended over "
  232. "the string format because it is more explicit. The dictionary "
  233. "format is: {'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}."
  234. " Note that the VersionId key is optional and may be omitted."
  235. )
  236. def handle_copy_source_param(params, **kwargs):
  237. """Convert CopySource param for CopyObject/UploadPartCopy.
  238. This handler will deal with two cases:
  239. * CopySource provided as a string. We'll make a best effort
  240. to URL encode the key name as required. This will require
  241. parsing the bucket and version id from the CopySource value
  242. and only encoding the key.
  243. * CopySource provided as a dict. In this case we're
  244. explicitly given the Bucket, Key, and VersionId so we're
  245. able to encode the key and ensure this value is serialized
  246. and correctly sent to S3.
  247. """
  248. source = params.get('CopySource')
  249. if source is None:
  250. # The call will eventually fail but we'll let the
  251. # param validator take care of this. It will
  252. # give a better error message.
  253. return
  254. if isinstance(source, six.string_types):
  255. params['CopySource'] = _quote_source_header(source)
  256. elif isinstance(source, dict):
  257. params['CopySource'] = _quote_source_header_from_dict(source)
  258. def _quote_source_header_from_dict(source_dict):
  259. try:
  260. bucket = source_dict['Bucket']
  261. key = percent_encode(source_dict['Key'], safe=SAFE_CHARS + '/')
  262. version_id = source_dict.get('VersionId')
  263. except KeyError as e:
  264. raise ParamValidationError(
  265. report='Missing required parameter: %s' % str(e))
  266. final = '%s/%s' % (bucket, key)
  267. if version_id is not None:
  268. final += '?versionId=%s' % version_id
  269. return final
  270. def _quote_source_header(value):
  271. result = VERSION_ID_SUFFIX.search(value)
  272. if result is None:
  273. return percent_encode(value, safe=SAFE_CHARS + '/')
  274. else:
  275. first, version_id = value[:result.start()], value[result.start():]
  276. return percent_encode(first, safe=SAFE_CHARS + '/') + version_id
  277. def copy_snapshot_encrypted(params, request_signer, **kwargs):
  278. # The presigned URL that facilities copying an encrypted snapshot.
  279. # If the user does not provide this value, we will automatically
  280. # calculate on behalf of the user and inject the PresignedUrl
  281. # into the requests.
  282. # The params sent in the event don't quite sync up 100% so we're
  283. # renaming them here until they can be updated in the event.
  284. request_dict = params
  285. params = request_dict['body']
  286. if 'PresignedUrl' in params:
  287. # If the customer provided this value, then there's nothing for
  288. # us to do.
  289. return
  290. destination_region = request_signer._region_name
  291. params['DestinationRegion'] = destination_region
  292. # The request will be sent to the destination region, so we need
  293. # to create an endpoint to the source region and create a presigned
  294. # url based on the source endpoint.
  295. source_region = params['SourceRegion']
  296. # The better way to do this is to actually get the
  297. # endpoint_resolver and get the endpoint_url given the
  298. # source region. In this specific case, we know that
  299. # we can safely replace the dest region with the source
  300. # region because of the supported EC2 regions, but in
  301. # general this is not a safe assumption to make.
  302. # I think eventually we should try to plumb through something
  303. # that allows us to resolve endpoints from regions.
  304. request_dict_copy = copy.deepcopy(request_dict)
  305. request_dict_copy['url'] = request_dict['url'].replace(
  306. destination_region, source_region)
  307. request_dict_copy['method'] = 'GET'
  308. request_dict_copy['headers'] = {}
  309. presigned_url = request_signer.generate_presigned_url(
  310. request_dict_copy, region_name=source_region)
  311. params['PresignedUrl'] = presigned_url
  312. def json_decode_policies(parsed, model, **kwargs):
  313. # Any time an IAM operation returns a policy document
  314. # it is a string that is json that has been urlencoded,
  315. # i.e urlencode(json.dumps(policy_document)).
  316. # To give users something more useful, we will urldecode
  317. # this value and json.loads() the result so that they have
  318. # the policy document as a dictionary.
  319. output_shape = model.output_shape
  320. if output_shape is not None:
  321. _decode_policy_types(parsed, model.output_shape)
  322. def _decode_policy_types(parsed, shape):
  323. # IAM consistently uses the policyDocumentType shape to indicate
  324. # strings that have policy documents.
  325. shape_name = 'policyDocumentType'
  326. if shape.type_name == 'structure':
  327. for member_name, member_shape in shape.members.items():
  328. if member_shape.type_name == 'string' and \
  329. member_shape.name == shape_name and \
  330. member_name in parsed:
  331. parsed[member_name] = decode_quoted_jsondoc(
  332. parsed[member_name])
  333. elif member_name in parsed:
  334. _decode_policy_types(parsed[member_name], member_shape)
  335. if shape.type_name == 'list':
  336. shape_member = shape.member
  337. for item in parsed:
  338. _decode_policy_types(item, shape_member)
  339. def parse_get_bucket_location(parsed, http_response, **kwargs):
  340. # s3.GetBucketLocation cannot be modeled properly. To
  341. # account for this we just manually parse the XML document.
  342. # The "parsed" passed in only has the ResponseMetadata
  343. # filled out. This handler will fill in the LocationConstraint
  344. # value.
  345. response_body = http_response.content
  346. parser = xml.etree.cElementTree.XMLParser(
  347. target=xml.etree.cElementTree.TreeBuilder(),
  348. encoding='utf-8')
  349. parser.feed(response_body)
  350. root = parser.close()
  351. region = root.text
  352. parsed['LocationConstraint'] = region
  353. def base64_encode_user_data(params, **kwargs):
  354. if 'UserData' in params:
  355. if isinstance(params['UserData'], six.text_type):
  356. # Encode it to bytes if it is text.
  357. params['UserData'] = params['UserData'].encode('utf-8')
  358. params['UserData'] = base64.b64encode(
  359. params['UserData']).decode('utf-8')
  360. def document_base64_encoding(param):
  361. description = ('**This value will be base64 encoded automatically. Do '
  362. 'not base64 encode this value prior to performing the '
  363. 'operation.**')
  364. append = AppendParamDocumentation(param, description)
  365. return append.append_documentation
  366. def validate_ascii_metadata(params, **kwargs):
  367. """
  368. """
  369. metadata = params.get('Metadata')
  370. if not metadata or not isinstance(metadata, dict):
  371. # We have to at least type check the metadata as a dict type
  372. # because this handler is called before param validation.
  373. # We'll go ahead and return because the param validator will
  374. # give a descriptive error message for us.
  375. # We might need a post-param validation event.
  376. return
  377. for key, value in metadata.items():
  378. try:
  379. key.encode('ascii')
  380. value.encode('ascii')
  381. except UnicodeEncodeError as e:
  382. error_msg = (
  383. 'Non ascii characters found in S3 metadata '
  384. 'for key "%s", value: "%s". \nS3 metadata can only '
  385. 'contain ASCII characters. ' % (key, value)
  386. )
  387. raise ParamValidationError(
  388. report=error_msg)
  389. def fix_route53_ids(params, model, **kwargs):
  390. """
  391. Check for and split apart Route53 resource IDs, setting
  392. only the last piece. This allows the output of one operation
  393. (e.g. ``'foo/1234'``) to be used as input in another
  394. operation (e.g. it expects just ``'1234'``).
  395. """
  396. input_shape = model.input_shape
  397. if not input_shape or not hasattr(input_shape, 'members'):
  398. return
  399. members = [name for (name, shape) in input_shape.members.items()
  400. if shape.name in ['ResourceId', 'DelegationSetId']]
  401. for name in members:
  402. if name in params:
  403. orig_value = params[name]
  404. params[name] = orig_value.split('/')[-1]
  405. logger.debug('%s %s -> %s', name, orig_value, params[name])
  406. def inject_account_id(params, **kwargs):
  407. if params.get('accountId') is None:
  408. # Glacier requires accountId, but allows you
  409. # to specify '-' for the current owners account.
  410. # We add this default value if the user does not
  411. # provide the accountId as a convenience.
  412. params['accountId'] = '-'
  413. def add_glacier_version(model, params, **kwargs):
  414. request_dict = params
  415. request_dict['headers']['x-amz-glacier-version'] = model.metadata[
  416. 'apiVersion']
  417. def add_accept_header(model, params, **kwargs):
  418. if params['headers'].get('Accept', None) is None:
  419. request_dict = params
  420. request_dict['headers']['Accept'] = 'application/json'
  421. def add_glacier_checksums(params, **kwargs):
  422. """Add glacier checksums to the http request.
  423. This will add two headers to the http request:
  424. * x-amz-content-sha256
  425. * x-amz-sha256-tree-hash
  426. These values will only be added if they are not present
  427. in the HTTP request.
  428. """
  429. request_dict = params
  430. headers = request_dict['headers']
  431. body = request_dict['body']
  432. if isinstance(body, six.binary_type):
  433. # If the user provided a bytes type instead of a file
  434. # like object, we're temporarily create a BytesIO object
  435. # so we can use the util functions to calculate the
  436. # checksums which assume file like objects. Note that
  437. # we're not actually changing the body in the request_dict.
  438. body = six.BytesIO(body)
  439. starting_position = body.tell()
  440. if 'x-amz-content-sha256' not in headers:
  441. headers['x-amz-content-sha256'] = utils.calculate_sha256(
  442. body, as_hex=True)
  443. body.seek(starting_position)
  444. if 'x-amz-sha256-tree-hash' not in headers:
  445. headers['x-amz-sha256-tree-hash'] = utils.calculate_tree_hash(body)
  446. body.seek(starting_position)
  447. def document_glacier_tree_hash_checksum():
  448. doc = '''
  449. This is a required field.
  450. But if you prefer, you can also use kscore.util.calculate_tree_hash()
  451. to compute it from raw file by::
  452. checksum = calculate_tree_hash(open('your_file.txt', 'rb'))
  453. '''
  454. return AppendParamDocumentation('checksum', doc).append_documentation
  455. def switch_host_machinelearning(request, **kwargs):
  456. switch_host_with_param(request, 'PredictEndpoint')
  457. def check_openssl_supports_tls_version_1_2(**kwargs):
  458. import ssl
  459. try:
  460. openssl_version_tuple = ssl.OPENSSL_VERSION_INFO
  461. if openssl_version_tuple[0] < 1 or openssl_version_tuple[2] < 1:
  462. warnings.warn(
  463. 'Currently installed openssl version: %s does not '
  464. 'support TLS 1.2, which is required for use of iot-data. '
  465. 'Please use python installed with openssl version 1.0.1 or '
  466. 'higher.' % (ssl.OPENSSL_VERSION),
  467. UnsupportedTLSVersionWarning
  468. )
  469. # We cannot check the openssl version on python2.6, so we should just
  470. # pass on this conveniency check.
  471. except AttributeError:
  472. pass
  473. def change_get_to_post(request, **kwargs):
  474. # This is useful when we need to change a potentially large GET request
  475. # into a POST with x-www-form-urlencoded encoding.
  476. if request.method == 'GET' and '?' in request.url:
  477. request.headers['Content-Type'] = 'application/x-www-form-urlencoded'
  478. request.method = 'POST'
  479. request.url, request.data = request.url.split('?', 1)
  480. def set_list_objects_encoding_type_url(params, context, **kwargs):
  481. if 'EncodingType' not in params:
  482. # We set this context so that we know it wasn't the customer that
  483. # requested the encoding.
  484. context['EncodingTypeAutoSet'] = True
  485. params['EncodingType'] = 'url'
  486. def decode_list_object(parsed, context, **kwargs):
  487. if parsed.get('EncodingType') == 'url' and \
  488. context.get('EncodingTypeAutoSet'):
  489. # URL decode top-level keys in the response if present.
  490. top_level_keys = ['Delimiter', 'Marker', 'NextMarker']
  491. for key in top_level_keys:
  492. if key in parsed:
  493. parsed[key] = unquote_str(parsed[key])
  494. # URL decode nested keys from the response if present.
  495. nested_keys = [('Contents', 'Key'), ('CommonPrefixes', 'Prefix')]
  496. for (top_key, child_key) in nested_keys:
  497. if top_key in parsed:
  498. for member in parsed[top_key]:
  499. member[child_key] = unquote_str(member[child_key])
  500. def convert_body_to_file_like_object(params, **kwargs):
  501. if 'Body' in params:
  502. if isinstance(params['Body'], six.string_types):
  503. params['Body'] = six.BytesIO(ensure_bytes(params['Body']))
  504. elif isinstance(params['Body'], six.binary_type):
  505. params['Body'] = six.BytesIO(params['Body'])
  506. def _add_parameter_aliases(handler_list):
  507. # Mapping of original parameter to parameter alias.
  508. # The key is <service>.<operation>.parameter
  509. # The first part of the key is used for event registration.
  510. # The last part is the original parameter name and the value is the
  511. # alias to expose in documentation.
  512. aliases = {
  513. 'ec2.*.Filter': 'Filters',
  514. 'logs.CreateExportTask.from': 'fromTime',
  515. 'cloudsearchdomain.Search.return': 'returnFields'
  516. }
  517. for original, new_name in aliases.items():
  518. event_portion, original_name = original.rsplit('.', 1)
  519. parameter_alias = ParameterAlias(original_name, new_name)
  520. # Add the handlers to the list of handlers.
  521. # One handler is to handle when users provide the alias.
  522. # The other handler is to update the documentation to show only
  523. # the alias.
  524. parameter_build_event_handler_tuple = (
  525. 'before-parameter-build.' + event_portion,
  526. parameter_alias.alias_parameter_in_call,
  527. REGISTER_FIRST
  528. )
  529. docs_event_handler_tuple = (
  530. 'docs.*.' + event_portion + '.complete-section',
  531. parameter_alias.alias_parameter_in_documentation)
  532. handler_list.append(parameter_build_event_handler_tuple)
  533. handler_list.append(docs_event_handler_tuple)
  534. class ParameterAlias(object):
  535. def __init__(self, original_name, alias_name):
  536. self._original_name = original_name
  537. self._alias_name = alias_name
  538. def alias_parameter_in_call(self, params, model, **kwargs):
  539. if model.input_shape:
  540. # Only consider accepting the alias if it is modeled in the
  541. # input shape.
  542. if self._original_name in model.input_shape.members:
  543. if self._alias_name in params:
  544. if self._original_name in params:
  545. raise AliasConflictParameterError(
  546. original=self._original_name,
  547. alias=self._alias_name,
  548. operation=model.name
  549. )
  550. # Remove the alias parameter value and use the old name
  551. # instead.
  552. params[self._original_name] = params.pop(self._alias_name)
  553. def alias_parameter_in_documentation(self, event_name, section, **kwargs):
  554. if event_name.startswith('docs.request-params'):
  555. if self._original_name not in section.available_sections:
  556. return
  557. # Replace the name for parameter type
  558. param_section = section.get_section(self._original_name)
  559. param_type_section = param_section.get_section('param-type')
  560. self._replace_content(param_type_section)
  561. # Replace the name for the parameter description
  562. param_name_section = param_section.get_section('param-name')
  563. self._replace_content(param_name_section)
  564. elif event_name.startswith('docs.request-example'):
  565. section = section.get_section('structure-value')
  566. if self._original_name not in section.available_sections:
  567. return
  568. # Replace the name for the example
  569. param_section = section.get_section(self._original_name)
  570. self._replace_content(param_section)
  571. def _replace_content(self, section):
  572. content = section.getvalue().decode('utf-8')
  573. updated_content = content.replace(
  574. self._original_name, self._alias_name)
  575. section.clear_text()
  576. section.write(updated_content)
  577. # This is a list of (event_name, handler).
  578. # When a Session is created, everything in this list will be
  579. # automatically registered with that Session.
  580. BUILTIN_HANDLERS = [
  581. ('before-parameter-build.s3.UploadPart',
  582. convert_body_to_file_like_object, REGISTER_LAST),
  583. ('before-parameter-build.s3.PutObject',
  584. convert_body_to_file_like_object, REGISTER_LAST),
  585. ('creating-client-class', add_generate_presigned_url),
  586. ('creating-client-class.s3', add_generate_presigned_post),
  587. ('creating-client-class.iot-data', check_openssl_supports_tls_version_1_2),
  588. ('after-call.iam', json_decode_policies),
  589. ('after-call.ec2.GetConsoleOutput', decode_console_output),
  590. ('after-call.cloudformation.GetTemplate', json_decode_template_body),
  591. ('after-call.s3.GetBucketLocation', parse_get_bucket_location),
  592. ('before-parameter-build.s3', validate_bucket_name),
  593. ('before-parameter-build.s3.ListObjects',
  594. set_list_objects_encoding_type_url),
  595. ('before-call.s3.PutBucketTagging', calculate_md5),
  596. ('before-call.s3.PutBucketLifecycle', calculate_md5),
  597. ('before-call.s3.PutBucketLifecycleConfiguration', calculate_md5),
  598. ('before-call.s3.PutBucketCors', calculate_md5),
  599. ('before-call.s3.DeleteObjects', calculate_md5),
  600. ('before-call.s3.PutBucketReplication', calculate_md5),
  601. ('before-call.s3.PutObject', conditionally_calculate_md5),
  602. ('before-call.s3.UploadPart', conditionally_calculate_md5),
  603. ('before-call.s3.PutBucketAcl', conditionally_calculate_md5),
  604. ('before-call.s3.PutBucketLogging', conditionally_calculate_md5),
  605. ('before-call.s3.PutBucketNotification', conditionally_calculate_md5),
  606. ('before-call.s3.PutBucketPolicy', conditionally_calculate_md5),
  607. ('before-call.s3.PutBucketRequestPayment', conditionally_calculate_md5),
  608. ('before-call.s3.PutBucketVersioning', conditionally_calculate_md5),
  609. ('before-call.s3.PutBucketWebsite', conditionally_calculate_md5),
  610. ('before-call.s3.PutObjectAcl', conditionally_calculate_md5),
  611. ('before-parameter-build.s3.CopyObject',
  612. handle_copy_source_param),
  613. ('before-parameter-build.s3.UploadPartCopy',
  614. handle_copy_source_param),
  615. ('before-parameter-build.s3.CopyObject', validate_ascii_metadata),
  616. ('before-parameter-build.s3.PutObject', validate_ascii_metadata),
  617. ('before-parameter-build.s3.CreateMultipartUpload',
  618. validate_ascii_metadata),
  619. ('docs.*.s3.CopyObject.complete-section', document_copy_source_form),
  620. ('docs.*.s3.UploadPartCopy.complete-section', document_copy_source_form),
  621. ('before-call.s3', add_expect_header),
  622. ('before-call.glacier', add_glacier_version),
  623. ('before-call.apigateway', add_accept_header),
  624. ('before-call.glacier.UploadArchive', add_glacier_checksums),
  625. ('before-call.glacier.UploadMultipartPart', add_glacier_checksums),
  626. ('before-call.ec2.CopySnapshot', copy_snapshot_encrypted),
  627. ('request-created.machinelearning.Predict', switch_host_machinelearning),
  628. ('needs-retry.s3.UploadPartCopy', check_for_200_error, REGISTER_FIRST),
  629. ('needs-retry.s3.CopyObject', check_for_200_error, REGISTER_FIRST),
  630. ('needs-retry.s3.CompleteMultipartUpload', check_for_200_error,
  631. REGISTER_FIRST),
  632. ('service-data-loaded', register_retries_for_service),
  633. ('choose-signer.cognito-identity.GetId', disable_signing),
  634. ('choose-signer.cognito-identity.GetOpenIdToken', disable_signing),
  635. ('choose-signer.cognito-identity.UnlinkIdentity', disable_signing),
  636. ('choose-signer.cognito-identity.GetCredentialsForIdentity',
  637. disable_signing),
  638. ('choose-signer.sts.AssumeRoleWithSAML', disable_signing),
  639. ('choose-signer.sts.AssumeRoleWithWebIdentity', disable_signing),
  640. ('choose-signer.cognito-idp.ConfirmSignUp', disable_signing),
  641. ('choose-signer.cognito-idp.VerifyUserAttribute', disable_signing),
  642. ('choose-signer.cognito-idp.ForgotPassword', disable_signing),
  643. ('choose-signer.cognito-idp.SignUp', disable_signing),
  644. ('choose-signer.cognito-idp.UpdateUserAttributes', disable_signing),
  645. ('choose-signer.cognito-idp.ConfirmForgotPassword', disable_signing),
  646. ('choose-signer.cognito-idp.ResendConfirmationCode', disable_signing),
  647. ('choose-signer.cognito-idp.GetUserAttributeVerificationCode', disable_signing),
  648. ('choose-signer.cognito-idp.GetUser', disable_signing),
  649. ('choose-signer.cognito-idp.ChangePassword', disable_signing),
  650. ('choose-signer.cognito-idp.GetOpenIdConfiguration', disable_signing),
  651. ('choose-signer.cognito-idp.DeleteUser', disable_signing),
  652. ('choose-signer.cognito-idp.SetUserSettings', disable_signing),
  653. ('choose-signer.cognito-idp.GetJWKS', disable_signing),
  654. ('choose-signer.cognito-idp.DeleteUserAttributes', disable_signing),
  655. ('before-sign.s3', utils.fix_s3_host),
  656. ('before-parameter-build.s3.HeadObject', sse_md5),
  657. ('before-parameter-build.s3.GetObject', sse_md5),
  658. ('before-parameter-build.s3.PutObject', sse_md5),
  659. ('before-parameter-build.s3.CopyObject', sse_md5),
  660. ('before-parameter-build.s3.CopyObject', copy_source_sse_md5),
  661. ('before-parameter-build.s3.CreateMultipartUpload', sse_md5),
  662. ('before-parameter-build.s3.UploadPart', sse_md5),
  663. ('before-parameter-build.s3.UploadPartCopy', sse_md5),
  664. ('before-parameter-build.s3.UploadPartCopy', copy_source_sse_md5),
  665. ('before-parameter-build.ec2.RunInstances', base64_encode_user_data),
  666. ('before-parameter-build.autoscaling.CreateLaunchConfiguration',
  667. base64_encode_user_data),
  668. ('before-parameter-build.route53', fix_route53_ids),
  669. ('before-parameter-build.glacier', inject_account_id),
  670. ('after-call.s3.ListObjects', decode_list_object),
  671. # Cloudsearchdomain search operation will be sent by HTTP POST
  672. ('request-created.cloudsearchdomain.Search',
  673. change_get_to_post),
  674. # Glacier documentation customizations
  675. ('docs.*.glacier.*.complete-section',
  676. AutoPopulatedParam('accountId', 'Note: this parameter is set to "-" by \
  677. default if no value is not specified.')
  678. .document_auto_populated_param),
  679. ('docs.*.glacier.UploadArchive.complete-section',
  680. AutoPopulatedParam('checksum').document_auto_populated_param),
  681. ('docs.*.glacier.UploadMultipartPart.complete-section',
  682. AutoPopulatedParam('checksum').document_auto_populated_param),
  683. ('docs.request-params.glacier.CompleteMultipartUpload.complete-section',
  684. document_glacier_tree_hash_checksum()),
  685. # UserData base64 encoding documentation customizations
  686. ('docs.*.ec2.RunInstances.complete-section',
  687. document_base64_encoding('UserData')),
  688. ('docs.*.autoscaling.CreateLaunchConfiguration.complete-section',
  689. document_base64_encoding('UserData')),
  690. # EC2 CopySnapshot documentation customizations
  691. ('docs.*.ec2.CopySnapshot.complete-section',
  692. AutoPopulatedParam('PresignedUrl').document_auto_populated_param),
  693. ('docs.*.ec2.CopySnapshot.complete-section',
  694. AutoPopulatedParam('DestinationRegion').document_auto_populated_param),
  695. # S3 SSE documentation modifications
  696. ('docs.*.s3.*.complete-section',
  697. AutoPopulatedParam('SSECustomerKeyMD5').document_auto_populated_param),
  698. # S3 SSE Copy Source documentation modifications
  699. ('docs.*.s3.*.complete-section',
  700. AutoPopulatedParam(
  701. 'CopySourceSSECustomerKeyMD5').document_auto_populated_param),
  702. # Add base64 information to Lambda
  703. ('docs.*.lambda.UpdateFunctionCode.complete-section',
  704. document_base64_encoding('ZipFile')),
  705. # The following S3 operations cannot actually accept a ContentMD5
  706. ('docs.*.s3.*.complete-section',
  707. HideParamFromOperations(
  708. 's3', 'ContentMD5',
  709. ['DeleteObjects', 'PutBucketAcl', 'PutBucketCors',
  710. 'PutBucketLifecycle', 'PutBucketLogging', 'PutBucketNotification',
  711. 'PutBucketPolicy', 'PutBucketReplication', 'PutBucketRequestPayment',
  712. 'PutBucketTagging', 'PutBucketVersioning', 'PutBucketWebsite',
  713. 'PutObjectAcl']).hide_param)
  714. ]
  715. _add_parameter_aliases(BUILTIN_HANDLERS)