example_resources_stack.py 8.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
  1. """
  2. Copyright (c) Contributors to the Open 3D Engine Project.
  3. For complete copyright and license terms please see the LICENSE at the root of this distribution.
  4. SPDX-License-Identifier: Apache-2.0 OR MIT
  5. """
  6. import os
  7. from aws_cdk import (
  8. aws_lambda as lambda_,
  9. aws_iam as iam,
  10. aws_s3 as s3,
  11. aws_s3_deployment as s3_deployment,
  12. aws_dynamodb as dynamo,
  13. core
  14. )
  15. from .auth import AuthPolicy
  16. class ExampleResources(core.Stack):
  17. """
  18. Defines a set of resources to use with AWSCore's ScriptBehaviours and examples. The example resources are:
  19. * An S3 bucket with a text file
  20. * A python 'echo' lambda
  21. * A small dynamodb table with the a primary 'id': str key
  22. """
  23. def __init__(self, scope: core.Construct, id_: str, project_name: str, feature_name: str, **kwargs) -> None:
  24. super().__init__(scope, id_, **kwargs,
  25. description=f'Contains resources for the AWSCore examples as part of the '
  26. f'{project_name} project')
  27. self._project_name = project_name
  28. self._feature_name = feature_name
  29. self._policy = AuthPolicy(context=self).generate_admin_policy(stack=self)
  30. self._s3_bucket = self.__create_s3_bucket()
  31. self._lambda = self.__create_example_lambda()
  32. self._table = self.__create_dynamodb_table()
  33. self.__create_outputs()
  34. # Finally grant cross stack references
  35. self.__grant_access()
  36. def __grant_access(self):
  37. user_group = iam.Group.from_group_arn(
  38. self,
  39. f'{self._project_name}-{self._feature_name}-ImportedUserGroup',
  40. core.Fn.import_value(f'{self._project_name}:UserGroup')
  41. )
  42. admin_group = iam.Group.from_group_arn(
  43. self,
  44. f'{self._project_name}-{self._feature_name}-ImportedAdminGroup',
  45. core.Fn.import_value(f'{self._project_name}:AdminGroup')
  46. )
  47. # Provide the admin and user groups permissions to read the example S3 bucket.
  48. # Cannot use the grant_read method defined by the Bucket structure since the method tries to add to
  49. # the resource-based policy but the imported IAM groups (which are tokens from Fn.ImportValue) are
  50. # not valid principals in S3 bucket policies.
  51. # Check https://aws.amazon.com/premiumsupport/knowledge-center/s3-invalid-principal-in-policy-error/
  52. user_group.add_to_principal_policy(
  53. iam.PolicyStatement(
  54. actions=[
  55. "s3:GetBucket*",
  56. "s3:GetObject*",
  57. "s3:List*"
  58. ],
  59. effect=iam.Effect.ALLOW,
  60. resources=[self._s3_bucket.bucket_arn, f'{self._s3_bucket.bucket_arn}/*']
  61. )
  62. )
  63. admin_group.add_to_principal_policy(
  64. iam.PolicyStatement(
  65. actions=[
  66. "s3:GetBucket*",
  67. "s3:GetObject*",
  68. "s3:List*"
  69. ],
  70. effect=iam.Effect.ALLOW,
  71. resources=[self._s3_bucket.bucket_arn, f'{self._s3_bucket.bucket_arn}/*']
  72. )
  73. )
  74. # Provide the admin and user groups permissions to invoke the example Lambda function.
  75. # Cannot use the grant_invoke method defined by the Function structure since the method tries to add to
  76. # the resource-based policy but the imported IAM groups (which are tokens from Fn.ImportValue) are
  77. # not valid principals in Lambda function policies.
  78. user_group.add_to_principal_policy(
  79. iam.PolicyStatement(
  80. actions=[
  81. "lambda:InvokeFunction"
  82. ],
  83. effect=iam.Effect.ALLOW,
  84. resources=[self._lambda.function_arn]
  85. )
  86. )
  87. admin_group.add_to_principal_policy(
  88. iam.PolicyStatement(
  89. actions=[
  90. "lambda:InvokeFunction"
  91. ],
  92. effect=iam.Effect.ALLOW,
  93. resources=[self._lambda.function_arn]
  94. )
  95. )
  96. # Provide the admin and user groups permissions to read from the DynamoDB table.
  97. self._table.grant_read_data(user_group)
  98. self._table.grant_read_data(admin_group)
  99. def __create_s3_bucket(self) -> s3.Bucket:
  100. # Create a sample S3 bucket following S3 best practices
  101. # # See https://docs.aws.amazon.com/AmazonS3/latest/dev/security-best-practices.html
  102. # 1. Block all public access to the bucket
  103. # 2. Use SSE-S3 encryption. Explore encryption at rest options via
  104. # https://docs.aws.amazon.com/AmazonS3/latest/userguide/serv-side-encryption.html
  105. # 3. Enable Amazon S3 server access logging
  106. # https://docs.aws.amazon.com/AmazonS3/latest/userguide/ServerLogs.html
  107. server_access_logs_bucket = None
  108. if self.node.try_get_context('disable_access_log') != 'true':
  109. server_access_logs_bucket = s3.Bucket.from_bucket_name(
  110. self,
  111. f'{self._project_name}-{self._feature_name}-ImportedAccessLogsBucket',
  112. core.Fn.import_value(f"{self._project_name}:ServerAccessLogsBucket")
  113. )
  114. # Auto cleanup bucket and data if requested
  115. _remove_storage = self.node.try_get_context('remove_all_storage_on_destroy') == 'true'
  116. _removal_policy = core.RemovalPolicy.DESTROY if _remove_storage else core.RemovalPolicy.RETAIN
  117. example_bucket = s3.Bucket(
  118. self,
  119. f'{self._project_name}-{self._feature_name}-Example-S3bucket',
  120. auto_delete_objects=_remove_storage,
  121. block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
  122. encryption=s3.BucketEncryption.S3_MANAGED,
  123. removal_policy=_removal_policy,
  124. server_access_logs_bucket=
  125. server_access_logs_bucket if server_access_logs_bucket else None,
  126. server_access_logs_prefix=
  127. f'{self._project_name}-{self._feature_name}-{self.region}-AccessLogs' if server_access_logs_bucket else None
  128. )
  129. s3_deployment.BucketDeployment(
  130. self,
  131. f'{self._project_name}-{self._feature_name}-S3bucket-Deployment',
  132. destination_bucket=example_bucket,
  133. sources=[
  134. s3_deployment.Source.asset('example/s3_content')
  135. ],
  136. retain_on_delete=False
  137. )
  138. return example_bucket
  139. def __create_example_lambda(self) -> lambda_.Function:
  140. # create lambda function
  141. function = lambda_.Function(
  142. self,
  143. f'{self._project_name}-{self._feature_name}-Lambda-Function',
  144. runtime=lambda_.Runtime.PYTHON_3_8,
  145. handler="lambda-handler.main",
  146. code=lambda_.Code.asset(os.path.join(os.path.dirname(__file__), 'lambda'))
  147. )
  148. return function
  149. def __create_dynamodb_table(self) -> dynamo.Table:
  150. # create dynamo table
  151. # NB: CDK does not support seeding data, see simple table_seeder.py
  152. demo_table = dynamo.Table(
  153. self,
  154. f'{self._project_name}-{self._feature_name}-Table',
  155. partition_key=dynamo.Attribute(
  156. name="id",
  157. type=dynamo.AttributeType.STRING
  158. )
  159. )
  160. # Auto-delete the table when requested
  161. if self.node.try_get_context('remove_all_storage_on_destroy') == 'true':
  162. demo_table.apply_removal_policy(core.RemovalPolicy.DESTROY)
  163. return demo_table
  164. def __create_outputs(self) -> None:
  165. # Define exports
  166. # Export resource group
  167. self._s3_output = core.CfnOutput(
  168. self,
  169. id=f'ExampleBucketOutput',
  170. description='An example S3 bucket name to use with AWSCore ScriptBehaviors',
  171. export_name=f"{self.stack_name}:ExampleS3Bucket",
  172. value=self._s3_bucket.bucket_name)
  173. # Define exports
  174. # Export resource group
  175. self._lambda_output = core.CfnOutput(
  176. self,
  177. id=f'ExampleLambdaOutput',
  178. description='An example Lambda name to use with AWSCore ScriptBehaviors',
  179. export_name=f"{self.stack_name}::ExampleLambdaFunction",
  180. value=self._lambda.function_name)
  181. # Export DynamoDB Table
  182. self._table_output = core.CfnOutput(
  183. self,
  184. id=f'ExampleDynamoTableOutput',
  185. description='An example DynamoDB Table name to use with AWSCore ScriptBehaviors',
  186. export_name=f"{self.stack_name}:ExampleTable",
  187. value=self._table.table_name)
  188. # Export user policy
  189. self._user_policy = core.CfnOutput(
  190. self,
  191. id=f'ExampleUserPolicyOutput',
  192. description='A User policy to invoke example resources',
  193. export_name=f"{self.stack_name}:ExampleUserPolicy",
  194. value=self._policy.managed_policy_arn)