aws_metrics_stack.py 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104
  1. """
  2. Copyright (c) Contributors to the Open 3D Engine Project.
  3. For complete copyright and license terms please see the LICENSE at the root of this distribution.
  4. SPDX-License-Identifier: Apache-2.0 OR MIT
  5. """
  6. from aws_cdk import (
  7. Stack
  8. )
  9. from constructs import Construct
  10. from .real_time_data_processing import RealTimeDataProcessing
  11. from .data_ingestion import DataIngestion
  12. from .batch_processing import BatchProcessing
  13. from .batch_analytics import BatchAnalytics
  14. from .data_lake_integration import DataLakeIntegration
  15. from .dashboard import Dashboard
  16. class AWSMetricsStack(Stack):
  17. """
  18. Create the feature stack for the AWSMetrics Gem.
  19. Please reference the CloudFormation template provided by the Game Analytics Pipeline for the
  20. full production ready solution. This CDK application deploys a simplified version of this pipeline as an example.
  21. https://docs.aws.amazon.com/solutions/latest/game-analytics-pipeline/template.html
  22. """
  23. def __init__(self,
  24. scope: Construct,
  25. id_: str,
  26. application_name: str,
  27. optional_features: dict,
  28. **kwargs) -> None:
  29. super().__init__(scope, id_, **kwargs)
  30. self._data_ingestion = DataIngestion(self, application_name)
  31. self._real_time_data_processing = RealTimeDataProcessing(
  32. self,
  33. input_stream_arn=self._data_ingestion.input_stream_arn,
  34. application_name=application_name
  35. )
  36. batch_processing_enabled = optional_features.get('batch_processing', False)
  37. server_access_logs_bucket = optional_features.get('server_access_logs_bucket')
  38. self._data_lake_integration = DataLakeIntegration(
  39. self,
  40. application_name=application_name,
  41. server_access_logs_bucket=server_access_logs_bucket
  42. ) if batch_processing_enabled else None
  43. self._batch_processing = BatchProcessing(
  44. self,
  45. input_stream_arn=self._data_ingestion.input_stream_arn,
  46. application_name=application_name,
  47. analytics_bucket_arn=self._data_lake_integration.analytics_bucket_arn,
  48. events_database_name=self._data_lake_integration.events_database_name,
  49. events_table_name=self._data_lake_integration.events_table_name
  50. ) if batch_processing_enabled else None
  51. self._batch_analytics = BatchAnalytics(
  52. self,
  53. application_name=application_name,
  54. analytics_bucket_name=self._data_lake_integration.analytics_bucket_name,
  55. events_database_name=self._data_lake_integration.events_database_name,
  56. events_table_name=self._data_lake_integration.events_table_name
  57. ) if batch_processing_enabled else None
  58. self._dashboard = Dashboard(
  59. self,
  60. input_stream_name=self._data_ingestion.input_stream_name,
  61. application_name=application_name,
  62. analytics_processing_lambda_name=self._real_time_data_processing.analytics_processing_lambda_name,
  63. delivery_stream_name=self._batch_processing.delivery_stream_name if batch_processing_enabled else '',
  64. events_processing_lambda_name=
  65. self._batch_processing.events_processing_lambda_name if batch_processing_enabled else ''
  66. )
  67. @property
  68. def data_ingestion_component(self):
  69. return self._data_ingestion
  70. @property
  71. def real_time_data_processing_component(self):
  72. return self._real_time_data_processing
  73. @property
  74. def dashboard_component(self):
  75. return self._dashboard
  76. @property
  77. def data_lake_integration_component(self):
  78. return self._data_lake_integration
  79. @property
  80. def batch_processing_component(self):
  81. return self._batch_processing
  82. @property
  83. def batch_analytics_component(self):
  84. return self._batch_analytics