Kanji
・ Cloud engineer / freelance ・ Born in 1993 ・ Born in Ehime Prefecture / Lives in Shibuya-ku, Tokyo ・ AWS history 5 years Profile details
Table of contents
AWS_DATA_PATH
/botocore/data
import os os.environ["AWS_DATA_PATH"] = f"{os.getcwd()}/botocore/data"
import os import sys import boto3 import botocore sys.path.insert(0, os.getcwd()) reload(botocore) reload(boto3)
/opt/python/botocore/data
import os os.environ["AWS_DATA_PATH"] = "/opt/python/botocore/data"
/opt/python
$ {Botocore root directory}/data
~/.aws/models
# For Control Tower botocore/data └── controltower └── 2018-05-10 ├── endpoint-rule-set-1.json.gz ├── paginators-1.json # The list of APIs that can be used here is listed └── service-2.json.gz
aws_data_path
/tmp/${random character string}
{ "sys path": [ "/var/task", "/opt/python/lib/python3.8/site-packages", "/opt/python", "/var/runtime", "/var/lang/lib/python38.zip", "/var/lang/lib/python3.8", "/var/lang/lib/python3.8/lib-dynload", "/var/lang/lib/python3.8/site-packages", "/opt/python/lib/python3.8/site-packages", "/opt/python", "/tmp/4aa790cf-cb0b-48b6-80f0-3ed7f82f228c-2024-03-17-07-15-31" ] }
├── src │ ├── customer_script.py │ └── modules │ └── requirements.txt └── ssm.tf
import logging import json import os import sys from importlib import reload import boto3 import botocore AUTOMATION_NAME = "sample-automation" logger = logging.getLogger(__name__) class ListHandler(logging.Handler): def __init__(self): super().__init__() self.log_messages = [] log_format = json.dumps( { "logLevel": "%(levelname)s", "time": "%(asctime)s", "line": "%(filename)s: %(lineno)d", "message": "%(message)s", }, indent=2, ensure_ascii=False, ) self.formatter = logging.Formatter(log_format) def emit(self, record): self.log_messages.append(json.loads(self.format(record))) def handler(event, context): list_handler = ListHandler() logger.addHandler(list_handler) logger.setLevel(logging.DEBUG) os.environ["AWS_DATA_PATH"] = f"{os.getcwd()}/botocore/data" session = boto3.Session() logger.debug({"botocore paths": session._loader.search_paths}) sys.path.insert(0, os.getcwd()) reload(botocore) reload(boto3) logger.debug({"sys path": sys.path}) logger.debug({"current directory": os.getcwd()}) logger.debug({"child directory": os.listdir()}) logger.debug({"botocore path": os.path.dirname(botocore.__file__)}) logger.debug({"boto3 path": boto3.__file__}) logger.debug({"boto3 version": boto3.__version__}) logger.debug({"botocore path": botocore.__file__}) logger.debug({"botocore version": botocore.__version__}) try: client = session.client("controltower") logger.info(client.list_baselines()) except Exception as e: logger.error(f"Failed to list_baselines: {e}") raise Exception(list_handler.log_messages) # max length of ssm automation output is 100 KB(102400 characters) return list_handler.log_messages if __name__ == "__main__": handler(event={}, context={})
boto3 botocore
#tfsec:ignore:aws-s3-encryption-customer-key
null_resource
dataarchive_file
aws_s3_object
aws cloudformation package
data "aws_caller_identity" "current" {} #tfsec:ignore:aws-s3-encryption-customer-key #tfsec:ignore:aws-s3-enable-bucket-encryption #tfsec:ignore:aws-s3-enable-bucket-logging resource "aws_s3_bucket" "bucket" { bucket = "sample-automation-${data.aws_caller_identity.current.account_id}" } resource "aws_s3_bucket_versioning" "bucket_versioning" { bucket = aws_s3_bucket.bucket.id versioning_configuration { status = "Enabled" } } resource "aws_s3_bucket_public_access_block" "bucket_public_access_block" { bucket = aws_s3_bucket.bucket.id block_public_acls = true block_public_policy = true ignore_public_acls = true restrict_public_buckets = true } resource "null_resource" "install_requirements" { triggers = { always_run = "${timestamp()}" } provisioner "local-exec" { command = "pip install -r ${path.root}/src/modules/requirements.txt -t ${path.root}/src/modules" } } data "archive_file" "file" { depends_on = [ null_resource.install_requirements ] type = "zip" source_dir = "${path.root}/src/modules" output_path = "${path.root}/src/modules.zip" } resource "aws_s3_object" "object" { bucket = aws_s3_bucket.bucket.id key = "modules.zip" source = data.archive_file.file.output_path } resource "aws_ssm_document" "runbook" { depends_on = [ aws_s3_object.object ] name = "sample-automation" document_type = "Automation" document_format = "JSON" attachments_source { key = "S3FileUrl" name = "modules.zip" values = [ "https://${aws_s3_bucket.bucket.bucket}.s3.amazonaws.com/modules.zip" ] } content = jsonencode({ assumeRole = aws_iam_role.role.arn schemaVersion = "0.3" description = "TestBoto3VersionUp" mainSteps = [ { name = "TestBoto3VersionUp" action = "aws:executeScript" inputs = { Runtime = "python3.8" Handler = "handler" Script = "${file("${path.root}/src/customer_script.py")}" Attachment = "modules.zip" } } ] files = { "modules.zip" = { checksums = { sha256 = filesha256(data.archive_file.file.output_path) } } } }) } resource "aws_iam_role" "role" { name = "sample-automation-role" assume_role_policy = jsonencode({ Version = "2012-10-17", Statement = [ { Action = "sts:AssumeRole", Effect = "Allow", Principal = { Service = "ssm.amazonaws.com" } } ] }) managed_policy_arns = [ "arn:aws:iam::aws:policy/service-role/AmazonSSMAutomationRole", "arn:aws:iam::aws:policy/AdministratorAccess" ] }
├── src │ ├── function │ │ └── main.py │ └── layer │ │ ├── python # Store the package file of Boto3 / Botocore by processing Terraform under this directory │ │ └── requirements.txt └── lambda.tf
import logging import os import sys import boto3 import botocore logger = logging.getLogger(__name__) def handler(event, context): logger.setLevel(logging.DEBUG) os.environ["AWS_DATA_PATH"] = "/opt/python/botocore/data" session = boto3.Session() logger.debug({"botocore paths": session._loader.search_paths}) logger.debug({"sys path": sys.path}) logger.debug({"botocore path": os.path.dirname(botocore.__file__)}) logger.debug({"boto3 path": boto3.__file__}) logger.debug({"boto3 version": boto3.__version__}) logger.debug({"botocore path": botocore.__file__}) logger.debug({"botocore version": botocore.__version__}) client = session.client("controltower") logger.info(client.list_baselines()) if __name__ == "__main__": handler(event={}, context={})
data.archive_file
data "archive_file" "function" { depends_on = [null_resource.install_requirements] type = "zip" source_dir = "${path.root}/src/function" output_path = "${path.root}/src/function.zip" } resource "aws_lambda_function" "function" { function_name = "sample-function" filename = data.archive_file.function.output_path source_code_hash = data.archive_file.function.output_base64sha256 handler = "main.handler" runtime = "python3.11" role = aws_iam_role.role.arn # os.environ["AWS_DATA_PATH"] is OK even if you set the following environment variables # environment { # variables = { # AWS_DATA_PATH = "/opt/python/botocore/data" # } # } layers = [aws_lambda_layer_version.layer_version.arn] } resource "null_resource" "install_requirements" { triggers = { always_run = "${timestamp()}" } provisioner "local-exec" { command = "pip install -r ${path.root}/src/layer/requirements.txt -t ${path.root}/src/layer/python" } } data "archive_file" "layer" { depends_on = [null_resource.install_requirements] type = "zip" source_dir = "${path.root}/src/layer" output_path = "${path.root}/src/layer.zip" } resource "aws_lambda_layer_version" "layer_version" { layer_name = "sample-layer" filename = data.archive_file.layer.output_path compatible_runtimes = ["python3.11"] source_code_hash = data.archive_file.layer.output_base64sha256 } resource "aws_iam_role" "role" { name = "sample-lambda-role" assume_role_policy = jsonencode({ Version = "2012-10-17", Statement = [ { Action = "sts:AssumeRole", Effect = "Allow", Principal = { Service = "lambda.amazonaws.com" } } ] }) managed_policy_arns = [ "arn:aws:iam::aws:policy/AdministratorAccess" ] }