由于中国区的cloudfront自定义域名无法使用Amazon Certificate Manager中免费的SSL证书,如果需要使用需要自行购买。
在源中使用上述创建的s3桶的域名。
3. 编写aws lambda创建aws iam role
在iam中创建role lambda_cloudfront_create_invalidation 信任实体为lambda.amazonaws.com
role包含两个策略
aws 原生策略AWSLambdaVPCAccessExecutionRole
{
"Version": "2012-10-17",
"Statement": [
{
"Effect": "Allow",
"Action": [
"logs:CreateLogGroup",
"logs:CreateLogStream",
"logs:PutLogEvents",
"ec2:CreateNetworkInterface",
"ec2:DescribeNetworkInterfaces",
"ec2:DeleteNetworkInterface",
"ec2:AssignPrivateIpAddresses",
"ec2:UnassignPrivateIpAddresses"
],
"Resource": "*"
}
]
}
自定义策略cloudfront-createInvalidation
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "VisualEditor0",
"Effect": "Allow",
"Action": "cloudfront:CreateInvalidation",
"Resource": "*"
}
]
}
初始化python环境
pip install python-lambda mkdir function_s3object_cloudfront_invalid cd function_s3object_cloudfront_invalid lambda init function_s3object_cloudfront_invalid
编辑function_s3object_cloudfront_invalid/config.yaml
region: cn-north-1 function_name: function_s3object_cloudfront_invalid handler: service.handler description: 当s3对象更新时候,将cloudfront的缓存失效 runtime: python3.8 # role 为上面创建的 role: lambda_cloudfront_create_invalidation # S3 upload requires appropriate role with s3:PutObject permission # (ex. basic_s3_upload), a destination bucket, and the key prefix # bucket_name: 'example-bucket' # s3_key_prefix: 'path/to/file/' # if access key and secret are left blank, boto will use the credentials # defined in the [default] section of ~/.aws/credentials. aws_access_key_id: aws_secret_access_key: # dist_directory: dist # timeout: 15 # memory_size: 512 # concurrency: 500 # # Experimental Environment variables #environment_variables: # env_1: foo # env_2: baz # If `tags` is uncommented then tags will be set at creation or update # time. During an update all other tags will be removed except the tags # listed here. # Build options build: source_directories: lib # a comma delimited list of directories in your project root that contains source to package.
编辑function_s3object_cloudfront_invalid/lib/setting.py
#!/usr/bin/env python
from pytz import timezone, utc
from datetime import datetime
import logging.config
import os
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"simple": {
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
}
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"level": "INFO",
"formatter": "simple"
},
},
"loggers": {
"my_module": {
"level": "INFO",
"handlers": ["console"],
"propagate": 0
}
},
}
logging.config.dictConfig(LOGGING)
def custom_time(*args):
utc_dt = utc.localize(datetime.utcnow())
my_tz = timezone("Asia/Shanghai")
converted = utc_dt.astimezone(my_tz)
return converted.timetuple()
logging.Formatter.converter = custom_time
CLOUDFRONT = {
"my-test.xxx.cn": {cloudfront域的ID号,自行替换}
}
编辑function_s3object_cloudfront_invalid/service.py
# -*- coding: utf-8 -*-
import boto3
import logging
from lib import setting
import time
logger = logging.getLogger("my_module")
def s3_obj_info(event):
s3_object_list = []
records = event["Records"]
s3_bucket = ""
for record in records:
s3_bucket = record["s3"]['bucket']['name']
s3_obj = record["s3"]['object']['key']
event_name = record["eventName"]
logger.info(f"{event_name=},{s3_bucket=},{s3_obj=}")
s3_object_list.append(s3_obj)
paths = [f"/{i}" for i in s3_object_list]
logger.info(f"需要失效的文件:{s3_bucket=},{paths=}")
cf = boto3.client('cloudfront')
distribution_id = setting.CLOUDFRONT[s3_bucket]
quantity = len(paths)
res = cf.create_invalidation(
DistributionId=distribution_id,
InvalidationBatch={
'Paths': {
'Quantity': quantity,
'Items': paths
},
'CallerReference': str(time.time()).replace(".", "")
}
)
invalidation_id = res['Invalidation']['Id']
logger.info(f'创建失效完成:{invalidation_id=}')
return invalidation_id
def handler(event, context):
s3_obj_info(event)
编辑function_s3object_cloudfront_invalid/requirements.txt
boto3==1.18.21 pytz==2018.9
上传代码到aws lambda
lambda deploy --requirements requirements.txt4. 设置S3桶的通知事件到lambda
事件类型
目标为刚刚上传的lambda



