Added lambda auto config

This commit is contained in:
2024-04-25 12:54:31 +02:00
parent 2cdc051191
commit 0bc1265b68
4 changed files with 241 additions and 31 deletions

View File

@@ -0,0 +1,92 @@
import os
import json
import boto3
from typing import Optional
from pydantic import BaseModel
from redirects_base import Redirects
s3_client = None
bucket_config = ''
bucket_data = ''
class S3Bucket(BaseModel):
name: str
ownerIdentity: dict
arn: str
class S3Object(BaseModel):
key: str
eTag: Optional[str | None] = None
size: Optional[int | None] = None
sequencer: Optional[str | None] = None
class S3Event(BaseModel):
s3SchemaVersion: str
bucket: S3Bucket
object: S3Object
class Record(BaseModel):
eventName: str
eventSource: str
eventTime: str
s3: S3Event
def lambda_handler(event: dict, context):
global s3_client, bucket_config, bucket_data
if s3_client is None:
print("Init Function")
bucket_config = os.environ.get('BUCKET_CONFIG', 'standout-config')
bucket_data = os.environ.get('BUCKET_DATA', 'standout-data')
print(f'Bucket Config: {bucket_config}')
print(f' Bucket Data: {bucket_data}')
s3_client = boto3.client('s3')
## Download redirects file
redirects = None
try:
resp = s3_client.get_object(
Bucket=bucket_config,
Key='redirects.json'
)
redirects = Redirects(**json.load(resp['Body']))
except s3_client.exceptions.NoSuchKey as e:
print(e)
# Oppure pagina "siamo spiacenti ma il contenuto non e' disponibile"
return {
"statusCode": 404
}
# Proces records
for r in event["Records"]:
record = Record(**r)
if record.eventSource != "aws:s3":
return False
print(f"Action: {record.eventName}")
print(f"Object:{record.s3}")
match record.eventName:
case "ObjectCreated:Put" | "ObjectCreated:Post":
print(f"Object add: {record.s3.object.key}")
key_components = record.s3.object.key.split('/')
# capire il numero di key components, aggiornare il modello
return True
case "ObjectCreated:Copy":
print(f"Object copy: {record.s3.object.key}")
return True
case "s3:ObjectRemoved:*":
print(f"Object remove: {record.s3.object.key}")
return True
case _:
print("Unknown action")
if __name__ == "__main__":
lambda_handler({}, None)

View File

@@ -0,0 +1,55 @@
from typing import Dict, Optional
from pydantic import BaseModel
class Content(BaseModel):
type: str
key: str
url: Optional[str | None]
class Tag(BaseModel):
status: str
content: Content | Dict[str, Content] | None = None
class Customer(BaseModel):
status: str
tags: Dict[str, Tag] | None = None
class Redirects(BaseModel):
customers: Dict[str, Customer]
if __name__ == "__main__":
r = Redirects (
customers = {
"cust1": Customer(
status="active",
tags= {
"tag1" : Tag (
status="active",
content= Content(
type="s3",
key="foo",
url=None
)
),
"tag2" : Tag(
status="active",
content = {
"face1" : Content(
type="s3",
key="contentface1",
url = "foo"
),
"face2": Content(
type="s3",
key="contentface2",
url = "bar"
),
}
)
}
)
}
)
r.customers['cust2'] = Customer(status="inactive", tags=None)
print(r.model_dump_json(indent=2))

View File

@@ -2,12 +2,23 @@ import os
import boto3
import json
import boto3.exceptions
from botocore.exceptions import ClientError
from pydantic import BaseModel
from typing import Optional
s3_client = None
bucket_config = ''
bucket_data = ''
class RequestParams(BaseModel):
id: str
tag_id: str
face_id: Optional[str] = None
class Content(BaseModel):
type: str
key: str
url: Optional[str] = None
def lambda_handler(event: dict, context):
global s3_client, bucket_config, bucket_data
@@ -17,11 +28,9 @@ def lambda_handler(event: dict, context):
bucket_data = os.environ.get('BUCKET_DATA', 'standout-data')
print(f'Bucket Config: {bucket_config}')
print(f' Bucket Data: {bucket_data}')
s3_client = boto3.client('s3')
for x in s3_client.list_buckets()['Buckets']:
print(f"{x['Name']}: {x['CreationDate'].isoformat()}")
## Download redirects file
try:
resp = s3_client.get_object(
Bucket=bucket_config,
@@ -33,32 +42,34 @@ def lambda_handler(event: dict, context):
return {
"statusCode": 404
}
try:
redirects = json.load(resp["Body"])
params = event.get('queryStringParameters', {})
customer = redirects.get(params['id'], {})
tag = customer.get(params['tag_id'], {})
## Parse request and get content
try:
redirects = json.load(resp['Body'])
params = RequestParams(**event.get('queryStringParameters', {}))
customer = redirects.get(params.id, {})
tag = customer.get(params.tag_id, {})
content = tag.get('content', None)
dest = None
if content and isinstance(content, dict) and not "type" in content.keys():
dest = content[params['face_id']]
else:
dest = content
if dest and isinstance(dest, dict):
match dest.get('type', 's3'):
# In case of multi face tag select the correct face
if isinstance(content, dict) and params.face_id:
content = content.get(params.face_id, None)
if content is None:
return {
"statusCode": 404
}
content = Content(**content)
match content.type:
case "s3":
try:
key = f'{params['id']}/{params['tag_id']}/{dest['key']}'
response = s3_client.generate_presigned_url('get_object',
key = f'{params.id}/{params.tag_id}/{content.key}'
final_redirect = s3_client.generate_presigned_url('get_object',
Params={'Bucket': bucket_data,
'Key': key},
ExpiresIn=120)
except ClientError as e:
print(e)
finally:
dest = response
case "url":
final_redirect = content.url
except json.decoder.JSONDecodeError as je:
@@ -68,6 +79,11 @@ def lambda_handler(event: dict, context):
}
except KeyError as ke:
print(ke)
return {
"statusCode": 404
}
except Exception as e:
print(e)
return {
"statusCode": 500
}
@@ -76,7 +92,7 @@ def lambda_handler(event: dict, context):
"statusCode": 301,
"headers": {
"Cache-Control": "no-cache",
"Location": str(dest)
"Location": str(final_redirect)
}
}

View File

@@ -140,14 +140,20 @@ resource "aws_iam_role" "iam_for_lambda" {
data "archive_file" "lambda_standout_code" {
type = "zip"
source_file = "./lambda_redirect/lambda_redirect.py"
output_path = "./lambda_redirect/standout_lambda_function.zip"
source_dir = "./lambda_redirect"
output_path = "./lambda_zip/standout_lambda_redirect.zip"
}
data "archive_file" "lambda_standout_config_code" {
type = "zip"
source_dir = "./lambda_config"
output_path = "./lambda_zip/standout_lambda_config.zip"
}
resource "aws_lambda_function" "lambda_standout_redirect" {
# If the file is not in the current working directory you will need to include a
# path.module in the filename.
filename = "./lambda_redirect/standout_lambda_function.zip"
filename = "./lambda_zip/standout_lambda_redirect.zip"
function_name = "standout-redirect"
role = aws_iam_role.iam_for_lambda.arn
handler = "lambda_redirect.lambda_handler"
@@ -166,6 +172,47 @@ resource "aws_lambda_function" "lambda_standout_redirect" {
}
}
resource "aws_lambda_function" "lambda_standout_config" {
# If the file is not in the current working directory you will need to include a
# path.module in the filename.
filename = "./lambda_zip/standout_lambda_config.zip"
function_name = "standout-config"
role = aws_iam_role.iam_for_lambda.arn
handler = "lambda_config.lambda_handler"
source_code_hash = data.archive_file.lambda_standout_code.output_base64sha256
runtime = "python3.12"
timeout = 10
environment {
variables = {
BUCKET_CONFIG = aws_s3_bucket.s3_standout_config.bucket,
BUCKET_DATA = aws_s3_bucket.s3_standout.bucket
}
}
}
# Add S3 trigger to config lambda
resource "aws_lambda_permission" "lambda_config_s3_trigger_allow" {
statement_id = "AllowExecutionFromS3Bucket"
action = "lambda:InvokeFunction"
function_name = aws_lambda_function.lambda_standout_config.arn
principal = "s3.amazonaws.com"
source_arn = aws_s3_bucket.s3_standout.arn
}
resource "aws_s3_bucket_notification" "bucket_notification" {
bucket = aws_s3_bucket.s3_standout.id
lambda_function {
lambda_function_arn = aws_lambda_function.lambda_standout_config.arn
events = ["s3:ObjectCreated:*", "s3:ObjectRemoved:*"]
}
}
# create API gateway for lambda triger and connect
resource "aws_apigatewayv2_api" "api_standout_gateway" {
name = "standout-api"