Compare commits

..

11 Commits

15 changed files with 5621 additions and 198 deletions

5
.gitignore vendored
View File

@@ -165,8 +165,9 @@ cython_debug/
**/.terraform/*
# .tfstate files
*.tfstate
*.tfstate.*
#*.tfstate
#*.tfstate.*
*.plan
# Crash log files
crash.log

32
.terraform.lock.hcl generated
View File

@@ -21,24 +21,24 @@ provider "registry.terraform.io/hashicorp/archive" {
}
provider "registry.terraform.io/hashicorp/aws" {
version = "5.45.0"
version = "5.51.1"
constraints = "~> 5.0"
hashes = [
"h1:4Vgk51R7iTY1oczaTQDG+DkA9nE8TmjlUtecqXX6qDU=",
"zh:1379bcf45aef3d486ee18b4f767bfecd40a0056510d26107f388be3d7994c368",
"zh:1615a6f5495acfb3a0cb72324587261dd4d72711a3cc51aff13167b14531501e",
"zh:18b69a0f33f8b1862fbd3f200756b7e83e087b73687085f2cf9c7da4c318e3e6",
"zh:2c5e7aecd197bc3d3b19290bad8cf4c390c2c6a77bb165da4e11f53f2dfe2e54",
"zh:3794da9bef97596e3bc60e12cdd915bda5ec2ed62cd1cd93723d58b4981905fe",
"zh:40a5e45ed91801f83db76dffd467dcf425ea2ca8642327cf01119601cb86021c",
"zh:4abfc3f53d0256a7d5d1fa5e931e4601b02db3d1da28f452341d3823d0518f1a",
"zh:4eb0e98078f79aeb06b5ff6115286dc2135d12a80287885698d04036425494a2",
"zh:75470efbadea4a8d783642497acaeec5077fc4a7f3df3340defeaa1c7de29bf7",
"zh:8861a0b4891d5fa2fa7142f236ae613cea966c45b5472e3915a4ac3abcbaf487",
"zh:8bf6f21cd9390b742ca0b4393fde92616ca9e6553fb75003a0999006ad233d35",
"h1:ESfxP2tCO6IZldSQnepXmIm+x+VtaQt/bKgGjYE+0BY=",
"zh:03d524b70ab300d90dc4dccad0c28b18d797b8986722b7a93e40a41500450eaa",
"zh:04dbcb7ab52181a784877c409f6c882df34bda686d8c884d511ebd4abf493f0c",
"zh:2b068f7838e0f3677829258df05d8b9d73fe6434a1a809f8710956cc1c01ea03",
"zh:41a4b1e4adbf7c90015ebff17a719fc08133b8a2c4dcefd2fa281552126e59a8",
"zh:48b1adf57f695a72c88c598f99912171ef7067638fd63fb0c6ad3fa397b3f7c3",
"zh:5c2fb26ecb83adac90d06dcf5f97edbc944824c2821816b1653e1a2b9d37b3c4",
"zh:93df05f53702df829d9b9335e559ad8b313808dbd2fad8b2ff14f176732e693d",
"zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425",
"zh:ad73008a044e75d337acda910fb54d8b81a366873c8a413fec1291034899a814",
"zh:bf261713b0b8bebfe8c199291365b87d9043849f28a2dc764bafdde73ae43693",
"zh:da3bafa1fd830be418dfcc730e85085fe67c0d415c066716f2ac350a2306f40a",
"zh:b5da39898602e44551b56e2803a42d92ea7115e35b1792efbf6649da37ef597b",
"zh:b7ab7f743f864ed8d479a7cb04fd3ce00c376f867ee5b53c4c1acaef6e286c54",
"zh:e7e7b2d8ee486415481a25ac7bdded20bd2897d5dd0790741798f31935b9528d",
"zh:e8008e3f5ef560fd9004d1ed1738f0f53e99b0ce961d967e95fc7c02e5954e4e",
"zh:f1296f648b8608ffa930b52519b00ed01eebedde9fdaf94205b365536e6c3916",
"zh:f8539960fd978a54990740ee984c6f7f743c9c32c7734e2601e92abfe54367e9",
"zh:fd182e6e20bb52982752a5d8c4b16887565f413a9d50d9d394d2c06eea8a195e",
]
}

9
deploy-dev.sh Executable file
View File

@@ -0,0 +1,9 @@
# Select Development workspace
terraform workspace select dev
# Plan the current execution with variables and verify
GIT_HASH=$(git rev-parse --short HEAD)
terraform plan -var-file dev.tfvars -var git_version=${GIT_HASH}-dev -out plans/dev.plan
# Apply Configuration
terraform apply plans/dev.plan

9
deploy-prod.sh Executable file
View File

@@ -0,0 +1,9 @@
# Select Development workspace
terraform workspace select prod
# Plan the current execution with variables and verify
GIT_HASH=$(git rev-parse --short HEAD)
terraform plan -var-file prod.tfvars -var git_version=${GIT_HASH} -out plans/prod.plan
# Apply Configuration
terraform apply plans/prod.plan

View File

@@ -1,15 +1,18 @@
import os
import json
import boto3
from typing import Optional
from uuid import uuid4
from pydantic import BaseModel
import pydantic_core
from typing import Dict, Optional
from redirects_base import Content, Customer, Redirects, Tag
s3_client = None
bucket_config = ''
bucket_data = ''
function_url = ''
git_version = ''
api_verson = ''
s3_client = None
redirects: Redirects | None = None
class S3Bucket(BaseModel):
name: str
@@ -34,63 +37,107 @@ class Record(BaseModel):
s3: S3Event
def lambda_handler(event: dict, context):
global s3_client, bucket_config, bucket_data
global s3_client, bucket_config, bucket_data, function_url, redirects, git_version, api_version
if s3_client is None:
print("Init Function")
bucket_config = os.environ.get('BUCKET_CONFIG', 'standout-config')
bucket_data = os.environ.get('BUCKET_DATA', 'standout-data')
print(f'Bucket Config: {bucket_config}')
function_url = os.environ.get('FUNCTION_URL', 'https://api.standout.it/dev')
git_version = os.environ.get('GIT_VERSION', '00000000')
api_version = os.environ.get('API_VERSION', 'v1')
print(f' Version: {git_version}')
print(f' Bucket Data: {bucket_data}')
print(f'Bucket Config: {bucket_config}')
s3_client = boto3.client('s3')
## Download redirects file
redirects: Redirects
try:
if context is not None:
resp = s3_client.get_object(
Bucket=bucket_config,
Key='redirects.json'
)
redirects = Redirects.model_validate_json(resp['Body'].read())
else:
with open('/home/emanuele/dev/StandOut/lambda_config/redirects.json', 'r') as f:
redirects = Redirects.model_validate_json(f.read())
except s3_client.exceptions.NoSuchKey as e:
print(e)
# Oppure pagina "siamo spiacenti ma il contenuto non e' disponibile"
return {
"statusCode": 404
}
# Proces records
### Process records
for r in event["Records"]:
record = Record(**r)
if record.eventSource != "aws:s3":
return False
## Stampa info di debug
print(f"Action: {record.eventName}")
print(f"Object: {record.s3}")
print(f"Object: {record.s3.object}")
# splitta la chiave per capire la directory
keys = record.s3.object.key.split('/')
keys.reverse()
## Ritorna se la chiave non e' un file
if record.s3.object.key.endswith('/') and record.s3.object.size == 0:
print(f"Skip, folder only: {record.s3.object.key}")
continue
## Scarica il redirects solo il primo giro
if redirects is None:
try:
redirects = downloadRedirects(client = s3_client, context = context)
redirects.version = git_version
except s3_client.exceptions.NoSuchKey as e:
print(e)
return False
match record.eventName:
case "ObjectCreated:Put" | "ObjectCreated:CompleteMultipartUpload":
print(f"ObjectCreated: {record.s3.object.key}")
processAdd(record=record, redirects=redirects, client=s3_client)
case "ObjectRemoved:Delete":
processDelete(record=record, redirects=redirects)
case "ObjectCreated:Copy":
print(f"Object copy: {record.s3.object.key}")
case _:
print("Unknown action")
if redirects is not None:
if context is not None:
resp = s3_client.put_object(Bucket=bucket_config,
Key='redirects.json',
Body=redirects.model_dump_json(indent=2))
print(f"New redirects version: {resp['ETag']}")
else:
with open('/home/emanuele/dev/StandOut/lambda_config/redirects.json', 'w') as f:
f.write(redirects.model_dump_json(indent=2))
else:
print("No Action")
return True
def downloadRedirects(client, context) -> Redirects:
if context is not None:
resp = client.get_object(
Bucket=bucket_config,
Key='redirects.json'
)
return Redirects.model_validate_json(resp['Body'].read())
else:
with open('/home/emanuele/dev/StandOut/lambda_config/redirects.json', 'r') as f:
return Redirects.model_validate_json(f.read(), strict=False)
def getObjectKeys(record: Record) -> list[str]:
keys = [v for v in record.s3.object.key.split('/') if v != '']
keys.reverse()
return keys
def processAdd(record: Record, redirects: Redirects, client) -> None:
# splitta la chiave per capire la directory
keys = getObjectKeys(record=record)
# crea il primo utente se necessario o selezionalo
cust_key = keys.pop()
cust_name = keys.pop()
# cerca la chiave utente dal nome e nel caso non sia presente creane uno nuovo
cust_id = searchCustomerKey(redirects=redirects, cust_name=cust_name, default=uuid4().hex)
assert(cust_id is not None) # uuid cannot return none
if redirects.customers is None:
redirects.customers = {cust_key: Customer(status="active")}
if cust_key not in redirects.customers.keys():
redirects.customers[cust_key] = Customer(status="active")
redirects.customers = {cust_id: Customer(name=cust_name, status="active", tags=None)}
if cust_id not in redirects.customers.keys():
redirects.customers[cust_id] = Customer(name=cust_name, status="active", tags=None)
# Aggiunto solo un cliente
if len(keys) == 0:
break
c = redirects.customers[cust_key]
c = redirects.customers[cust_id]
if len(keys) == 0 or not c:
return
# crea un tag per l'utente, con contenuto nullo o selezionalo
tag_key = keys.pop()
@@ -100,49 +147,83 @@ def lambda_handler(event: dict, context):
c.tags[tag_key] = Tag(status="active", content=None)
# Aggiunta anche una chiave
if len(keys) == 0:
break
t = c.tags[tag_key]
if len(keys) == 0 or not t:
return
# crea un contenuto per il tag a seconda della lunghezza della chiave
# Crea un contenuto per il tag a seconda della lunghezza della chiave
file_name = keys[0]
if file_name == "url.txt":
with s3_client.get_object(Bucket=bucket_data, Key=record.s3.object.key)['Body'] as url_file:
content = Content(type='url', key=file_name, url=url_file.readline().decode().strip())
with client.get_object(Bucket=bucket_data, Key=record.s3.object.key)['Body'] as url_file:
content = Content(type='url', key=file_name, url=url_file.readline().decode().strip(), tag_url=None)
else:
content = Content(type='s3', key=file_name, url=None)
content = Content(type='s3', key=file_name, url=None, tag_url=None)
# Aggiungi il contenuto a una faccia o al tag a seconda della lunghezza del path
match len(keys):
case 2:
content.tag_url = generateTagUrl(cust_id, tag_key, face_id=keys[1])
if t.content is None or isinstance(t.content, Content):
t.content = {keys[1]: content}
elif isinstance(t.content, dict):
t.content[keys[1]] = content
case 1:
content.tag_url = generateTagUrl(cust_id, tag_key, None)
t.content = content
case _:
print("Too long keys")
case "ObjectCreated:Copy":
print(f"Object copy: {record.s3.object.key}")
print(f"ObjectCreated: {record.s3.object.key}")
case "s3:ObjectRemoved:Delete":
print(f"Object remove: {record.s3.object.key}")
def generateTagUrl(cust_id: str, tag_id: str, face_id: str | None) -> str:
return f"{function_url}/{api_version}?id={cust_id}&tag_id={tag_id}{f"&face_id={face_id}" if face_id else ""}"
case _:
print("Unknown action")
def searchCustomerKey(redirects: Redirects, cust_name: str, default: str | None) -> str | None:
if redirects.customers is None:
return default
for k,v in redirects.customers.items():
if v is not None and v.name == cust_name:
return k
return default
if context is not None:
resp = s3_client.put_object(Bucket=bucket_config,
Key='redirects.json',
Body=redirects.model_dump_json(indent=2))
print(f"New redirects version: {resp['ETag']}")
def processDelete(record: Record, redirects: Redirects) -> None:
keys = getObjectKeys(record=record)
if not redirects.customers:
return
cust_name = keys.pop()
cust_key = searchCustomerKey(redirects=redirects, cust_name=cust_name, default=None)
if cust_key is None:
return
customer = redirects.customers.get(cust_key, None)
if len(keys) == 0 or not customer:
redirects.customers.pop(cust_key, None)
return
if not customer.tags:
return
tag_key = keys.pop()
tag = customer.tags.get(tag_key, None)
if len(keys) == 0 or not tag:
customer.tags.pop(tag_key, None)
return
match len(keys):
case 2:
if not tag.content or not isinstance(tag.content, Dict):
return
tag.content.pop(keys[1], None)
case 1:
if isinstance(tag.content, Dict) and keys[0] in tag.content.keys():
tag.content[keys[0]] = None
else:
with open('/home/emanuele/dev/StandOut/lambda_config/redirects.json', 'w') as f:
f.write(redirects.model_dump_json(indent=2))
return True
tag.content = None
case _:
print("Unexpected")
return
print(f"Object Remove: {record.s3.object.key}")
if __name__ == "__main__":
with open('/home/emanuele/dev/StandOut/lambda_config/test.json', 'r') as f:

View File

@@ -1,51 +1,36 @@
{
"customers": {
"customer1": {
"8eb763ceacec418cb48c81dae40bcde0": {
"status": "active",
"name": "cliente-test",
"tags": {
"tag1": {
"status": "active",
"content": {
"type": "url",
"key": "url.txt",
"url": "https://grafana.etss.it/d/LbON5PkGz/power?orgId=1&from=now-12h&to=now&refresh=30s"
"url": "https://www.instagram.com/lisavarano?igsh=M2V6ZHhqMG91cmF1&utm_source=qr",
"tag_url": "https://sel2p8wy6c.execute-api.eu-west-1.amazonaws.com/dev/api?id=8eb763ceacec418cb48c81dae40bcde0&tag_id=tag1"
}
},
"tag2": {
"status": "active",
"content": {
"type": "s3",
"key": "file.txt",
"url": null
"key": "f3",
"url": null,
"tag_url": "https://api.standout.it/dev/api?id=8eb763ceacec418cb48c81dae40bcde0&tag_id=tag2"
}
}
}
},
"customer2": {
"0ef1d0acb1bf4ebab77f5a8c9c56d6b5": {
"status": "active",
"name": "foo",
"tags": {
"tag1": {
"help": {
"status": "active",
"content": {
"face1": {
"type": "s3",
"key": "file.txt",
"url": null
}
}
}
}
},
"customer3": {
"status": "active",
"tags": {
"tag1": {
"status": "active",
"content": {
"type": "s3",
"key": "VID20240116160134.mp4",
"url": null
}
"content": null
}
}
}

View File

@@ -4,52 +4,18 @@ from pydantic import BaseModel
class Content(BaseModel):
type: str
key: str
url: Optional[str | None] = None
url: Optional[str]
tag_url: Optional[str]
class Tag(BaseModel):
status: str
content: Content | Dict[str, Content] | None = None
content: Optional[Content | Dict[str, Optional[Content]]]
class Customer(BaseModel):
status: str
tags: Dict[str, Tag] | None = None
name: str
tags: Optional[Dict[str, Optional[Tag]] ]
class Redirects(BaseModel):
customers: Dict[str, Customer] | None = None
if __name__ == "__main__":
r = Redirects (
customers = {
"cust1": Customer(
status="active",
tags= {
"tag1" : Tag (
status="active",
content= Content(
type="s3",
key="foo",
url=None
)
),
"tag2" : Tag(
status="active",
content = {
"face1" : Content(
type="s3",
key="contentface1",
url = "foo"
),
"face2": Content(
type="s3",
key="contentface2",
url = "bar"
),
}
)
}
)
}
)
r.customers['cust2'] = Customer(status="inactive", tags=None)
print(r.model_dump_json(indent=2))
version: str
customers: Dict[str, Optional[Customer]] | None = None

View File

@@ -27,8 +27,8 @@
"arn": "arn:aws:s3:::standout-data"
},
"object": {
"key": "customer1/tag2/file.txt",
"size": 2844326,
"key": "cliente-test/tag2/f3/",
"size": 0,
"eTag": "7039e5338840f289d0510dc9149bf0b5",
"sequencer": "00662A206F99CD2E09"
}

View File

@@ -8,6 +8,8 @@ from typing import Optional
s3_client = None
bucket_config = ''
bucket_data = ''
git_version = ''
api_version = ''
class RequestParams(BaseModel):
id: str
@@ -20,14 +22,19 @@ class Content(BaseModel):
url: Optional[str] = None
def lambda_handler(event: dict, context):
global s3_client, bucket_config, bucket_data
global s3_client, bucket_config, bucket_data, git_version, api_version
if s3_client is None:
print("Init Function")
bucket_config = os.environ.get('BUCKET_CONFIG', 'standout-config')
bucket_data = os.environ.get('BUCKET_DATA', 'standout-data')
print(f'Bucket Config: {bucket_config}')
git_version = os.environ.get('GIT_VERSION', '00000000')
api_version = os.environ.get('API_VERSION', 'v1')
print(f' Version: {git_version}')
print(f' Bucket Data: {bucket_data}')
print(f'Bucket Config: {bucket_config}')
s3_client = boto3.client('s3')
## Download redirects file
@@ -48,8 +55,10 @@ def lambda_handler(event: dict, context):
redirects = json.load(resp['Body'])
params = RequestParams(**event.get('queryStringParameters', {}))
customer = redirects.get(params.id, {})
tag = customer.get(params.tag_id, {})
customers = redirects.get('customers', {})
customer = customers.get(params.id, {})
tags = customer.get('tags', {})
tag = tags.get(params.tag_id, {})
content = tag.get('content', None)
# In case of multi face tag select the correct face
@@ -63,7 +72,8 @@ def lambda_handler(event: dict, context):
content = Content(**content)
match content.type:
case "s3":
key = f'{params.id}/{params.tag_id}/{content.key}'
file_name = content.key.replace('+',' ') # remove slugify of filename
key = f'{customer['name']}/{params.tag_id}{'/'+params.face_id if params.face_id else ''}/{file_name}'
final_redirect = s3_client.generate_presigned_url('get_object',
Params={'Bucket': bucket_data,
'Key': key},
@@ -71,7 +81,6 @@ def lambda_handler(event: dict, context):
case "url":
final_redirect = content.url
except json.decoder.JSONDecodeError as je:
print(je)
return {
@@ -104,8 +113,8 @@ if __name__ == "__main__":
"httpMethod": "GET",
"queryStringParameters": {
"id": "customer1",
"tag_id": "tag3",
"face_id": "face1"
"tag_id": "tag2",
"face_id": "face2"
},
}

View File

@@ -9,23 +9,24 @@ terraform {
# Configure the AWS Provider
provider "aws" {
region = "eu-west-1"
region = var.region
profile = "StandOut_Terraform"
}
# Create a VPC
resource "aws_vpc" "vpc_standout" {
cidr_block = "10.0.0.0/16"
cidr_block = var.env == "dev" ? "10.0.0.0/16" : "10.10.0.0/16"
}
# create an s3 bucket for config
resource "aws_s3_bucket" "s3_standout_config" {
bucket = "standout-config"
bucket = "standout-config-${var.env}"
force_destroy = false
}
# create an s3 bucket for data
resource "aws_s3_bucket" "s3_standout" {
bucket = "standout-data"
bucket = "standout-data-${var.env}"
force_destroy = true
}
@@ -134,30 +135,35 @@ data "aws_iam_policy_document" "lambda_role" {
}
resource "aws_iam_role" "iam_for_lambda" {
name = "iam_for_lambda"
name = "iam_for_lambda-${var.env}"
assume_role_policy = data.aws_iam_policy_document.lambda_role.json
}
data "archive_file" "lambda_standout_code" {
resource "aws_iam_role_policy_attachment" "iam_for_lambda_allow_logs" {
role = aws_iam_role.iam_for_lambda.name
policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
}
data "archive_file" "lambda_standout_redirect_code" {
type = "zip"
source_dir = "./lambda_redirect"
output_path = "./lambda_zip/standout_lambda_redirect.zip"
output_path = "./lambda_zip/standout_lambda_redirect-${var.env}.zip"
}
data "archive_file" "lambda_standout_config_code" {
type = "zip"
source_dir = "./lambda_config"
output_path = "./lambda_zip/standout_lambda_config.zip"
output_path = "./lambda_zip/standout_lambda_config-${var.env}.zip"
}
data "archive_file" "lambda_layer_deps" {
type = "zip"
source_dir = "./lambda_layer"
output_path = "./lambda_zip/lambda_layer.zip"
output_path = "./lambda_zip/lambda_layer-${var.env}.zip"
}
resource "aws_lambda_layer_version" "lambda_layer" {
filename = "./lambda_zip/lambda_layer.zip"
filename = "./lambda_zip/lambda_layer-${var.env}.zip"
layer_name = "lambda_deps"
compatible_runtimes = ["python3.12"]
}
@@ -165,12 +171,12 @@ resource "aws_lambda_layer_version" "lambda_layer" {
resource "aws_lambda_function" "lambda_standout_redirect" {
# If the file is not in the current working directory you will need to include a
# path.module in the filename.
filename = "./lambda_zip/standout_lambda_redirect.zip"
function_name = "standout-redirect"
filename = "./lambda_zip/standout_lambda_redirect-${var.env}.zip"
function_name = "standout-redirect-${var.env}"
role = aws_iam_role.iam_for_lambda.arn
handler = "lambda_redirect.lambda_handler"
source_code_hash = data.archive_file.lambda_standout_code.output_base64sha256
source_code_hash = data.archive_file.lambda_standout_redirect_code.output_base64sha256
runtime = "python3.12"
@@ -178,10 +184,18 @@ resource "aws_lambda_function" "lambda_standout_redirect" {
timeout = 10
logging_config {
log_format = "Text"
log_group = aws_cloudwatch_log_group.standout_lambda_config_logs.name
}
environment {
variables = {
BUCKET_CONFIG = aws_s3_bucket.s3_standout_config.bucket,
BUCKET_DATA = aws_s3_bucket.s3_standout.bucket
GIT_VERSION = var.git_version
API_VERSION = var.api_version
env = var.env
}
}
}
@@ -189,12 +203,12 @@ resource "aws_lambda_function" "lambda_standout_redirect" {
resource "aws_lambda_function" "lambda_standout_config" {
# If the file is not in the current working directory you will need to include a
# path.module in the filename.
filename = "./lambda_zip/standout_lambda_config.zip"
function_name = "standout-config"
filename = "./lambda_zip/standout_lambda_config-${var.env}.zip"
function_name = "standout-config-${var.env}"
role = aws_iam_role.iam_for_lambda.arn
handler = "lambda_config.lambda_handler"
source_code_hash = data.archive_file.lambda_standout_code.output_base64sha256
source_code_hash = data.archive_file.lambda_standout_config_code.output_base64sha256
runtime = "python3.12"
@@ -202,14 +216,34 @@ resource "aws_lambda_function" "lambda_standout_config" {
timeout = 10
logging_config {
log_format = "Text"
log_group = aws_cloudwatch_log_group.standout_lambda_redirect_logs.name
}
environment {
variables = {
BUCKET_CONFIG = aws_s3_bucket.s3_standout_config.bucket,
BUCKET_DATA = aws_s3_bucket.s3_standout.bucket
FUNCTION_URL = var.redirect_url
GIT_VERSION = var.git_version
API_VERSION = var.api_version
ENV = var.env
}
}
}
# Create and manage log groups retention
resource "aws_cloudwatch_log_group" "standout_lambda_redirect_logs" {
name = "standout-lambda-redirect-logs-${var.env}"
retention_in_days = 14
}
resource "aws_cloudwatch_log_group" "standout_lambda_config_logs" {
name = "standout-lambda-config-logs-${var.env}"
retention_in_days = 30
}
# Add S3 trigger to config lambda
resource "aws_lambda_permission" "lambda_config_s3_trigger_allow" {
statement_id = "AllowExecutionFromS3Bucket"
@@ -231,7 +265,7 @@ resource "aws_s3_bucket_notification" "bucket_notification" {
# create API gateway for lambda triger and connect
resource "aws_apigatewayv2_api" "api_standout_gateway" {
name = "standout-api"
name = "standout-api-${var.env}"
protocol_type = "HTTP"
}
@@ -248,18 +282,33 @@ resource "aws_apigatewayv2_integration" "api_standout_integration" {
resource "aws_apigatewayv2_stage" "api_standout_lambda_stage" {
api_id = aws_apigatewayv2_api.api_standout_gateway.id
name = "dev"
name = var.env
auto_deploy = true
}
resource "aws_apigatewayv2_route" "api_standout_route" {
api_id = aws_apigatewayv2_api.api_standout_gateway.id
route_key = "GET /api"
route_key = "GET /${var.api_version}"
target = "integrations/${aws_apigatewayv2_integration.api_standout_integration.id}"
}
resource "aws_apigatewayv2_domain_name" "api_standout_domain_name" {
domain_name = var.domain_name
domain_name_configuration {
certificate_arn = var.ssl_certificate
endpoint_type = "REGIONAL"
security_policy = "TLS_1_2"
}
}
resource "aws_apigatewayv2_api_mapping" "api_standout_domain_mapping" {
api_id = aws_apigatewayv2_api.api_standout_gateway.id
domain_name = aws_apigatewayv2_domain_name.api_standout_domain_name.id
stage = aws_apigatewayv2_stage.api_standout_lambda_stage.id
}
resource "aws_lambda_permission" "api_lambda_permission" {
statement_id = "AllowExecutionFromAPIGateway"
action = "lambda:InvokeFunction"
@@ -269,5 +318,7 @@ resource "aws_lambda_permission" "api_lambda_permission" {
source_arn = "${aws_apigatewayv2_api.api_standout_gateway.execution_arn}/*/*"
}
# create a route 53 configuration
# Output relevant variables
output "api_mapping_domain_name" {
value = aws_apigatewayv2_api_mapping.api_standout_domain_mapping
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

34
variable.tf Normal file
View File

@@ -0,0 +1,34 @@
variable "region" {
type = string
default = "eu-west-1"
}
variable "env" {
type = string
default = "dev"
}
variable "redirect_url" {
type = string
default = "https://nfc.infostandout.com"
}
variable "domain_name" {
type = string
default = "nfc.infostandout.com"
}
variable "ssl_certificate" {
type = string
default = ""
}
variable "git_version" {
type = string
default = "00000000"
}
variable "api_version" {
type = string
default = "v1"
}