Skip to content

Commit efe049a

Browse files
authored
Merge branch 'master' into simplify-update-item-call
2 parents 4092810 + ebb0205 commit efe049a

File tree

542 files changed

+22658
-2997
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

542 files changed

+22658
-2997
lines changed

.github/scripts/build-assets.py

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,13 +24,13 @@
2424

2525
#Move static assets
2626
Path(dest_root, 'assets').mkdir(parents=True, exist_ok=False)
27-
data_files = ['design-patterns/cloudformation/lab.yaml',
28-
'design-patterns/cloudformation/C9.yaml',
29-
'design-patterns/cloudformation/UserData.sh',
27+
data_files = ['design-patterns/cloudformation/C9.yaml',
3028
'design-patterns/cloudformation/UserDataC9.sh',
3129
'event-driven/event-driven-cfn.yaml',
3230
'static/files/hands-on-labs/migration-env-setup.yaml',
33-
'static/files/hands-on-labs/migration-dms-setup.yaml']
31+
'static/files/hands-on-labs/migration-dms-setup.yaml',
32+
'static/files/dynamodb-opensearch-zetl/dynamodb-opensearch-setup.yaml'
33+
]
3434
for inp_file in data_files:
3535
src_file = os.path.join(pkg_root, inp_file)
3636
head, tail = ntpath.split(src_file)
@@ -60,6 +60,31 @@
6060
workshop_zip.write(scenario2)
6161
shutil.move(os.path.join(os.getcwd(), 'scenario-solutions.zip'), os.path.join(dest_root, 'assets', 'scenario-solutions.zip'))
6262

63+
#Create LHOL zETL ZIP
64+
os.chdir(os.path.join(pkg_root, 'static', 'files', 'dynamodb-opensearch-zetl'))
65+
with ZipFile('OpenSearchPipeline.zip', 'w') as workshop_zip:
66+
for pyscript in glob.glob('./OpenSearchPipeline/*'):
67+
workshop_zip.write(pyscript)
68+
shutil.move(os.path.join(os.getcwd(), 'OpenSearchPipeline.zip'), os.path.join(dest_root, 'assets', 'OpenSearchPipeline.zip'))
69+
70+
71+
#Create Game-Player-Data Python Scripts ZIP
72+
os.chdir(os.path.join(pkg_root, 'game-player-data'))
73+
with ZipFile('battle-royale.zip', 'w') as workshop_zip:
74+
for pyscript in glob.glob('./scripts/*.py'):
75+
workshop_zip.write(pyscript)
76+
for js_script in glob.glob('./scripts/*.json'):
77+
workshop_zip.write(js_script)
78+
shutil.move(os.path.join(os.getcwd(), 'battle-royale.zip'), os.path.join(dest_root, 'assets', 'battle-royale.zip'))
79+
80+
#Create Global Serverless ZIP
81+
os.chdir(os.path.join(pkg_root, 'global-serverless'))
82+
with ZipFile('global-serverless.zip', 'w') as workshop_zip:
83+
for data_file in glob.glob('global-serverless/*'):
84+
workshop_zip.write(data_file)
85+
workshop_zip.write('global-serverless/.chalice/config.json')
86+
shutil.move(os.path.join(os.getcwd(), 'global-serverless.zip'), os.path.join(dest_root, 'assets', 'global-serverless.zip'))
87+
6388

6489
#Create Event Driven ZIPs
6590
zips_to_make = ['MapLambdaPackage', 'ReduceLambdaPackage', 'StateLambdaPackage', 'GeneratorLambdaPackage']

.github/workflows/main.yml

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,14 +27,18 @@ jobs:
2727
with:
2828
aws-region: us-east-1
2929
role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }}
30+
- name: S3SyncStaticWeb
31+
run: aws s3 sync ./global-serverless/web/ s3://amazon-dynamodb-labs-static/static/global-serverless-application/web/
32+
- name: StaticYearReplacer
33+
run: sed -i "s/<<PRESENTYEAR>>/$(date +%Y)/" ./static/files/visualizer/index.html
34+
- name: S3SyncVisualizer
35+
run: aws s3 sync ./static/files/visualizer/ s3://amazon-dynamodb-labs-static/static/visualizer/
3036
- name: Pull preview build
3137
run: aws s3 sync s3://amazon-dynamodb-labs-static/build/ . && chmod +x preview_build
3238
- name: Build Assets
3339
run: python3 ./.github/scripts/build-assets.py
3440
- name: S3Sync
3541
run: aws s3 sync public/assets/ s3://$STEP_S3_BUCKET/assets/ --delete
36-
- name: SetS3Acl
37-
run: aws s3api put-object-acl --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers --bucket $STEP_S3_BUCKET --key assets/lab.yaml
3842
- name: SetS3Acl
3943
run: aws s3api put-object-acl --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers --bucket $STEP_S3_BUCKET --key assets/C9.yaml
4044
- name: SetS3AclED1
@@ -51,3 +55,11 @@ jobs:
5155
run: aws s3api put-object-acl --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers --bucket $STEP_S3_BUCKET --key assets/migration-env-setup.yaml
5256
- name: SetS3AclLHOLDMS
5357
run: aws s3api put-object-acl --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers --bucket $STEP_S3_BUCKET --key assets/migration-dms-setup.yaml
58+
- name: SetS3AclLHOLzETL
59+
run: aws s3api put-object-acl --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers --bucket $STEP_S3_BUCKET --key assets/OpenSearchPipeline.zip
60+
- name: SetS3AclLHOLzETLYaml
61+
run: aws s3api put-object-acl --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers --bucket $STEP_S3_BUCKET --key assets/dynamodb-opensearch-setup.yaml
62+
- name: SetS3AclLGME1
63+
run: aws s3api put-object-acl --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers --bucket $STEP_S3_BUCKET --key assets/battle-royale.zip
64+
- name: SetS3AclLMR
65+
run: aws s3api put-object-acl --grant-read uri=http://acs.amazonaws.com/groups/global/AllUsers --bucket $STEP_S3_BUCKET --key assets/global-serverless.zip

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,3 +5,4 @@ __pycache__/
55
.python-version
66
build
77
.hugo_build.lock
8+
preview_build

README.md

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Amazon DynamoDB Labs
1+
# Amazon DynamoDB Labs / Amazon DynamoDB Immersion Day
22
The repo for https://catalog.workshops.aws/dynamodb-labs/en-US , formerly https://amazon-dynamodb-labs.com
33

44
### Dev:
@@ -23,5 +23,10 @@ Make a pull request with changes. PRs will be automatically checked to make sure
2323

2424
On merge to master, a GitHub action will deploy the assets to amazon-dynamodb-labs.com and verify the build to ensure the markdown and other files are correctly formatted. From there, a maintainer must manually pull the changes and push to https://catalog.workshops.aws/dynamodb-labs/en-US
2525

26+
#### Internal maintainer: sync changes to internal copy of this repo
27+
This public repo is pushed to the internal repo in workshop studio using a combination of rsync (we assume you are on macOS) and git. The file `sync.sh` copies the source files to the WS repo folder, and after that you follow the internal README.md to complete the sync.
28+
1. Run sync.sh to sync the public repo to the amazon-dynamodb-immersion-day repo. e.g. `./sync.sh -d /Users/$USER/workspace/amazon-dynamodb-immersion-day` . Choose y to sync the files.
29+
2. Change into the directory for amazon-dynamodb-immersion-day (the workshop studio version), open README.md, and follow the instructions there to git add and push the changes internally. Note that some assets, specifically the LEDA central account resources, are only authored on the internal repo, and they have a separate set of commands to push updates because they are assets that must be in a special S3 bucket owned by WS.
30+
2631
## License
2732
This project is licensed under the Apache-2.0 License.

content/authors.en.md

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
---
2-
title: "Contributors to Amazon DynamoDB Labs"
2+
title: "Contributors to the Immersion Day"
33
hidden: false
44
chapter: true
55
description: "Our editors and hall of fame."
@@ -13,8 +13,17 @@ weight: 100
1313
1. Sean Shriver ([switch180](https://github.com/switch180)) - Ported the whole lab to amazon-dynamodb-labs.com with a custom Hugo theme. Made the "bullet-proof" CloudFormation template for the lab. Updated the hands on lab to Python3
1414
1. Daniel Yoder ([danielsyoder](https://github.com/danielsyoder)) - The brains behind amazon-dynamodb-labs.com and the co-creator of the design scenarios
1515

16-
### 2023 additions
16+
### 2024 additions
17+
The Generative AI workshop LBED was released in early 2024:
18+
1. John Terhune - ([@terhunej](https://github.com/terhunej)) - Primary author
19+
1. Zhang Xin - ([@SEZ9](https://github.com/SEZ9)) - Content contributor and original author of a lab that John used as the basis of LBED
20+
1. Sean Shriver - ([@switch180](https://github.com/switch180)) - Editor, tech reviewer, and merger
21+
22+
The LSQL relational migration lab was released in late 2024:
23+
1. Robert McCauley - ([robm26](https://github.com/robm26)) - Primary author
24+
1. Sean Shriver - ([@switch180](https://github.com/switch180)) - Editor, tech reviewer, and merger
1725

26+
### 2023 additions
1827
The serverless event driven architecture lab was added in 2023:
1928

2029
1. Lucas Rettenmeier ([@rettenls](https://github.com/rettenls)) - Workshop creator for re\:Invent 2021
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
---
2+
title: "5. Summary and Clean Up"
3+
date: 2023-12-01T00:00:00-00:00
4+
weight: 20
5+
chapter: true
6+
---
7+
8+
Congratulations! You have made it to the end of the workshop.
9+
10+
In this workshop you explored capturing item level changes on a DynamoDB table using DynamoDB Streams and Kinesis Data Streams. In this instance, you wrote the previous version of updated items to a different DynamoDB table. By applying these same techniques, you can build complex event driven solutions that are triggered by changes to items you have stored on DynamoDB.
11+
12+
If you used an account provided by Workshop Studio, you do not need to do any cleanup. The account terminates when the event is over.
13+
14+
If you used your own account, please remove the following resources:
15+
16+
* The Lambda Function Event Source Mappings:
17+
18+
```bash
19+
UUID_1=`aws lambda list-event-source-mappings --function-name create-order-history-kds --query 'EventSourceMappings[].UUID' --output text`
20+
UUID_2=`aws lambda list-event-source-mappings --function-name create-order-history-ddbs --query 'EventSourceMappings[].UUID' --output text`
21+
aws lambda delete-event-source-mapping --uuid ${UUID_1}
22+
aws lambda delete-event-source-mapping --uuid ${UUID_2}
23+
```
24+
25+
* The AWS Lambda functions created during the labs:
26+
27+
```bash
28+
aws lambda delete-function --function-name create-order-history-ddbs
29+
aws lambda delete-function --function-name create-order-history-kds
30+
```
31+
32+
* The AWS Kinesis data stream created during the labs:
33+
34+
```bash
35+
aws kinesis delete-stream --stream-name Orders
36+
```
37+
38+
* The Amazon DynamoDB tables created in the Getting Started section of the lab:
39+
40+
```bash
41+
aws dynamodb delete-table --table-name Orders
42+
aws dynamodb delete-table --table-name OrdersHistory
43+
```
44+
45+
* The Amazon SQS queues created during the labs:
46+
47+
```bash
48+
aws sqs delete-queue --queue-url https://sqs.${REGION}.amazonaws.com/${ACCOUNT_ID}/orders-ddbs-dlq
49+
aws sqs delete-queue --queue-url https://sqs.${REGION}.amazonaws.com/${ACCOUNT_ID}/orders-kds-dlq
50+
```
51+
52+
* The IAM policies attached to the IAM execution roles you created:
53+
54+
![Delete IAM Policies](/static/images/change-data-capture/cleanup/delete-policies-one.png)
55+
56+
![Delete IAM Policies](/static/images/change-data-capture/cleanup/delete-policies-two.png)
57+
58+
* The AWS IAM execution roles created for the lambda functions:
59+
60+
![Delete IAM Roles](/static/images/change-data-capture/cleanup/delete-roles.png)
61+
62+
This should wrap up the cleanup process.
Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
---
2+
title: "Configure Lambda Function"
3+
date: 2023-12-01T00:00:00-00:00
4+
weight: 120
5+
chapter: true
6+
---
7+
8+
Configure your lambda function to copy changed records from the Orders DynamoDB streams to the OrdersHistory table by doing the following.
9+
10+
1. Go to the IAM dashboard on the AWS Management Console and inspect the IAM policy, i.e. **AWSLambdaMicroserviceExecutionRole...**, created when you created the create-order-history-ddbs lambda function.
11+
12+
![AWS Lambda function console](/static/images/change-data-capture/ex1/iam-edit-policy.png)
13+
14+
```
15+
{
16+
"Version": "2012-10-17",
17+
"Statement": [
18+
{
19+
"Effect": "Allow",
20+
"Action": [
21+
"dynamodb:DeleteItem",
22+
"dynamodb:GetItem",
23+
"dynamodb:PutItem",
24+
"dynamodb:Scan",
25+
"dynamodb:UpdateItem"
26+
],
27+
"Resource": "arn:aws:dynamodb:{aws-region}:{aws-account-id}:table/*"
28+
}
29+
]
30+
}
31+
```
32+
33+
2. Update the policy statement by editing and replacing the existing policy using the following IAM policy statement.
34+
35+
```json
36+
{
37+
"Version": "2012-10-17",
38+
"Statement": [
39+
{
40+
"Effect": "Allow",
41+
"Action": [
42+
"dynamodb:DescribeStream",
43+
"dynamodb:GetRecords",
44+
"dynamodb:GetShardIterator",
45+
"dynamodb:ListStreams"
46+
],
47+
"Resource": "arn:aws:dynamodb:{aws-region}:{aws-account-id}:table/Orders/stream/*"
48+
},
49+
{
50+
"Effect": "Allow",
51+
"Action": "dynamodb:PutItem",
52+
"Resource": "arn:aws:dynamodb:{aws-region}:{aws-account-id}:table/OrdersHistory"
53+
},
54+
{
55+
"Effect": "Allow",
56+
"Action": "sqs:SendMessage",
57+
"Resource": "arn:aws:sqs:{aws-region}:{aws-account-id}:orders-ddbs-dlq"
58+
}
59+
]
60+
}
61+
```
62+
63+
The updated IAM policy gives the create-order-history-ddbs lambda function the permissions required to read events from the Orders DynamoDB stream, write new items to the OrdersHistory DynamoDB table and send messages to the orders-ddbs-dlq SQS queue.
64+
65+
::alert[Replace **{aws-region}** and **{aws-account-id}** in the policy statement above with the correct value for your AWS region and your AWS account ID.]
66+
67+
3. Go to the lambda function console editor. Select **Layers** then select **Add a Layer**.
68+
69+
![AWS Lambda function console](/static/images/change-data-capture/ex1/select-layer.png)
70+
71+
![AWS Lambda function console](/static/images/change-data-capture/ex1/add-layer.png)
72+
73+
4. Select **Specify an ARN**, enter the Lambda Layer ARN below.
74+
75+
```bash
76+
arn:aws:lambda:{aws-region}:017000801446:layer:AWSLambdaPowertoolsPythonV2:58
77+
```
78+
79+
5. Click **Verify** then select **Add**.
80+
81+
![AWS Lambda function console](/static/images/change-data-capture/ex1/specify-layer.png)
82+
83+
::alert[Replace {aws-region} with ID for the AWS region that you are currently working on.]
84+
85+
6. Go to the configuration section of the lambda console editor. Select **Environment variables** then select **Edit**.
86+
87+
![AWS Lambda function console](/static/images/change-data-capture/ex1/edit-env-var.png)
88+
89+
7. Add a new environment variable called **ORDERS_HISTORY_DB** and set its value to **OrdersHistory**.
90+
91+
![AWS Lambda function console](/static/images/change-data-capture/ex1/new-env-var.png)
92+
93+
8. Select **Triggers** then select **Add trigger**.
94+
95+
![AWS Lambda function console](/static/images/change-data-capture/ex1/triggers.png)
96+
97+
9. Select **DynamoDB** as the trigger source.
98+
10. Select the **Orders** DynamoDB table.
99+
11. Set the **Batch size** to **10** and leave all other values unchanged.
100+
101+
![AWS Lambda function console](/static/images/change-data-capture/ex1/trigger-config.png)
102+
103+
12. Click **Additional settings** to expand the section.
104+
13. Provide the ARN of the **orders-ddbs-dlq** SQS queue you created earlier.
105+
106+
```bash
107+
arn:aws:sqs:{aws-region}:{aws-account-id}:orders-ddbs-dlq
108+
```
109+
110+
14. Set the Retry attempts to 3.
111+
112+
![AWS Lambda function console](/static/images/change-data-capture/ex1/trigger-settings.png)
113+
114+
15. Select **Add**.
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
---
2+
title: "Create Dead Letter Queue"
3+
date: 2023-12-01T00:00:00-00:00
4+
weight: 110
5+
chapter: true
6+
---
7+
8+
If the lambda function is not able to successfully process a record it receives from DynamoDB stream, the Lambda service should write the metadata for the error record to a dead letter queue (DLQ) so the reason for the failure can be investigated and resolved.
9+
10+
So create an [Amazon SQS Dead Letter Queue](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html) named **orders-ddbs-dlq** for your lambda function trigger using the AWS CLI command below.
11+
12+
```bash
13+
aws sqs create-queue --queue-name orders-ddbs-dlq
14+
```
15+
16+
Sample output:
17+
18+
```
19+
{
20+
"QueueUrl": "https://sqs.{aws-region}.amazonaws.com/{aws-account-id}/orders-ddbs-dlq"
21+
}
22+
```
23+
24+
Later you will need the queue ARN. Use the below command, modifying the queue URL after *--queue-url* to match the result of the previous command, and then save the ARN for later use.
25+
26+
```bash
27+
aws sqs get-queue-attributes --attribute-names "QueueArn" --query 'Attributes.QueueArn' --output text \
28+
--queue-url "https://sqs.{aws-region}.amazonaws.com/{aws-account-id}/orders-ddbs-dlq"
29+
```

0 commit comments

Comments
 (0)