Skip to content
This repository was archived by the owner on Aug 9, 2023. It is now read-only.

Commit 166120a

Browse files
committed
Merge tag 'v3.0.3' into release
2 parents 988b270 + c3130b1 commit 166120a

File tree

77 files changed

+3516
-2929
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

77 files changed

+3516
-2929
lines changed

.gitignore

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
.DS_Store
21
.idea
32
/.idea/markdown-navigator.xml
43
/.idea/markdown-navigator/profiles_settings.xml
@@ -58,4 +57,7 @@ __pycache__
5857
publish
5958
launch.sh
6059
LICENSE-*
61-
src/templates/tests
60+
src/templates/tests
61+
/aws-genomics-workflows.iml
62+
_ignore
63+
dist/

README.md

Lines changed: 50 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,55 @@
44

55
This repository is the source code for [Genomics Workflows on AWS](https://docs.opendata.aws/genomics-workflows). It contains markdown documents that are used to build the site as well as source code (CloudFormation templates, scripts, etc) that can be used to deploy AWS infrastructure for running genomics workflows.
66

7+
If you want to get the latest version of these solutions up and running quickly, it is recommended that you deploy stacks using the launch buttons available via the [hosted guide](https://docs.opendata.aws/genomics-workflows).
8+
9+
If you want to customize these solutions, you can create your own distribution using the instructions below.
10+
11+
## Creating your own distribution
12+
13+
Clone the repo
14+
15+
```bash
16+
git clone https://github.com/aws-samples/aws-genomics-workflows.git
17+
```
18+
19+
Create an S3 bucket in your AWS account to use for the distribution deployment
20+
21+
```bash
22+
aws s3 mb <dist-bucketname>
23+
```
24+
25+
Create and deploy a distribution from source
26+
27+
```bash
28+
cd aws-genomics-workflows
29+
bash _scripts/deploy.sh --deploy-region <region> --asset-profile <profile-name> --asset-bucket s3://<dist-bucketname> test
30+
```
31+
32+
This will create a `dist` folder in the root of the project with subfolders `dist/artifacts` and `dist/templates` that will be uploaded to the S3 bucket you created above.
33+
34+
Use `--asset-profile` option to specify an AWS profile to use to make the deployment.
35+
36+
**Note**: the region set for `--deploy-region` should match the region the bucket `<dist-bucketname>` is created in.
37+
38+
You can now use your deployed distribution to launch stacks using the AWS CLI. For example, to launch the GWFCore stack:
39+
40+
```bash
41+
TEMPLATE_ROOT_URL=https://<dist-bucketname>.s3-<region>.amazonaws.com/test/templates
42+
43+
aws cloudformation create-stack \
44+
--region <region> \
45+
--stack-name <stackname> \
46+
--template-url $TEMPLATE_ROOT_URL/gwfcore-root.template.yaml \
47+
--capabilities CAPABILITY_IAM CAPABILITY_AUTO_EXPAND \
48+
--parameters \
49+
ParameterKey=VpcId,ParameterValue=<vpc-id> \
50+
ParameterKey=SubnetIds,ParameterValue=\"<subnet-id-1>,<subnet-id-2>,...\" \
51+
ParameterKey=ArtifactBucketName,ParameterValue=<dist-bucketname> \
52+
ParameterKey=TemplateRootUrl,ParameterValue=$TEMPLATE_ROOT_URL
53+
54+
```
55+
756
## Building the documentation
857

958
The documentation is built using mkdocs.
@@ -19,7 +68,7 @@ This will create a `conda` environment called `mkdocs`
1968
Build the docs:
2069

2170
```bash
22-
$ source activate mkdocs
71+
$ conda activate mkdocs
2372
$ mkdocs build
2473
```
2574

_scripts/configure-deploy.sh

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,11 @@ set -e
88
mkdir -p $HOME/.aws
99
cat << EOF > $HOME/.aws/config
1010
[default]
11+
region = us-east-1
1112
output = json
1213
1314
[profile asset-publisher]
15+
region = us-east-1
1416
role_arn = ${ASSET_ROLE_ARN}
1517
credential_source = Environment
1618
EOF

_scripts/deploy.sh

Lines changed: 123 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,52 @@
22

33
set -e
44

5-
bash _scripts/make-artifacts.sh
5+
bash _scripts/make-dist.sh
66
mkdocs build
77

8+
SITE_BUCKET=s3://docs.opendata.aws/genomics-workflows
89
ASSET_BUCKET=s3://aws-genomics-workflows
9-
ASSET_STAGE=${1:-production}
10-
10+
ASSET_STAGE=test
11+
ASSET_PROFILE=asset-publisher
12+
DEPLOY_REGION=us-east-1
13+
14+
PARAMS=""
15+
while (( "$#" )); do
16+
case "$1" in
17+
--site-bucket)
18+
SITE_BUCKET=$2
19+
shift 2
20+
;;
21+
--asset-bucket)
22+
ASSET_BUCKET=$2
23+
shift 2
24+
;;
25+
--asset-profile)
26+
ASSET_PROFILE=$2
27+
shift 2
28+
;;
29+
--deploy-region)
30+
DEPLOY_REGION=$2
31+
shift 2
32+
;;
33+
--) # end optional argument parsing
34+
shift
35+
break
36+
;;
37+
-*|--*=)
38+
echo "Error: unsupported argument $1" >&2
39+
exit 1
40+
;;
41+
*) # positional agruments
42+
PARAMS="$PARAMS $1"
43+
shift
44+
;;
45+
esac
46+
done
47+
48+
eval set -- "$PARAMS"
49+
50+
ASSET_STAGE=${1:-$ASSET_STAGE}
1151

1252
function s3_uri() {
1353
BUCKET=$1
@@ -21,74 +61,112 @@ function s3_uri() {
2161
echo "${BUCKET%/}/${PREFIX:1}"
2262
}
2363

64+
function s3_sync() {
65+
local source=$1
66+
local destination=$2
2467

25-
function artifacts() {
26-
# root level is always "latest"
27-
S3_URI=$(s3_uri $ASSET_BUCKET $ASSET_STAGE_PATH "artifacts")
28-
29-
echo "publishing artifacts: $S3_URI"
68+
echo "syncing ..."
69+
echo " from: $source"
70+
echo " to: $destination"
3071
aws s3 sync \
31-
--profile asset-publisher \
72+
--profile $ASSET_PROFILE \
73+
--region $DEPLOY_REGION \
3274
--acl public-read \
3375
--delete \
34-
./artifacts \
35-
$S3_URI
36-
76+
--metadata commit=$(git rev-parse HEAD) \
77+
$source \
78+
$destination
79+
}
80+
81+
function publish() {
82+
local source=$1
83+
local destination=$2
84+
3785
if [[ $USE_RELEASE_TAG && ! -z "$TRAVIS_TAG" ]]; then
38-
S3_URI=$(s3_uri $ASSET_BUCKET $ASSET_STAGE_PATH $TRAVIS_TAG "artifacts")
39-
40-
echo "publishing artifacts: $S3_URI"
41-
aws s3 sync \
42-
--profile asset-publisher \
43-
--acl public-read \
44-
--delete \
45-
./artifacts \
46-
$S3_URI
86+
# create explicit pinned versions "latest" and TRAVIS_TAG
87+
# pin the TRAVIS_TAG first, since the files are modified inplace
88+
# "latest" will inherit the TRAVIS_TAG value
89+
echo "PINNED VERSION: $TRAVIS_TAG"
90+
for version in $TRAVIS_TAG latest; do
91+
S3_URI=$(s3_uri $ASSET_BUCKET $ASSET_STAGE_PATH $version $destination)
92+
93+
if [[ "$destination" == "templates" ]]; then
94+
# pin distribution template and artifact paths in cfn templates
95+
pin_version $version templates $source
96+
pin_version $version artifacts $source
97+
fi
98+
99+
s3_sync $source $S3_URI
100+
done
101+
elif [[ $ASSET_STAGE == "test" ]]; then
102+
echo "PINNED VERSION: $ASSET_STAGE"
103+
version=$ASSET_STAGE
104+
S3_URI=$(s3_uri $ASSET_BUCKET $ASSET_STAGE_PATH $destination)
105+
106+
if [[ "$destination" == "templates" ]]; then
107+
# pin distribution template and artifact paths in cfn templates
108+
pin_version $version templates $source
109+
pin_version $version artifacts $source
110+
fi
111+
112+
s3_sync $source $S3_URI
113+
else
114+
echo "unknown publish target"
115+
exit 1
47116
fi
117+
118+
}
119+
120+
121+
function pin_version() {
122+
# locates parameters in cfn templates files in {folder} that need to be version pinned
123+
# using the locator pattern: "{asset}\s{2}# dist: {action}"
124+
# replaces the locator pattern with: "{version}/{asset} #"
125+
local version=$1
126+
local asset=$2
127+
local folder=$3
128+
129+
echo "PINNING VERSIONS"
130+
for file in `grep -irl "$asset # dist: pin_version" $folder`; do
131+
echo "pinning '$asset' as '$version/$asset' in '$file'"
132+
sed -i'' -e "s|$asset # dist: pin_version|$version/$asset #|g" $file
133+
done
48134
}
49135

136+
137+
function artifacts() {
138+
139+
publish ./dist/artifacts artifacts
140+
141+
}
142+
143+
50144
function templates() {
51-
# root level is always "latest"
52-
S3_URI=$(s3_uri $ASSET_BUCKET $ASSET_STAGE_PATH "templates")
53145

54-
echo "publishing templates: $S3_URI"
55-
aws s3 sync \
56-
--profile asset-publisher \
57-
--acl public-read \
58-
--delete \
59-
--metadata commit=$(git rev-parse HEAD) \
60-
./src/templates \
61-
$S3_URI
62-
63-
if [[ $USE_RELEASE_TAG && ! -z "$TRAVIS_TAG" ]]; then
64-
S3_URI=$(s3_uri $ASSET_BUCKET $ASSET_STAGE_PATH $TRAVIS_TAG "templates")
65-
66-
echo "publishing templates: $S3_URI"
67-
aws s3 sync \
68-
--profile asset-publisher \
69-
--acl public-read \
70-
--delete \
71-
--metadata commit=$(git rev-parse HEAD) \
72-
./src/templates \
73-
$S3_URI
74-
fi
146+
publish ./dist/templates templates
147+
75148
}
76149

150+
77151
function site() {
78152
echo "publishing site"
79153
aws s3 sync \
154+
--region $DEPLOY_REGION \
80155
--acl public-read \
81156
--delete \
157+
--metadata commit=$(git rev-parse HEAD) \
82158
./site \
83-
s3://docs.opendata.aws/genomics-workflows
159+
$SITE_BUCKET
84160
}
85161

162+
86163
function all() {
87164
artifacts
88165
templates
89166
site
90167
}
91168

169+
92170
echo "DEPLOYMENT STAGE: $ASSET_STAGE"
93171
case $ASSET_STAGE in
94172
production)

_scripts/make-artifacts.sh

Lines changed: 0 additions & 58 deletions
This file was deleted.

0 commit comments

Comments
 (0)