Skip to content

Commit

Permalink
Merge pull request docker-archive#495 from docker/add-latest
Browse files Browse the repository at this point in the history
Always upload a latest.json for all channels
  • Loading branch information
FrenchBen authored Dec 13, 2016
2 parents 6b4b434 + b8bde76 commit 6287236
Show file tree
Hide file tree
Showing 5 changed files with 112 additions and 4 deletions.
2 changes: 1 addition & 1 deletion aws/release/files/aws_release.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,4 +112,4 @@ def main():


if __name__ == '__main__':
main()
main()
36 changes: 36 additions & 0 deletions aws/release/files/publish.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/usr/bin/env python
import json
import argparse
import re

from utils import publish_cfn_template


CFN_TEMPLATE = '/home/docker/docker_for_aws.template'
CFN_CLOUD_TEMPLATE = '/home/docker/docker_for_aws_cloud.template'


def main():
parser = argparse.ArgumentParser(description='Release Docker for AWS')
parser.add_argument('-r', '--release_version',
dest='release_version', required=True,
help="Docker release version (i.e. 1.12.3-beta10)")
parser.add_argument('-c', '--channel',
dest='channel', default="beta", required=True,
help="release channel (beta, alpha, rc, nightly)")
args = parser.parse_args()

release_version = args.release_version
release_channel = args.channel
docker_for_aws_version = u"aws-v{}".format(release_version)
print("\nVariables")
print(u"release_channel={}".format(release_channel))
print(u"release_version={}".format(release_version))

s3_latest_url = publish_cfn_template(release_channel, docker_for_aws_version)
print("------------------")
print(u"Finshed.. Latest CloudFormation \n\t URL={0}".format(s3_latest_url))
print("------------------")

if __name__ == '__main__':
main()
20 changes: 19 additions & 1 deletion aws/release/files/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def set_ami_public(ami_list):


def upload_cfn_template(release_channel, cloudformation_template_name, tempfile, cfn_type=''):

# upload to s3, make public, return s3 URL
s3_host_name = u"https://{}.s3.amazonaws.com".format(S3_BUCKET_NAME)
s3_path = u"aws/{}/{}".format(release_channel, cloudformation_template_name)
Expand Down Expand Up @@ -198,6 +198,24 @@ def upload_cfn_template(release_channel, cloudformation_template_name, tempfile,

return s3_full_url

def publish_cfn_template(release_channel, docker_for_aws_version):
# upload to s3, make public, return s3 URL
s3_host_name = u"https://{}.s3.amazonaws.com".format(S3_BUCKET_NAME)
s3_path = u"aws/{}/{}.json".format(release_channel, docker_for_aws_version)

print(u"Update the latest.json file to the release of {} in {} channel.".format(docker_for_aws_version, release_channel))
latest_name = "latest.json"
s3_path_latest = u"aws/{}/{}".format(release_channel, latest_name)
s3_full_url = u"{}/{}".format(s3_host_name, s3_path_latest)

s3conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket = s3conn.get_bucket(S3_BUCKET_NAME)

print(u"Copy Cloudformation template from {} to {} s3 bucket".format(s3_path, s3_path_latest))
srckey = bucket.get_key(s3_path)
dstkey = bucket.new_key(s3_path_latest)
srckey.copy(S3_BUCKET_NAME, dstkey, preserve_acl=True, validate_dst_bucket=True)
return s3_full_url

def upload_ami_list(ami_list_json, docker_version):

Expand Down
35 changes: 35 additions & 0 deletions azure/release/files/publish.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#!/usr/bin/env python
import json
import argparse
import re

from utils import publish_rg_template


CFN_TEMPLATE = '/home/docker/editions.template'


def main():
parser = argparse.ArgumentParser(description='Release Docker for AWS')
parser.add_argument('-r', '--release_version',
dest='release_version', required=True,
help="Docker release version (i.e. 1.12.3-beta10)")
parser.add_argument('-c', '--channel',
dest='channel', default="beta", required=True,
help="release channel (beta, alpha, rc, nightly)")
args = parser.parse_args()

release_version = args.release_version
release_channel = args.channel
docker_for_azure_version = u"azure-v{}".format(release_version)
print("\nVariables")
print(u"release_channel={}".format(release_channel))
print(u"release_version={}".format(release_version))

s3_latest_url = publish_rg_template(release_channel, docker_for_azure_version)
print("------------------")
print(u"Finshed.. Latest Azure template \n\t URL={0}".format(s3_latest_url))
print("------------------")

if __name__ == '__main__':
main()
23 changes: 21 additions & 2 deletions azure/release/files/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,15 +42,15 @@ def buildCustomData(data_file):
return customData

def upload_rg_template(release_channel, cloudformation_template_name, tempfile, cfn_type=''):

# upload to s3, make public, return s3 URL
s3_host_name = u"https://{}.s3.amazonaws.com".format(S3_BUCKET_NAME)
s3_path = u"azure/{}/{}".format(release_channel, cloudformation_template_name)
latest_name = "latest.json"
if cfn_type:
latest_name = "{}-latest.json".format(cfn_type)

s3_path_latest = u"aws/{}/{}".format(release_channel, latest_name)
s3_path_latest = u"azure/{}/{}".format(release_channel, latest_name)
s3_full_url = u"{}/{}".format(s3_host_name, s3_path)

s3conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
Expand All @@ -73,6 +73,25 @@ def upload_rg_template(release_channel, cloudformation_template_name, tempfile,

return s3_full_url

def publish_rg_template(release_channel, docker_for_azure_version):
# upload to s3, make public, return s3 URL
s3_host_name = u"https://{}.s3.amazonaws.com".format(S3_BUCKET_NAME)
s3_path = u"azure/{}/{}.json".format(release_channel, docker_for_azure_version)

print(u"Update the latest.json file to the release of {} in {} channel.".format(docker_for_azure_version, release_channel))
latest_name = "latest.json"
s3_path_latest = u"azure/{}/{}".format(release_channel, latest_name)
s3_full_url = u"{}/{}".format(s3_host_name, s3_path_latest)

s3conn = boto.connect_s3(AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
bucket = s3conn.get_bucket(S3_BUCKET_NAME)

print(u"Copy Azure template from {} to {} s3 bucket".format(s3_path, s3_path_latest))
srckey = bucket.get_key(s3_path)
dstkey = bucket.new_key(s3_path_latest)
srckey.copy(S3_BUCKET_NAME, dstkey, preserve_acl=True, validate_dst_bucket=True)
return s3_full_url

def create_rg_template(vhd_sku, vhd_version, offer_id, release_channel, docker_version,
docker_for_azure_version, edition_version, cfn_template, cloudformation_template_name):
# check if file exists before opening.
Expand Down

0 comments on commit 6287236

Please sign in to comment.