From d8278ec8aee8259ac884eb33e1840d02a7645d37 Mon Sep 17 00:00:00 2001 From: Bryan Boreham Date: Thu, 8 Jun 2017 16:08:23 +0000 Subject: [PATCH 1/4] Pass all Go build tags to tests --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 8866684374..2f88f86713 100644 --- a/Makefile +++ b/Makefile @@ -124,7 +124,7 @@ shell: $(SCOPE_BACKEND_BUILD_UPTODATE) /bin/bash tests: $(SCOPE_BACKEND_BUILD_UPTODATE) $(CODECGEN_TARGETS) prog/staticui/staticui.go prog/externalui/externalui.go - ./tools/test -no-go-get + ./tools/test -no-go-get -tags $(GO_BUILD_TAGS) lint: $(SCOPE_BACKEND_BUILD_UPTODATE) ./tools/lint From 6acb397d5dd0f7c7b7763de90fc5c38720b32668 Mon Sep 17 00:00:00 2001 From: Bryan Boreham Date: Mon, 19 Jun 2017 13:45:01 +0000 Subject: [PATCH 2/4] Install yapf and flake8 required by build-tools lint --- backend/Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index 1b5333b53b..c11d99768c 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,10 +1,10 @@ FROM ubuntu:yakkety ENV GOPATH /go ENV GOVERSION 1.8.3 -ENV PATH /go/bin:/usr/local/go/bin:/usr/bin:/bin:/usr/sbin:/sbin +ENV PATH /go/bin:/usr/local/go/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin ENV SCOPE_SKIP_UI_ASSETS true RUN apt-get update && \ - apt-get install -y libpcap-dev python-requests time file shellcheck git gcc-arm-linux-gnueabihf curl build-essential && \ + apt-get install -y libpcap-dev python-requests time file shellcheck git gcc-arm-linux-gnueabihf curl build-essential python-pip && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* RUN curl -Ls https://storage.googleapis.com/golang/go${GOVERSION}.linux-amd64.tar.gz | tar xz -C /usr/local RUN go clean -i net && \ @@ -22,5 +22,6 @@ RUN go get -tags netgo \ github.com/client9/misspell/cmd/misspell && \ chmod a+wr --recursive /usr/local/go && \ rm -rf /go/pkg/ /go/src/ +RUN pip install yapf==0.16.2 flake8==3.3.0 COPY build.sh / ENTRYPOINT ["/build.sh"] From ae9d708dbc8314331f1df86d0a3cdb2441cab1df Mon Sep 17 00:00:00 2001 From: Bryan Boreham Date: Mon, 19 Jun 2017 14:05:55 +0000 Subject: [PATCH 3/4] Reformat Python examples to placate linter --- extras/example/app/app.py | 104 +++++++++++++++++--------------- extras/example/client/client.py | 87 +++++++++++++------------- extras/example/echo/echo.py | 17 +++--- extras/example/trace_app/app.py | 70 +++++++++++---------- 4 files changed, 148 insertions(+), 130 deletions(-) diff --git a/extras/example/app/app.py b/extras/example/app/app.py index 398cbb0fff..01798a3e97 100644 --- a/extras/example/app/app.py +++ b/extras/example/app/app.py @@ -1,8 +1,6 @@ -import os import socket import sys import requests -import random import threading import logging import argparse @@ -18,68 +16,78 @@ sessions = threading.local() args = None + def do_redis(): - redis.incr('hits') - return redis.get('hits') + redis.incr('hits') + return redis.get('hits') + def do_qotd(): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - try: - s.settimeout(args.timeout) - s.connect((args.qotd, 4446)) - s.send("Hello") - return s.recv(1024) - finally: - s.close() + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + s.settimeout(args.timeout) + s.connect((args.qotd, 4446)) + s.send("Hello") + return s.recv(1024) + finally: + s.close() + def do_search(): - if getattr(sessions, 'session', None) == None: - sessions.session = requests.Session() - r = sessions.session.get(args.search, timeout=args.timeout) - return r.text + if getattr(sessions, 'session', None) is None: + sessions.session = requests.Session() + r = sessions.session.get(args.search, timeout=args.timeout) + return r.text + def do_echo(text): - if getattr(sessions, 'session', None) == None: - sessions.session = requests.Session() - r = sessions.session.get(args.echo, data=text, timeout=args.timeout) - return r.text + if getattr(sessions, 'session', None) is None: + sessions.session = requests.Session() + r = sessions.session.get(args.echo, data=text, timeout=args.timeout) + return r.text + def ignore_error(f): - try: - return str(f()) - except: - logging.error("Error executing function", exc_info=sys.exc_info()) - return "Error" + try: + return str(f()) + except: + logging.error("Error executing function", exc_info=sys.exc_info()) + return "Error" + # this root is for the tracing demo @app.route('/hello') def hello(): - qotd_msg = do_qotd() - qotd_msg = do_echo(qotd_msg) - return qotd_msg + qotd_msg = do_qotd() + qotd_msg = do_echo(qotd_msg) + return qotd_msg + # this is for normal demos @app.route('/') def root(): - counter_future = pool.submit(do_redis) - search_future = pool.submit(do_search) - qotd_future = pool.submit(do_qotd) - echo_future = pool.submit(lambda: do_echo("foo")) - result = 'Hello World! I have been seen %s times.' % ignore_error(counter_future.result) - result += ignore_error(search_future.result) - result += ignore_error(qotd_future.result) - result += ignore_error(echo_future.result) - return result + counter_future = pool.submit(do_redis) + search_future = pool.submit(do_search) + qotd_future = pool.submit(do_qotd) + echo_future = pool.submit(lambda: do_echo("foo")) + result = 'Hello World! I have been seen %s times.' % ignore_error( + counter_future.result) + result += ignore_error(search_future.result) + result += ignore_error(qotd_future.result) + result += ignore_error(echo_future.result) + return result + if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument('-redis', default="redis.weave.local") - parser.add_argument('-search', default="http://search.weave.local:80/") - parser.add_argument('-qotd', default="qotd.weave.local") - parser.add_argument('-echo', default="http://echo.weave.local:80/") - parser.add_argument('-timeout', default=0.5, type=float) - args = parser.parse_args() - - logging.basicConfig(format='%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s', level=logging.INFO) - WSGIRequestHandler.protocol_version = "HTTP/1.1" - app.run(host="0.0.0.0", port=80, debug=True) + parser = argparse.ArgumentParser() + parser.add_argument('-redis', default="redis.weave.local") + parser.add_argument('-search', default="http://search.weave.local:80/") + parser.add_argument('-qotd', default="qotd.weave.local") + parser.add_argument('-echo', default="http://echo.weave.local:80/") + parser.add_argument('-timeout', default=0.5, type=float) + args = parser.parse_args() + + logfmt = '%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s' + logging.basicConfig(format=logfmt, level=logging.INFO) + WSGIRequestHandler.protocol_version = "HTTP/1.1" + app.run(host="0.0.0.0", port=80, debug=True) diff --git a/extras/example/client/client.py b/extras/example/client/client.py index b184d23620..3ff169e399 100644 --- a/extras/example/client/client.py +++ b/extras/example/client/client.py @@ -7,53 +7,58 @@ import socket import sys + def do_request(s, args): - addrs = socket.getaddrinfo(args.target, args.port) - addrs = [a - for a in addrs - if a[0] == socket.AF_INET] - if len(addrs) <= 0: - logging.info("Could not resolve %s", args.target) - return - addr = random.choice(addrs) - url = "http://%s:%d%s" % (addr[4][0], args.port, args.path) - s.get(url, timeout=args.timeout) - logging.info("Did request %s", url) + addrs = socket.getaddrinfo(args.target, args.port) + addrs = [a for a in addrs if a[0] == socket.AF_INET] + if len(addrs) <= 0: + logging.info("Could not resolve %s", args.target) + return + addr = random.choice(addrs) + url = "http://%s:%d%s" % (addr[4][0], args.port, args.path) + s.get(url, timeout=args.timeout) + logging.info("Did request %s", url) + def do_requests(args): - s = requests.Session() - while True: - try: - if args.persist: - do_request(s, args) - else: - do_request(requests.Session(), args) - except: - logging.error("Error doing request", exc_info=sys.exc_info()) + s = requests.Session() + while True: + try: + if args.persist: + do_request(s, args) + else: + do_request(requests.Session(), args) + except: + logging.error("Error doing request", exc_info=sys.exc_info()) - time.sleep(args.period) + time.sleep(args.period) def main(): - parser = argparse.ArgumentParser() - parser.add_argument('-target', default="frontend.weave.local") - parser.add_argument('-port', default=80, type=int) - parser.add_argument('-path', default="/") - parser.add_argument('-concurrency', default=1, type=int) - parser.add_argument('-persist', default=True, type=bool) - parser.add_argument('-timeout', default=1.0, type=float) - parser.add_argument('-period', default=0.1, type=float) - args = parser.parse_args() - - logging.info("Starting %d threads, targeting %s", args.concurrency, args.target) - threads = [threading.Thread(target=do_requests, args=(args,)) - for i in range(args.concurrency)] - for thread in threads: - thread.start() - for thread in threads: - thread.join() - logging.info("Exiting") + parser = argparse.ArgumentParser() + parser.add_argument('-target', default="frontend.weave.local") + parser.add_argument('-port', default=80, type=int) + parser.add_argument('-path', default="/") + parser.add_argument('-concurrency', default=1, type=int) + parser.add_argument('-persist', default=True, type=bool) + parser.add_argument('-timeout', default=1.0, type=float) + parser.add_argument('-period', default=0.1, type=float) + args = parser.parse_args() + + logging.info("Starting %d threads, targeting %s", args.concurrency, + args.target) + threads = [ + threading.Thread(target=do_requests, args=(args, )) + for i in range(args.concurrency) + ] + for thread in threads: + thread.start() + for thread in threads: + thread.join() + logging.info("Exiting") + if __name__ == "__main__": - logging.basicConfig(format='%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s', level=logging.INFO) - main() + logfmt = '%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s' + logging.basicConfig(format=logfmt, level=logging.INFO) + main() diff --git a/extras/example/echo/echo.py b/extras/example/echo/echo.py index fd2564cb5a..99eb6f0246 100644 --- a/extras/example/echo/echo.py +++ b/extras/example/echo/echo.py @@ -1,9 +1,3 @@ -import os -import socket -import sys -import random -import time -import threading import logging from flask import Flask @@ -12,11 +6,14 @@ app = Flask(__name__) + @app.route('/') def echo(): - return request.data + return request.data + if __name__ == "__main__": - logging.basicConfig(format='%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s', level=logging.INFO) - WSGIRequestHandler.protocol_version = "HTTP/1.0" - app.run(host="0.0.0.0", port=80, debug=True) + logfmt = '%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s' + logging.basicConfig(format=logfmt, level=logging.INFO) + WSGIRequestHandler.protocol_version = "HTTP/1.0" + app.run(host="0.0.0.0", port=80, debug=True) diff --git a/extras/example/trace_app/app.py b/extras/example/trace_app/app.py index beff4e287f..40c9e440b6 100644 --- a/extras/example/trace_app/app.py +++ b/extras/example/trace_app/app.py @@ -1,4 +1,3 @@ -import os import socket import sys import requests @@ -18,52 +17,61 @@ searchapps = ['http://searchapp:8080/'] + def do_redis(): - redis.incr('hits') - return redis.get('hits') + redis.incr('hits') + return redis.get('hits') + def do_qotd(): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - try: - s.connect(("qotd.weave.local", 4446)) - s.send("Hello") - return s.recv(1024) - finally: - s.close() + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + s.connect(("qotd.weave.local", 4446)) + s.send("Hello") + return s.recv(1024) + finally: + s.close() + def do_search(): - if getattr(sessions, 'session', None) == None: - sessions.session = requests.Session() - r = sessions.session.get(random.choice(searchapps)) - return r.text + if getattr(sessions, 'session', None) is None: + sessions.session = requests.Session() + r = sessions.session.get(random.choice(searchapps)) + return r.text + def do_echo(text): - r = requests.get("http://echo.weave.local/", data=text) - return r.text + r = requests.get("http://echo.weave.local/", data=text) + return r.text + def ignore_error(f): - try: - return str(f()) - except: - logging.error("Error executing function", exc_info=sys.exc_info()) - return "Error" + try: + return str(f()) + except: + logging.error("Error executing function", exc_info=sys.exc_info()) + return "Error" + # this root is for the tracing demo @app.route('/hello') def hello(): - qotd_msg = do_qotd() - qotd_msg = do_echo(qotd_msg) - return qotd_msg + qotd_msg = do_qotd() + qotd_msg = do_echo(qotd_msg) + return qotd_msg + # this is for normal demos @app.route('/') def root(): - #counter_future = pool.submit(do_redis) - #search_future = pool.submit(do_search) - result = do_echo(do_qotd()) - return result + # counter_future = pool.submit(do_redis) + # search_future = pool.submit(do_search) + result = do_echo(do_qotd()) + return result + if __name__ == "__main__": - logging.basicConfig(format='%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s', level=logging.INFO) - WSGIRequestHandler.protocol_version = "HTTP/1.1" - app.run(host="0.0.0.0", port=80, debug=True) + logfmt = '%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s' + logging.basicConfig(format=logfmt, level=logging.INFO) + WSGIRequestHandler.protocol_version = "HTTP/1.1" + app.run(host="0.0.0.0", port=80, debug=True) From 1eeb4d0d80f2c02805b531c09e88658fb72b78a8 Mon Sep 17 00:00:00 2001 From: Bryan Boreham Date: Thu, 13 Jul 2017 16:18:44 +0000 Subject: [PATCH 4/4] Squashed 'tools/' changes from 0d6d4da..74dc626 74dc626 Merge pull request #108 from weaveworks/disable-apt-daily b4f1d91 Merge pull request #107 from weaveworks/docker-17-update 7436aa1 Override apt daily job to not run immediately on boot 7980f15 Merge pull request #106 from weaveworks/document-docker-install-role f741e53 Bump to Docker 17.06 from CE repo 61796a1 Update Docker CE Debian repo details 0d86f5e Allow for Docker package to be named docker-ce 065c68d Document selection of Docker installation role. 3809053 Just --porcelain; it defaults to v1 11400ea Merge pull request #105 from weaveworks/remove-weaveplugin-remnants b8b4d64 remove weaveplugin remnants 35099c9 Merge pull request #104 from weaveworks/pull-docker-py cdd48fc Pull docker-py to speed tests/builds up. e1c6c24 Merge pull request #103 from weaveworks/test-build-tags d5d71e0 Add -tags option so callers can pass in build tags 8949b2b Merge pull request #98 from weaveworks/git-status-tag ac30687 Merge pull request #100 from weaveworks/python_linting 4b125b5 Pin yapf & flake8 versions 7efb485 Lint python linting function 444755b Swap diff direction to reflect changes required c5b2434 Install flake8 & yapf 5600eac Lint python in build-tools repo 0b02ca9 Add python linting c011c0d Merge pull request #79 from kinvolk/schu/python-shebang 6577d07 Merge pull request #99 from weaveworks/shfmt-version 00ce0dc Use git status instead of diff to add 'WIP' tag 411fd13 Use shfmt v1.3.0 instead of latest from master. 31d069d Change Python shebang to `#!/usr/bin/env python` git-subtree-dir: tools git-subtree-split: 74dc626b6de3ffb38591510f7cb7bc2db33743c4 --- build/golang/Dockerfile | 3 +- circle.yml | 8 +- config_management/README.md | 22 + config_management/group_vars/all | 4 +- .../dev-tools/files/apt-daily.timer.conf | 2 + .../roles/dev-tools/tasks/main.yml | 8 + .../docker-configuration/files/docker.conf | 2 +- .../tasks/debian.yml | 35 ++ .../docker-from-docker-ce-repo/tasks/main.yml | 35 +- .../tasks/redhat.yml | 29 + .../tasks/debian.yml | 2 +- .../roles/weave-net-utilities/tasks/main.yml | 9 + dependencies/cross_versions.py | 118 ++-- dependencies/list_versions.py | 503 ++++++++++-------- image-tag | 2 +- integration/config.sh | 5 +- lint | 18 +- sched | 2 +- scheduler/main.py | 269 +++++----- test | 12 +- 20 files changed, 650 insertions(+), 438 deletions(-) create mode 100644 config_management/roles/dev-tools/files/apt-daily.timer.conf create mode 100644 config_management/roles/docker-from-docker-ce-repo/tasks/debian.yml create mode 100644 config_management/roles/docker-from-docker-ce-repo/tasks/redhat.yml diff --git a/build/golang/Dockerfile b/build/golang/Dockerfile index 012b1f88e1..8ef1d2b04c 100644 --- a/build/golang/Dockerfile +++ b/build/golang/Dockerfile @@ -14,7 +14,8 @@ RUN apt-get update && \ unzip && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* RUN pip install attrs pyhcl -RUN curl -fsSL -o shfmt https://github.com/mvdan/sh/releases/download/v1.3.0/shfmt_v1.3.0_linux_amd64 && \ +RUN curl -fsSLo shfmt https://github.com/mvdan/sh/releases/download/v1.3.0/shfmt_v1.3.0_linux_amd64 && \ + echo "b1925c2c405458811f0c227266402cf1868b4de529f114722c2e3a5af4ac7bb2 shfmt" | sha256sum -c && \ chmod +x shfmt && \ mv shfmt /usr/bin RUN go clean -i net && \ diff --git a/circle.yml b/circle.yml index 0c8d371dee..976a68cc9e 100644 --- a/circle.yml +++ b/circle.yml @@ -14,9 +14,10 @@ dependencies: - mkdir -p $(dirname $SRCDIR) - cp -r $(pwd)/ $SRCDIR - | - curl -fsSL -o shfmt https://github.com/mvdan/sh/releases/download/v1.3.0/shfmt_v1.3.0_linux_amd64 && \ - chmod +x shfmt && \ - sudo mv shfmt /usr/bin + curl -fsSLo shfmt https://github.com/mvdan/sh/releases/download/v1.3.0/shfmt_v1.3.0_linux_amd64 && \ + echo "b1925c2c405458811f0c227266402cf1868b4de529f114722c2e3a5af4ac7bb2 shfmt" | sha256sum -c && \ + chmod +x shfmt && \ + sudo mv shfmt /usr/bin - | cd $SRCDIR; go get \ @@ -24,6 +25,7 @@ dependencies: github.com/golang/lint/golint \ github.com/kisielk/errcheck \ github.com/fatih/hclfmt + - pip install yapf==0.16.2 flake8==3.3.0 test: override: diff --git a/config_management/README.md b/config_management/README.md index e98b9b4e08..bf1f6f65f8 100644 --- a/config_management/README.md +++ b/config_management/README.md @@ -113,6 +113,28 @@ N.B.: `--ssh-extra-args` is used to provide: * `StrictHostKeyChecking=no`: as VMs come and go, the same IP can be used by a different machine, so checking the host's SSH key may fail. Note that this introduces a risk of a man-in-the-middle attack. * `UserKnownHostsFile=/dev/null`: if you previously connected a VM with the same IP but a different public key, and added it to `~/.ssh/known_hosts`, SSH may still fail to connect, hence we use `/dev/null` instead of `~/.ssh/known_hosts`. + +### Docker installation role + +Various ways to install Docker are provided: + +- `docker-from-docker-ce-repo` +- `docker-from-docker-repo` +- `docker-from-get.docker.com` +- `docker-from-tarball` + +each producing a slightly different outcome, which can be useful for testing various setup scenarios. + +The `docker-install` role selects one of the above ways to install Docker based on the `docker_install_role` variable. +The default value for this variable is configured in `group_vars/all`. +You can however override it with whichever role you would want to run by passing the name of the role as a key-value pair in `extra-vars`, e.g.: + +``` +ansible-playbook .yml \ + --extra-vars "docker_install_role=docker-from-docker-ce-repo" +``` + + ## Resources * [https://www.vagrantup.com/docs/provisioning/ansible.html](https://www.vagrantup.com/docs/provisioning/ansible.html) diff --git a/config_management/group_vars/all b/config_management/group_vars/all index 05f5dbc399..d728cce80b 100644 --- a/config_management/group_vars/all +++ b/config_management/group_vars/all @@ -1,8 +1,8 @@ --- go_version: 1.8.1 terraform_version: 0.8.5 -docker_version: 1.11.2 -docker_install_role: 'docker-from-get.docker.com' +docker_version: 17.06 +docker_install_role: 'docker-from-docker-ce-repo' kubernetes_version: 1.6.1 kubernetes_cni_version: 0.5.1 kubernetes_token: '123456.0123456789123456' diff --git a/config_management/roles/dev-tools/files/apt-daily.timer.conf b/config_management/roles/dev-tools/files/apt-daily.timer.conf new file mode 100644 index 0000000000..bd19c61f9e --- /dev/null +++ b/config_management/roles/dev-tools/files/apt-daily.timer.conf @@ -0,0 +1,2 @@ +[Timer] +Persistent=false diff --git a/config_management/roles/dev-tools/tasks/main.yml b/config_management/roles/dev-tools/tasks/main.yml index a9cb99ddda..96ac3a2199 100644 --- a/config_management/roles/dev-tools/tasks/main.yml +++ b/config_management/roles/dev-tools/tasks/main.yml @@ -38,3 +38,11 @@ dest: /usr/bin mode: 0555 creates: /usr/bin/terraform + +# Ubuntu runs an apt update process that will run on first boot from image. +# This is of questionable value when the machines are only going to live for a few minutes. +# If you leave them on they will run the process daily. +# Also we have seen the update process create a 'defunct' process which then throws off Weave Net smoke-test checks. +# So, we override the 'persistent' setting so it will still run at the scheduled time but will not try to catch up on first boot. +- name: copy apt daily override + copy: src=apt-daily.timer.conf dest=/etc/systemd/system/apt-daily.timer.d/ diff --git a/config_management/roles/docker-configuration/files/docker.conf b/config_management/roles/docker-configuration/files/docker.conf index 6d02b55e51..626d8022b3 100644 --- a/config_management/roles/docker-configuration/files/docker.conf +++ b/config_management/roles/docker-configuration/files/docker.conf @@ -1,3 +1,3 @@ [Service] ExecStart= -ExecStart=/usr/bin/docker daemon -H fd:// -H unix:///var/run/alt-docker.sock -H tcp://0.0.0.0:2375 -s overlay --insecure-registry "weave-ci-registry:5000" +ExecStart=/usr/bin/dockerd -H fd:// -H unix:///var/run/alt-docker.sock -H tcp://0.0.0.0:2375 -s overlay --insecure-registry "weave-ci-registry:5000" diff --git a/config_management/roles/docker-from-docker-ce-repo/tasks/debian.yml b/config_management/roles/docker-from-docker-ce-repo/tasks/debian.yml new file mode 100644 index 0000000000..3e2ae1270c --- /dev/null +++ b/config_management/roles/docker-from-docker-ce-repo/tasks/debian.yml @@ -0,0 +1,35 @@ +--- +# Debian / Ubuntu specific: + +- name: install dependencies for docker repository + package: + name: "{{ item }}" + state: present + with_items: + - apt-transport-https + - ca-certificates + +- name: add apt key for the docker repository + apt_key: + keyserver: hkp://ha.pool.sks-keyservers.net:80 + id: 9DC858229FC7DD38854AE2D88D81803C0EBFCD88 + state: present + register: apt_key_docker_repo + +- name: add docker's apt repository ({{ ansible_distribution | lower }}-{{ ansible_distribution_release }}) + apt_repository: + repo: deb https://download.docker.com/linux/ubuntu {{ ansible_lsb.codename|lower }} stable + state: present + register: apt_docker_repo + +- name: update apt's cache + apt: + update_cache: yes + when: apt_key_docker_repo.changed or apt_docker_repo.changed + +- name: install docker-engine + package: + name: "{{ item }}" + state: present + with_items: + - docker-ce={{ docker_version }}* diff --git a/config_management/roles/docker-from-docker-ce-repo/tasks/main.yml b/config_management/roles/docker-from-docker-ce-repo/tasks/main.yml index ea9a3fa462..0acb6d8c92 100644 --- a/config_management/roles/docker-from-docker-ce-repo/tasks/main.yml +++ b/config_management/roles/docker-from-docker-ce-repo/tasks/main.yml @@ -1,29 +1,10 @@ -# Docker installation from Docker's CentOS Community Edition -# See also: https://docs.docker.com/engine/installation/linux/centos/ +--- +# Set up Docker +# See also: https://docs.docker.com/engine/installation/linux/ubuntulinux/#install -- name: remove all potentially pre existing packages - yum: - name: '{{ item }}' - state: absent - with_items: - - docker - - docker-common - - container-selinux - - docker-selinux - - docker-engine +# Distribution-specific tasks: +- include: debian.yml + when: ansible_os_family == "Debian" -- name: install yum-utils - yum: - name: yum-utils - state: present - -- name: add docker ce repo - command: yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo - -# Note that Docker CE versions do not follow regular Docker versions, but look -# like, for example: "17.03.0.el7" -- name: install docker - yum: - name: 'docker-ce-{{ docker_version }}' - update_cache: yes - state: present +- include: redhat.yml + when: ansible_os_family == "RedHat" diff --git a/config_management/roles/docker-from-docker-ce-repo/tasks/redhat.yml b/config_management/roles/docker-from-docker-ce-repo/tasks/redhat.yml new file mode 100644 index 0000000000..ea9a3fa462 --- /dev/null +++ b/config_management/roles/docker-from-docker-ce-repo/tasks/redhat.yml @@ -0,0 +1,29 @@ +# Docker installation from Docker's CentOS Community Edition +# See also: https://docs.docker.com/engine/installation/linux/centos/ + +- name: remove all potentially pre existing packages + yum: + name: '{{ item }}' + state: absent + with_items: + - docker + - docker-common + - container-selinux + - docker-selinux + - docker-engine + +- name: install yum-utils + yum: + name: yum-utils + state: present + +- name: add docker ce repo + command: yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo + +# Note that Docker CE versions do not follow regular Docker versions, but look +# like, for example: "17.03.0.el7" +- name: install docker + yum: + name: 'docker-ce-{{ docker_version }}' + update_cache: yes + state: present diff --git a/config_management/roles/docker-from-get.docker.com/tasks/debian.yml b/config_management/roles/docker-from-get.docker.com/tasks/debian.yml index 97b5b7a330..7444194e57 100644 --- a/config_management/roles/docker-from-get.docker.com/tasks/debian.yml +++ b/config_management/roles/docker-from-get.docker.com/tasks/debian.yml @@ -5,4 +5,4 @@ shell: curl -sSL https://get.docker.com/gpg | sudo apt-key add - - name: install docker - shell: 'curl -sSL https://get.docker.com/ | sed -e s/docker-engine/docker-engine={{ docker_version }}*/ | sh' + shell: 'curl -sSL https://get.docker.com/ | sed -e s/docker-engine/docker-engine={{ docker_version }}*/ -e s/docker-ce/docker-ce={{ docker_version }}*/ | sh' diff --git a/config_management/roles/weave-net-utilities/tasks/main.yml b/config_management/roles/weave-net-utilities/tasks/main.yml index 8903859786..6883d23aad 100644 --- a/config_management/roles/weave-net-utilities/tasks/main.yml +++ b/config_management/roles/weave-net-utilities/tasks/main.yml @@ -45,3 +45,12 @@ - alpine - aanand/docker-dnsutils - weaveworks/hello-world + +- name: docker pull docker-py which is used by tests + docker_image: + name: joffrey/docker-py + tag: '{{ item }}' + state: present + with_items: + - '1.8.1' + - '1.9.0-rc2' diff --git a/dependencies/cross_versions.py b/dependencies/cross_versions.py index bc93cf3175..dd920f0ef4 100755 --- a/dependencies/cross_versions.py +++ b/dependencies/cross_versions.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python # Generate the cross product of latest versions of Weave Net's dependencies: # - Go @@ -20,74 +20,72 @@ from itertools import product # See also: /usr/include/sysexits.h -_ERROR_RUNTIME=1 -_ERROR_ILLEGAL_ARGS=64 +_ERROR_RUNTIME = 1 +_ERROR_ILLEGAL_ARGS = 64 + def _usage(error_message=None): - if error_message: - stderr.write('ERROR: ' + error_message + linesep) - stdout.write(linesep.join([ - 'Usage:', - ' cross_versions.py [OPTION]...', - 'Examples:', - ' cross_versions.py', - ' cross_versions.py -r', - ' cross_versions.py --rc', - ' cross_versions.py -l', - ' cross_versions.py --latest', - 'Options:', - '-l/--latest Include only the latest version of each major and minor versions sub-tree.', - '-r/--rc Include release candidate versions.', - '-h/--help Prints this!', - '' - ])) + if error_message: + stderr.write('ERROR: ' + error_message + linesep) + stdout.write( + linesep.join([ + 'Usage:', ' cross_versions.py [OPTION]...', 'Examples:', + ' cross_versions.py', ' cross_versions.py -r', + ' cross_versions.py --rc', ' cross_versions.py -l', + ' cross_versions.py --latest', 'Options:', + '-l/--latest Include only the latest version of each major and' + ' minor versions sub-tree.', + '-r/--rc Include release candidate versions.', + '-h/--help Prints this!', '' + ])) + def _validate_input(argv): - try: - config = { - 'rc': False, - 'latest': False - } - opts, args = getopt(argv, 'hlr', ['help', 'latest', 'rc']) - for opt, value in opts: - if opt in ('-h', '--help'): - _usage() - exit() - if opt in ('-l', '--latest'): - config['latest'] = True - if opt in ('-r', '--rc'): - config['rc'] = True - if len(args) != 0: - raise ValueError('Unsupported argument(s): %s.' % args) - return config - except GetoptError as e: - _usage(str(e)) - exit(_ERROR_ILLEGAL_ARGS) - except ValueError as e: - _usage(str(e)) - exit(_ERROR_ILLEGAL_ARGS) + try: + config = {'rc': False, 'latest': False} + opts, args = getopt(argv, 'hlr', ['help', 'latest', 'rc']) + for opt, value in opts: + if opt in ('-h', '--help'): + _usage() + exit() + if opt in ('-l', '--latest'): + config['latest'] = True + if opt in ('-r', '--rc'): + config['rc'] = True + if len(args) != 0: + raise ValueError('Unsupported argument(s): %s.' % args) + return config + except GetoptError as e: + _usage(str(e)) + exit(_ERROR_ILLEGAL_ARGS) + except ValueError as e: + _usage(str(e)) + exit(_ERROR_ILLEGAL_ARGS) + def _versions(dependency, config): - return map(str, - filter_versions( - get_versions_from(DEPS[dependency]['url'], DEPS[dependency]['re']), - DEPS[dependency]['min'], - **config - ) - ) + return map(str, + filter_versions( + get_versions_from(DEPS[dependency]['url'], + DEPS[dependency]['re']), + DEPS[dependency]['min'], **config)) + def cross_versions(config): - docker_versions = _versions('docker', config) - k8s_versions = _versions('kubernetes', config) - return product(docker_versions, k8s_versions) + docker_versions = _versions('docker', config) + k8s_versions = _versions('kubernetes', config) + return product(docker_versions, k8s_versions) + def main(argv): - try: - config = _validate_input(argv) - print(linesep.join('\t'.join(triple) for triple in cross_versions(config))) - except Exception as e: - print(str(e)) - exit(_ERROR_RUNTIME) + try: + config = _validate_input(argv) + print(linesep.join('\t'.join(triple) + for triple in cross_versions(config))) + except Exception as e: + print(str(e)) + exit(_ERROR_RUNTIME) + if __name__ == '__main__': - main(argv[1:]) + main(argv[1:]) diff --git a/dependencies/list_versions.py b/dependencies/list_versions.py index 3b756cd19b..e008ecfef4 100755 --- a/dependencies/list_versions.py +++ b/dependencies/list_versions.py @@ -1,17 +1,32 @@ -#!/usr/bin/python +#!/usr/bin/env python # List all available versions of Weave Net's dependencies: # - Go # - Docker # - Kubernetes # -# Depending on the parameters passed, it can gather the equivalent of the below bash one-liners: -# git ls-remote --tags https://github.com/golang/go | grep -oP '(?<=refs/tags/go)[\.\d]+$' | sort --version-sort -# git ls-remote --tags https://github.com/golang/go | grep -oP '(?<=refs/tags/go)[\.\d]+rc\d+$' | sort --version-sort | tail -n 1 -# git ls-remote --tags https://github.com/docker/docker | grep -oP '(?<=refs/tags/v)\d+\.\d+\.\d+$' | sort --version-sort -# git ls-remote --tags https://github.com/docker/docker | grep -oP '(?<=refs/tags/v)\d+\.\d+\.\d+\-rc\d*$' | sort --version-sort | tail -n 1 -# git ls-remote --tags https://github.com/kubernetes/kubernetes | grep -oP '(?<=refs/tags/v)\d+\.\d+\.\d+$' | sort --version-sort -# git ls-remote --tags https://github.com/kubernetes/kubernetes | grep -oP '(?<=refs/tags/v)\d+\.\d+\.\d+\-beta\.\d+$' | sort --version-sort | tail -n 1 +# Depending on the parameters passed, it can gather the equivalent of the below +# bash one-liners: +# git ls-remote --tags https://github.com/golang/go \ +# | grep -oP '(?<=refs/tags/go)[\.\d]+$' \ +# | sort --version-sort +# git ls-remote --tags https://github.com/golang/go \ +# | grep -oP '(?<=refs/tags/go)[\.\d]+rc\d+$' \ +# | sort --version-sort \ +# | tail -n 1 +# git ls-remote --tags https://github.com/docker/docker \ +# | grep -oP '(?<=refs/tags/v)\d+\.\d+\.\d+$' \ +# | sort --version-sort +# git ls-remote --tags https://github.com/docker/docker \ +# | grep -oP '(?<=refs/tags/v)\d+\.\d+\.\d+\-rc\d*$' \ +# | sort --version-sort \ +# | tail -n 1 +# git ls-remote --tags https://github.com/kubernetes/kubernetes \ +# | grep -oP '(?<=refs/tags/v)\d+\.\d+\.\d+$' \ +# | sort --version-sort +# git ls-remote --tags https://github.com/kubernetes/kubernetes \ +# | grep -oP '(?<=refs/tags/v)\d+\.\d+\.\d+\-beta\.\d+$' \ +# | sort --version-sort | tail -n 1 # # Dependencies: # - python @@ -23,7 +38,7 @@ from os import linesep, path from sys import argv, exit, stdout, stderr from getopt import getopt, GetoptError -from subprocess import Popen, PIPE, STDOUT +from subprocess import Popen, PIPE from pkg_resources import parse_version from itertools import groupby from six.moves import filter @@ -31,236 +46,298 @@ import re # See also: /usr/include/sysexits.h -_ERROR_RUNTIME=1 -_ERROR_ILLEGAL_ARGS=64 - -_TAG_REGEX='^[0-9a-f]{40}\s+refs/tags/%s$' -_VERSION='version' -DEPS={ - 'go': { - 'url': 'https://github.com/golang/go', - 're': 'go(?P<%s>[\d\.]+(?:rc\d)*)' % _VERSION, - 'min': None - }, - 'docker': { - 'url': 'https://github.com/docker/docker', - 're': 'v(?P<%s>\d+\.\d+\.\d+(?:\-rc\d)*)' % _VERSION, - # Weave Net only works with Docker from 1.10.0 onwards, so we ignore all previous versions: - 'min': '1.10.0' - }, - 'kubernetes': { - 'url': 'https://github.com/kubernetes/kubernetes', - 're': 'v(?P<%s>\d+\.\d+\.\d+(?:\-beta\.\d)*)' % _VERSION, - # Weave Kube requires Kubernetes 1.4.2+, so we ignore all previous versions: - 'min': '1.4.2' - } +_ERROR_RUNTIME = 1 +_ERROR_ILLEGAL_ARGS = 64 + +_TAG_REGEX = '^[0-9a-f]{40}\s+refs/tags/%s$' +_VERSION = 'version' +DEPS = { + 'go': { + 'url': 'https://github.com/golang/go', + 're': 'go(?P<%s>[\d\.]+(?:rc\d)*)' % _VERSION, + 'min': None + }, + 'docker': { + 'url': 'https://github.com/docker/docker', + 're': 'v(?P<%s>\d+\.\d+\.\d+(?:\-rc\d)*)' % _VERSION, + # Weave Net only works with Docker from 1.10.0 onwards, so we ignore + # all previous versions: + 'min': '1.10.0', + }, + 'kubernetes': { + 'url': 'https://github.com/kubernetes/kubernetes', + 're': 'v(?P<%s>\d+\.\d+\.\d+(?:\-beta\.\d)*)' % _VERSION, + # Weave Kube requires Kubernetes 1.4.2+, so we ignore all previous + # versions: + 'min': '1.4.2', + } } + class Version(object): - ''' Helper class to parse and manipulate (sort, filter, group) software versions. ''' - def __init__(self, version): - self.version = version - self.digits = [int(x) if x else 0 for x in re.match('(\d*)\.?(\d*)\.?(\d*).*?', version).groups()] - self.major, self.minor, self.patch = self.digits - self.__parsed = parse_version(version) - self.is_rc = self.__parsed.is_prerelease - def __lt__ (self, other): - return self.__parsed.__lt__(other.__parsed) - def __gt__ (self, other): - return self.__parsed.__gt__(other.__parsed) - def __le__ (self, other): - return self.__parsed.__le__(other.__parsed) - def __ge__ (self, other): - return self.__parsed.__ge__(other.__parsed) - def __eq__ (self, other): - return self.__parsed.__eq__(other.__parsed) - def __ne__ (self, other): - return self.__parsed.__ne__(other.__parsed) - def __str__(self): - return self.version - def __repr__(self): - return self.version + ''' Helper class to parse and manipulate (sort, filter, group) software + versions. ''' + + def __init__(self, version): + self.version = version + self.digits = [ + int(x) if x else 0 + for x in re.match('(\d*)\.?(\d*)\.?(\d*).*?', version).groups() + ] + self.major, self.minor, self.patch = self.digits + self.__parsed = parse_version(version) + self.is_rc = self.__parsed.is_prerelease + + def __lt__(self, other): + return self.__parsed.__lt__(other.__parsed) + + def __gt__(self, other): + return self.__parsed.__gt__(other.__parsed) + + def __le__(self, other): + return self.__parsed.__le__(other.__parsed) + + def __ge__(self, other): + return self.__parsed.__ge__(other.__parsed) + + def __eq__(self, other): + return self.__parsed.__eq__(other.__parsed) + + def __ne__(self, other): + return self.__parsed.__ne__(other.__parsed) + + def __str__(self): + return self.version + + def __repr__(self): + return self.version + def _read_go_version_from_dockerfile(): - # Read Go version from weave/build/Dockerfile - dockerfile_path = path.join(path.dirname(path.dirname(path.dirname(path.realpath(__file__)))), 'build', 'Dockerfile') - with open(dockerfile_path, 'r') as f: - for line in f: - m = re.match('^FROM golang:(\S*)$', line) - if m: - return m.group(1) - raise RuntimeError("Failed to read Go version from weave/build/Dockerfile. You may be running this script from somewhere else than weave/tools.") + # Read Go version from weave/build/Dockerfile + dockerfile_path = path.join( + path.dirname(path.dirname(path.dirname(path.realpath(__file__)))), + 'build', 'Dockerfile') + with open(dockerfile_path, 'r') as f: + for line in f: + m = re.match('^FROM golang:(\S*)$', line) + if m: + return m.group(1) + raise RuntimeError( + "Failed to read Go version from weave/build/Dockerfile." + " You may be running this script from somewhere else than weave/tools." + ) + def _try_set_min_go_version(): - ''' Set the current version of Go used to build Weave Net's containers as the minimum version. ''' - try: - DEPS['go']['min'] = _read_go_version_from_dockerfile() - except IOError as e: - stderr.write('WARNING: No minimum Go version set. Root cause: %s%s' % (e, linesep)) + ''' Set the current version of Go used to build Weave Net's containers as + the minimum version. ''' + try: + DEPS['go']['min'] = _read_go_version_from_dockerfile() + except IOError as e: + stderr.write('WARNING: No minimum Go version set. Root cause: %s%s' % + (e, linesep)) + def _sanitize(out): - return out.decode('ascii').strip().split(linesep) + return out.decode('ascii').strip().split(linesep) + def _parse_tag(tag, version_pattern, debug=False): - ''' Parse Git tag output's line using the provided `version_pattern`, e.g.: - >>> _parse_tag('915b77eb4efd68916427caf8c7f0b53218c5ea4a refs/tags/v1.4.6', 'v(?P\d+\.\d+\.\d+(?:\-beta\.\d)*)') - '1.4.6' - ''' - pattern = _TAG_REGEX % version_pattern - m = re.match(pattern, tag) - if m: - return m.group(_VERSION) - elif debug: - stderr.write('ERROR: Failed to parse version out of tag [%s] using [%s].%s' % (tag, pattern, linesep)) + ''' Parse Git tag output's line using the provided `version_pattern`, e.g.: + >>> _parse_tag( + '915b77eb4efd68916427caf8c7f0b53218c5ea4a refs/tags/v1.4.6', + 'v(?P\d+\.\d+\.\d+(?:\-beta\.\d)*)') + '1.4.6' + ''' + pattern = _TAG_REGEX % version_pattern + m = re.match(pattern, tag) + if m: + return m.group(_VERSION) + elif debug: + stderr.write( + 'ERROR: Failed to parse version out of tag [%s] using [%s].%s' % + (tag, pattern, linesep)) + def get_versions_from(git_repo_url, version_pattern): - ''' Get release and release candidates' versions from the provided Git repository. ''' - git = Popen(shlex.split('git ls-remote --tags %s' % git_repo_url), stdout=PIPE) - out, err = git.communicate() - status_code = git.returncode - if status_code != 0: - raise RuntimeError('Failed to retrieve git tags from %s. Status code: %s. Output: %s. Error: %s' % (git_repo_url, status_code, out, err)) - return list(filter(None, (_parse_tag(line, version_pattern) for line in _sanitize(out)))) + ''' Get release and release candidates' versions from the provided Git + repository. ''' + git = Popen( + shlex.split('git ls-remote --tags %s' % git_repo_url), stdout=PIPE) + out, err = git.communicate() + status_code = git.returncode + if status_code != 0: + raise RuntimeError('Failed to retrieve git tags from %s. ' + 'Status code: %s. Output: %s. Error: %s' % + (git_repo_url, status_code, out, err)) + return list( + filter(None, (_parse_tag(line, version_pattern) + for line in _sanitize(out)))) + def _tree(versions, level=0): - ''' Group versions by major, minor and patch version digits. ''' - if not versions or level >= len(versions[0].digits): - return # Empty versions or no more digits to group by. - versions_tree = [] - for _, versions_group in groupby(versions, lambda v: v.digits[level]): - subtree = _tree(list(versions_group), level+1) - if subtree: - versions_tree.append(subtree) - # Return the current subtree if non-empty, or the list of "leaf" versions: - return versions_tree if versions_tree else versions + ''' Group versions by major, minor and patch version digits. ''' + if not versions or level >= len(versions[0].digits): + return # Empty versions or no more digits to group by. + versions_tree = [] + for _, versions_group in groupby(versions, lambda v: v.digits[level]): + subtree = _tree(list(versions_group), level + 1) + if subtree: + versions_tree.append(subtree) + # Return the current subtree if non-empty, or the list of "leaf" versions: + return versions_tree if versions_tree else versions + def _is_iterable(obj): - ''' - Check if the provided object is an iterable collection, i.e. not a string, e.g. a list, a generator: - >>> _is_iterable('string') - False - >>> _is_iterable([1, 2, 3]) - True - >>> _is_iterable((x for x in [1, 2, 3])) - True - ''' - return hasattr(obj, '__iter__') and not isinstance(obj, str) + ''' + Check if the provided object is an iterable collection, i.e. not a string, + e.g. a list, a generator: + >>> _is_iterable('string') + False + >>> _is_iterable([1, 2, 3]) + True + >>> _is_iterable((x for x in [1, 2, 3])) + True + ''' + return hasattr(obj, '__iter__') and not isinstance(obj, str) + def _leaf_versions(tree, rc): - ''' - Recursively traverse the versions tree in a depth-first fashion, - and collect the last node of each branch, i.e. leaf versions. - ''' - versions = [] - if _is_iterable(tree): - for subtree in tree: - versions.extend(_leaf_versions(subtree, rc)) - if not versions: - if rc: - last_rc = next(filter(lambda v: v.is_rc, reversed(tree)), None) - last_prod = next(filter(lambda v: not v.is_rc, reversed(tree)), None) - if last_rc and last_prod and (last_prod < last_rc): - versions.extend([last_prod, last_rc]) - elif not last_prod: - versions.append(last_rc) - else: - # Either there is no RC, or we ignore the RC as older than the latest production version: - versions.append(last_prod) - else: - versions.append(tree[-1]) - return versions + ''' + Recursively traverse the versions tree in a depth-first fashion, + and collect the last node of each branch, i.e. leaf versions. + ''' + versions = [] + if _is_iterable(tree): + for subtree in tree: + versions.extend(_leaf_versions(subtree, rc)) + if not versions: + if rc: + last_rc = next(filter(lambda v: v.is_rc, reversed(tree)), None) + last_prod = next( + filter(lambda v: not v.is_rc, reversed(tree)), None) + if last_rc and last_prod and (last_prod < last_rc): + versions.extend([last_prod, last_rc]) + elif not last_prod: + versions.append(last_rc) + else: + # Either there is no RC, or we ignore the RC as older than + # the latest production version: + versions.append(last_prod) + else: + versions.append(tree[-1]) + return versions + def filter_versions(versions, min_version=None, rc=False, latest=False): - ''' Filter provided versions - - >>> filter_versions(['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], min_version=None, latest=False, rc=False) - [1.0.0, 1.0.1, 1.1.1, 2.0.0] - - >>> filter_versions(['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], min_version=None, latest=True, rc=False) - [1.0.1, 1.1.1, 2.0.0] - - >>> filter_versions(['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], min_version=None, latest=False, rc=True) - [1.0.0-beta.1, 1.0.0, 1.0.1, 1.1.1, 1.1.2-rc1, 2.0.0] - - >>> filter_versions(['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], min_version='1.1.0', latest=False, rc=True) - [1.1.1, 1.1.2-rc1, 2.0.0] - - >>> filter_versions(['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], min_version=None, latest=True, rc=True) - [1.0.1, 1.1.1, 1.1.2-rc1, 2.0.0] - - >>> filter_versions(['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], min_version='1.1.0', latest=True, rc=True) - [1.1.1, 1.1.2-rc1, 2.0.0] - ''' - versions = sorted([Version(v) for v in versions]) - if min_version: - min_version = Version(min_version) - versions = [v for v in versions if v >= min_version] - if not rc: - versions = [v for v in versions if not v.is_rc] - if latest: - versions_tree = _tree(versions) - return _leaf_versions(versions_tree, rc) - else: - return versions + ''' Filter provided versions + + >>> filter_versions( + ['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], + min_version=None, latest=False, rc=False) + [1.0.0, 1.0.1, 1.1.1, 2.0.0] + + >>> filter_versions( + ['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], + min_version=None, latest=True, rc=False) + [1.0.1, 1.1.1, 2.0.0] + + >>> filter_versions( + ['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], + min_version=None, latest=False, rc=True) + [1.0.0-beta.1, 1.0.0, 1.0.1, 1.1.1, 1.1.2-rc1, 2.0.0] + + >>> filter_versions( + ['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], + min_version='1.1.0', latest=False, rc=True) + [1.1.1, 1.1.2-rc1, 2.0.0] + + >>> filter_versions( + ['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], + min_version=None, latest=True, rc=True) + [1.0.1, 1.1.1, 1.1.2-rc1, 2.0.0] + + >>> filter_versions( + ['1.0.0-beta.1', '1.0.0', '1.0.1', '1.1.1', '1.1.2-rc1', '2.0.0'], + min_version='1.1.0', latest=True, rc=True) + [1.1.1, 1.1.2-rc1, 2.0.0] + ''' + versions = sorted([Version(v) for v in versions]) + if min_version: + min_version = Version(min_version) + versions = [v for v in versions if v >= min_version] + if not rc: + versions = [v for v in versions if not v.is_rc] + if latest: + versions_tree = _tree(versions) + return _leaf_versions(versions_tree, rc) + else: + return versions + def _usage(error_message=None): - if error_message: - stderr.write('ERROR: ' + error_message + linesep) - stdout.write(linesep.join([ - 'Usage:', - ' list_versions.py [OPTION]... [DEPENDENCY]', - 'Examples:', - ' list_versions.py go', - ' list_versions.py -r docker', - ' list_versions.py --rc docker', - ' list_versions.py -l kubernetes', - ' list_versions.py --latest kubernetes', - 'Options:', - '-l/--latest Include only the latest version of each major and minor versions sub-tree.', - '-r/--rc Include release candidate versions.', - '-h/--help Prints this!', - '' - ])) + if error_message: + stderr.write('ERROR: ' + error_message + linesep) + stdout.write( + linesep.join([ + 'Usage:', ' list_versions.py [OPTION]... [DEPENDENCY]', + 'Examples:', ' list_versions.py go', + ' list_versions.py -r docker', + ' list_versions.py --rc docker', + ' list_versions.py -l kubernetes', + ' list_versions.py --latest kubernetes', 'Options:', + '-l/--latest Include only the latest version of each major and' + ' minor versions sub-tree.', + '-r/--rc Include release candidate versions.', + '-h/--help Prints this!', '' + ])) + def _validate_input(argv): - try: - config = { - 'rc': False, - 'latest': False - } - opts, args = getopt(argv, 'hlr', ['help', 'latest', 'rc']) - for opt, value in opts: - if opt in ('-h', '--help'): - _usage() - exit() - if opt in ('-l', '--latest'): - config['latest'] = True - if opt in ('-r', '--rc'): - config['rc'] = True - if len(args) != 1: - raise ValueError('Please provide a dependency to get versions of. Expected 1 argument but got %s: %s.' % (len(args), args)) - dependency=args[0].lower() - if dependency not in DEPS.keys(): - raise ValueError('Please provide a valid dependency. Supported one dependency among {%s} but got: %s.' % (', '.join(DEPS.keys()), dependency)) - return dependency, config - except GetoptError as e: - _usage(str(e)) - exit(_ERROR_ILLEGAL_ARGS) - except ValueError as e: - _usage(str(e)) - exit(_ERROR_ILLEGAL_ARGS) + try: + config = {'rc': False, 'latest': False} + opts, args = getopt(argv, 'hlr', ['help', 'latest', 'rc']) + for opt, value in opts: + if opt in ('-h', '--help'): + _usage() + exit() + if opt in ('-l', '--latest'): + config['latest'] = True + if opt in ('-r', '--rc'): + config['rc'] = True + if len(args) != 1: + raise ValueError('Please provide a dependency to get versions of.' + ' Expected 1 argument but got %s: %s.' % + (len(args), args)) + dependency = args[0].lower() + if dependency not in DEPS.keys(): + raise ValueError( + 'Please provide a valid dependency.' + ' Supported one dependency among {%s} but got: %s.' % + (', '.join(DEPS.keys()), dependency)) + return dependency, config + except GetoptError as e: + _usage(str(e)) + exit(_ERROR_ILLEGAL_ARGS) + except ValueError as e: + _usage(str(e)) + exit(_ERROR_ILLEGAL_ARGS) + def main(argv): - try: - dependency, config = _validate_input(argv) - if dependency == 'go': - _try_set_min_go_version() - versions = get_versions_from(DEPS[dependency]['url'], DEPS[dependency]['re']) - versions = filter_versions(versions, DEPS[dependency]['min'], **config) - print(linesep.join(map(str, versions))) - except Exception as e: - print(str(e)) - exit(_ERROR_RUNTIME) + try: + dependency, config = _validate_input(argv) + if dependency == 'go': + _try_set_min_go_version() + versions = get_versions_from(DEPS[dependency]['url'], + DEPS[dependency]['re']) + versions = filter_versions(versions, DEPS[dependency]['min'], **config) + print(linesep.join(map(str, versions))) + except Exception as e: + print(str(e)) + exit(_ERROR_RUNTIME) + if __name__ == '__main__': - main(argv[1:]) + main(argv[1:]) diff --git a/image-tag b/image-tag index d1fd2f728f..31f023dac0 100755 --- a/image-tag +++ b/image-tag @@ -4,6 +4,6 @@ set -o errexit set -o nounset set -o pipefail -WORKING_SUFFIX=$(if ! git diff --exit-code --quiet HEAD >&2; then echo "-WIP"; else echo ""; fi) +WORKING_SUFFIX=$(if git status --porcelain | grep -qE '^(?:[^?][^ ]|[^ ][^?])\s'; then echo "-WIP"; else echo ""; fi) BRANCH_PREFIX=$(git rev-parse --abbrev-ref HEAD) echo "${BRANCH_PREFIX//\//-}-$(git rev-parse --short HEAD)$WORKING_SUFFIX" diff --git a/integration/config.sh b/integration/config.sh index 6bf208606b..5419219203 100644 --- a/integration/config.sh +++ b/integration/config.sh @@ -115,11 +115,8 @@ rm_containers() { start_suite() { for host in $HOSTS; do [ -z "$DEBUG" ] || echo "Cleaning up on $host: removing all containers and resetting weave" - PLUGIN_ID=$(docker_on "$host" ps -aq --filter=name=weaveplugin) - PLUGIN_FILTER="cat" - [ -n "$PLUGIN_ID" ] && PLUGIN_FILTER="grep -v $PLUGIN_ID" # shellcheck disable=SC2046 - rm_containers "$host" $(docker_on "$host" ps -aq 2>/dev/null | $PLUGIN_FILTER) + rm_containers "$host" $(docker_on "$host" ps -aq 2>/dev/null) run_on "$host" "docker network ls | grep -q ' weave ' && docker network rm weave" || true weave_on "$host" reset 2>/dev/null done diff --git a/lint b/lint index b5bdf3b1f9..63c5066185 100755 --- a/lint +++ b/lint @@ -113,7 +113,7 @@ lint_sh() { local filename="$1" local lint_result=0 - if ! diff -u <(shfmt -i 4 "${filename}") "${filename}"; then + if ! diff -u "${filename}" <(shfmt -i 4 "${filename}"); then lint_result=1 echo "${filename}: run shfmt -i 4 -w ${filename}" fi @@ -153,6 +153,21 @@ lint_md() { return $lint_result } +lint_py() { + local filename="$1" + local lint_result=0 + + if yapf --diff "${filename}" | grep -qE '^[+-]'; then + lint_result=1 + echo "${filename}: run yapf --in-place ${filename}" + else + # Only run flake8 if yapf passes, since they pick up a lot of similar issues + flake8 "${filename}" || lint_result=1 + fi + + return $lint_result +} + lint() { filename="$1" ext="${filename##*\.}" @@ -179,6 +194,7 @@ lint() { sh) lint_sh "${filename}" || lint_result=1 ;; tf) lint_tf "${filename}" || lint_result=1 ;; md) lint_md "${filename}" || lint_result=1 ;; + py) lint_py "${filename}" || lint_result=1 ;; esac spell_check "${filename}" || lint_result=1 diff --git a/sched b/sched index 72eeee652a..a282558f11 100755 --- a/sched +++ b/sched @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python import sys, string, urllib import requests import optparse diff --git a/scheduler/main.py b/scheduler/main.py index 4ed887563a..3b540b54a3 100644 --- a/scheduler/main.py +++ b/scheduler/main.py @@ -19,157 +19,188 @@ # observations faster. alpha = 0.3 + class Test(ndb.Model): - total_run_time = ndb.FloatProperty(default=0.) # Not total, but a EWMA - total_runs = ndb.IntegerProperty(default=0) - - def parallelism(self): - name = self.key.string_id() - m = re.search('(\d+)_test.sh$', name) - if m is None: - return 1 - else: - return int(m.group(1)) - - def cost(self): - p = self.parallelism() - logging.info("Test %s has parallelism %d and avg run time %s", self.key.string_id(), p, self.total_run_time) - return self.parallelism() * self.total_run_time + total_run_time = ndb.FloatProperty(default=0.) # Not total, but a EWMA + total_runs = ndb.IntegerProperty(default=0) + + def parallelism(self): + name = self.key.string_id() + m = re.search('(\d+)_test.sh$', name) + if m is None: + return 1 + else: + return int(m.group(1)) + + def cost(self): + p = self.parallelism() + logging.info("Test %s has parallelism %d and avg run time %s", + self.key.string_id(), p, self.total_run_time) + return self.parallelism() * self.total_run_time + class Schedule(ndb.Model): - shards = ndb.JsonProperty() + shards = ndb.JsonProperty() + @app.route('/record//', methods=['POST']) @ndb.transactional def record(test_name, runtime): - test = Test.get_by_id(test_name) - if test is None: - test = Test(id=test_name) - test.total_run_time = (test.total_run_time * (1-alpha)) + (float(runtime) * alpha) - test.total_runs += 1 - test.put() - return ('', 204) - -@app.route('/schedule///', methods=['POST']) + test = Test.get_by_id(test_name) + if test is None: + test = Test(id=test_name) + test.total_run_time = (test.total_run_time * + (1 - alpha)) + (float(runtime) * alpha) + test.total_runs += 1 + test.put() + return ('', 204) + + +@app.route( + '/schedule///', methods=['POST']) def schedule(test_run, shard_count, shard): - # read tests from body - test_names = flask.request.get_json(force=True)['tests'] - - # first see if we have a scedule already - schedule_id = "%s-%d" % (test_run, shard_count) - schedule = Schedule.get_by_id(schedule_id) - if schedule is not None: + # read tests from body + test_names = flask.request.get_json(force=True)['tests'] + + # first see if we have a scedule already + schedule_id = "%s-%d" % (test_run, shard_count) + schedule = Schedule.get_by_id(schedule_id) + if schedule is not None: + return flask.json.jsonify(tests=schedule.shards[str(shard)]) + + # if not, do simple greedy algorithm + test_times = ndb.get_multi( + ndb.Key(Test, test_name) for test_name in test_names) + + def avg(test): + if test is not None: + return test.cost() + return 1 + + test_times = [(test_name, avg(test)) + for test_name, test in zip(test_names, test_times)] + test_times_dict = dict(test_times) + test_times.sort(key=operator.itemgetter(1)) + + shards = {i: [] for i in xrange(shard_count)} + while test_times: + test_name, time = test_times.pop() + + # find shortest shard and put it in that + s, _ = min( + ((i, sum(test_times_dict[t] for t in shards[i])) + for i in xrange(shard_count)), + key=operator.itemgetter(1)) + + shards[s].append(test_name) + + # atomically insert or retrieve existing schedule + schedule = Schedule.get_or_insert(schedule_id, shards=shards) return flask.json.jsonify(tests=schedule.shards[str(shard)]) - # if not, do simple greedy algorithm - test_times = ndb.get_multi(ndb.Key(Test, test_name) for test_name in test_names) - def avg(test): - if test is not None: - return test.cost() - return 1 - test_times = [(test_name, avg(test)) for test_name, test in zip(test_names, test_times)] - test_times_dict = dict(test_times) - test_times.sort(key=operator.itemgetter(1)) - - shards = {i: [] for i in xrange(shard_count)} - while test_times: - test_name, time = test_times.pop() - - # find shortest shard and put it in that - s, _ = min(((i, sum(test_times_dict[t] for t in shards[i])) - for i in xrange(shard_count)), key=operator.itemgetter(1)) - - shards[s].append(test_name) - - # atomically insert or retrieve existing schedule - schedule = Schedule.get_or_insert(schedule_id, shards=shards) - return flask.json.jsonify(tests=schedule.shards[str(shard)]) FIREWALL_REGEXES = [ - re.compile(r'^(?P\w+)-allow-(?P\w+)-(?P\d+)-(?P\d+)$'), - re.compile(r'^(?P\w+)-(?P\d+)-(?P\d+)-allow-(?P[\w\-]+)$'), + re.compile( + r'^(?P\w+)-allow-(?P\w+)-(?P\d+)-(?P\d+)$' + ), + re.compile(r'^(?P\w+)-(?P\d+)-(?P\d+)-allow-' + r'(?P[\w\-]+)$'), ] NAME_REGEXES = [ - re.compile(r'^host(?P\d+)-(?P\d+)-(?P\d+)$'), - re.compile(r'^test-(?P\d+)-(?P\d+)-(?P\d+)$'), + re.compile(r'^host(?P\d+)-(?P\d+)-(?P\d+)$'), + re.compile(r'^test-(?P\d+)-(?P\d+)-(?P\d+)$'), ] + def _matches_any_regex(name, regexes): - for regex in regexes: - matches = regex.match(name) - if matches: - return matches + for regex in regexes: + matches = regex.match(name) + if matches: + return matches + PROJECTS = [ - ('weaveworks/weave', 'weave-net-tests', 'us-central1-a', True), - ('weaveworks/weave', 'positive-cocoa-90213', 'us-central1-a', True), - ('weaveworks/scope', 'scope-integration-tests', 'us-central1-a', False), + ('weaveworks/weave', 'weave-net-tests', 'us-central1-a', True), + ('weaveworks/weave', 'positive-cocoa-90213', 'us-central1-a', True), + ('weaveworks/scope', 'scope-integration-tests', 'us-central1-a', False), ] + @app.route('/tasks/gc') def gc(): - # Get list of running VMs, pick build id out of VM name - credentials = GoogleCredentials.get_application_default() - compute = discovery.build('compute', 'v1', credentials=credentials) + # Get list of running VMs, pick build id out of VM name + credentials = GoogleCredentials.get_application_default() + compute = discovery.build('compute', 'v1', credentials=credentials) + + for repo, project, zone, gc_fw in PROJECTS: + gc_project(compute, repo, project, zone, gc_fw) - for repo, project, zone, gc_fw in PROJECTS: - gc_project(compute, repo, project, zone, gc_fw) + return "Done" - return "Done" def gc_project(compute, repo, project, zone, gc_fw): - logging.info("GCing %s, %s, %s", repo, project, zone) - # Get list of builds, filter down to running builds: - running = _get_running_builds(repo) - # Stop VMs for builds that aren't running: - _gc_compute_engine_instances(compute, project, zone, running) - # Remove firewall rules for builds that aren't running: - if gc_fw: - _gc_firewall_rules(compute, project, running) + logging.info("GCing %s, %s, %s", repo, project, zone) + # Get list of builds, filter down to running builds: + running = _get_running_builds(repo) + # Stop VMs for builds that aren't running: + _gc_compute_engine_instances(compute, project, zone, running) + # Remove firewall rules for builds that aren't running: + if gc_fw: + _gc_firewall_rules(compute, project, running) + def _get_running_builds(repo): - result = urlfetch.fetch('https://circleci.com/api/v1/project/%s' % repo, - headers={'Accept': 'application/json'}) - assert result.status_code == 200 - builds = json.loads(result.content) - running = {build['build_num'] for build in builds if not build.get('stop_time')} - logging.info("Runnings builds: %r", running) - return running + result = urlfetch.fetch( + 'https://circleci.com/api/v1/project/%s' % repo, + headers={'Accept': 'application/json'}) + assert result.status_code == 200 + builds = json.loads(result.content) + running = { + build['build_num'] + for build in builds if not build.get('stop_time') + } + logging.info("Runnings builds: %r", running) + return running + def _get_hosts_by_build(instances): - host_by_build = collections.defaultdict(list) - for instance in instances['items']: - matches = _matches_any_regex(instance['name'], NAME_REGEXES) - if not matches: - continue - host_by_build[int(matches.group('build'))].append(instance['name']) - logging.info("Running VMs by build: %r", host_by_build) - return host_by_build + host_by_build = collections.defaultdict(list) + for instance in instances['items']: + matches = _matches_any_regex(instance['name'], NAME_REGEXES) + if not matches: + continue + host_by_build[int(matches.group('build'))].append(instance['name']) + logging.info("Running VMs by build: %r", host_by_build) + return host_by_build + def _gc_compute_engine_instances(compute, project, zone, running): - instances = compute.instances().list(project=project, zone=zone).execute() - if 'items' not in instances: - return - host_by_build = _get_hosts_by_build(instances) - stopped = [] - for build, names in host_by_build.iteritems(): - if build in running: - continue - for name in names: - stopped.append(name) - logging.info("Stopping VM %s", name) - compute.instances().delete(project=project, zone=zone, instance=name).execute() - return stopped + instances = compute.instances().list(project=project, zone=zone).execute() + if 'items' not in instances: + return + host_by_build = _get_hosts_by_build(instances) + stopped = [] + for build, names in host_by_build.iteritems(): + if build in running: + continue + for name in names: + stopped.append(name) + logging.info("Stopping VM %s", name) + compute.instances().delete( + project=project, zone=zone, instance=name).execute() + return stopped + def _gc_firewall_rules(compute, project, running): - firewalls = compute.firewalls().list(project=project).execute() - if 'items' not in firewalls: - return - for firewall in firewalls['items']: - matches = _matches_any_regex(firewall['name'], FIREWALL_REGEXES) - if not matches: - continue - if int(matches.group('build')) in running: - continue - logging.info("Deleting firewall rule %s", firewall['name']) - compute.firewalls().delete(project=project, firewall=firewall['name']).execute() + firewalls = compute.firewalls().list(project=project).execute() + if 'items' not in firewalls: + return + for firewall in firewalls['items']: + matches = _matches_any_regex(firewall['name'], FIREWALL_REGEXES) + if not matches: + continue + if int(matches.group('build')) in running: + continue + logging.info("Deleting firewall rule %s", firewall['name']) + compute.firewalls().delete( + project=project, firewall=firewall['name']).execute() diff --git a/test b/test index 1497e7a2ae..c87bdd0739 100755 --- a/test +++ b/test @@ -33,9 +33,13 @@ while [ $# -gt 0 ]; do shift 1 ;; "-netgo") - TAGS="-tags netgo" + TAGS="netgo" shift 1 ;; + "-tags") + TAGS="$2" + shift 2 + ;; "-p") PARALLEL=true shift 1 @@ -51,7 +55,7 @@ while [ $# -gt 0 ]; do esac done -GO_TEST_ARGS=($TAGS -cpu 4 -timeout $TIMEOUT) +GO_TEST_ARGS=(-tags "${TAGS[@]}" -cpu 4 -timeout $TIMEOUT) if [ -n "$SLOW" ] || [ -n "$CIRCLECI" ]; then SLOW=true @@ -74,7 +78,7 @@ fail=0 if [ -z "$TESTDIRS" ]; then # NB: Relies on paths being prefixed with './'. - TESTDIRS=($(git ls-files -- '*_test.go' | grep -vE '^(vendor|prog|experimental)/' | xargs -n1 dirname | sort -u | sed -e 's|^|./|')) + TESTDIRS=($(git ls-files -- '*_test.go' | grep -vE '^(vendor|experimental)/' | xargs -n1 dirname | sort -u | sed -e 's|^|./|')) else # TESTDIRS on the right side is not really an array variable, it # is just a string with spaces, but it is written like that to @@ -97,7 +101,7 @@ go test -i "${GO_TEST_ARGS[@]}" "${TESTDIRS[@]}" run_test() { local dir=$1 if [ -z "$NO_GO_GET" ]; then - go get -t "$TAGS" "$dir" + go get -t -tags "${TAGS[@]}" "$dir" fi local GO_TEST_ARGS_RUN=("${GO_TEST_ARGS[@]}")