Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Follow package dependency graph from changed packages. #2498

Merged
merged 2 commits into from
Oct 7, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions scripts/generate_json_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from parinx.errors import MethodParsingException
import six

from script_utils import PROJECT_ROOT
from verify_included_modules import get_public_modules


Expand Down Expand Up @@ -601,7 +602,7 @@ def main():
parser.add_argument('--tag', help='The version of the documentation.',
default='master')
parser.add_argument('--basepath', help='Path to the library.',
default=os.path.join(os.path.dirname(__file__), '..'))
default=PROJECT_ROOT)
parser.add_argument('--show-toc', help='Prints partial table of contents',
default=False)
args = parser.parse_args()
Expand Down Expand Up @@ -635,18 +636,17 @@ def main():
}
}

BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
BASE_JSON_DOCS_DIR = os.path.join(BASE_DIR, 'docs', 'json')
BASE_JSON_DOCS_DIR = os.path.join(PROJECT_ROOT, 'docs', 'json')

DOCS_BUILD_DIR = os.path.join(BASE_DIR, 'docs', '_build')
DOCS_BUILD_DIR = os.path.join(PROJECT_ROOT, 'docs', '_build')
JSON_DOCS_DIR = os.path.join(DOCS_BUILD_DIR, 'json', args.tag)
LIB_DIR = os.path.abspath(args.basepath)

library_dir = os.path.join(LIB_DIR, 'google', 'cloud')
public_mods = get_public_modules(library_dir,
base_package='google.cloud')

generate_module_docs(public_mods, JSON_DOCS_DIR, BASE_DIR, toc)
generate_module_docs(public_mods, JSON_DOCS_DIR, PROJECT_ROOT, toc)
generate_doc_types_json(public_mods,
os.path.join(JSON_DOCS_DIR, 'types.json'))
package_files(JSON_DOCS_DIR, DOCS_BUILD_DIR, BASE_JSON_DOCS_DIR)
Expand Down
8 changes: 4 additions & 4 deletions scripts/make_datastore_grpc.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,13 @@
import sys
import tempfile

from script_utils import PROJECT_ROOT

ROOT_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
PROTOS_DIR = os.path.join(ROOT_DIR, 'googleapis-pb')

PROTOS_DIR = os.path.join(PROJECT_ROOT, 'googleapis-pb')
PROTO_PATH = os.path.join(PROTOS_DIR, 'google', 'datastore',
'v1', 'datastore.proto')
GRPC_ONLY_FILE = os.path.join(ROOT_DIR, 'datastore',
GRPC_ONLY_FILE = os.path.join(PROJECT_ROOT, 'datastore',
'google', 'cloud', 'datastore',
'_generated', 'datastore_grpc_pb2.py')
GRPCIO_VIRTUALENV = os.getenv('GRPCIO_VIRTUALENV')
Expand Down
3 changes: 2 additions & 1 deletion scripts/run_pylint.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import sys

from script_utils import get_affected_files
from script_utils import PROJECT_ROOT


IGNORED_DIRECTORIES = [
Expand All @@ -44,7 +45,7 @@
os.path.join('google', 'cloud', '__init__.py'),
'setup.py',
]
SCRIPTS_DIR = os.path.abspath(os.path.dirname(__file__))
SCRIPTS_DIR = os.path.join(PROJECT_ROOT, 'scripts')
PRODUCTION_RC = os.path.join(SCRIPTS_DIR, 'pylintrc_default')
TEST_RC = os.path.join(SCRIPTS_DIR, 'pylintrc_reduced')
TEST_DISABLED_MESSAGES = [
Expand Down
17 changes: 13 additions & 4 deletions scripts/run_unit_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,15 @@
import sys

from script_utils import check_output
from script_utils import follow_dependencies
from script_utils import get_changed_packages
from script_utils import in_travis
from script_utils import in_travis_pr
from script_utils import local_diff_branch
from script_utils import PROJECT_ROOT
from script_utils import travis_branch


PROJECT_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
IGNORED_DIRECTORIES = (
'appveyor',
'docs',
Expand Down Expand Up @@ -127,6 +127,12 @@ def get_test_packages():
any filtering)
* Just use all packages

An additional check is done for the cases when a diff is computed (i.e.
using local remote and local branch environment variables, and on Travis).
Once the filtered list of **changed** packages is found, the package
dependency graph is used to add any additional packages which depend on
the changed packages.

:rtype: list
:returns: A list of all package directories where tests
need be run.
Expand All @@ -140,9 +146,12 @@ def get_test_packages():
verify_packages(args.packages, all_packages)
return sorted(args.packages)
elif local_diff is not None:
return get_changed_packages('HEAD', local_diff, all_packages)
changed_packages = get_changed_packages(
'HEAD', local_diff, all_packages)
return follow_dependencies(changed_packages, all_packages)
elif in_travis():
return get_travis_directories(all_packages)
changed_packages = get_travis_directories(all_packages)
return follow_dependencies(changed_packages, all_packages)
else:
return all_packages

Expand Down
117 changes: 117 additions & 0 deletions scripts/script_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,21 @@

from __future__ import print_function

import ast
import os
import subprocess


PROJECT_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
LOCAL_REMOTE_ENV = 'GOOGLE_CLOUD_TESTING_REMOTE'
LOCAL_BRANCH_ENV = 'GOOGLE_CLOUD_TESTING_BRANCH'
IN_TRAVIS_ENV = 'TRAVIS'
TRAVIS_PR_ENV = 'TRAVIS_PULL_REQUEST'
TRAVIS_BRANCH_ENV = 'TRAVIS_BRANCH'
INST_REQS_KWARG = 'install_requires'
REQ_VAR = 'REQUIREMENTS'
PACKAGE_PREFIX = 'google-cloud-'


def in_travis():
Expand Down Expand Up @@ -226,3 +232,114 @@ def get_affected_files(allow_limited=True):
result = subprocess.check_output(['git', 'ls-files'])

return result.rstrip('\n').split('\n'), diff_base


def get_required_packages(file_contents):
"""Get required packages from a ``setup.py`` file.

Makes the following assumptions:

* ``install_requires=REQUIREMENTS`` occurs in the call to
``setup()`` in the ``file_contents``.
* The text ``install_requires`` occurs nowhere else in the file.
* The text ``REQUIREMENTS`` only appears when being passed to
``setup()`` (as above) and when being defined.
* The ``REQUIREMENTS`` variable is a list and the text from the
``setup.py`` file containing that list can be parsed using
``ast.literal_eval()``.

:type file_contents: str
:param file_contents: The contents of a ``setup.py`` file.

:rtype: list
:returns: The list of required packages.
:raises: :class:`~exceptions.ValueError` if the file is in an
unexpected format.
"""
# Make sure the only ``install_requires`` happens in the
# call to setup()
if file_contents.count(INST_REQS_KWARG) != 1:
raise ValueError('Expected only one use of keyword',
INST_REQS_KWARG, file_contents)
# Make sure the only usage of ``install_requires`` is to set
# install_requires=REQUIREMENTS.
keyword_stmt = INST_REQS_KWARG + '=' + REQ_VAR
if file_contents.count(keyword_stmt) != 1:
raise ValueError('Expected keyword to be set with variable',
INST_REQS_KWARG, REQ_VAR, file_contents)
# Split file on ``REQUIREMENTS`` variable while asserting that
# it only appear twice.
_, reqs_section, _ = file_contents.split(REQ_VAR)
# Find ``REQUIREMENTS`` list variable defined in ``reqs_section``.
reqs_begin = reqs_section.index('[')
reqs_end = reqs_section.index(']') + 1

# Convert the text to an actual list, but make sure no
# locals or globals can be used.
reqs_list_text = reqs_section[reqs_begin:reqs_end]
# We use literal_eval() because it limits to evaluating
# strings that only consist of a few Python literals: strings,
# numbers, tuples, lists, dicts, booleans, and None.
requirements = ast.literal_eval(reqs_list_text)

# Take the list of requirements and strip off the package name
# from each requirement.
result = []
for required in requirements:
parts = required.split()
result.append(parts[0])
return result


def get_dependency_graph(package_list):
"""Get a directed graph of package dependencies.

:type package_list: list
:param package_list: The list of **all** valid packages.

:rtype: dict
:returns: A dictionary where keys are packages and values are
the set of packages that depend on the key.
"""
result = {package: set() for package in package_list}
for package in package_list:
setup_file = os.path.join(PROJECT_ROOT, package,
'setup.py')
with open(setup_file, 'r') as file_obj:
file_contents = file_obj.read()

requirements = get_required_packages(file_contents)
for requirement in requirements:
if not requirement.startswith(PACKAGE_PREFIX):
continue
_, req_package = requirement.split(PACKAGE_PREFIX)
req_package = req_package.replace('-', '_')
result[req_package].add(package)

return result


def follow_dependencies(subset, package_list):
"""Get a directed graph of package dependencies.

:type subset: list
:param subset: List of a subset of package names.

:type package_list: list
:param package_list: The list of **all** valid packages.

:rtype: list
:returns: An expanded list of packages containing everything
in ``subset`` and any packages that depend on those.
"""
dependency_graph = get_dependency_graph(package_list)

curr_pkgs = None
updated_pkgs = set(subset)
while curr_pkgs != updated_pkgs:
curr_pkgs = updated_pkgs
updated_pkgs = set(curr_pkgs)
for package in curr_pkgs:
updated_pkgs.update(dependency_graph[package])

return sorted(curr_pkgs)
8 changes: 4 additions & 4 deletions scripts/verify_included_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@

from sphinx.ext.intersphinx import fetch_inventory

from script_utils import PROJECT_ROOT

BASE_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
DOCS_DIR = os.path.join(BASE_DIR, 'docs')

DOCS_DIR = os.path.join(PROJECT_ROOT, 'docs')
IGNORED_PREFIXES = ('test_', '_')
IGNORED_MODULES = frozenset([
'google.cloud.__init__',
Expand Down Expand Up @@ -153,7 +153,7 @@ def verify_modules(build_root='_build'):

public_mods = set()
for package in PACKAGES:
library_dir = os.path.join(BASE_DIR, package, 'google', 'cloud')
library_dir = os.path.join(PROJECT_ROOT, package, 'google', 'cloud')
package_mods = get_public_modules(library_dir,
base_package='google.cloud')
public_mods.update(package_mods)
Expand Down