Skip to content

Commit

Permalink
feat(builtin): add LinkablePackageInfo to pkg_npm, js_library & ts_li…
Browse files Browse the repository at this point in the history
  • Loading branch information
gregmagolan committed Mar 28, 2020
1 parent 66db579 commit 1023852
Show file tree
Hide file tree
Showing 9 changed files with 243 additions and 112 deletions.
65 changes: 55 additions & 10 deletions internal/js_library/js_library.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.

"""js_library allows defining a set of javascript sources and assigning a module_name and module_root.
"""js_library allows defining a set of javascript sources and assigning a package_name.
DO NOT USE - this is not fully designed, and exists only to enable testing within this repo.
"""

load("//internal/providers:linkable_package_info.bzl", "LinkablePackageInfo")

_AMD_NAMES_DOC = """Mapping from require module names to global variables.
This allows devmode JS sources to load unnamed UMD bundles from third-party libraries."""

Expand Down Expand Up @@ -46,23 +48,66 @@ def write_amd_names_shim(actions, amd_names_shim, targets):
amd_names_shim_content += "define(\"%s\", function() { return %s });\n" % n
actions.write(amd_names_shim, amd_names_shim_content)

def _js_library(ctx):
return [
def _impl(ctx):
if not ctx.files.srcs:
fail("No srcs specified")

source_files = ctx.files.srcs[0].is_source
for src in ctx.files.srcs:
if src.is_source != source_files:
fail("Mixing of source and generated files not allowed")

sources_depset = depset(ctx.files.srcs)

result = [
DefaultInfo(
files = depset(ctx.files.srcs),
files = sources_depset,
runfiles = ctx.runfiles(files = ctx.files.srcs),
),
AmdNamesInfo(names = ctx.attr.amd_names),
]

js_library = rule(
implementation = _js_library,
if ctx.attr.package_name:
if source_files:
path = "/".join([p for p in [ctx.label.workspace_root, ctx.label.package] if p])
else:
path = "/".join([p for p in [ctx.bin_dir.path, ctx.label.workspace_root, ctx.label.package] if p])
result.append(LinkablePackageInfo(
package_name = ctx.attr.package_name,
path = path,
files = sources_depset,
))

return result

_js_library = rule(
implementation = _impl,
attrs = {
"srcs": attr.label_list(allow_files = [".js"]),
"package_name": attr.string(),
"srcs": attr.label_list(allow_files = True),
"amd_names": attr.string_dict(doc = _AMD_NAMES_DOC),
"module_from_src": attr.bool(),
# Used to determine module mappings
# module_name for legagy ts_library module_mapping support
# TODO: remove once legacy module_mapping is removed
"module_name": attr.string(),
"module_root": attr.string(),
},
)

def js_library(
name,
srcs,
amd_names = {},
package_name = None,
**kwargs):
module_name = kwargs.pop("module_name", None)
if module_name:
fail("use package_name instead of module_name in target //%s:%s" % (native.package_name(), name))
_js_library(
name = name,
srcs = srcs,
amd_names = amd_names,
package_name = package_name,
# module_name for legagy ts_library module_mapping support
# TODO: remove once legacy module_mapping is removed
module_name = package_name,
**kwargs
)
2 changes: 1 addition & 1 deletion internal/linker/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ This means you need a workflow like `npm link` to symlink the package from the `
Under Bazel, we have exactly this monorepo feature. But, we want users to have a better experience than lerna: they shouldn't need to run any tool other than `bazel test` or `bazel run` and they expect programs to work, even when they `require()` some local package from the monorepo.

To make this seamless, we run a linker as a separate program inside the Bazel action, right before node.
It does essentially the same job as Lerna: make sure there is a `$PWD/node_modules` tree and that all the semantics from Bazel (such as `module_name`/`module_root` attributes) are mapped to the node module resolution algorithm, so that the node runtime behaves the same way as if the packages had been installed from npm.
It does essentially the same job as Lerna: make sure there is a `$PWD/node_modules` tree and that all the semantics from Bazel (such as LinkablePackageInfo provider) are mapped to the node module resolution algorithm, so that the node runtime behaves the same way as if the packages had been installed from npm.

Note that the behavior of the linker depends on whether the package to link was declared as:

Expand Down
75 changes: 43 additions & 32 deletions internal/linker/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -127,15 +127,16 @@ Include as much of the build output as you can without disclosing anything confi
const fromManifest = runfiles.lookupDirectory(root);
if (fromManifest)
return fromManifest;
// Account for Bazel --legacy_external_runfiles
// which look like 'my_wksp/external/npm/node_modules'
if (yield exists(path.join('external', root))) {
log_verbose('found legacy_external_runfiles, switching root to', path.join('external', root));
if (runfiles.execroot) {
// Under execroot there is an external folder in the root which look
// like 'my_wksp/external/npm/node_modules'
return path.join('external', root);
}
// The repository should be layed out in the parent directory
// since bazel sets our working directory to the repository where the build is happening
return path.join('..', root);
else {
// Under runfiles, the repository should be layed out in the parent directory
// since bazel sets our working directory to the repository where the build is happening
return path.join('..', root);
}
});
}
class Runfiles {
Expand Down Expand Up @@ -424,26 +425,12 @@ Include as much of the build output as you can without disclosing anything confi
const [modulesManifest] = args;
let { bin, root, modules, workspace } = JSON.parse(fs.readFileSync(modulesManifest));
modules = modules || {};
log_verbose(`module manifest: workspace ${workspace}, bin ${bin}, root ${root} with first-party packages\n`, modules);
log_verbose(`module manifest '${modulesManifest}': workspace ${workspace}, bin ${bin}, root ${root} with first-party packages\n`, modules);
const rootDir = yield resolveRoot(root, runfiles);
log_verbose('resolved root', root, 'to', rootDir);
log_verbose('cwd', process.cwd());
// Bazel starts actions with pwd=execroot/my_wksp
const workspaceDir = path.resolve('.');
// Convert from runfiles path
// this_wksp/path/to/file OR other_wksp/path/to/file
// to execroot path
// path/to/file OR external/other_wksp/path/to/file
function toWorkspaceDir(p) {
if (p === workspace) {
return '.';
}
// The manifest is written with forward slash on all platforms
if (p.startsWith(workspace + '/')) {
return p.substring(workspace.length + 1);
}
return path.join('external', p);
}
// Create the $pwd/node_modules directory that node will resolve from
yield symlink(rootDir, 'node_modules');
process.chdir(rootDir);
Expand All @@ -455,20 +442,44 @@ Include as much of the build output as you can without disclosing anything confi
yield mkdirp(path.dirname(m.name));
if (m.link) {
const [root, modulePath] = m.link;
const externalPrefix = 'external/';
let target = '<package linking failed>';
switch (root) {
case 'bin':
// If we are in the execroot then add the bin path to the target; otherwise
// we are in runfiles and the bin path should be omitted.
// FIXME(#1196)
target = runfiles.execroot ? path.join(workspaceAbs, bin, toWorkspaceDir(modulePath)) :
path.join(workspaceAbs, toWorkspaceDir(modulePath));
break;
case 'src':
target = path.join(workspaceAbs, toWorkspaceDir(modulePath));
case 'execroot':
if (runfiles.execroot) {
target = path.posix.join(workspaceAbs, modulePath);
}
else {
// If under runfiles, convert from execroot path to runfiles path.
// First strip the bin portion if it exists:
let runfilesPath = modulePath;
if (runfilesPath.startsWith(`${bin}/`)) {
runfilesPath = runfilesPath.slice(bin.length + 1);
}
else if (runfilesPath === bin) {
runfilesPath = '';
}
// Next replace `external/` with `../` if it exists:
if (runfilesPath.startsWith(externalPrefix)) {
runfilesPath = `../${runfilesPath.slice(externalPrefix.length)}`;
}
target = path.posix.join(workspaceAbs, runfilesPath);
}
break;
case 'runfiles':
target = runfiles.resolve(modulePath) || '<runfiles resolution failed>';
// Transform execroot path to the runfiles manifest path so that
// it can be resolved with runfiles.resolve()
let runfilesPath = modulePath;
if (runfilesPath.startsWith(`${bin}/`)) {
runfilesPath = runfilesPath.slice(bin.length + 1);
}
if (runfilesPath.startsWith(externalPrefix)) {
runfilesPath = runfilesPath.slice(externalPrefix.length);
}
else {
runfilesPath = `${workspace}/${runfilesPath}`;
}
target = runfiles.resolve(runfilesPath) || '<runfiles resolution failed>';
break;
}
yield symlink(target, m.name);
Expand Down
59 changes: 36 additions & 23 deletions internal/linker/link_node_modules.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ linker, which uses the mappings to link a node_modules directory for
runtimes to locate all the first-party packages.
"""

load("//internal/providers:linkable_package_info.bzl", "LinkablePackageInfo")
load("//internal/providers:npm_package_info.bzl", "NpmPackageInfo")

def _debug(vars, *args):
Expand All @@ -36,16 +37,21 @@ def add_arg(args, arg):

def _link_mapping(label, mappings, k, v):
if k in mappings and mappings[k] != v:
# Allow all other mappings to win over legacy "_tslibrary"
if mappings[k][0] == "_tslibrary":
return True
elif v[0] == "_tslibrary":
return False
if v[1] == mappings[k][1]:
# Allow "src" and "bin" to win over "runfiles"
# Allow "execroot" to win over "runfiles"
# For example,
# mappings[k] = ["runfiles", "angular/packages/compiler"]
# v = ["bin", "angular/packages/compiler"]
# v = ["execroot", "angular/packages/compiler"]
if mappings[k][0] == "runfiles":
return True
elif v[0] == "runfiles":
return False
fail(("conflicting mapping at %s: %s maps to both %s and %s" % (label, k, mappings[k], v)), "deps")
fail(("conflicting mapping at '%s': '%s' maps to both %s and %s" % (label, k, mappings[k], v)), "deps")
else:
return True

Expand All @@ -61,7 +67,7 @@ def write_node_modules_manifest(ctx, extra_data = []):
# We always map the workspace to itself to support absolute require like
# import from 'my_wksp/path/to/file'
# and it's always a bin dependency, TODO: let user control via package.json
ctx.workspace_name: ["bin", ctx.workspace_name],
ctx.workspace_name: ["execroot", ctx.bin_dir.path],
}
node_modules_root = ""

Expand All @@ -78,6 +84,10 @@ def write_node_modules_manifest(ctx, extra_data = []):
# ...first-party packages to be linked into the node_modules tree
for k, v in getattr(dep, _ASPECT_RESULT_NAME, {}).items():
if _link_mapping(dep.label, mappings, k, v):
# Special case for ts_library module_name for legacy behavior and for AMD name
# work-around. Do not propogate tslibrary root type to runtime.
if v[0] == "_tslibrary":
v = ["execroot", v[1]]
_debug(ctx.var, "Linking %s: %s" % (k, v))
mappings[k] = v

Expand Down Expand Up @@ -110,6 +120,7 @@ def _get_module_mappings(target, ctx):
"""
mappings = {}

# Propogate transitive mappings
for name in _MODULE_MAPPINGS_DEPS_NAMES:
for dep in getattr(ctx.rule.attr, name, []):
for k, v in getattr(dep, _ASPECT_RESULT_NAME, {}).items():
Expand All @@ -124,29 +135,31 @@ def _get_module_mappings(target, ctx):
v = ["runfiles", v[1]]
if _link_mapping(target.label, mappings, k, v):
mappings[k] = v
_debug(ctx.var, "target %s propagating module mapping %s: %s" % (dep, k, v))
_debug(ctx.var, "target %s propagating module mapping %s: %s" % (dep.label, k, v))

if not getattr(ctx.rule.attr, "module_name", None) and not getattr(ctx.rule.attr, "module_root", None):
# Look for LinkablePackageInfo mapping in this node
if not LinkablePackageInfo in target:
# No mappings contributed here, short-circuit with the transitive ones we collected
_debug(ctx.var, "No module_name or module_root attr for", target.label)
_debug(ctx.var, "No LinkablePackageInfo for", target.label)
return mappings

# When building a mapping for use at runtime, we need paths to be relative to
# the runfiles directory. This requires the workspace_name to be prefixed on
# each module root.
workspace_name = target.label.workspace_name if target.label.workspace_name else ctx.workspace_name

mn = getattr(ctx.rule.attr, "module_name", target.label.name)
mr = "%s/%s" % (workspace_name, target.label.package)

# since our module mapping is currently based on attribute names,
# allow a special one to instruct the linker that the package has no output
# directory and is therefore meant to be used as sources.
# TODO: This belongs in a different mechanism like a package.json field.
if getattr(ctx.rule.attr, "module_from_src", False):
mr = ["src", mr]
else:
mr = ["bin", mr]
linkable_package_info = target[LinkablePackageInfo]

mn = linkable_package_info.package_name
mr = ["execroot", linkable_package_info.path]

# Special case for ts_library module_name for legacy behavior and for AMD name work-around
# Tag the mapping as "_tslibrary" so it can be overriden by any other mapping if found.
#
# In short, ts_library module_name attribute results in both setting the AMD name (which
# desired and necessary in devmode which outputs UMD) and in making a linkable mapping. Because
# of this, you can get in the situation where a ts_library module_name and a downstream pkg_name
# package_name conflict and result in duplicate mappings. This work-around will make this
# situation work however it is not a recommended pattern since a ts_library can be a dep of a
# pkg_npm but not vice-verse at the moment since ts_library cannot handle directory artifacts as
# deps.
if hasattr(linkable_package_info, "_tslibrary") and linkable_package_info._tslibrary:
mr[0] = "_tslibrary"

if _link_mapping(target.label, mappings, mn, mr):
_debug(ctx.var, "target %s adding module mapping %s: %s" % (target.label, mn, mr))
Expand Down
Loading

0 comments on commit 1023852

Please sign in to comment.