diff --git a/.travis.yml b/.travis.yml
index e8928c3..4f4b95e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -8,14 +8,9 @@ os:
- osx
env:
- - V=0.4.4
- - V=0.4.3
- - V=0.4.2
- - V=0.4.1
- - V=0.4.0
- - V=0.3.2
- - V=0.3.1
-
+ - V=0.5.4
+ - V=0.5.3
+ # Not compatible under 5.3
before_install:
- OS=linux
@@ -35,33 +30,7 @@ before_install:
- rm -f install.sh
script:
- - |
- bazel \
- --output_base=$HOME/.cache/bazel \
- --batch \
- --host_jvm_args=-Xmx500m \
- --host_jvm_args=-Xms500m \
- test \
- --verbose_failures \
- --sandbox_debug \
- --spawn_strategy=standalone \
- --genrule_strategy=standalone \
- --local_resources=400,1,1.0 \
- //... \
- $FLAGS
- bazel \
- --output_base=$HOME/.cache/bazel \
- --batch \
- --host_jvm_args=-Xmx500m \
- --host_jvm_args=-Xms500m \
- run \
- --verbose_failures \
- --sandbox_debug \
- --spawn_strategy=standalone \
- --genrule_strategy=standalone \
- --local_resources=400,1,1.0 \
- //examples/foo \
- $FLAGS
+ - make test_all
notifications:
email: false
diff --git a/CHANGES.md b/CHANGES.md
new file mode 100644
index 0000000..8953707
--- /dev/null
+++ b/CHANGES.md
@@ -0,0 +1,9 @@
+v0.4.0 (Thu Sep 21 2017)
+
+This is a complete rewrite of `rules_node`. In prior releases `npm`
+was used to install dependencies, this has been replaced with `yarn`.
+The mechanism for associating external (npm) node modules was
+previously based on assembling a `NODE_PATH` enviroment variable.
+This has been replaced by code that constructs a fresh `node_modules/`
+tree foreach `node_binary` rule, having much better hermeticity
+characteristics.
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..e06e30a
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,7 @@
+test_all:
+ (cd tests/helloworld && bazel test //:helloworld_test)
+ (cd tests/lyrics && bazel test //:lyrics_test)
+ (cd tests/express && bazel test //:server_test)
+ (cd tests/namespace && bazel test //:question_test)
+ (cd tests/typescript && bazel test //:typescript_test)
+ (cd tests/mocha && bazel test //:test)
diff --git a/README.md b/README.md
index 65ca819..b9725bb 100644
--- a/README.md
+++ b/README.md
@@ -1,201 +1,360 @@
- |
- |
+ |
+ |
+ |
| Bazel |
NodeJs |
+Yarn |
# `rules_node` [](https://travis-ci.org/pubref/rules_node)
-Put `rules_node` in your `WORKSPACE` and load the main repository
-dependencies. This will download the nodejs toolchain including
-`node` (6.6.x) and `npm`.
-
-```python
-git_repository(
- name = "org_pubref_rules_node",
- remote = "https://github.com/pubref/rules_node.git",
- commit = "{HEAD}",
-)
-
-load("@org_pubref_rules_node//node:rules.bzl", "node_repositories")
-
-node_repositories()
-```
-
# Rules
| Rule | Description |
| ---: | :---------- |
| [node_repositories](#node_repositories) | Install node toolchain. |
-| [npm_repository](#npm_repository) | Install a set of npm dependencies. |
-| [node_library](#node_library) | Define a local npm module. |
-| [node_binary](#node_binary) | Build or execute a nodejs script. |
+| [yarn_modules](#yarn_modules) | Install a set node_modules dependencies using yarn. |
+| [node_module](#node_module) | Define a node module from a set of source files and a main (or index) source file. |
+| [node_binary](#node_binary) | Build a node_modules tree and execute an entrypoint module script. |
| [mocha_test](#mocha_test) | Run a mocha test script. |
## node_repositories
-WORKSPACE rule that downloads and configures the node toolchain.
+WORKSPACE rule that downloads and configures node based on your
+operating system. Includes `node` (7.10.1) and `yarn` (1.0.1).
+
+```python
+RULES_NODE_COMMIT = '...' # Update to current HEAD
+RULES_NODE_SHA256 = '...'
+
+http_archive(
+ name = "org_pubref_rules_node",
+ url = "https://github.com/pubref/rules_node/archive/%s.zip" % RULES_NODE_COMMIT,
+ strip_prefix = "rules_node-%s" % RULES_NODE_COMMIT,
+ sha256 = RULES_NODE_SHA256,
+)
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_repositories")
-## npm_repository
+node_repositories()
+```
+
+## yarn_modules
-Install a set of npm dependencies into a `node_modules` folder as an
-external workspace. For example:
+Install a set of module dependencies into a `yarn_modules` folder as
+an external workspace. Requires either a `package.json` file or
+`deps` as input.
```python
# In WORKSPACE
-load("@org_pubref_rules_node//node:rules.bzl", "npm_repository")
+load("@org_pubref_rules_node//node:rules.bzl", "yarn_modules")
+
+# Use a package.json file as input. Location of the package json
+# is arbitrary.
+yarn_modules(
+ name = "yarn_modules",
+ package_json = "//:package.json",
+)
-npm_repository(
- name = "npm_react_stack",
+# Shortcut form without a separate package.json file
+yarn_modules(
+ name = "yarn_modules",
deps = {
"react": "15.3.2",
"react-dom": "15.3.2",
},
- sha256 = "dedabd07bf8399ef5bd6032e87a3ea17eef08183d8766ccedaef63d7707283b6",
)
```
-You can then refer to `@npm_react_stack//:modules` in the `modules`
-attribute of a `node_binary` or `node_library` rule.
-
-#### About the sha256 option
+How It Works:
-`sha256` is optional. The expected value is the output of `sha256sum
-node_modules.tar` (linux) or `shasum -a256 node_modules.tar` (osx),
-where `node_modules.tar` is an archive file created from the aggregate
-contents of the `node_modules` folder created by `npm install` (and
-where (hopefully) all non-deterministic bits (timestamps, variable
-data) have been stripped out).
+1. Create an external workspace `@yarn_modules` at `$(bazel info
+output_base)/external/yarn_modules`.
+2. Invoke `yarn install` to create a `node_modules` folder and
+populate it with the necessary dependencies.
-There is no convenient way to determine this sha256 other than by
-attempting to install it against a false value (for example: `sha256 =
-"foo"`), at which point bazel will print the expected value. You can
-then copy-paste that output into your `WORKSPACE` file.
+3. Read the generated `yarn.lock` file, parse it, and write out a
+ `@yarn_modules//:BUILD` file. This file contains a `node_module`
+ rule foreach entry in the `yarn.lock` file, a `node_module` rule
+ with the special name `_all_`, and an `sh_binary` rule foreach
+ executable script in the `node_modules/.bin` folder.
-*This assumes you trust the network and the origin of the files* (only
-you can determine this). By setting a `sha256`, you can guard against
-the code changing, but you are not guarding against a malicious
-attacker sneaking in bogus code in the first place.
+> Note 1: You can inspect all the targets by running `bazel query @yarn_modules//:*`.
-> Note: the `WORKSPACE` for `rules_node` itself is not yet using the
-> sha256 option as there seems to be remaining non-determinism that
-> renders it flaky.
+> Note 2: The workspace name `yarn_modules` is arbitrary, choose
+whatever you like *other than* `node_modules` (that one doesn't work).
-#### What gets removed before determining the sha256?
+At this point you can use these rule targets as `deps` for your
+`node_module` rules. *Example*:
-In order to make npm deterministic it is necessary to:
-
-1. Remove all file timestamps and user/group information from
- node_modules.
+```python
+node_module(
+ name = "my_module",
+ package_json = "package.json",
+ srcs = glob(["**/*.js"]),
+ deps = [
+ "@yarn_modules//:_all_",
+ ],
+)
+```
-2. Make sure the keys in `package.json` are sorted.
+### yarn_module attributes
-3. Remove custom npm-related generated fields in `package.json` files
- that carry non-deterministic data.
+| | Type | Name | Description |
+| --- | --- | --- | --- |
+| optional | `label` | `package_json` | A `package.json` file containing the dependencies that should be installed. |
+| optional | `string_dict` | `deps` | A mapping of `name` --> `version` for the dependencies that should be installed. |
-If you find that the
-[default list of blacklisted/excluded attributes](node/internal/npm_repository.bzl)
-is either too aggressive or too lax, it can be configured via the
-`exclude_package_json_keys` attribute.
+> Either `package_json` or `deps` must be present, but not both.
-## node_library
+## node_module
-This rule accepts a list of `srcs` (`*.js`) and other configuration
-attributes. When depended upon, it generates a `package.json` file
-describing the module and the `npm install`'s it in a local
-`node_modules` folder within `bazel-bin`. The name of the module is
-taken by munging the package label, substituting `/` (slash) with `-`
-(dash). For example:
+BUILD file rule that creates a folder which conforms to the nodejs
+[Folders as Modules](https://nodejs.org/api/modules.html#modules_folders_as_modules)
+packaging structure. *Example*:
```python
-load("//node:rules.bzl", "node_library")
-
-node_library(
- name = "baz",
+node_module(
+ name = "my_module",
main = "index.js",
srcs = [
- "qux.js",
+ "lib/util.js",
+ "lib/math.js",
+ ],
+ version = "1.2.0",
+ description = "Example node module",
+ deps = [
+ "@yarn_modules//:lodash",
+ "@yarn_modules//:fs-extra",
],
+```
+
+When used in a `node_binary` rule, this ultimately materializes to:
+
+```
+node_modules/my_module
+node_modules/my_module/package.json
+node_modules/my_module/index.js
+node_modules/my_module/lib/util.js
+node_modules/my_module/lib/math.js
+node_modules/lodash
+node_modules/fs-extra
+```
+
+When used by other `node_module` rules, you can import the module as:
+
+```javascript
+const myModule = require("my-module");
+```
+
+There are three basic ways to create a `node_module` rule:
+
+### 1. Creating a `node_module` with a `package.json` file
+
+```python
+node_module(
+ name = "my_module_1",
+ package_json = "package.json", # label to the 'package.json' file to use directly
)
```
-This will be installed as:
+In this scenario, assumes the package.json file has an entry that
+specifies the `main` entrypoint (or not, if you follow the
+[Files as Modules](https://nodejs.org/api/modules.html#modules_file_modules)
+pattern).
-```sh
-INFO: From NpmInstallLocal examples/baz/lib/node_modules/examples-baz/package.json:
-/private/var/tmp/_bazel_user/178d7438552046b1be3cba61fe7b75a8/execroot/rules_node/bazel-out/local-fastbuild/bin/examples/baz/lib
-`-- examples-baz@0.0.0
+### 2. Creating a `node_module` with a label to the `main` entrypoint source file
+
+```python
+node_module(
+ name = "my_module_2",
+ main = "app.js", # label to the entrypoint file for the module
+ version = "1.0.0", # optional arguments to populate the generated package.json file
+ ...
+)
+```
+
+In this scenario, a `package.json` file will be generated for the
+module that specifies the file you provide to the `main` attribute.
+
+### 3. Creating a `node_module` with a label to the `index.js` entrypoint source file
+
+```python
+node_module(
+ name = "my_module_3",
+ index = "index.js", # label to the 'index.js' file to use as the index
+)
```
-The local modules can be `require()`'d in another module as follows:
+> In this scenario, no `package.json` file is generated.
+
+### Module dependencies
+
+Build up a dependency tree via the `deps` attribute:
-```js
-var baz = require("examples-baz");
-console.log('Hello, ' + baz());
```
+node_module(
+ name = "my_module_3",
+ ...
+ deps = [
+ "@yarn_modules//:_all_", # special token '_all_' to have access to all modules
+ ":my_module_1",
+ ],
+)
+```
+
+### Core node_module attributes
+
+| | Type | Name | Default | Description |
+| ---: | :--- | :--- | :--- | :--- |
+| optional | `label` | `package_json` | `None` | Explicitly name a `package.json` file to use for the module.
+| optional | `label` | `main` | `None` | Source file named in the generated package.json `main` property.
+| optional | `label` | `index` | `None` | Source file to be used as the index file (supresses generation of a `package.json` file).
+| optional | `label_list` | `srcs` | `[]` | Source files to be included in the module.
+| optional | `label_list` | `deps` | `[]` | `node_module` rule dependencies.
+
+
+### node_module attributes that affect the name of the module
+
+For reference, by default a `node_module` rule `//src/js:my_module`
+generates `node_modules/src/js/my_module`.
+
+| | Type | Name | Default | Description |
+| ---: | :--- | :--- | :--- | :--- |
+| optional | `string` | `namespace` | `None` | See 1
+| optional | `string` | `module_name` | `${ctx.label.package}/{ctx.label.name}` | See 2
+| optional | `string` | `separator` | `/` | See 3
+
+1 Use to scope the module with some organization prefix. *Example*: `namespace = '@foo'` generates `node_modules/@foo/src/js/my_module`.
+
+2 Override the module name. *Example*: `name = 'barbaz'` with namespace (above) generates `node_modules/@foo/barbaz`
+
+3 *Example*: `separator = '-'` generates `node_modules/src-js-my_module`.
+
+### node_module attributes that affect the generated `package.json`
-This packaging/install cycle occurs on demand and is a nicer way to
-develop nodejs applications with clear dependency requirements. Bazel
-makes this very clean and convenient.
+These are only relevant if you don't explicitly name a `package.json` file.
+
+| | Type | Name | Default | Description |
+| ---: | :--- | :--- | :--- | :--- |
+| optional | `string` | `version` | `1.0.0` | Version string
+| optional | `string` | `url` | `None` | Url where the module tgz archive was resolved
+| optional | `string` | `sha1` | `None` | Sha1 hash of of the resolved tgz archive
+| optional | `string` | `description` | `None` | Module description
+
+### node_module attributes that affect the relative path of files included in the module
+
+| | Type | Name | Default | Description |
+| ---: | :--- | :--- | :--- | :--- |
+| optional | `string` | `layout` | `relative` | Changes the way files are included in the module. One of `relative` or `workspace`.
+
+Consider a file with the label `//src/js/my_module/app.js`. With
+`layout = 'relative'` (the default), the location of the file becomes
+`node_modules/src/js/my_module/app.js` (skylark: `file.short_path`
+relative to `module_name`). Under `layout = 'workspace'`, the it
+becomes `node_modules/src/js/my_module/src/js/my_module/app.js`
+(skylark: `file.path`). This is relevant only for protocol buffers
+where the generated sources import their own dependencies relative to
+the workspace, which needs to be preserved in the generated module.
## node_binary
-Creates an executable script that will run the file named in the
-`main` attribute. Paths to dependent `node_library` and
-`@npm_repository//:modules` labels are used to construct a `NODE_PATH`
-environment variable that the `node` executable will use to fulfill
-`require` dependencies.
+The `node_binary` rule builds a `node_modules/` tree based on its
+`node_module` dependencies and writes a script to execute a module
+entrypoint.
```python
load("@org_pubref_rules_node//node:rules.bzl", "node_binary")
+node_binary(
+ name = "foo",
+ entrypoint = ":my_module_1",
+)
+```
+
+In example above, we're specifying the name of a `node_module` to
+use as the entrypoint.
+
+```python
node_binary(
name = "foo",
main = "foo.js",
- modules = [
- "@npm_react_stack//:modules",
+ deps = [
+ ":my_module_1
],
)
```
+In this second example, we're specifying the name of a file to use as
+the entrypoint (under the hood, it will just build a `node_module`
+(called `foo_module`) for your single `main` foo.js file entrypoint,
+becoming equivalent to the first example).
+
+
+### Output structure of files generated for a `node_binary` rule
+
+A `node_binary` rule named `foo` will create a folder having exactly two entries:
+
+1. An executable shell script named `foo`.
+1. A folder which bundles up all the needed files in `foo_bundle/`.
+
+Within `foo_bundle/`, there will also be exactly two entries:
+
+1. The `node` executable itself.
+1. The `node_modules/` folder with all the built/copied modules.
+
+The bash shell script `foo` performs the following:
+
+`cd $(dirname $0)/foo_bundle && exec node node_modules/entrypoint`
+
+
+### Building a deployable bundle
+
+To generate a tarred gzipped archive of the above example that you can
+ship as a single 'executable' file, invoke `$ bazel build
+:{target}_bundle.tgz`. This is similar in intent to the java
+`{target}_deploy.jar` implicit build rule.
+
+```sh
+$ bazel build :foo_bundle.tgz
+Target //:foo_bundle.tgz up-to-date:
+ bazel-bin/foo_bundle.tgz
+$ du -h bazel-bin/foo_bundle.tgz
+33M bazel-bin/foo_bundle.tgz
+```
## mocha_test
-Runs a mocha test identified by the start script given in `main`.
-External modules dependencies can be listed in the `modules`
-attribute, while internal module dependencies are named in the `deps`
-attribute. Additional arguments to the `mocha` script runner can be
-listed in the `mocha_args` attribute.
+Runs a mocha test identified by the start script given in `main` or
+module given in `entrypoint`.
-```python
-load("@org_pubref_rules_node//node:rules.bzl", "mocha_test")
+> Note: The mocha_test rule depends on `@mocha_modules//:_all_`, so
+> you'll need to add this dependency in your `WORKSPACE` file:
-mocha_test(
- name = "foo_test",
- main = "foo_test.js",
- modules = [
- "@npm_underscore//:modules",
- ],
- deps = [
- "//examples/baz",
- ],
- mocha_args = [
- "--reporter=dot",
- ]
+```python
+yarn_modules(
+ name = "mocha_modules",
+ deps = {
+ "mocha": "3.5.3",
+ }
)
```
-> Note: to use the mocha_test rules, you'll need to add npm_mocha as a
-> dependency in your `WORKSPACE` file:
+```python
+mocha_test(
+ name = "test",
+ main = "test.js",
+)
-```
-npm_repository(
- name = "npm_mocha",
- deps = {
- "mocha": "3.5.0", # update as needed
- },
+mocha_test(
+ name = "test",
+ entrypoint = ":my_module",
)
```
+
+## Conclusion
+
+That's it! Please refer to the various workspaces in `tests/` and the source for more detail.
diff --git a/WORKSPACE b/WORKSPACE
index badd93e..a1ac749 100644
--- a/WORKSPACE
+++ b/WORKSPACE
@@ -1,53 +1,5 @@
workspace(name = "org_pubref_rules_node")
-load("//node:rules.bzl", "node_repositories", "npm_repository", "bower_repository")
-node_repositories()
-
-npm_repository(
- name = "npm_glob",
- deps = {
- "glob": "7.1.0",
- },
- #sha256 = "0d694720f9d942d334a45230fdf55ff20e4c78bff8adb67fba99d6d62e27df84",
-)
-
-npm_repository(
- name = "npm_react_stack",
- deps = {
- "react": "15.3.2",
- "react-dom": "15.3.2",
- },
- #sha256 = "fa7f0306841e8f03de78bd4b80f0da525238cf95cb360d7072013ca5fe7215e0",
-)
-
-npm_repository(
- name = "npm_mocha",
- deps = {
- "mocha": "3.1.0",
- },
- #sha256 = "9b48987065bb42003bab81b4538afa9ac194d217d8e2e770a5cba782249f7dc8",
-)
+load("//node:rules.bzl", "node_repositories")
-npm_repository(
- name = "npm_underscore",
- deps = {
- "underscore": "1.8.3",
- },
- #sha256 = "7c413345ad4f97024258e5d9fda40e26be0f2c2b73987d13f03352b5c489b1a8",
-)
-
-npm_repository(
- name = "npm_bower",
- deps = {
- "bower": "1.7.9",
- },
- #sha256 = "7f85a05c00a86b0f9cfd8d58ad61d0447bd9235d13a743ede87af1ca5509403f",
-)
-
-bower_repository(
- name = "bower_react_stack",
- deps = {
- "react": "15.3.2",
- },
- #sha256 = "9779fcd247213b898d53473d4cc884f8b2d64b7d8021f56dd54a6dcd5f1bf845",
-)
+node_repositories()
diff --git a/examples/bar/BUILD b/examples/bar/BUILD
deleted file mode 100644
index 1d7f3db..0000000
--- a/examples/bar/BUILD
+++ /dev/null
@@ -1,6 +0,0 @@
-load("//node:rules.bzl", "node_binary", "node_library")
-
-node_binary(
- name = "bar",
- main = "bar.js",
-)
diff --git a/examples/bar/bar.js b/examples/bar/bar.js
deleted file mode 100644
index aafa1c4..0000000
--- a/examples/bar/bar.js
+++ /dev/null
@@ -1,3 +0,0 @@
-module.exports = function() {
- return "Bar!";
-};
diff --git a/examples/baz/BUILD b/examples/baz/BUILD
deleted file mode 100644
index 6126db6..0000000
--- a/examples/baz/BUILD
+++ /dev/null
@@ -1,12 +0,0 @@
-package(default_visibility = ["//visibility:public"])
-
-load("//node:rules.bzl", "node_binary", "node_library")
-
-node_library(
- name = "baz",
- main = "index.js",
- srcs = [
- "qux.js"
- ],
- modules = ["@npm_glob//:modules"],
-)
diff --git a/examples/baz/index.js b/examples/baz/index.js
deleted file mode 100644
index a7386d0..0000000
--- a/examples/baz/index.js
+++ /dev/null
@@ -1,5 +0,0 @@
-var qux = require("./qux.js");
-
-module.exports = function() {
- return "Baz!! (and " + qux() + ")";
-};
diff --git a/examples/baz/qux.js b/examples/baz/qux.js
deleted file mode 100644
index e8147d5..0000000
--- a/examples/baz/qux.js
+++ /dev/null
@@ -1,5 +0,0 @@
-var glob = require("glob");
-
-module.exports = function() {
- return "Qux!!!";
-};
diff --git a/examples/foo/BUILD b/examples/foo/BUILD
deleted file mode 100644
index 67efd4c..0000000
--- a/examples/foo/BUILD
+++ /dev/null
@@ -1,25 +0,0 @@
-load("//node:rules.bzl", "node_binary", "mocha_test")
-
-node_binary(
- name = "foo",
- main = "foo.js",
- modules = [
- "@npm_react_stack//:modules",
- ],
- deps = [
- "//examples/baz",
- ]
-)
-
-mocha_test(
- name = "foo_test",
- main = "foo_test.js",
- size = "small",
- modules = ["@npm_underscore//:modules"],
- deps = [
- "//examples/baz",
- ],
- mocha_args = [
- "--reporter=dot"
- ]
-)
diff --git a/examples/foo/foo.js b/examples/foo/foo.js
deleted file mode 100644
index 5c4698a..0000000
--- a/examples/foo/foo.js
+++ /dev/null
@@ -1,19 +0,0 @@
-// stdlib dependency
-var process = require("process");
-// npm_repository module dependency
-var react = require("react");
-// local module dependency
-var baz = require("examples-baz");
-// relative file dependency
-var bar = require("../bar/bar.js");
-
-console.log('****************************************************************');
-console.log('Hello, Foo and ' + bar() + " and " + baz());
-console.log('****************************************************************');
-
-// console.log("filename:", __filename);
-// console.log("dirname:", __dirname);
-// console.log("process.versions:", process.versions);
-// console.log("process.argv ", process.argv);
-// console.log("require paths:", module.paths);
-// console.log("env:", process.env);
diff --git a/examples/foo/foo_test.js b/examples/foo/foo_test.js
deleted file mode 100644
index 92134c8..0000000
--- a/examples/foo/foo_test.js
+++ /dev/null
@@ -1,27 +0,0 @@
-var assert = require('assert');
-var baz = require('examples-baz');
-var _ = require('underscore');
-
-describe('Array', function() {
- describe('#indexOf()', function() {
- it('should return -1 when the value is not present', function() {
- assert.equal(-1, [1,2,3].indexOf(4));
- });
- });
-});
-
-describe('baz', function() {
- describe('#value', function() {
- it('should return a function', function() {
- assert.equal("function", typeof(baz));
- //assert.ok(baz.indexOf("baz") != -1);
- });
- it('should resolve to string', function() {
- assert.equal("string", typeof(baz()));
- //assert.ok(baz.indexOf("baz") != -1);
- });
- it('should resolve to module name', function() {
- assert.ok(baz().match(/.*[bB]az.*/));
- });
- });
-});
diff --git a/node/BUILD b/node/BUILD
index 3e880d6..b5e0682 100644
--- a/node/BUILD
+++ b/node/BUILD
@@ -1,5 +1,5 @@
package(default_visibility = ["//visibility:public"])
exports_files([
- "package.json.tpl",
+ "internal/node_launcher.sh",
])
diff --git a/node/internal/bower_repository.bzl b/node/internal/bower_repository.bzl
deleted file mode 100644
index ae21879..0000000
--- a/node/internal/bower_repository.bzl
+++ /dev/null
@@ -1,89 +0,0 @@
-load("//node:internal/dar.bzl", "dar_attrs", "dar_execute")
-load("//node:internal/dson.bzl", "dson_attrs", "dson_execute")
-load("//node:internal/sha256.bzl", "sha256_attrs", "sha256_execute")
-load("//node:internal/node_utils.bzl", "execute", "node_attrs")
-
-BUILD_FILE = """package(default_visibility = ["//visibility:public"])
-filegroup(
- name = "components",
- srcs = glob(["{components_path}/**/*"]),
-)
-exports_files(["{components_path}"])
-"""
-
-
-_bower_repository_attrs = node_attrs + dar_attrs + dson_attrs + sha256_attrs + {
- "bower": attr.label(
- default = Label("@npm_bower//:node_modules/bower/bin/bower"),
- single_file = True,
- allow_files = True,
- executable = True,
- cfg = "host",
- ),
-
- # dar_attrs redefines
- "dar_filename": attr.string(
- default = "bower_components",
- ),
- "dar_root": attr.string(
- default = "bower_components",
- ),
-
- # dson_attrs redefines
- "dson_path": attr.string(
- default = "bower_components",
- ),
- "dson_filenames": attr.string_list(
- default = ["bower.json", ".bower.json"],
- ),
- "dson_exclude_keys": attr.string_list(
- default = [
- "__dummy_entry_to_prevent_empty_list__",
- ],
- ),
-
- "registry": attr.string(),
- "deps": attr.string_dict(mandatory = True),
-}
-
-def _bower_repository_impl(ctx):
- node = ctx.path(ctx.attr.node)
- nodedir = node.dirname.dirname
- bower = ctx.path(ctx.attr.bower)
-
- bower_json = ['{']
- bower_json.append('"name": "%s"' % ctx.name)
- if ctx.attr.registry:
- bower_json.append('"registry": "%s"' % ctx.attr.registry)
- bower_json.append('}')
- ctx.file("bower.json", "\n".join(bower_json))
-
- cmd = [
- node,
- bower,
- "install",
- ]
-
- modules = []
- for k, v in ctx.attr.deps.items():
- if v:
- modules.append("%s#%s" % (k, v))
- else:
- modules.append(k)
- cmd += modules
-
- output = execute(ctx, cmd).stdout
-
- if ctx.attr.sha256:
- dson_execute(ctx, dson_path = "bower_components")
- dar_execute(ctx, dar_root = "bower_components")
- sha256_execute(ctx, "bower_components.tar")
-
- ctx.file("BUILD", BUILD_FILE.format(
- components_path = "bower_components",
- ))
-
-bower_repository = repository_rule(
- implementation = _bower_repository_impl,
- attrs = _bower_repository_attrs,
-)
diff --git a/node/internal/dar.bzl b/node/internal/dar.bzl
deleted file mode 100644
index 50c0830..0000000
--- a/node/internal/dar.bzl
+++ /dev/null
@@ -1,38 +0,0 @@
-load("//node:internal/node_utils.bzl", "execute")
-
-dar_attrs = {
- "dar": attr.label(
- default = Label("//node:tools/dar.py"),
- single_file = True,
- allow_files = True,
- cfg = "host",
- ),
- "dar_filename": attr.string(
- default = "node_modules",
- ),
- "dar_root": attr.string(
- default = "lib/node_modules",
- ),
-}
-
-def dar_execute(ctx, dar_root=None):
- python = ctx.which("python")
- if not python:
- fail("python not found (is it present in your PATH?)")
-
- dar_filename = ctx.attr.dar_filename
- dar_file = "%s.tar" % ctx.attr.dar_filename
- dar_py = ctx.path(ctx.attr.dar)
- if not dar_root:
- dar_root=ctx.attr.dar_root
- tarfile = "%s.tar" % dar_filename
-
- cmd = [
- python,
- dar_py,
- "--output", tarfile,
- "--file", "%s=%s" % (dar_filename, dar_root),
- ]
-
- #print("dar: %s" % cmd)
- return execute(ctx, cmd)
diff --git a/node/internal/dson.bzl b/node/internal/dson.bzl
deleted file mode 100644
index 0fe021e..0000000
--- a/node/internal/dson.bzl
+++ /dev/null
@@ -1,61 +0,0 @@
-load("//node:internal/node_utils.bzl", "execute")
-
-dson_attrs = {
- "dson": attr.label(
- default = Label("//node:tools/dson.py"),
- single_file = True,
- allow_files = True,
- cfg = "host",
- ),
- "dson_path": attr.string(
- default = "lib/node_modules",
- ),
- "dson_filenames": attr.string_list(
- default = ["package.json"],
- ),
- "dson_exclude_keys": attr.string_list(
- default = [
- "_args",
- "_from",
- "_inCache",
- "_installable",
- "_nodeVersion",
- "_npmOperationalInternal",
- "_npmUser",
- "_npmVersion",
- "_phantomChildren",
- "_resolved",
- "_requested",
- "_requiredBy",
- "_where",
- ],
- ),
-}
-
-def dson_execute(ctx, dson_path=None):
- python = ctx.which("python")
- if not python:
- fail("python not found (is it present in your PATH?)")
- dson_py = ctx.path(ctx.attr.dson)
- if not dson_path:
- dson_path = ctx.attr.dson_path
- cmd = [
- python,
- dson_py,
- "--path", "%s/%s" % (ctx.path(""), dson_path),
- "--verbose", "--verbose",
- ]
-
- for filename in ctx.attr.dson_filenames:
- cmd += ["--filename", filename]
-
- for key in ctx.attr.dson_exclude_keys:
- cmd += ["--exclude", key]
-
- #print("dson: %s" % cmd)
-
- result = execute(ctx, cmd)
-
- #print("dson-out: %s" % result.stdout)
-
- return result
diff --git a/node/internal/mocha_test.bzl b/node/internal/mocha_test.bzl
index a299c2b..653960b 100644
--- a/node/internal/mocha_test.bzl
+++ b/node/internal/mocha_test.bzl
@@ -1,81 +1,54 @@
-_js_filetype = FileType([".js"])
-
-_modules_filetype = FileType(["node_modules"])
-
-BASH_TEMPLATE = """
-#!/usr/bin/env bash
-set -e
-
-# Resolve to 'this' node instance if other scripts
-# have '/usr/bin/env node' shebangs
-export PATH={node_bin_path}:$PATH
-
-# Used by NPM
-export NODE_PATH={node_paths}
-
-# Run it
-"{node}" "{mocha}" {mocha_args} "{script_path}" $@
-"""
-
-def _get_node_modules_dir_from_binfile(file):
- bin = str(file)
- parts = bin.partition("[source]]")
- prefix = parts[0][len("Artifact:["):]
- suffix_parts = parts[2].split("/")
- #print("prefix: %s, suffix_parts: %s" % (prefix, suffix_parts))
- return "/".join([prefix] + suffix_parts[0:2] + ["node_modules"])
-
-def _get_node_modules_dir_from_package_json(file):
- filename = str(file)
- parts = filename.split("]")
- prefix = parts[0][len("Artifact:[["):]
- middle = parts[1]
- suffix = parts[2].split("/")
- d = "/".join([prefix, middle] + suffix[0:-3] + ["node_modules"])
- return d
-
-def mocha_test_impl(ctx):
- inputs = []
- srcs = []
- script = ctx.file.main
- node = ctx.file._node
- mocha = ctx.file.mocha
- node_paths = []
- node_paths.append(_get_node_modules_dir_from_binfile(mocha))
-
- for file in ctx.files.modules:
- #print("file: %s" % file)
- if not file.basename.endswith("node_modules"):
- fail("npm_dependency should be a path to a node_modules/ directory.")
- node_paths += [_get_node_modules_dir_from_binfile(file)]
-
- for dep in ctx.attr.deps:
- lib = dep.node_library
- srcs += lib.transitive_srcs
- inputs += [lib.package_json, lib.npm_package_json]
- node_paths += [_get_node_modules_dir_from_package_json(lib.package_json)]
- for file in lib.transitive_node_modules:
- node_paths += [file.path]
- inputs.append(file)
-
- node_paths = list(set(node_paths))
+load("//node:internal/node_module.bzl", "node_module")
+load("//node:internal/node_binary.bzl", "copy_modules", "binary_attrs")
+
+
+def _create_launcher(ctx, output_dir, node, mocha):
+ entry_module = ctx.attr.entrypoint.node_module
+ entrypoint = '%s_test/node_modules/%s' % (ctx.label.name, entry_module.name)
+
+ cmd = [
+ node.short_path,
+ ] + ctx.attr.node_args + [
+ mocha.short_path,
+ ] + ctx.attr.mocha_args + [
+ entrypoint,
+ ] + ctx.attr.script_args + [
+ '$@',
+ ]
+
+ lines = [
+ '#!/usr/bin/env bash',
+ 'set -e',
+ ' '.join(cmd)
+ ]
ctx.file_action(
output = ctx.outputs.executable,
executable = True,
- content = BASH_TEMPLATE.format(
- node_paths = ":".join(node_paths),
- node = node.short_path,
- node_bin_path = node.dirname,
- script_path = script.short_path,
- mocha = mocha.path,
- mocha_args = " ".join(ctx.attr.mocha_args),
- ),
+ content = '\n'.join(lines),
)
- #print("node_paths %s" % "\n".join(node_paths))
- runfiles = [node, script, mocha] + inputs + srcs
+def mocha_test_impl(ctx):
+ output_dir = ctx.label.name + '_test'
+ node = ctx.executable._node
+ mocha = ctx.executable._mocha_bin
+
+ all_deps = ctx.attr.deps + [ctx.attr.entrypoint]
+ files = copy_modules(ctx, output_dir, all_deps)
+
+ _create_launcher(ctx, output_dir, node, mocha)
+
+ mocha_deps_all = ctx.attr._mocha_deps.node_module
+ transitive_mocha_files = mocha_deps_all.files.to_list()
+ for dep in mocha_deps_all.transitive_deps:
+ transitive_mocha_files += dep.files.to_list()
+
+ runfiles = [
+ node,
+ mocha,
+ ctx.outputs.executable
+ ] + transitive_mocha_files + files
return struct(
runfiles = ctx.runfiles(
@@ -84,40 +57,46 @@ def mocha_test_impl(ctx):
),
)
-mocha_test = rule(
+
+_mocha_test = rule(
mocha_test_impl,
- attrs = {
- "main": attr.label(
- single_file = True,
- allow_files = True,
- #allow_files = _js_filetype,
- ),
- "data": attr.label_list(
- allow_files = True,
- cfg = "data",
- ),
- "deps": attr.label_list(
- providers = ["node_library"],
- ),
- "modules": attr.label_list(
- allow_files = _modules_filetype,
- ),
- "_node": attr.label(
- default = Label("@org_pubref_rules_node_toolchain//:node_tool"),
- single_file = True,
+ attrs = binary_attrs + {
+ "_mocha_bin": attr.label(
+ default = Label("@mocha_modules//:mocha_bin"),
allow_files = True,
executable = True,
cfg = "host",
),
- "mocha": attr.label(
- default = Label("@npm_mocha//:bin/mocha"),
- allow_files = True,
- single_file = True,
- ),
- "mocha_modules": attr.label(
- default = Label("@npm_mocha//:modules"),
+ "_mocha_deps": attr.label(
+ providers = ["node_module"],
+ default = Label("@mocha_modules//:_all_"),
),
"mocha_args": attr.string_list(),
},
test = True,
)
+
+
+def mocha_test(name = None, main = None, entrypoint = None, node_args = [], mocha_args = [], deps = [], visibility = None, size = "small", **kwargs):
+
+ if not entrypoint:
+ if not main:
+ fail('Either an entrypoint node_module or a main script file must be specified')
+ entrypoint = name + '_module'
+ node_module(
+ name = entrypoint,
+ main = main,
+ deps = [],
+ visibility = visibility,
+ **kwargs
+ )
+
+ _mocha_test(
+ name = name,
+ entrypoint = entrypoint,
+ deps = deps,
+ size = size,
+ node_args = node_args,
+ mocha_args = mocha_args,
+ visibility = visibility,
+ )
diff --git a/node/internal/node_binary.bzl b/node/internal/node_binary.bzl
index 9963b72..0d80f14 100644
--- a/node/internal/node_binary.bzl
+++ b/node/internal/node_binary.bzl
@@ -1,112 +1,231 @@
-_js_filetype = FileType([".js"])
-_modules_filetype = FileType(["node_modules"])
+load("@bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_tar", "pkg_deb")
+load("//node:internal/node_module.bzl", "node_module")
-BASH_TEMPLATE = """
-#!/usr/bin/env bash
-set -e
+_node_filetype = FileType(['.js', '.node'])
-# Resolve to 'this' node instance if other scripts
-# have '/usr/bin/env node' shebangs
-export PATH={node_bin_path}:$PATH
+def _get_relative_dirname(file):
+ return file.path[0:-len(file.short_path)]
-# Used by NPM
-export NODE_PATH={node_paths}
-# Run it but wrap all calls to paths in a call to find. The call to find will
-# search recursively through the filesystem to find the appropriate runfiles
-# directory if that is necessary.
-cd $(find . | grep -m 1 "{node_bin}" | sed 's|{node_bin}$||') && exec "{node_bin}" "{script_path}" $@
-"""
+def _get_filename_relative_to_module(module, file):
+ name = module.name
+ parts = file.path.partition(name)
+ return '/'.join(parts[1:])
-def _get_node_modules_dir_from_package_json(file):
- filename = str(file)
- parts = filename.split("]")
- prefix = parts[0][len("Artifact:[["):]
- middle = parts[1]
- suffix = parts[2].split("/")
- d = "/".join([prefix, middle] + suffix[0:-3] + ["node_modules"])
- return d
+def _copy_module(ctx, output_dir, module):
+ if len(module.files) == 0:
+ return []
+ inputs = []
+ outputs = []
-def _get_node_modules_dir_from_sourcefile(file):
- bin = str(file)
- parts = bin.partition("[source]]")
- prefix = parts[0][len("Artifact:["):]
- suffix_parts = parts[2].split("/")
- return "/".join([prefix] + suffix_parts)
+ script_file = ctx.new_file('%s/copy_%s.sh' % (output_dir, module.identifier))
+ script_lines = []
+ for src in module.files:
+ inputs.append(src)
+ dst_filename = _get_filename_relative_to_module(module, src)
+ dst = ctx.new_file('%s/node_modules/%s' % (output_dir, dst_filename))
+ outputs.append(dst)
+ script_lines.append('cp %s %s' % (src.path, dst.path))
-def node_binary_impl(ctx):
- inputs = []
- srcs = []
- script = ctx.file.main
- node = ctx.file._node
- node_paths = []
-
- for file in ctx.files.modules:
- if not file.basename.endswith("node_modules"):
- fail("npm_dependency should be a path to a node_modules/ directory.")
- node_paths += [_get_node_modules_dir_from_sourcefile(file)]
-
- for dep in ctx.attr.deps:
- lib = dep.node_library
- srcs += lib.transitive_srcs
- inputs += [lib.package_json, lib.npm_package_json]
- node_paths += [_get_node_modules_dir_from_package_json(lib.package_json)]
- for file in lib.transitive_node_modules:
- inputs.append(file)
- node_paths += [file.path]
-
- node_paths = list(set(node_paths))
+ ctx.file_action(
+ output = script_file,
+ content = '\n'.join(script_lines),
+ executable = True,
+ )
+
+ ctx.action(
+ mnemonic = 'CopyModuleWith%sFiles' % len(outputs),
+ inputs = inputs + [script_file],
+ outputs = outputs,
+ command = script_file.path,
+ )
+
+ return outputs
+
+# NOTE(pcj): I tried in vain to make a version of this based on
+# symlinks, either of the folders or the files themselves. Maybe you
+# can get that figured out.
+def copy_modules(ctx, output_dir, deps):
+ outputs = []
+ for dep in deps:
+ module = dep.node_module
+ outputs += _copy_module(ctx, output_dir, module)
+ for module in module.transitive_deps:
+ outputs += _copy_module(ctx, output_dir, module)
+ return outputs
+
+
+def _create_launcher(ctx, output_dir, node):
+ entry_module = ctx.attr.entrypoint.node_module
+ entrypoint = 'node_modules/%s' % entry_module.name
+
+ # cd $(dirname $0)/bundle and exec node node_modules/foo
+ cmd = [
+ 'cd $ROOT/%s' % output_dir,
+ '&&',
+ 'exec',
+ 'node',
+ ] + ctx.attr.node_args + [
+ entrypoint,
+ ] + ctx.attr.script_args + [
+ '$@',
+ ]
+
+ lines = [
+ '#!/usr/bin/env bash',
+ 'set -e',
+
+ # Set the execution root to the same directory where the
+ # script lives. We know for sure that node executable and
+ # node_modules dir will also be close to here since we
+ # specifically built that here (this means we don't have to go
+ # through backflips to figure out what run context we're in.
+ 'ROOT=$(dirname $0)',
+
+ # Resolve to this node instance if other scripts have
+ # '/usr/bin/env node' shebangs
+ 'export PATH="$ROOT:$PATH"',
+
+ ' '.join(cmd)
+ ]
ctx.file_action(
output = ctx.outputs.executable,
executable = True,
- content = BASH_TEMPLATE.format(
- node_bin = node.path,
- script_path = script.path,
- node_bin_path = node.dirname,
- node_paths = ":".join(node_paths),
- ),
+ content = '\n'.join(lines),
)
- #print("node_paths %s" % "\n".join(node_paths))
- runfiles = [node, script] + inputs + srcs
+def node_binary_impl(ctx):
+ output_dir = ctx.label.name + '_bundle'
+
+ manifest_file = ctx.new_file('%s/node_modules/manifest.json' % output_dir)
+ json = {}
+ all_deps = ctx.attr.deps + [ctx.attr.entrypoint]
+ files = copy_modules(ctx, output_dir, all_deps)
+
+ dependencies = {}
+ for dep in all_deps:
+ module = dep.node_module
+ dependencies[module.name] = module.version
+ json['dependencies'] = struct(**dependencies)
+
+ manifest_content = struct(**json)
+
+ ctx.file_action(
+ output = manifest_file,
+ content = manifest_content.to_json(),
+ )
+
+ node = ctx.new_file('%s/node' % output_dir)
+ ctx.action(
+ mnemonic = 'CopyNode',
+ inputs = [ctx.executable._node],
+ outputs = [node],
+ command = 'cp %s %s' % (ctx.executable._node.path, node.path),
+ )
+
+ _create_launcher(ctx, output_dir, node)
+
+ runfiles = [node, manifest_file, ctx.outputs.executable] + files
+ files = runfiles if ctx.attr.export_files else []
return struct(
runfiles = ctx.runfiles(
files = runfiles,
collect_data = True,
),
+ node_binary = struct(
+ files = runfiles,
+ )
)
-node_binary = rule(
+binary_attrs = {
+ 'entrypoint': attr.label(
+ providers = ['node_module'],
+ mandatory = True,
+ ),
+ 'deps': attr.label_list(
+ providers = ['node_module'],
+ ),
+ 'node_args': attr.string_list(
+ ),
+ 'script_args': attr.string_list(
+ ),
+ '_node': attr.label(
+ default = Label('@node//:node'),
+ single_file = True,
+ allow_files = True,
+ executable = True,
+ cfg = 'host',
+ ),
+}
+
+
+_node_binary = rule(
node_binary_impl,
- attrs = {
- "main": attr.label(
- single_file = True,
- allow_files = True,
- #allow_files = _js_filetype,
- ),
- "data": attr.label_list(
- allow_files = True,
- cfg = "data",
- ),
- "deps": attr.label_list(
- providers = ["node_library"],
- ),
- "modules": attr.label_list(
- allow_files = _modules_filetype,
- ),
- "_node": attr.label(
- default = Label("@org_pubref_rules_node_toolchain//:node_tool"),
- single_file = True,
- allow_files = True,
- executable = True,
- cfg = "host",
+ attrs = binary_attrs + {
+ 'export_files': attr.bool(
+ default = False,
),
},
executable = True,
)
+
+
+def node_binary_files_impl(ctx):
+ return struct(
+ files = depset(ctx.attr.target.node_binary.files),
+ )
+
+_node_binary_files = rule(
+ node_binary_files_impl,
+ attrs = {
+ 'target': attr.label(
+ providers = ['node_binary'],
+ mandatory = True,
+ ),
+ },
+)
+
+def node_binary(name = None, main = None, entrypoint = None, version = None, node_args = [], deps = [], extension = 'tgz', visibility = None, **kwargs):
+
+ if not entrypoint:
+ if not main:
+ fail('Either an entrypoint node_module or a main script file must be specified')
+ entrypoint = name + '_module'
+ node_module(
+ name = entrypoint,
+ main = main,
+ deps = [],
+ version = version,
+ visibility = visibility,
+ **kwargs
+ )
+
+ _node_binary(
+ name = name,
+ entrypoint = entrypoint,
+ deps = deps,
+ export_files = name.endswith('_bundle.tgz'),
+ node_args = node_args,
+ visibility = visibility,
+ )
+
+ _node_binary_files(
+ name = name + '_files',
+ target = name,
+ visibility = visibility,
+ )
+
+ pkg_tar(
+ name = name + '_bundle',
+ extension = extension,
+ package_dir = name,
+ srcs = [name + '_files'],
+ visibility = visibility,
+ strip_prefix = '.',
+ )
diff --git a/node/internal/node_library.bzl b/node/internal/node_library.bzl
deleted file mode 100644
index 2f53d21..0000000
--- a/node/internal/node_library.bzl
+++ /dev/null
@@ -1,183 +0,0 @@
-_js_filetype = FileType([".js"])
-_modules_filetype = FileType(["node_modules"])
-
-def _get_node_modules_dir(file, include_node_modules = True):
- filename = str(file)
- parts = filename.split("]")
- prefix = parts[0][len("Artifact:[["):]
- middle = parts[1]
- suffix = parts[2].split("/")
- components = [prefix, middle] + suffix[0:-1]
- if include_node_modules:
- components.append("node_modules")
- d = "/".join(components)
- return d
-
-
-def _get_lib_name(ctx):
- name = ctx.label.name
- parts = ctx.label.package.split("/")
- if (len(parts) == 0) or (name != parts[-1]):
- parts.append(name)
- if ctx.attr.use_prefix:
- parts.insert(0, ctx.attr.prefix)
- return "-".join(parts)
-
-
-def _copy_to_namespace(base, file):
- steps = []
- src = file.path
- dst = file.basename
- short_parts = file.short_path.split('/')
- if short_parts:
- dst_dir = "/".join(short_parts[0:-1])
- dst = dst_dir + "/" + dst
- steps.append("mkdir -p %s/%s" % (base, dst_dir))
- steps.append("cp -f %s %s/%s" % (src, base, dst))
- return steps
-
-
-def node_library_impl(ctx):
- node = ctx.executable._node
- npm = ctx.executable._npm
- modules = ctx.attr.modules
-
- lib_name = _get_lib_name(ctx)
- stage_name = lib_name + ".npmfiles"
-
- srcs = ctx.files.srcs
- script = ctx.file.main
- if not script and len(srcs) > 0:
- script = srcs[0]
-
- package_json_template_file = ctx.file.package_json_template_file
- package_json_file = ctx.new_file(stage_name + "/package.json")
- npm_package_json_file = ctx.new_file("lib/node_modules/%s/package.json" % lib_name)
-
- transitive_srcs = []
- transitive_node_modules = []
-
- files = []
- for d in ctx.attr.data:
- for file in d.files:
- files.append(file)
-
- for dep in ctx.attr.deps:
- lib = dep.node_library
- transitive_srcs += lib.transitive_srcs
- transitive_node_modules += lib.transitive_node_modules
-
- ctx.template_action(
- template = package_json_template_file,
- output = package_json_file,
- substitutions = {
- "%{name}": lib_name,
- "%{main}": script.short_path if script else "",
- "%{version}": ctx.attr.version,
- "%{description}": ctx.attr.d,
- },
- )
-
- npm_prefix_parts = _get_node_modules_dir(package_json_file, False).split("/")
- npm_prefix = "/".join(npm_prefix_parts[0:-1])
- staging_dir = "/".join([npm_prefix, stage_name])
-
- cmds = []
- cmds += ["mkdir -p %s" % staging_dir]
-
- if script:
- cmds += _copy_to_namespace(staging_dir, script)
- for src in srcs:
- cmds += _copy_to_namespace(staging_dir, src)
- for file in files:
- cmds += _copy_to_namespace(staging_dir, file)
-
- install_cmd = [
- node.path,
- npm.path,
- "install",
- #"--verbose",
- "--global", # remember you need --global + --prefix
- "--prefix",
- npm_prefix,
- ]
-
- install_cmd.append(staging_dir)
- cmds.append(" ".join(install_cmd))
-
- #print("cmds: \n%s" % "\n".join(cmds))
-
- ctx.action(
- mnemonic = "NpmInstallLocal",
- inputs = [node, npm, package_json_file, script] + srcs,
- outputs = [npm_package_json_file],
- command = " && ".join(cmds),
- )
-
- return struct(
- files = set(srcs),
- runfiles = ctx.runfiles(
- files = srcs,
- collect_default = True,
- ),
- node_library = struct(
- name = lib_name,
- label = ctx.label,
- srcs = srcs,
- transitive_srcs = srcs + transitive_srcs,
- transitive_node_modules = ctx.files.modules + transitive_node_modules,
- package_json = npm_package_json_file,
- npm_package_json = npm_package_json_file,
- ),
- )
-
-node_library = rule(
- node_library_impl,
- attrs = {
- "srcs": attr.label_list(
- allow_files = _js_filetype,
- ),
- "version": attr.string(
- default = "0.0.0",
- ),
- "main": attr.label(
- mandatory = False,
- single_file = True,
- allow_files = _js_filetype,
- ),
- "d": attr.string(
- default = "No description provided.",
- ),
- "data": attr.label_list(
- allow_files = True,
- cfg = "data",
- ),
- "deps": attr.label_list(
- providers = ["node_library"],
- ),
- "modules": attr.label_list(
- allow_files = _modules_filetype,
- ),
- "package_json_template_file": attr.label(
- single_file = True,
- allow_files = True,
- default = Label("//node:package.json.tpl"),
- ),
- "prefix": attr.string(default = "workspace"),
- "use_prefix": attr.bool(default = False),
- "_node": attr.label(
- default = Label("@org_pubref_rules_node_toolchain//:node_tool"),
- single_file = True,
- allow_files = True,
- executable = True,
- cfg = "host",
- ),
- "_npm": attr.label(
- default = Label("@org_pubref_rules_node_toolchain//:npm_tool"),
- single_file = True,
- allow_files = True,
- executable = True,
- cfg = "host",
- ),
- },
-)
diff --git a/node/internal/node_module.bzl b/node/internal/node_module.bzl
new file mode 100644
index 0000000..e6463b3
--- /dev/null
+++ b/node/internal/node_module.bzl
@@ -0,0 +1,273 @@
+# https://nodejs.org/api/modules.html#modules_folders_as_modules
+
+_node_filetype = FileType([".js", ".node"])
+
+def _relname(ctx, root_file, file):
+ #print("getting relative name for %s rel %s" % (file.path, root_file.path))
+ # If file is in the workspace root, just return the name
+ if file.dirname == ".":
+ return file.short_path
+ parts = file.path.partition(root_file.dirname)
+ # If the file.path does not contain root_file.dirname, try the
+ # label.package...
+ if not len(parts[2]):
+ # However, if the label.package is empty, file is in the
+ # workspace root (so just use the basename)
+ if not ctx.label.package:
+ return file.basename
+ parts = file.path.partition(ctx.label.package)
+ if not len(parts[2]):
+ print("failed relative name for %s rel %s" % (file.path, root_file.path))
+ return parts[2]
+
+
+def _get_package_dependencies(module_deps):
+ dependencies = {}
+ for dep in module_deps:
+ module = dep.node_module
+ dependencies[module.name] = module.version
+ return struct(**dependencies)
+
+
+def _get_module_name(ctx):
+ parts = []
+ # namespace attribute takes precedence...
+ if ctx.attr.namespace:
+ parts.append(ctx.attr.namespace)
+ # else use the package name, but only if non-empty
+ elif ctx.label.package:
+ parts += ctx.label.package.split("/")
+ # finally, use the module_name or label name
+ parts.append(ctx.attr.module_name or ctx.label.name)
+ return ctx.attr.separator.join(parts)
+
+
+def _create_package_json(ctx, name, files):
+ output_file = ctx.new_file("%s/package.json" % name)
+
+ json = {
+ "name": name,
+ "version": ctx.attr.version,
+ "description": ctx.attr.description,
+ "url": ctx.attr.url,
+ "sha1": ctx.attr.sha1,
+ }
+
+ if len(files) > 0:
+ json["files"] = list(depset([_get_path_for_module_file(ctx, output_file, file, {}) for file in files]))
+
+ if ctx.attr.main:
+ json["main"] = ctx.file.main.basename
+
+ # Add dependencies if they exist
+ if (ctx.attr.deps):
+ json["dependencies"] = _get_package_dependencies(ctx.attr.deps)
+ if (ctx.attr.dev_deps):
+ json["devDependencies"] = _get_package_dependencies(ctx.attr.dev_deps)
+
+ content = struct(**json)
+
+ ctx.file_action(
+ output = output_file,
+ content = content.to_json(),
+ )
+
+ return output_file
+
+
+def _get_transitive_modules(deps, key):
+ modules = depset()
+ for dep in deps:
+ module = dep.node_module
+ modules += [module]
+ modules += getattr(module, key)
+ return modules
+
+
+def _get_path_for_module_file(ctx, root_file, file, sourcemap):
+ """Compute relative output path for file relative to root_file Return
+ the return ad as side-effect store the mapping of file.path -->
+ relative_path in the given sourcemap dict.
+ """
+
+ path = None
+ if ctx.attr.layout == 'relative':
+ path = _relname(ctx, root_file, file)
+ elif ctx.attr.layout == 'workspace':
+ path = file.short_path
+ elif ctx.attr.layout == 'flat':
+ path = file.basename
+ else:
+ fail("Unexpected layout: " + ctx.attr.layout)
+ sourcemap[file.path] = path
+ return path
+
+
+def _copy_file(ctx, src, dst):
+ ctx.action(
+ mnemonic = "CopyFileToNodeModule",
+ inputs = [src],
+ outputs = [dst],
+ command = "cp %s %s" % (src.path, dst.path),
+ )
+ return dst
+
+
+def _node_module_impl(ctx):
+ name = _get_module_name(ctx)
+ outputs = []
+
+ files = [] + ctx.files.srcs
+ if ctx.file.main:
+ files.append(ctx.file.main)
+
+ package_json = ctx.file.package_json
+
+ # The presence of an index file suppresses creation of the
+ # package.json file, if not already provided and no 'main' file is
+ # provided.
+ if len(files) > 0 and not package_json:
+ if ctx.attr.main or not ctx.file.index:
+ package_json = _create_package_json(ctx, name, files)
+ if package_json:
+ outputs.append(package_json)
+
+ root_file = package_json or ctx.file.index
+ if len(files) > 0 and not root_file:
+ fail("A module with source files must be created from (1) a package.json file, (2) a 'main' file, or (3) an 'index' file. None of these were present.")
+
+ index_file = None
+ if ctx.file.index:
+ dst = ctx.new_file("%s/index.%s" % (name, ctx.file.index.extension))
+ outputs.append(_copy_file(ctx, ctx.file.index, dst))
+ index_file = dst
+
+ sourcemap = {}
+ for src in files:
+ dst = ctx.new_file("%s/%s" % (name, _get_path_for_module_file(ctx, root_file, src, sourcemap)))
+ outputs.append(_copy_file(ctx, src, dst))
+
+ return struct(
+ files = depset(outputs),
+ node_module = struct(
+ identifier = name.replace(ctx.attr.separator, '_'),
+ name = name,
+ version = ctx.attr.version,
+ url = ctx.attr.url,
+ sha1 = ctx.attr.sha1,
+ description = ctx.attr.description,
+ package_json = package_json,
+ root = root_file,
+ sourcemap = sourcemap,
+ index = index_file,
+ files = depset(outputs),
+ sources = depset(files),
+ transitive_deps = _get_transitive_modules(ctx.attr.deps, "transitive_deps"),
+ transitive_dev_deps = _get_transitive_modules(ctx.attr.dev_deps, "transitive_dev_deps"),
+ ),
+ )
+
+
+node_module = rule(
+ implementation = _node_module_impl,
+ attrs = {
+ # An organizational prefix for the module, for example
+ # '@types' in '@types/node'.
+ "namespace": attr.string(
+ ),
+
+ # A string that, if present, will be used for the module name.
+ # If absent, defaults the the ctx.label.name.
+ "module_name": attr.string(
+ ),
+
+ # separator used to create the scoped module name. For
+ # example, if you have a node_module rule 'fs-super' in
+ # src/main/js with separator '-' (the default), the module
+ # name will be 'src-main-js-fs-super' UNLESS you specify a
+ # namespace '@bazel', in which case it becomes
+ # '@bazel/fs-super'.
+ "separator": attr.string(
+ default = "/",
+ ),
+
+ # A string that determines how files are placed within the
+ # module. With 'flat', all files are copied into the root of
+ # the module using File.basename. With 'relative', files are
+ # copied into the module relative to the BUILD file containing
+ # the node_module rule (this is the default). With
+ # 'workspace', files are copied into the module using
+ # File.short_path, causing them to be relative to the
+ # WORKSPACE.
+ "layout": attr.string(
+ values = ["relative", "workspace"],
+ default = "relative",
+ ),
+
+ # A set of source files to include in the module.
+ "srcs": attr.label_list(
+ allow_files = True,
+ ),
+
+ # A file that will be used for the package.json at the root of
+ # the module. If not present, one will be generated UNLESS an
+ # index file is provided.
+ "package_json": attr.label(
+ allow_files = FileType(["package.json"]),
+ single_file = True,
+ ),
+
+ # Additional data files to be included in the module, but
+ # excluded from the package.json 'files' attribute.
+ "data": attr.label_list(
+ allow_files = True,
+ cfg = "data",
+ ),
+
+ # Module dependencies.
+ "deps": attr.label_list(
+ providers = ["node_module"],
+ ),
+
+ # Development-only module dependencies.
+ "dev_deps": attr.label_list(
+ providers = ["node_module"],
+ ),
+
+ # Module version
+ "version": attr.string(
+ default = "1.0.0",
+ ),
+
+ # Module URL (location where the modeule was originally loaded
+ # from)
+ "url": attr.string(
+ ),
+
+ # Sha1 hash for the tgz that it was loaded from.
+ "sha1": attr.string(
+ ),
+
+ # Package description.
+ "description": attr.string(
+ default = "No description provided",
+ ),
+
+ # File that should be named as the package.json 'main'
+ # attribute.
+ "main": attr.label(
+ #allow_files = _node_filetype,
+ allow_files = True,
+ mandatory = False,
+ single_file = True,
+ ),
+
+ # File that should be copied to the module root as 'index.js'.
+ # If the index file is present and no 'main' is provided, a
+ # package.json file will not be generated.
+ "index": attr.label(
+ allow_files = _node_filetype,
+ single_file = True,
+ ),
+ },
+)
diff --git a/node/internal/node_repositories.bzl b/node/internal/node_repositories.bzl
index 23d8c72..43f4afc 100644
--- a/node/internal/node_repositories.bzl
+++ b/node/internal/node_repositories.bzl
@@ -1,81 +1,129 @@
-NODE_TOOLCHAIN_BUILD_FILE = """
+# The node_repository_impl taken from Alex Eagle's rules_nodejs :)
+#
+# Copyright 2017 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Install NodeJS when the user runs node_repositories() from their WORKSPACE.
+
+We fetch a specific version of Node, to ensure builds are hermetic.
+We then create a repository @nodejs which provides the
+node binary to other rules.
+"""
+
+YARN_BUILD_FILE_CONTENT = """
package(default_visibility = [ "//visibility:public" ])
exports_files([
- "bin/node",
- "bin/npm",
+ "bin/yarn",
+ "bin/yarn.js",
])
-filegroup(
- name = "node_tool",
- srcs = [ "bin/node" ],
-)
-filegroup(
- name = "npm_tool",
- srcs = [ "bin/npm" ],
-)
"""
-def _mirror_path(ctx, workspace_root, path):
- src = '/'.join([workspace_root, path])
- dst = '/'.join([ctx.path('.'), path])
- ctx.symlink(src, dst)
-
-
-def _node_toolchain_impl(ctx):
- os = ctx.os.name
- if os == 'linux':
- noderoot = ctx.path(ctx.attr._linux).dirname
- elif os == 'mac os x':
- noderoot = ctx.path(ctx.attr._darwin).dirname
- else:
- fail("Unsupported operating system: " + os)
-
- _mirror_path(ctx, noderoot, "bin")
- _mirror_path(ctx, noderoot, "include")
- _mirror_path(ctx, noderoot, "lib")
- _mirror_path(ctx, noderoot, "share")
-
- ctx.file("WORKSPACE", "workspace(name = '%s')" % ctx.name)
- ctx.file("BUILD", NODE_TOOLCHAIN_BUILD_FILE)
- ctx.file("BUILD.bazel", NODE_TOOLCHAIN_BUILD_FILE)
-
-
-_node_toolchain = repository_rule(
- _node_toolchain_impl,
- attrs = {
- "_linux": attr.label(
- default = Label("@nodejs_linux_amd64//:WORKSPACE"),
- allow_files = True,
- single_file = True,
- ),
- "_darwin": attr.label(
- default = Label("@nodejs_darwin_amd64//:WORKSPACE"),
- allow_files = True,
- single_file = True,
- ),
- },
+YARN_LOCKFILE_BUILD_FILE_CONTENT = """
+package(default_visibility = [ "//visibility:public" ])
+exports_files([
+ "index.js",
+])
+"""
+
+NODE_BUILD_FILE_CONTENT = """
+package(default_visibility = ["//visibility:public"])
+exports_files([
+ "{0}",
+ "{1}",
+])
+alias(name = "node", actual = "{0}")
+alias(name = "npm", actual = "{1}")
+"""
+
+
+def _node_repository_impl(repository_ctx):
+ version = repository_ctx.attr.node_version
+ sha256 = repository_ctx.attr.linux_sha256
+ arch = "linux-x64"
+ node = "bin/node"
+ npm = "bin/npm"
+ compression_format = "tar.xz"
+
+ os_name = repository_ctx.os.name.lower()
+ if os_name.startswith("mac os"):
+ arch = "darwin-x64"
+ sha256 = repository_ctx.attr.darwin_sha256
+ elif os_name.find("windows") != -1:
+ arch = "win-x64"
+ node = "node.exe"
+ npm = "npm.cmd"
+ compression_format = "zip"
+ sha256 = repository_ctx.attr.windows_sha256
+
+ prefix = "node-v%s-%s" % (version, arch)
+ url = "https://nodejs.org/dist/v{version}/{prefix}.{compression_format}".format(
+ version = version,
+ prefix = prefix,
+ compression_format = compression_format,
+ )
+
+ repository_ctx.download_and_extract(
+ url = url,
+ stripPrefix = prefix,
+ sha256 = sha256,
+ )
+
+ repository_ctx.file("BUILD.bazel", content = NODE_BUILD_FILE_CONTENT.format(node, npm))
+
+
+_node_repository = repository_rule(
+ _node_repository_impl,
+ attrs = {
+ "node_version": attr.string(
+ default = "7.10.1",
+ ),
+ "linux_sha256": attr.string(
+ default = "7b0e9d1af945671a0365a64ee58a2b0d72b3632a1cebe6b5bd75094b93627bf3",
+ ),
+ "darwin_sha256": attr.string(
+ default = "d67d2eb9456aab925416ad58aa18b9680e66a4bcc243a89b22e646f7fffc4ff9",
+ ),
+ "windows_sha256": attr.string(
+ default = "a03512d8f17d8312c6fece68a9c20aaa8e2268de18edfea847aa6a35af3a95ba",
+ ),
+ },
)
-def node_repositories(version="6.6.0",
- linux_sha256="c22ab0dfa9d0b8d9de02ef7c0d860298a5d1bf6cae7413fb18b99e8a3d25648a",
- darwin_sha256="c8d1fe38eb794ca46aacf6c8e90676eec7a8aeec83b4b09f57ce503509e7a19f"):
+
+def node_repositories(yarn_version="v1.0.1",
+ yarn_sha256="6b00b5e0a7074a512d39d2d91ba6262dde911d452617939ca4be4a700dd77cf1",
+ **kwargs):
+
native.new_http_archive(
- name = "nodejs_linux_amd64",
- url = "https://nodejs.org/dist/v{version}/node-v{version}-linux-x64.tar.gz".format(version=version),
- type = "tar.gz",
- strip_prefix = "node-v{version}-linux-x64".format(version=version),
- sha256 = linux_sha256,
- build_file_content = "",
+ name = "yarn",
+ url = "https://github.com/yarnpkg/yarn/releases/download/{yarn_version}/yarn-{yarn_version}.tar.gz".format(
+ yarn_version = yarn_version,
+ ),
+ sha256 = yarn_sha256,
+ strip_prefix="yarn-%s" % yarn_version,
+ build_file_content = YARN_BUILD_FILE_CONTENT,
)
native.new_http_archive(
- name = "nodejs_darwin_amd64",
- url = "https://nodejs.org/dist/v{version}/node-v{version}-darwin-x64.tar.gz".format(version=version),
- type = "tar.gz",
- strip_prefix = "node-v{version}-darwin-x64".format(version=version),
- sha256 = darwin_sha256,
- build_file_content = "",
+ name = "yarnpkg_lockfile",
+ url = "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.0.0.tgz",
+ sha256 = "472add7ad141c75811f93dca421e2b7456045504afacec814b0565f092156250",
+ strip_prefix="package",
+ build_file_content = YARN_LOCKFILE_BUILD_FILE_CONTENT,
)
- _node_toolchain(
- name = "org_pubref_rules_node_toolchain",
+ _node_repository(
+ name = "node",
+ **kwargs
)
diff --git a/node/internal/node_utils.bzl b/node/internal/node_utils.bzl
index 6c5ed6e..304d3ba 100644
--- a/node/internal/node_utils.bzl
+++ b/node/internal/node_utils.bzl
@@ -1,6 +1,6 @@
node_attrs = {
"node": attr.label(
- default = Label("@org_pubref_rules_node_toolchain//:bin/node"),
+ default = Label("@node//:node"),
single_file = True,
allow_files = True,
executable = True,
@@ -8,8 +8,8 @@ node_attrs = {
),
}
-def execute(ctx, cmds):
- result = ctx.execute(cmds)
+def execute(ctx, cmds, **kwargs):
+ result = ctx.execute(cmds, **kwargs)
if result.return_code:
- fail(" ".join(cmds) + "failed: %s" %(result.stderr))
+ fail(" ".join(cmds) + "failed: \nSTDOUT:\n%s\nSTDERR:\n%s" % (result.stdout, result.stderr))
return result
diff --git a/node/internal/npm_repository.bzl b/node/internal/npm_repository.bzl
deleted file mode 100644
index 3b7ccf8..0000000
--- a/node/internal/npm_repository.bzl
+++ /dev/null
@@ -1,74 +0,0 @@
-load("//node:internal/dar.bzl", "dar_attrs", "dar_execute")
-load("//node:internal/dson.bzl", "dson_attrs", "dson_execute")
-load("//node:internal/sha256.bzl", "sha256_attrs", "sha256_execute")
-load("//node:internal/node_utils.bzl", "execute", "node_attrs")
-
-BUILD_FILE = """package(default_visibility = ["//visibility:public"])
-filegroup(
- name = "modules",
- srcs = ["{modules_path}"],
-)
-exports_files(["{modules_path}"])
-exports_files(glob(["bin/*"]))
-"""
-
-_npm_repository_attrs = node_attrs + dar_attrs + dson_attrs + sha256_attrs + {
- "npm": attr.label(
- default = Label("@org_pubref_rules_node_toolchain//:bin/npm"),
- single_file = True,
- allow_files = True,
- executable = True,
- cfg = "host",
- ),
- "registry": attr.string(),
- "deps": attr.string_dict(mandatory = True),
-}
-
-def _npm_repository_impl(ctx):
- node = ctx.path(ctx.attr.node)
- nodedir = node.dirname.dirname
- npm = ctx.path(ctx.attr.npm)
- modules_path = ctx.attr.dar_root
-
- modules = []
- for k, v in ctx.attr.deps.items():
- if v:
- modules.append("%s@%s" % (k, v))
- else:
- modules.append(k)
-
- cmd = [
- node,
- npm,
- "install",
- #"--loglevel", "silly", # info
- "--prefix", ctx.path(""),
- "--nodedir=%s" % nodedir,
- "--global"
- ]
-
- if ctx.attr.registry:
- cmd += ["--registry", ctx.attr.registry]
-
- cmd += modules
-
- output = execute(ctx, cmd).stdout
- #print("npm install output: %s" % output)
-
- if str(modules_path) != "node_modules":
- execute(ctx, ["ln", "-s", modules_path, "node_modules"])
-
- if ctx.attr.sha256:
- dson_execute(ctx, dson_path = "node_modules")
- dar_execute(ctx, dar_root = "node_modules")
- sha256_execute(ctx, "node_modules.tar")
-
-
- ctx.file("BUILD", BUILD_FILE.format(
- modules_path = modules_path,
- ))
-
-npm_repository = repository_rule(
- implementation = _npm_repository_impl,
- attrs = _npm_repository_attrs,
-)
diff --git a/node/internal/parse_yarn_lock.js b/node/internal/parse_yarn_lock.js
new file mode 100644
index 0000000..74b9f31
--- /dev/null
+++ b/node/internal/parse_yarn_lock.js
@@ -0,0 +1,159 @@
+'use strict';
+
+const fs = require('fs');
+const lockfile = require('@yarnpkg/lockfile');
+
+let file = fs.readFileSync('yarn.lock', 'utf8');
+let json = lockfile.parse(file);
+
+if (json.type !== 'success') {
+ throw new Error('Lockfile parse failed: ' + JSON.stringify(json, null, 2));
+}
+
+const entries = Object.keys(json.object).map(key => makeEntry(key, json.object[key]));
+const cache = new Map();
+
+print("");
+print("package(default_visibility = ['//visibility:public'])");
+print("load('@org_pubref_rules_node//node:rules.bzl', 'node_module', 'node_binary')");
+
+entries.forEach(entry => printNodeModule(entry));
+
+printNodeModules(cache);
+
+entries.forEach(entry => parsePackageJson(entry));
+
+print("");
+print("# EOF");
+
+function makeEntry(key, entry) {
+ parseName(key, entry);
+ parseResolved(entry);
+ return entry;
+}
+
+function parseName(key, entry) {
+ // can be 'foo@1.0.0' or something like '@types/foo@1.0.0'
+ const at = key.lastIndexOf('@');
+ entry.id = key;
+ entry.name = key.slice(0, at);
+
+ const label = entry.name.replace('@', 'at-');
+ entry.label = label;
+}
+
+function parseResolved(entry) {
+ const resolved = entry.resolved;
+ if (resolved) {
+ const tokens = resolved.split("#");
+ entry.url = tokens[0];
+ entry.sha1 = tokens[1];
+ }
+}
+
+function printDownloadMeta(entry) {
+ print("# <-- " + [entry.sha1,entry.name,entry.url].join("|"));
+}
+
+function printJson(entry) {
+ JSON.stringify(entry, null, 2).split("\n").forEach(line => print("# " + line));
+}
+
+function printNodeModule(entry) {
+ print(``);
+ printJson(entry);
+ const prev = cache.get(entry.name);
+ if (prev) {
+ print(`## Skipped ${entry.id} (${entry.name} resolves to ${prev.id})`);
+ return;
+ }
+ print(`node_module(`);
+ print(` name = "${entry.name}",`);
+ print(` version = "${entry.version}",`);
+ print(` url = "${entry.url}",`);
+ print(` sha1 = "${entry.sha1}",`);
+ print(` package_json = "node_modules/${entry.name}/package.json",`);
+ print(` srcs = glob(["node_modules/${entry.name}/**/*"], exclude = ["node_modules/${entry.name}/package.json"]),`);
+
+ if (entry.dependencies) {
+ print(` deps = [`);
+ Object.keys(entry.dependencies).forEach(module => {
+ print(` ":${module}",`);
+ });
+ print(` ],`);
+ }
+ print(`)`);
+
+ cache.set(entry.name, entry);
+}
+
+function printNodeModules(map) {
+ print(``);
+ print(`# Pseudo-module that basically acts as a module collection for the entire set`);
+ print(`node_module(`);
+ print(` name = "_all_",`);
+ print(` deps = [`);
+ for (let entry of map.values()) {
+ print(` ":${entry.name}",`);
+ }
+ print(` ],`);
+ print(`)`);
+}
+
+function parsePackageJson(entry) {
+ const pkg = require(`./node_modules/${entry.name}/package`);
+ if (Array.isArray(pkg.bin)) {
+ // should not happen: throw new Error('Hmm, I didn\'t realize pkg.bin could be an array.');
+ } else if (typeof pkg.bin === 'string') {
+ printNodeModuleShBinary(entry, pkg, entry.name, pkg.bin);
+ } else if (typeof pkg.bin === 'object') {
+ Object.keys(pkg.bin).forEach(key => printNodeModuleShBinary(entry, pkg, key, pkg.bin[key]));
+ }
+}
+
+function printNodeModuleShBinary(entry, pkg, name, path) {
+ print(``);
+ print(`sh_binary(`);
+ print(` name = "${name}_bin",`); // dont want sh_binary 'mkdirp' to conflict
+ print(` srcs = [":node_modules/.bin/${name}"],`);
+ print(` data = [`);
+ print(` ":${entry.name}",`); // must always depend on self
+ if (pkg.dependencies) {
+ Object.keys(pkg.dependencies).forEach(dep_name => {
+ const dep_entry = cache.get(dep_name);
+ if (!dep_entry) {
+ throw new Error('Cannot find dependency entry for ' + dep_name);
+ }
+ print(` ":${dep_entry.name}",`);
+ });
+ }
+ print(` ],`);
+ print(`)`);
+}
+
+function printNodeModuleBinary(entry, pkg, name, path) {
+ if (path.indexOf("./") === 0) {
+ path = path.slice(2);
+ }
+ print(``);
+ print(`sh_binary(`);
+ print(` name = "${entry.name}_${name}",`);
+ print(` srcs = [":node_modules/${entry.name}/${path}"],`);
+ print(` data = [`);
+ print(` ":${entry.name}",`); // must always depend on self
+ if (pkg.dependencies) {
+ Object.keys(pkg.dependencies).forEach(dep_name => {
+ const dep_entry = cache.get(dep_name);
+ if (!dep_entry) {
+ throw new Error('Cannot find dependency entry for ' + dep_name);
+ }
+ print(` ":${dep_entry.name}",`);
+ });
+ }
+ print(` ],`);
+ print(`)`);
+}
+
+function print(msg) {
+ console.log(msg);
+}
diff --git a/node/internal/yarn_modules.bzl b/node/internal/yarn_modules.bzl
new file mode 100644
index 0000000..7dda3c4
--- /dev/null
+++ b/node/internal/yarn_modules.bzl
@@ -0,0 +1,93 @@
+load("//node:internal/sha256.bzl", "sha256_attrs", "sha256_execute")
+load("//node:internal/node_utils.bzl", "execute")
+
+def _create_package_json_content(ctx):
+ content = {
+ "name": ctx.name,
+ "version": "1.0.0",
+ }
+ dependencies = {}
+ for name, version in ctx.attr.deps.items():
+ dependencies[name] = version
+ content["dependencies"] = struct(**dependencies)
+ return struct(**content)
+
+
+def _download_and_extract_module(ctx, entry):
+ name = entry["name"]
+ url = entry["url"]
+ print("downloading %s to node_modules/%s, stripping '%s'" % (url, name, 'package'))
+ ctx.download_and_extract(
+ url,
+ output = "node_modules/" + name,
+ stripPrefix = "package",
+ )
+
+
+def _yarn_modules_impl(ctx):
+
+ # Preconditions
+ if not (ctx.attr.package_json or ctx.attr.deps):
+ fail("You must provide either a package.json file OR specify deps (got none!)")
+ if ctx.attr.package_json and ctx.attr.deps:
+ fail("You must specify a package.json file OR deps (not both!)")
+
+ # Gather required resources
+ node = ctx.path(ctx.attr._node)
+ parse_yarn_lock_js = ctx.path(ctx.attr._parse_yarn_lock_js)
+ yarn_js = ctx.path(ctx.attr._yarn_js)
+
+ # Copy over or create the package.json file
+ if ctx.attr.package_json:
+ package_json_file = ctx.path(ctx.attr.package_json)
+ execute(ctx, ["cp", package_json_file, "package.json"])
+ else:
+ ctx.file("package.json", _create_package_json_content(ctx).to_json())
+
+ # Copy the parse_yarn_lock script and yarn.js over here.
+ execute(ctx, ["cp", parse_yarn_lock_js, "parse_yarn_lock.js"])
+ execute(ctx, ["cp", yarn_js, "yarn.js"])
+
+ # Build node_modules via 'yarn install'
+ execute(ctx, [node, yarn_js, "install"], quiet = True)
+
+ # Build a node_modules with this single dependency
+ ctx.download_and_extract(
+ url = "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.0.0.tgz",
+ output = "node_modules/@yarnpkg/lockfile",
+ sha256 = "472add7ad141c75811f93dca421e2b7456045504afacec814b0565f092156250",
+ stripPrefix = "package",
+ )
+
+ # Run the script and save the stdout to our BUILD file(s)
+ result = execute(ctx, [node, "parse_yarn_lock.js"], quiet = True)
+ ctx.file("BUILD", result.stdout)
+ ctx.file("BUILD.bazel", result.stdout)
+
+
+yarn_modules = repository_rule(
+ implementation = _yarn_modules_impl,
+ attrs = {
+ "_node": attr.label(
+ # FIXME(pcj): This is going to invalid for windows
+ default = Label("@node//:bin/node"),
+ single_file = True,
+ allow_files = True,
+ executable = True,
+ cfg = "host",
+ ),
+ "_parse_yarn_lock_js": attr.label(
+ default = Label("//node:internal/parse_yarn_lock.js"),
+ single_file = True,
+ ),
+ "_yarn_js": attr.label(
+ default = Label("@yarn//:bin/yarn.js"),
+ single_file = True,
+ ),
+ "package_json": attr.label(
+ mandatory = False,
+ allow_files = FileType(["package.json"]),
+ ),
+ "deps": attr.string_dict(mandatory = False),
+ }
+)
diff --git a/node/package.json.tpl b/node/package.json.tpl
deleted file mode 100644
index 5143b69..0000000
--- a/node/package.json.tpl
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "name": "%{name}",
- "main": "%{main}",
- "version": "%{version}",
- "description": "%{description}"
-}
diff --git a/node/rules.bzl b/node/rules.bzl
index 6422fc9..83abe92 100644
--- a/node/rules.bzl
+++ b/node/rules.bzl
@@ -1,6 +1,5 @@
load("//node:internal/node_repositories.bzl", "node_repositories")
-load("//node:internal/npm_repository.bzl", "npm_repository")
-load("//node:internal/bower_repository.bzl", "bower_repository")
+load("//node:internal/yarn_modules.bzl", "yarn_modules")
+load("//node:internal/node_module.bzl", "node_module")
load("//node:internal/node_binary.bzl", "node_binary")
-load("//node:internal/node_library.bzl", "node_library")
load("//node:internal/mocha_test.bzl", "mocha_test")
diff --git a/node/tools/BUILD b/node/tools/BUILD
deleted file mode 100644
index a63f0e2..0000000
--- a/node/tools/BUILD
+++ /dev/null
@@ -1,9 +0,0 @@
-py_test(
- name = "dson_test",
- size = "small",
- srcs = [
- "dson.py",
- "dson_test.py",
- ],
- main = "dson_test.py",
-)
diff --git a/node/tools/dar.py b/node/tools/dar.py
deleted file mode 100644
index 3760707..0000000
--- a/node/tools/dar.py
+++ /dev/null
@@ -1,563 +0,0 @@
-# Copyright 2016 The Bazel Authors, @pcj. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# In order to have a self-contained script with no import
-# dependencies, this file is a merger of the following two files, with
-# the gflags dependency removed and replaced with standard argparse
-# library.
-#
-# https://github.com/bazelbuild/bazel/blob/master/tools/build_defs/pkg/archive.py
-# https://github.com/bazelbuild/bazel/blob/master/tools/build_defs/pkg/build_tar.py
-#
-"""Deterministic archive manipulation library."""
-
-import argparse
-import os
-import os.path
-import subprocess
-import sys
-import tarfile
-import tempfile
-
-from StringIO import StringIO
-
-
-class SimpleArFile(object):
- """A simple AR file reader.
-
- This enable to read AR file (System V variant) as described
- in https://en.wikipedia.org/wiki/Ar_(Unix).
-
- The standard usage of this class is:
-
- with SimpleArFile(filename) as ar:
- nextFile = ar.next()
- while nextFile:
- print nextFile.filename
- nextFile = ar.next()
-
- Upon error, this class will raise a ArError exception.
- """
-
- # TODO(dmarting): We should use a standard library instead but python 2.7
- # does not have AR reading library.
-
- class ArError(Exception):
- pass
-
- class SimpleArFileEntry(object):
- """Represent one entry in a AR archive.
-
- Attributes:
- filename: the filename of the entry, as described in the archive.
- timestamp: the timestamp of the file entry.
- owner_id, group_id: numeric id of the user and group owning the file.
- mode: unix permission mode of the file
- size: size of the file
- data: the content of the file.
- """
-
- def __init__(self, f):
- self.filename = f.read(16).strip()
- if self.filename.endswith('/'): # SysV variant
- self.filename = self.filename[:-1]
- self.timestamp = int(f.read(12).strip())
- self.owner_id = int(f.read(6).strip())
- self.group_id = int(f.read(6).strip())
- self.mode = int(f.read(8).strip(), 8)
- self.size = int(f.read(10).strip())
- pad = f.read(2)
- if pad != '\x60\x0a':
- raise SimpleArFile.ArError('Invalid AR file header')
- self.data = f.read(self.size)
-
- MAGIC_STRING = '!\n'
-
- def __init__(self, filename):
- self.filename = filename
-
- def __enter__(self):
- self.f = open(self.filename, 'rb')
- if self.f.read(len(self.MAGIC_STRING)) != self.MAGIC_STRING:
- raise self.ArError('Not a ar file: ' + self.filename)
- return self
-
- def __exit__(self, t, v, traceback):
- self.f.close()
-
- def next(self):
- """Read the next file. Returns None when reaching the end of file."""
- # AR sections are two bit aligned using new lines.
- if self.f.tell() % 2 != 0:
- self.f.read(1)
- # An AR sections is at least 60 bytes. Some file might contains garbage
- # bytes at the end of the archive, ignore them.
- if self.f.tell() > os.fstat(self.f.fileno()).st_size - 60:
- return None
- return self.SimpleArFileEntry(self.f)
-
-
-class TarFileWriter(object):
- """A wrapper to write tar files."""
-
- class Error(Exception):
- pass
-
- def __init__(self, name, compression=''):
- print("tarfile name %s" % name)
- if compression in ['tgz', 'gz']:
- mode = 'w:gz'
- elif compression in ['bzip2', 'bz2']:
- mode = 'w:bz2'
- else:
- mode = 'w:'
- print("tarfile mode %s" % mode)
- # Support xz compression through xz... until we can use Py3
- self.xz = compression in ['xz', 'lzma']
- self.name = name
- self.tar = tarfile.open(name=name, mode=mode)
- #self.tar = tarfile.open(name="foo", mode='w:')
- self.members = set([])
- self.directories = set([])
-
- def __enter__(self):
- return self
-
- def __exit__(self, t, v, traceback):
- self.close()
-
- def add_dir(self,
- name,
- path,
- uid=0,
- gid=0,
- uname='',
- gname='',
- mtime=0,
- mode=None,
- depth=100):
- """Recursively add a directory.
-
- Args:
- name: the destination path of the directory to add.
- path: the path of the directory to add.
- uid: owner user identifier.
- gid: owner group identifier.
- uname: owner user names.
- gname: owner group names.
- mtime: modification time to put in the archive.
- mode: unix permission mode of the file, default 0644 (0755).
- depth: maximum depth to recurse in to avoid infinite loops
- with cyclic mounts.
-
- Raises:
- TarFileWriter.Error: when the recursion depth has exceeded the
- `depth` argument.
- """
- if not (name == '.' or name.startswith('/') or name.startswith('./')):
- name = './' + name
- if os.path.isdir(path):
- # Remove trailing '/' (index -1 => last character)
- if name[-1] == '/':
- name = name[:-1]
- # Add the x bit to directories to prevent non-traversable directories.
- # The x bit is set only to if the read bit is set.
- dirmode = (mode | ((0o444 & mode) >> 2)) if mode else mode
- self.add_file(name + '/',
- tarfile.DIRTYPE,
- uid=uid,
- gid=gid,
- uname=uname,
- gname=gname,
- mtime=mtime,
- mode=dirmode)
- if depth <= 0:
- raise self.Error('Recursion depth exceeded, probably in '
- 'an infinite directory loop.')
- # Iterate over the sorted list of file so we get a deterministic result.
- filelist = os.listdir(path)
- filelist.sort()
- for f in filelist:
- new_name = os.path.join(name, f)
- new_path = os.path.join(path, f)
- self.add_dir(new_name, new_path, uid, gid, uname, gname, mtime, mode,
- depth - 1)
- else:
- self.add_file(name,
- tarfile.REGTYPE,
- file_content=path,
- uid=uid,
- gid=gid,
- uname=uname,
- gname=gname,
- mtime=mtime,
- mode=mode)
-
- def _addfile(self, info, fileobj=None):
- """Add a file in the tar file if there is no conflict."""
- if not info.name.endswith('/') and info.type == tarfile.DIRTYPE:
- # Enforce the ending / for directories so we correctly deduplicate.
- info.name += '/'
- if info.name not in self.members:
- self.tar.addfile(info, fileobj)
- self.members.add(info.name)
- elif info.type != tarfile.DIRTYPE:
- print('Duplicate file in archive: %s, '
- 'picking first occurrence' % info.name)
-
- def add_file(self,
- name,
- kind=tarfile.REGTYPE,
- content=None,
- link=None,
- file_content=None,
- uid=0,
- gid=0,
- uname='',
- gname='',
- mtime=0,
- mode=None):
- """Add a file to the current tar.
-
- Args:
- name: the name of the file to add.
- kind: the type of the file to add, see tarfile.*TYPE.
- content: a textual content to put in the file.
- link: if the file is a link, the destination of the link.
- file_content: file to read the content from. Provide either this
- one or `content` to specifies a content for the file.
- uid: owner user identifier.
- gid: owner group identifier.
- uname: owner user names.
- gname: owner group names.
- mtime: modification time to put in the archive.
- mode: unix permission mode of the file, default 0644 (0755).
- """
- if file_content and os.path.isdir(file_content):
- # Recurse into directory
- self.add_dir(name, file_content, uid, gid, uname, gname, mtime, mode)
- return
- if not (name == '.' or name.startswith('/') or name.startswith('./')):
- name = './' + name
- if kind == tarfile.DIRTYPE:
- name = name.rstrip('/')
- if name in self.directories:
- return
-
- components = name.rsplit('/', 1)
- if len(components) > 1:
- d = components[0]
- self.add_file(d,
- tarfile.DIRTYPE,
- uid=uid,
- gid=gid,
- uname=uname,
- gname=gname,
- mtime=mtime,
- mode=0o755)
- tarinfo = tarfile.TarInfo(name)
- tarinfo.mtime = mtime
- tarinfo.uid = uid
- tarinfo.gid = gid
- tarinfo.uname = uname
- tarinfo.gname = gname
- tarinfo.type = kind
- if mode is None:
- tarinfo.mode = 0o644 if kind == tarfile.REGTYPE else 0o755
- else:
- tarinfo.mode = mode
- if link:
- tarinfo.linkname = link
- if content:
- tarinfo.size = len(content)
- self._addfile(tarinfo, StringIO(content))
- elif file_content:
- with open(file_content, 'rb') as f:
- tarinfo.size = os.fstat(f.fileno()).st_size
- self._addfile(tarinfo, f)
- else:
- if kind == tarfile.DIRTYPE:
- self.directories.add(name)
- self._addfile(tarinfo)
-
- def add_tar(self,
- tar,
- rootuid=None,
- rootgid=None,
- numeric=False,
- name_filter=None,
- root=None):
- """Merge a tar content into the current tar, stripping timestamp.
-
- Args:
- tar: the name of tar to extract and put content into the current tar.
- rootuid: user id that we will pretend is root (replaced by uid 0).
- rootgid: group id that we will pretend is root (replaced by gid 0).
- numeric: set to true to strip out name of owners (and just use the
- numeric values).
- name_filter: filter out file by names. If not none, this method will be
- called for each file to add, given the name and should return true if
- the file is to be added to the final tar and false otherwise.
- root: place all non-absolute content under given root direcory, if not
- None.
-
- Raises:
- TarFileWriter.Error: if an error happens when uncompressing the tar file.
- """
- if root and root[0] not in ['/', '.']:
- # Root prefix should start with a '/', adds it if missing
- root = '/' + root
- compression = os.path.splitext(tar)[-1][1:]
- if compression == 'tgz':
- compression = 'gz'
- elif compression == 'bzip2':
- compression = 'bz2'
- elif compression == 'lzma':
- compression = 'xz'
- elif compression not in ['gz', 'bz2', 'xz']:
- compression = ''
- if compression == 'xz':
- # Python 2 does not support lzma, our py3 support is terrible so let's
- # just hack around.
- # Note that we buffer the file in memory and it can have an important
- # memory footprint but it's probably fine as we don't use them for really
- # large files.
- # TODO(dmarting): once our py3 support gets better, compile this tools
- # with py3 for proper lzma support.
- if subprocess.call('which xzcat', shell=True, stdout=subprocess.PIPE):
- raise self.Error('Cannot handle .xz and .lzma compression: '
- 'xzcat not found.')
- p = subprocess.Popen('cat %s | xzcat' % tar,
- shell=True,
- stdout=subprocess.PIPE)
- f = StringIO(p.stdout.read())
- p.wait()
- intar = tarfile.open(fileobj=f, mode='r:')
- else:
- intar = tarfile.open(name=tar, mode='r:' + compression)
- for tarinfo in intar:
- if name_filter is None or name_filter(tarinfo.name):
- tarinfo.mtime = 0
- if rootuid is not None and tarinfo.uid == rootuid:
- tarinfo.uid = 0
- tarinfo.uname = 'root'
- if rootgid is not None and tarinfo.gid == rootgid:
- tarinfo.gid = 0
- tarinfo.gname = 'root'
- if numeric:
- tarinfo.uname = ''
- tarinfo.gname = ''
-
- name = tarinfo.name
- if not name.startswith('/') and not name.startswith('.'):
- name = './' + name
- if root is not None:
- if name.startswith('.'):
- name = '.' + root + name.lstrip('.')
- # Add root dir with same permissions if missing. Note that
- # add_file deduplicates directories and is safe to call here.
- self.add_file('.' + root,
- tarfile.DIRTYPE,
- uid=tarinfo.uid,
- gid=tarinfo.gid,
- uname=tarinfo.uname,
- gname=tarinfo.gname,
- mtime=tarinfo.mtime,
- mode=0o755)
- # Relocate internal hardlinks as well to avoid breaking them.
- link = tarinfo.linkname
- if link.startswith('.') and tarinfo.type == tarfile.LNKTYPE:
- tarinfo.linkname = '.' + root + link.lstrip('.')
- tarinfo.name = name
-
- if tarinfo.isfile():
- self._addfile(tarinfo, intar.extractfile(tarinfo.name))
- else:
- self._addfile(tarinfo)
- intar.close()
-
- def close(self):
- """Close the output tar file.
-
- This class should not be used anymore after calling that method.
-
- Raises:
- TarFileWriter.Error: if an error happens when compressing the output file.
- """
- self.tar.close()
- if self.xz:
- # Support xz compression through xz... until we can use Py3
- if subprocess.call('which xz', shell=True, stdout=subprocess.PIPE):
- raise self.Error('Cannot handle .xz and .lzma compression: '
- 'xz not found.')
- subprocess.call(
- 'mv {0} {0}.d && xz -z {0}.d && mv {0}.d.xz {0}'.format(self.name),
- shell=True,
- stdout=subprocess.PIPE)
-
-
-class TarFile(object):
- """A class to generates a Docker layer."""
-
- class DebError(Exception):
- pass
-
- def __init__(self, output, directory, compression):
- self.directory = directory
- self.output = output
- self.compression = compression
-
- def __enter__(self):
- self.tarfile = TarFileWriter(self.output, self.compression)
- return self
-
- def __exit__(self, t, v, traceback):
- self.tarfile.close()
-
- def add_file(self, f, destfile, mode=None):
- """Add a file to the tar file.
-
- Args:
- f: the file to add to the layer
- destfile: the name of the file in the layer
- mode: force to set the specified mode, by
- default the value from the source is taken.
- `f` will be copied to `self.directory/destfile` in the layer.
- """
- dest = destfile.lstrip('/') # Remove leading slashes
- if self.directory and self.directory != '/':
- dest = self.directory.lstrip('/') + '/' + dest
- # If mode is unspecified, derive the mode from the file's mode.
- if mode is None:
- mode = 0o755 if os.access(f, os.X_OK) else 0o644
- self.tarfile.add_file(dest, file_content=f, mode=mode)
-
- def add_tar(self, tar):
- """Merge a tar file into the destination tar file.
-
- All files presents in that tar will be added to the output file
- under self.directory/path. No user name nor group name will be
- added to the output.
-
- Args:
- tar: the tar file to add
- """
- root = None
- if self.directory and self.directory != '/':
- root = self.directory
- self.tarfile.add_tar(tar, numeric=True, root=root)
-
- def add_link(self, symlink, destination):
- """Add a symbolic link pointing to `destination`.
-
- Args:
- symlink: the name of the symbolic link to add.
- destination: where the symbolic link point to.
- """
- self.tarfile.add_file(symlink, tarfile.SYMTYPE, link=destination)
-
- def add_deb(self, deb):
- """Extract a debian package in the output tar.
-
- All files presents in that debian package will be added to the
- output tar under the same paths. No user name nor group names will
- be added to the output.
-
- Args:
- deb: the tar file to add
-
- Raises:
- DebError: if the format of the deb archive is incorrect.
- """
- with SimpleArFile(deb) as arfile:
- current = arfile.next()
- while current and not current.filename.startswith('data.'):
- current = arfile.next()
- if not current:
- raise self.DebError(deb + ' does not contains a data file!')
- tmpfile = tempfile.mkstemp(suffix=os.path.splitext(current.filename)[-1])
- with open(tmpfile[1], 'wb') as f:
- f.write(current.data)
- self.add_tar(tmpfile[1])
- os.remove(tmpfile[1])
-
-
-def main(args, unused_argv):
- # Parse modes arguments
- default_mode = None
- if args.mode:
- # Convert from octal
- default_mode = int(args.mode, 8)
-
- mode_map = {}
- if args.modes:
- for filemode in args.modes:
- (f, mode) = filemode.split('=', 1)
- if f[0] == '/':
- f = f[1:]
- mode_map[f] = int(mode, 8)
-
- # Add objects to the tar file
- with TarFile(args.output[0], args.directory, args.compression) as output:
- for f in args.file:
- (inf, tof) = f.split('=', 1)
- mode = default_mode
- if tof[0] == '/' and (tof[1:] in mode_map):
- mode = mode_map[tof[1:]]
- elif tof in mode_map:
- mode = mode_map[tof]
- output.add_file(inf, tof, mode)
- for tar in args.tar:
- output.add_tar(tar)
- for deb in args.deb:
- output.add_deb(deb)
- for link in args.link:
- l = link.split(':', 1)
- output.add_link(l[0], l[1])
-
-
-# Replacement for gflags that I can't figure out how to import.
-
-parser = argparse.ArgumentParser(description='Build a deterministic tar file.')
-
-parser.add_argument("--output", nargs = 1,
- help='The output file, mandatory')
-
-parser.add_argument("--file", nargs="+",
- help='A file to add to the layer')
-
-parser.add_argument("--tar", nargs="*", default = [],
- help='A tar file to add to the layer')
-
-parser.add_argument("--link", nargs="*", default = [],
- help='A tar file to add to the layer')
-
-parser.add_argument("--deb", nargs="*", default = [],
- help='A deb file to add to the layer')
-
-parser.add_argument("--directory", nargs = "?",
- help='Directory in which to store the file inside the layer')
-
-parser.add_argument("--compression", nargs = "?",
- help='Compression (`gz` or `bz2`), default is none.')
-
-parser.add_argument("--mode", nargs = "?",
- help='Force the mode on the added files (in octal).')
-
-parser.add_argument("--modes", nargs="*", default = [],
- help='Specific mode to apply to specific file (from the file argument), e.g., path/to/file=0455.')
-
-
-if __name__ == '__main__':
- main(parser.parse_args(), sys.argv)
diff --git a/node/tools/dson.py b/node/tools/dson.py
deleted file mode 100644
index 1c744d3..0000000
--- a/node/tools/dson.py
+++ /dev/null
@@ -1,124 +0,0 @@
-import os
-import json
-import argparse
-
-def get_default_excludes():
- return [
- "_args",
- "_from",
- "_inCache",
- "_installable",
- "_nodeVersion",
- "_npmOperationalInternal",
- "_npmUser",
- "_npmVersion",
- "_phantomChildren",
- "_resolved",
- "_requested",
- "_requiredBy",
- "_where",
- ]
-
-class Rewriter:
- def __init__(self, verbose, filenames, excludes):
- self.verbose = verbose
- self.filenames = filenames
- self.excludes = excludes
- self._current_filename = ""
- if verbose > 1:
- print("rewriter filenames: %s" % self.filenames)
- print("rewriter excludes: %s" % self.excludes)
- if verbose > 2:
- self.union = {}
-
- def walk_path(self, path):
- for subdir, dirs, files in os.walk(path):
- for file in files:
- if file in self.filenames:
- if self.verbose > 2:
- print("hit: file %s" % file)
- filepath = os.path.join(subdir, file)
- self.process_json_file(filepath)
- else:
- if self.verbose > 2:
- print("miss: file %s not in %s" % (file, self.filenames))
-
-
- def process_json_file(self, file):
- self._current_filename = file
- if self.verbose > 1:
- print "File: " + file
- json_obj = None
- with open(file, "r") as f:
- obj = json.load(f)
- if isinstance(obj, dict):
- json_obj = obj
- self.strip_excludes(json_obj)
- if json_obj:
- with open(file, "w") as f:
- json.dump(json_obj, f, sort_keys=True, indent=2)
-
- def strip_excludes(self, obj):
- """Remove all top-level json entries having a key in EXCLUDES. The
- json argument will be modified in place."""
- if not isinstance(obj, dict):
- raise ValueError("json argument must be a dict")
- excludes = self.excludes
-
- for key in obj.keys():
- val = obj[key]
- if key in excludes:
- del obj[key]
- if self.verbose:
- print "excluding: %s=%s from %s" % (key, val, self._current_filename)
- if hasattr(self, "union"):
- if key in vals:
- self.union[key] += [val]
- else:
- self.union[key] = [val]
-
- return obj
-
- def report(self):
- """Show output of the union of all top-level json objects."""
- if hasattr(self, "union"):
- for k, v in self.union.items():
- print k + ' ****************************************************************'
- print v
-
-
-def main():
- parser = argparse.ArgumentParser(
- description='Rewrite all json files deterministically within a file tree.')
- parser.add_argument("--path", nargs="+", default = [],
- help='The root path to start file walk.')
- parser.add_argument("--exclude", nargs="*", action="append", default = [],
- help='Top-level key names to exclude from matching json files.')
- parser.add_argument("--filename", nargs=1, action="append", default = [],
- help='Json filenames to match (exact match) when traversing path, example "package.json" or "bower.json"')
- parser.add_argument("--verbose", action="count", default=0,
- help='Print more debug messages.')
- args = parser.parse_args()
-
- excludes = []
- for keys in args.exclude:
- excludes += keys
- if not excludes:
- excludes = get_default_excludes()
-
- filenames = []
- for files in args.filename:
- filenames += files
- if not filenames:
- filename = ["package.json"]
-
- rewriter = Rewriter(args.verbose, filenames, excludes)
-
- for path in args.path:
- print("walking " + path)
- rewriter.walk_path(path)
-
- rewriter.report
-
-if __name__ == '__main__':
- main()
diff --git a/node/tools/dson_test.py b/node/tools/dson_test.py
deleted file mode 100644
index 313a593..0000000
--- a/node/tools/dson_test.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import json
-import unittest
-from dson import Rewriter, get_default_excludes
-
-class RewriteTest(unittest.TestCase):
- def __init__(self, *args, **kwargs):
- super(RewriteTest, self).__init__(*args, **kwargs)
- self.rewriter = Rewriter(verbose=0,
- filenames = ["package.json"],
- excludes=get_default_excludes())
-
- def strip(self, s):
- return self.rewriter.strip_excludes(json.loads(s))
-
- def test_invalid_json_args(self):
- with self.assertRaises(ValueError) as context:
- self.strip("[]")
- with self.assertRaises(ValueError) as context:
- self.strip("''")
- with self.assertRaises(ValueError) as context:
- self.strip("1")
-
- def test_blacklist(self):
- self.assertEqual({}, self.strip('{"_where": "foo"}'))
- self.assertEqual({}, self.strip('{"_npmOperationalInternal": "foo"}'))
-
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/tests/express/BUILD b/tests/express/BUILD
new file mode 100644
index 0000000..50d7b2b
--- /dev/null
+++ b/tests/express/BUILD
@@ -0,0 +1,18 @@
+package(default_visibility = ["//visibility:public"])
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_binary")
+
+node_binary(
+ name = "server",
+ main = "server.js",
+ deps = [
+ "@yarn_modules//:_all_",
+ ]
+)
+
+sh_test(
+ name = "server_test",
+ size = "small",
+ srcs = ["server_test.sh"],
+ data = [":server"],
+)
diff --git a/tests/express/README.md b/tests/express/README.md
new file mode 100644
index 0000000..72f704a
--- /dev/null
+++ b/tests/express/README.md
@@ -0,0 +1,8 @@
+# Express Server Example
+
+This folder demonstrates the use of node_binary with a node modules
+dependency that has other modular dependencies. In this case, we're
+using the `yarn_modules.package_json` attribute rather than the `deps`
+attribute to specify the dependency on express. We're also using the
+`@yarn_modules//:_all_` pseudo-module target to pull in all the module
+dependencies.
diff --git a/tests/express/WORKSPACE b/tests/express/WORKSPACE
new file mode 100644
index 0000000..e70f372
--- /dev/null
+++ b/tests/express/WORKSPACE
@@ -0,0 +1,13 @@
+local_repository(
+ name = "org_pubref_rules_node",
+ path = "../..",
+)
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_repositories", "yarn_modules")
+
+node_repositories()
+
+yarn_modules(
+ name = "yarn_modules",
+ package_json = "//:package.json",
+)
diff --git a/tests/express/package.json b/tests/express/package.json
new file mode 100644
index 0000000..b714bfe
--- /dev/null
+++ b/tests/express/package.json
@@ -0,0 +1,7 @@
+{
+ "name": "server",
+ "version": "1.0.0",
+ "dependencies": {
+ "express": "4.15.4"
+ }
+}
diff --git a/tests/express/server.js b/tests/express/server.js
new file mode 100644
index 0000000..cacaf81
--- /dev/null
+++ b/tests/express/server.js
@@ -0,0 +1,11 @@
+const express = require("express");
+
+const app = express();
+
+app.get('/', (req, res) => {
+ res.send('Hello World!');
+});
+
+app.listen(3000, () => {
+ console.log('Server listening on port 3000!');
+});
diff --git a/tests/express/server_test.sh b/tests/express/server_test.sh
new file mode 100755
index 0000000..1530f44
--- /dev/null
+++ b/tests/express/server_test.sh
@@ -0,0 +1,7 @@
+set -e
+
+if (./server &) | grep -q 'Server listening on port 3000!'; then
+ echo "PASS"
+else
+ exit 1
+fi
diff --git a/tests/helloworld/BUILD b/tests/helloworld/BUILD
new file mode 100644
index 0000000..a6fc9a6
--- /dev/null
+++ b/tests/helloworld/BUILD
@@ -0,0 +1,15 @@
+package(default_visibility = ["//visibility:public"])
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_binary", "node_module")
+
+node_binary(
+ name = "helloworld",
+ main = "helloworld.js",
+)
+
+sh_test(
+ name = "helloworld_test",
+ size = "small",
+ srcs = ["helloworld_test.sh"],
+ data = [":helloworld"],
+)
diff --git a/tests/helloworld/README.md b/tests/helloworld/README.md
new file mode 100644
index 0000000..eb341a9
--- /dev/null
+++ b/tests/helloworld/README.md
@@ -0,0 +1,21 @@
+# Helloworld Example
+
+This folder demonstrates the simplest use of node_binary. To run the
+example:
+
+```sh
+$ bazel run //:helloworld
+```
+
+Here are the available targets (with brief explanation):
+
+```sh
+$ bazel query //:*
+//:helloworld_deploy.tgz # compressed archive file with node, script, and node_modules/**/*
+//:helloworld_deploy # rule that builds a compressed archive from all the files
+//:helloworld_files # rule that exposes all the files from a node_binary target
+//:helloworld # node_binary rule that builds a node_modules tree and writes a
+ # bash script that executes 'node node_modules/helloworld_module'
+//:helloworld_module # rule that builds a node_module using 'helloworld.js' as package.json main
+//:helloworld.js # javascript source file
+```
diff --git a/tests/helloworld/WORKSPACE b/tests/helloworld/WORKSPACE
new file mode 100644
index 0000000..54f06ce
--- /dev/null
+++ b/tests/helloworld/WORKSPACE
@@ -0,0 +1,8 @@
+local_repository(
+ name = "org_pubref_rules_node",
+ path = "../..",
+)
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_repositories", "yarn_modules")
+
+node_repositories()
diff --git a/tests/helloworld/helloworld.js b/tests/helloworld/helloworld.js
new file mode 100644
index 0000000..019c0f4
--- /dev/null
+++ b/tests/helloworld/helloworld.js
@@ -0,0 +1 @@
+console.log("Hello World!");
diff --git a/tests/helloworld/helloworld_test.sh b/tests/helloworld/helloworld_test.sh
new file mode 100755
index 0000000..774821b
--- /dev/null
+++ b/tests/helloworld/helloworld_test.sh
@@ -0,0 +1,7 @@
+set -e
+
+if ./helloworld | grep -q 'Hello World!'; then
+ echo "PASS"
+else
+ exit 1
+fi
diff --git a/tests/lyrics/BUILD b/tests/lyrics/BUILD
new file mode 100644
index 0000000..58d7805
--- /dev/null
+++ b/tests/lyrics/BUILD
@@ -0,0 +1,18 @@
+package(default_visibility = ["//visibility:public"])
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_binary")
+
+node_binary(
+ name = "lyrics",
+ main = "lyrics.js",
+ deps = [
+ "@yarn_modules//:underscore",
+ ]
+)
+
+sh_test(
+ name = "lyrics_test",
+ size = "small",
+ srcs = ["lyrics_test.sh"],
+ data = [":lyrics"],
+)
diff --git a/tests/lyrics/README.md b/tests/lyrics/README.md
new file mode 100644
index 0000000..6ed2145
--- /dev/null
+++ b/tests/lyrics/README.md
@@ -0,0 +1,6 @@
+# Lyrics Example
+
+This folder demonstrates the simplest use of node_binary with a single
+node modules dependency. In this case, we're using the
+`yarn_modules.deps` attribute rather than a `package.json` file to
+specify the dependency on underscore.
diff --git a/tests/lyrics/WORKSPACE b/tests/lyrics/WORKSPACE
new file mode 100644
index 0000000..b8fe8a6
--- /dev/null
+++ b/tests/lyrics/WORKSPACE
@@ -0,0 +1,15 @@
+local_repository(
+ name = "org_pubref_rules_node",
+ path = "../..",
+)
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_repositories", "yarn_modules")
+
+node_repositories()
+
+yarn_modules(
+ name = "yarn_modules",
+ deps = {
+ "underscore": "1.8.3",
+ }
+)
diff --git a/tests/lyrics/lyrics.js b/tests/lyrics/lyrics.js
new file mode 100644
index 0000000..c18d007
--- /dev/null
+++ b/tests/lyrics/lyrics.js
@@ -0,0 +1,20 @@
+const _ = require("underscore");
+
+var lyrics = [
+ 'I\'m a lumberjack and I\'m okay',
+ 'I sleep all night and I work all day',
+ 'He\'s a lumberjack and he\'s okay',
+ 'He sleeps all night and he works all day'
+];
+
+var counts = _(lyrics).chain()
+ .map(line => line.split(''))
+ .flatten()
+ .reduce((hash, l) => {
+ hash[l] = hash[l] || 0;
+ hash[l]++;
+ return hash;
+ }, {})
+ .value();
+
+console.log(`Count (letter a): ${counts.a}`);
diff --git a/tests/lyrics/lyrics_test.sh b/tests/lyrics/lyrics_test.sh
new file mode 100755
index 0000000..9f31de2
--- /dev/null
+++ b/tests/lyrics/lyrics_test.sh
@@ -0,0 +1,9 @@
+set -e
+
+
+
+if ./lyrics | grep -q 'Count (letter a): 16'; then
+ echo "PASS"
+else
+ exit 1
+fi
diff --git a/tests/mocha/BUILD b/tests/mocha/BUILD
new file mode 100644
index 0000000..99a8809
--- /dev/null
+++ b/tests/mocha/BUILD
@@ -0,0 +1,16 @@
+package(default_visibility = ["//visibility:public"])
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_binary", "node_module", "mocha_test")
+
+mocha_test(
+ name = "test",
+ main = "test.js",
+)
+
+# Can one test a bazel test? I'm getting 'fork: Resource temporarily unavailable' when I try to run this.
+# sh_test(
+# name = "test_test",
+# size = "small",
+# srcs = ["test.sh"],
+# data = [":test"],
+# )
diff --git a/tests/mocha/WORKSPACE b/tests/mocha/WORKSPACE
new file mode 100644
index 0000000..831fdb4
--- /dev/null
+++ b/tests/mocha/WORKSPACE
@@ -0,0 +1,15 @@
+local_repository(
+ name = "org_pubref_rules_node",
+ path = "../..",
+)
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_repositories", "yarn_modules")
+
+node_repositories()
+
+yarn_modules(
+ name = "mocha_modules",
+ deps = {
+ "mocha": "3.5.3",
+ }
+)
diff --git a/tests/mocha/test.js b/tests/mocha/test.js
new file mode 100644
index 0000000..3c07044
--- /dev/null
+++ b/tests/mocha/test.js
@@ -0,0 +1,9 @@
+var assert = require('assert');
+
+describe('Array', function() {
+ describe('#indexOf()', function() {
+ it('should return -1 when the value is not present', function() {
+ assert.equal(-1, [1,2,3].indexOf(4));
+ });
+ });
+});
diff --git a/tests/mocha/test.sh b/tests/mocha/test.sh
new file mode 100755
index 0000000..49688ca
--- /dev/null
+++ b/tests/mocha/test.sh
@@ -0,0 +1,7 @@
+set -e
+
+if ./test | grep -q 'PASSED'; then
+ echo "PASS"
+else
+ exit 1
+fi
diff --git a/tests/namespace/BUILD b/tests/namespace/BUILD
new file mode 100644
index 0000000..7429b26
--- /dev/null
+++ b/tests/namespace/BUILD
@@ -0,0 +1,18 @@
+package(default_visibility = ["//visibility:public"])
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_binary")
+
+node_binary(
+ name = "question",
+ main = "question.js",
+ deps = [
+ "//src/javascript/answer",
+ ],
+)
+
+sh_test(
+ name = "question_test",
+ size = "small",
+ srcs = ["question_test.sh"],
+ data = [":question"],
+)
diff --git a/tests/namespace/README.md b/tests/namespace/README.md
new file mode 100644
index 0000000..500f42d
--- /dev/null
+++ b/tests/namespace/README.md
@@ -0,0 +1,5 @@
+# Express Server Example
+
+This folder demonstrates the use of the `node_module.namespace` and
+`node_module.module_name` attribute specify the name of the generated
+node_module.
diff --git a/tests/namespace/WORKSPACE b/tests/namespace/WORKSPACE
new file mode 100644
index 0000000..6df1f7e
--- /dev/null
+++ b/tests/namespace/WORKSPACE
@@ -0,0 +1,8 @@
+local_repository(
+ name = "org_pubref_rules_node",
+ path = "../..",
+)
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_repositories")
+
+node_repositories()
diff --git a/tests/namespace/question.js b/tests/namespace/question.js
new file mode 100644
index 0000000..f573362
--- /dev/null
+++ b/tests/namespace/question.js
@@ -0,0 +1,3 @@
+const answer = require("@pubref/meaning_of_life");
+
+console.log(`The meaning of life is ${answer}`);
diff --git a/tests/namespace/question_test.sh b/tests/namespace/question_test.sh
new file mode 100755
index 0000000..7abdcb6
--- /dev/null
+++ b/tests/namespace/question_test.sh
@@ -0,0 +1,7 @@
+set -e
+
+if ./question | grep -q 'The meaning of life is 42'; then
+ echo "PASS"
+else
+ exit 1
+fi
diff --git a/tests/namespace/src/javascript/answer/BUILD b/tests/namespace/src/javascript/answer/BUILD
new file mode 100644
index 0000000..7d1b7ff
--- /dev/null
+++ b/tests/namespace/src/javascript/answer/BUILD
@@ -0,0 +1,10 @@
+package(default_visibility = ["//visibility:public"])
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_module")
+
+node_module(
+ name = "answer",
+ index = "index.js",
+ namespace = "@pubref",
+ module_name = "meaning_of_life",
+)
diff --git a/tests/namespace/src/javascript/answer/README.md b/tests/namespace/src/javascript/answer/README.md
new file mode 100644
index 0000000..e81efff
--- /dev/null
+++ b/tests/namespace/src/javascript/answer/README.md
@@ -0,0 +1,2 @@
+This folder demonstrates the use of `node_module.index`. In this case
+the generated `node_module` does not create a `package.json` file.
diff --git a/tests/namespace/src/javascript/answer/index.js b/tests/namespace/src/javascript/answer/index.js
new file mode 100644
index 0000000..888cae3
--- /dev/null
+++ b/tests/namespace/src/javascript/answer/index.js
@@ -0,0 +1 @@
+module.exports = 42;
diff --git a/tests/typescript/BUILD b/tests/typescript/BUILD
new file mode 100644
index 0000000..05dea52
--- /dev/null
+++ b/tests/typescript/BUILD
@@ -0,0 +1,69 @@
+package(default_visibility = ["//visibility:public"])
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_binary", "node_module")
+
+load("//:ts_module.bzl", "ts_module")
+
+# Demonstration consuming compiled output from traditional javascript.
+node_binary(
+ name = "trad_report",
+ main = "trad_report.js",
+ deps = [
+ ":zoo",
+ "@yarn_modules//:typescript",
+ ]
+)
+
+# shell script that runs 'node node_modules/report', which has a
+# compiled index.js file, so this gets loaded by the intrinsic node
+# module resolution algorithm.
+node_binary(
+ name = "ts_report",
+ entrypoint = ":report",
+)
+
+# Build node_modules/report/{package.json,index.js,index.d.ts}
+# and a correspoding node_modules tree for the compilation with
+# node_modules/taxonomy and node_modules/zoo in it.
+ts_module(
+ name = "report",
+ srcs = ["index.ts"],
+ deps = [
+ ":zoo",
+ ]
+)
+
+# Build node_modules/zoo/{package.json,animal.js,animal.d.ts}
+# and a correspoding node_modules tree for the compilation with
+# node_modules/taxonomy in it.
+ts_module(
+ name = "zoo",
+ srcs = ["animal.ts"],
+ deps = [
+ ":taxonomy",
+ ]
+)
+
+# Build node_modules/taxonomy/{package.json,phyla.js,phyla.d.ts}
+ts_module(
+ name = "taxonomy",
+ srcs = ["phyla.ts"],
+)
+
+# Run the test
+sh_test(
+ name = "typescript_test",
+ size = "small",
+ srcs = ["typescript_test.sh"],
+ data = [":ts_report"],
+)
+
+
+# Note to self: genrule way to call typescript (too simple for real usage)
+# genrule(
+# name = "tsc",
+# srcs = ["hello.ts"],
+# outs = ["hello.js"],
+# cmd = "external/yarn_modules/node_modules/.bin/tsc --outDir $(@D) $(SRCS)",
+# tools = ["@yarn_modules//:tsc"],
+# )
diff --git a/tests/typescript/README.md b/tests/typescript/README.md
new file mode 100644
index 0000000..d9b3b25
--- /dev/null
+++ b/tests/typescript/README.md
@@ -0,0 +1,4 @@
+# Typescript Example
+
+This folder demonstrates compiling typescript via a custom `ts_module`
+rule and consuming the output via traditional javascript.
diff --git a/tests/typescript/WORKSPACE b/tests/typescript/WORKSPACE
new file mode 100644
index 0000000..a19a4f8
--- /dev/null
+++ b/tests/typescript/WORKSPACE
@@ -0,0 +1,16 @@
+local_repository(
+ name = "org_pubref_rules_node",
+ path = "../..",
+)
+
+load("@org_pubref_rules_node//node:rules.bzl", "node_repositories", "yarn_modules")
+
+node_repositories()
+
+yarn_modules(
+ name = "yarn_modules",
+ deps = {
+ "typescript": "2.5.2",
+ "@types/node": "8.0.28",
+ }
+)
diff --git a/tests/typescript/animal.ts b/tests/typescript/animal.ts
new file mode 100644
index 0000000..a230cae
--- /dev/null
+++ b/tests/typescript/animal.ts
@@ -0,0 +1,25 @@
+//const phyla = require("taxonomy/phyla");
+//import * as answer from "@pubref/meaning of life";
+import { getKingdomByPhylum } from 'taxonomy/phyla';
+
+export abstract class Organism {
+ public abstract getKingdom(): string;
+}
+
+export class Animal extends Organism {
+
+ protected static readonly PHYLUM = "Chordata";
+
+ constructor(public readonly name: string) {
+ super();
+ }
+
+ getPhylum(): string {
+ return Animal.PHYLUM;
+ }
+
+ getKingdom(): string {
+ return getKingdomByPhylum(this.getPhylum());
+ }
+
+}
diff --git a/tests/typescript/index.ts b/tests/typescript/index.ts
new file mode 100644
index 0000000..5123be9
--- /dev/null
+++ b/tests/typescript/index.ts
@@ -0,0 +1,5 @@
+import { Animal } from "zoo/animal";
+
+const animal = new Animal("Bear");
+
+console.log(`animal "${animal.name}" has taxonomy ${animal.getKingdom()}/${animal.getPhylum()}`);
diff --git a/tests/typescript/phyla.ts b/tests/typescript/phyla.ts
new file mode 100644
index 0000000..866e2ee
--- /dev/null
+++ b/tests/typescript/phyla.ts
@@ -0,0 +1,18 @@
+/**
+ * A mapping between kingdom to phylum
+ */
+export function getKingdomByPhylum(name: string) {
+ switch (name) {
+ case "Rhizopoda":
+ case "Chlorophyta":
+ return "Protista";
+ case "Bryophyta":
+ case "Anthrophyta":
+ return "Plantae";
+ case "Porifera":
+ case "Chordata":
+ return "Animalia";
+ default:
+ throw new Error("Unknown phylum: " + name);
+ }
+}
diff --git a/tests/typescript/trad_report.js b/tests/typescript/trad_report.js
new file mode 100644
index 0000000..1c7c9d8
--- /dev/null
+++ b/tests/typescript/trad_report.js
@@ -0,0 +1,4 @@
+const Animal = require("zoo/animal").Animal;
+
+const animal = new Animal("Bear");
+console.log(`animal "${animal.name}" has taxonomy ${animal.getKingdom()}/${animal.getPhylum()}`);
diff --git a/tests/typescript/ts_module.bzl b/tests/typescript/ts_module.bzl
new file mode 100644
index 0000000..08b6eb4
--- /dev/null
+++ b/tests/typescript/ts_module.bzl
@@ -0,0 +1,150 @@
+load("@org_pubref_rules_node//node:rules.bzl", "node_module")
+
+
+def _get_d_ts_files(list):
+ files = []
+ for file in list:
+ if file.path.endswith(".d.ts"):
+ files.append(file)
+ return files
+
+
+def _build_node_module(ctx, compilation_dir, node_module):
+ outputs = []
+ for src in node_module.sources:
+ relpath = node_module.sourcemap[src.path]
+ dst = ctx.new_file("%s/node_modules/%s/%s" % (compilation_dir, node_module.name, relpath))
+ outputs.append(dst)
+
+ ctx.action(
+ mnemonic = "CopyNodeModuleForTs",
+ inputs = [src],
+ outputs = [dst],
+ command = "cp %s %s" % (src.path, dst.path),
+ )
+ return outputs
+
+
+def _ts_module_impl(ctx):
+ node = ctx.executable._node
+ tsc = ctx.executable._tsc
+ tsconfig = ctx.file.tsconfig
+ inputs = [node, tsc]
+ if tsconfig:
+ inputs.append(tsconfig)
+
+ compilation_dir = "package_" + ctx.label.name + ".tscompile"
+
+ node_modules = [] # list of output files (building a custom node_modules tree for the compilation)
+ for dep in ctx.attr.deps:
+ node_modules += _build_node_module(ctx, compilation_dir, dep.node_module)
+
+ output_js_files = []
+ output_js_map_files = []
+ output_d_ts_files = []
+
+ srcs = []
+ for src in ctx.files.srcs:
+ copied_src = ctx.new_file("%s/%s" % (compilation_dir, src.short_path))
+ ctx.action(
+ inputs = [src],
+ outputs = [copied_src],
+ command = "cp %s %s" % (src.path, copied_src.path),
+ )
+ srcs.append(copied_src)
+
+ for src in srcs:
+ inputs.append(src)
+ basefile = src.short_path[0:-len(src.extension) - 1]
+ if ctx.label.package:
+ basefile = ctx.label.package + "/" + basefile
+ js_out = ctx.new_file("%s.js" % basefile)
+ output_js_files.append(js_out)
+ d_ts_out = ctx.new_file("%s.d.ts" % basefile)
+ output_d_ts_files.append(d_ts_out)
+ if (ctx.attr.sourcemap):
+ js_map_out = ctx.new_file("%s.js.map" % basefile)
+ output_js_map_files.append(js_map_out)
+
+ arguments = [
+ tsc.path,
+ "--moduleResolution", "node",
+ "--declaration",
+ ] + ctx.attr.args
+
+ if ctx.attr.sourcemap:
+ arguments += ["--sourceMap"]
+
+ if tsconfig:
+ arguments += ["--project", tsconfig.path]
+
+ for src in srcs:
+ arguments.append(src.path)
+
+ outputs = output_js_files + output_d_ts_files + output_js_map_files
+
+ ctx.action(
+ mnemonic = "TypescriptCompile",
+ inputs = inputs + node_modules,
+ outputs = outputs,
+ executable = node,
+ arguments = arguments,
+ )
+
+ return struct(
+ files = depset(outputs),
+ ts_module = struct(
+ files = outputs,
+ tsconfig = tsconfig,
+ srcs = ctx.files.srcs,
+ )
+ )
+
+_ts_module = rule(
+ implementation = _ts_module_impl,
+ attrs = {
+ "srcs": attr.label_list(
+ allow_files = FileType([".ts", ".tsx"]),
+ mandatory = True,
+ ),
+ "deps": attr.label_list(
+ providers = ["node_module"],
+ ),
+ "tsconfig": attr.label(
+ allow_files = FileType(["tsconfig.json"]),
+ single_file = True,
+ mandatory = False,
+ ),
+ "sourcemap": attr.bool(
+ default = True,
+ ),
+ "args": attr.string_list(),
+ "_tsc": attr.label(
+ default = "@yarn_modules//:tsc_bin",
+ executable = True,
+ cfg = "host",
+ ),
+ "_node": attr.label(
+ default = Label("@node//:node"),
+ single_file = True,
+ allow_files = True,
+ executable = True,
+ cfg = "host",
+ ),
+ },
+)
+
+def ts_module(name = None, srcs = [], tsconfig = None, deps = [], sourcemap = True, **kwargs):
+ _ts_module(
+ name = name + ".tsc",
+ srcs = srcs,
+ tsconfig = tsconfig,
+ sourcemap = sourcemap,
+ deps = deps,
+ )
+ node_module(
+ name = name,
+ srcs = [name + ".tsc"],
+ deps = deps,
+ **kwargs
+ )
diff --git a/tests/typescript/typescript_test.sh b/tests/typescript/typescript_test.sh
new file mode 100755
index 0000000..a73a605
--- /dev/null
+++ b/tests/typescript/typescript_test.sh
@@ -0,0 +1,7 @@
+set -e
+
+if ./ts_report | grep -q 'animal "Bear" has taxonomy Animalia/Chordata'; then
+ echo "PASS"
+else
+ exit 1
+fi