Skip to content

Commit

Permalink
tests: new typescript WPT runner (#9269)
Browse files Browse the repository at this point in the history
  • Loading branch information
lucacasonato committed Jan 27, 2021
1 parent ecfda65 commit 2638aa0
Show file tree
Hide file tree
Showing 16 changed files with 1,623 additions and 666 deletions.
4 changes: 3 additions & 1 deletion .dprintrc.json
Expand Up @@ -31,7 +31,9 @@
"std/node_modules",
"std/hash/_wasm",
"target",
"third_party"
"third_party",
"tools/wpt/expectation.json",
"tools/wpt/manifest.json"
],
"plugins": [
"https://plugins.dprint.dev/typescript-0.33.0.wasm",
Expand Down
24 changes: 23 additions & 1 deletion .github/workflows/ci.yml
Expand Up @@ -90,7 +90,7 @@ jobs:
- name: Install Python
uses: actions/setup-python@v1
with:
python-version: "2.7"
python-version: "3.8"
architecture: x64

- name: Install Node
Expand Down Expand Up @@ -234,6 +234,28 @@ jobs:
cargo test --locked --doc
cargo test --locked --all-targets
- name: Configure hosts file for WPT (unix)
if: runner.os != 'Windows'
run: ./wpt make-hosts-file | sudo tee -a /etc/hosts
working-directory: test_util/wpt/

- name: Configure hosts file for WPT (windows)
if: runner.os == 'Windows'
working-directory: test_util/wpt/
run: python wpt make-hosts-file | Out-File $env:SystemRoot\System32\drivers\etc\hosts -Encoding ascii -Append

- name: Run web platform tests (release)
if: matrix.kind == 'test_release'
run: |
deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run ./tools/wpt.ts setup
deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run ./tools/wpt.ts run --quiet --release
- name: Run web platform tests (debug)
if: matrix.kind == 'test_debug'
run: |
deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run ./tools/wpt.ts setup
deno run --unstable --allow-write --allow-read --allow-net --allow-env --allow-run ./tools/wpt.ts run --quiet
- name: Run Benchmarks
if: matrix.kind == 'bench'
run: cargo bench
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -9,6 +9,7 @@ gclient_config.py_entries
/gh-pages/
/target/
/std/hash/_wasm/target
/tools/wpt/manifest.json

# Files that help ensure VSCode can work but we don't want checked into the
# repo
Expand Down
2 changes: 1 addition & 1 deletion .gitmodules
Expand Up @@ -8,5 +8,5 @@
shallow = true
[submodule "test_util/wpt"]
path = test_util/wpt
url = https://github.com/web-platform-tests/wpt.git
url = https://github.com/denoland/wpt.git
shallow = true
35 changes: 0 additions & 35 deletions cli/tests/WPT.md

This file was deleted.

246 changes: 0 additions & 246 deletions cli/tests/integration_tests.rs
Expand Up @@ -6,12 +6,9 @@ use deno_core::url;
use deno_runtime::deno_fetch::reqwest;
use deno_runtime::deno_websocket::tokio_tungstenite;
use std::io::{BufRead, Write};
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
use tempfile::TempDir;
use test_util as util;
use walkdir::WalkDir;

macro_rules! itest(
($name:ident {$( $key:ident: $value:expr,)*}) => {
Expand Down Expand Up @@ -5193,249 +5190,6 @@ fn denort_direct_use_error() {
assert!(!status.success());
}

fn concat_bundle(
files: Vec<(PathBuf, String)>,
bundle_path: &Path,
init: String,
) -> String {
let bundle_url = url::Url::from_file_path(bundle_path).unwrap().to_string();

let mut bundle = init.clone();
let mut bundle_line_count = init.lines().count() as u32;
let mut source_map = sourcemap::SourceMapBuilder::new(Some(&bundle_url));

// In classic workers, `importScripts()` performs an actual import.
// However, we don't implement that function in Deno as we want to enforce
// the use of ES6 modules.
// To work around this, we:
// 1. Define `importScripts()` as a no-op (code below)
// 2. Capture its parameter from the source code and add it to the list of
// files to concatenate. (see `web_platform_tests()`)
bundle.push_str("function importScripts() {}\n");
bundle_line_count += 1;

for (path, text) in files {
let path = std::fs::canonicalize(path).unwrap();
let url = url::Url::from_file_path(path).unwrap().to_string();
let src_id = source_map.add_source(&url);
source_map.set_source_contents(src_id, Some(&text));

for (line_index, line) in text.lines().enumerate() {
bundle.push_str(line);
bundle.push('\n');
source_map.add_raw(
bundle_line_count,
0,
line_index as u32,
0,
Some(src_id),
None,
);

bundle_line_count += 1;
}
bundle.push('\n');
bundle_line_count += 1;
}

let mut source_map_buf: Vec<u8> = vec![];
source_map
.into_sourcemap()
.to_writer(&mut source_map_buf)
.unwrap();

bundle.push_str("//# sourceMappingURL=data:application/json;base64,");
let encoded_map = base64::encode(source_map_buf);
bundle.push_str(&encoded_map);

bundle
}

// TODO(lucacasonato): DRY with tsc_config.rs
/// Convert a jsonc libraries `JsonValue` to a serde `Value`.
fn jsonc_to_serde(j: jsonc_parser::JsonValue) -> serde_json::Value {
use jsonc_parser::JsonValue;
use serde_json::Value;
use std::str::FromStr;
match j {
JsonValue::Array(arr) => {
let vec = arr.into_iter().map(jsonc_to_serde).collect();
Value::Array(vec)
}
JsonValue::Boolean(bool) => Value::Bool(bool),
JsonValue::Null => Value::Null,
JsonValue::Number(num) => {
let number =
serde_json::Number::from_str(&num).expect("could not parse number");
Value::Number(number)
}
JsonValue::Object(obj) => {
let mut map = serde_json::map::Map::new();
for (key, json_value) in obj.into_iter() {
map.insert(key, jsonc_to_serde(json_value));
}
Value::Object(map)
}
JsonValue::String(str) => Value::String(str),
}
}

#[test]
fn web_platform_tests() {
use deno_core::serde::Deserialize;

#[derive(Deserialize)]
#[serde(untagged)]
enum WptConfig {
Simple(String),
#[serde(rename_all = "camelCase")]
Options {
name: String,
expect_fail: Vec<String>,
},
}

let text =
std::fs::read_to_string(util::tests_path().join("wpt.jsonc")).unwrap();
let jsonc = jsonc_parser::parse_to_value(&text).unwrap().unwrap();
let config: std::collections::HashMap<String, Vec<WptConfig>> =
deno_core::serde_json::from_value(jsonc_to_serde(jsonc)).unwrap();

for (suite_name, includes) in config.into_iter() {
let suite_path = util::wpt_path().join(suite_name);
let dir = WalkDir::new(&suite_path)
.into_iter()
.filter_map(Result::ok)
.filter(|e| e.file_type().is_file())
.filter(|f| {
let filename = f.file_name().to_str().unwrap();
filename.ends_with(".any.js")
|| filename.ends_with(".window.js")
|| filename.ends_with(".worker.js")
})
.filter_map(|f| {
let path = f
.path()
.strip_prefix(&suite_path)
.unwrap()
.to_str()
.unwrap();
for cfg in &includes {
match cfg {
WptConfig::Simple(name) if path.starts_with(name) => {
return Some((f.path().to_owned(), vec![]))
}
WptConfig::Options { name, expect_fail }
if path.starts_with(name) =>
{
return Some((f.path().to_owned(), expect_fail.to_vec()))
}
_ => {}
}
}
None
});

let testharness_path = util::wpt_path().join("resources/testharness.js");
let testharness_text = std::fs::read_to_string(&testharness_path)
.unwrap()
.replace("output:true", "output:false");
let testharnessreporter_path =
util::tests_path().join("wpt_testharnessconsolereporter.js");
let testharnessreporter_text =
std::fs::read_to_string(&testharnessreporter_path).unwrap();

for (test_file_path, expect_fail) in dir {
let test_file_text = std::fs::read_to_string(&test_file_path).unwrap();
let imports: Vec<(PathBuf, String)> = test_file_text
.split('\n')
.into_iter()
.filter_map(|t| {
// Hack: we don't implement `importScripts()`, and instead capture the
// parameter in source code; see `concat_bundle()` for more details.
if let Some(rest_import_scripts) = t.strip_prefix("importScripts(\"")
{
if let Some(import_path) = rest_import_scripts.strip_suffix("\");")
{
// The code in `testharness.js` silences the test outputs.
if import_path != "/resources/testharness.js" {
return Some(import_path);
}
}
}
t.strip_prefix("// META: script=")
})
.map(|s| {
let s = if s == "/resources/WebIDLParser.js" {
"/resources/webidl2/lib/webidl2.js"
} else {
s
};
if s.starts_with('/') {
util::wpt_path().join(format!(".{}", s))
} else {
test_file_path.parent().unwrap().join(s)
}
})
.map(|path| {
let text = std::fs::read_to_string(&path).unwrap();
(path, text)
})
.collect();

let mut variants: Vec<&str> = test_file_text
.split('\n')
.into_iter()
.filter_map(|t| t.strip_prefix("// META: variant="))
.collect();

if variants.is_empty() {
variants.push("");
}

for variant in variants {
let mut files = Vec::with_capacity(3 + imports.len());
files.push((testharness_path.clone(), testharness_text.clone()));
files.push((
testharnessreporter_path.clone(),
testharnessreporter_text.clone(),
));
files.extend(imports.clone());
files.push((test_file_path.clone(), test_file_text.clone()));

let mut file = tempfile::Builder::new()
.prefix("wpt-bundle-")
.suffix(".js")
.rand_bytes(5)
.tempfile()
.unwrap();

let bundle = concat_bundle(files, file.path(), "".to_string());
file.write_all(bundle.as_bytes()).unwrap();

let child = util::deno_cmd()
.current_dir(test_file_path.parent().unwrap())
.arg("run")
.arg("--location")
.arg(&format!("http://web-platform-tests/?{}", variant))
.arg("-A")
.arg(file.path())
.arg(deno_core::serde_json::to_string(&expect_fail).unwrap())
.arg("--quiet")
.stdin(std::process::Stdio::piped())
.spawn()
.unwrap();

let output = child.wait_with_output().unwrap();
if !output.status.success() {
file.keep().unwrap();
}
assert!(output.status.success());
}
}
}
}

#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_resolve_dns() {
use std::collections::BTreeMap;
Expand Down

0 comments on commit 2638aa0

Please sign in to comment.