Skip to content

Commit

Permalink
chore: update WPT
Browse files Browse the repository at this point in the history
  • Loading branch information
Uzlopak authored and github-actions[bot] committed Aug 17, 2024
1 parent 3544311 commit a5720c9
Show file tree
Hide file tree
Showing 291 changed files with 17,437 additions and 766 deletions.
26 changes: 0 additions & 26 deletions test/fixtures/wpt/README.md

This file was deleted.

6 changes: 6 additions & 0 deletions test/fixtures/wpt/common/dispatcher/dispatcher.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,12 @@ function findLocation() {
if (location.href == 'about:srcdoc') {
return findLocationFromAncestors(window.parent);
}
if (location.protocol == 'blob:' || location.protocol == 'data:') {
// Allows working around blob and data URLs.
if (self.document && self.document.baseURI) {
return self.document.baseURI;
}
}
return location;
}

Expand Down
Empty file modified test/fixtures/wpt/common/security-features/tools/generate.py
100644 → 100755
Empty file.
Empty file.
3 changes: 1 addition & 2 deletions test/fixtures/wpt/fetch/api/basic/keepalive.any.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ const {
* document event.
*/
function keepaliveSimpleRequestTest(method) {
for (const evt of ['load', 'pagehide', 'unload']) {
for (const evt of ['load', 'unload', 'pagehide']) {
const desc =
`[keepalive] simple ${method} request on '${evt}' [no payload]`;
promise_test(async (test) => {
Expand All @@ -30,7 +30,6 @@ function keepaliveSimpleRequestTest(method) {
if (evt != 'load') {
iframe.remove();
}
assert_equals(await getTokenFromMessage(), token1);

assertStashedTokenAsync(desc, token1);
}, `${desc}; setting up`);
Expand Down
2 changes: 1 addition & 1 deletion test/fixtures/wpt/fetch/api/basic/request-headers.any.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ requestHeaders("Fetch with POST with Blob body", url, "POST", new Blob(["Test"])
requestHeaders("Fetch with POST with ArrayBuffer body", url, "POST", new ArrayBuffer(4), location.origin, "4");
requestHeaders("Fetch with POST with Uint8Array body", url, "POST", new Uint8Array(4), location.origin, "4");
requestHeaders("Fetch with POST with Int8Array body", url, "POST", new Int8Array(4), location.origin, "4");
requestHeaders("Fetch with POST with Float16Array body", url, "POST", new Float16Array(1), location.origin, "2");
requestHeaders("Fetch with POST with Float16Array body", url, "POST", () => new Float16Array(1), location.origin, "2");
requestHeaders("Fetch with POST with Float32Array body", url, "POST", new Float32Array(1), location.origin, "4");
requestHeaders("Fetch with POST with Float64Array body", url, "POST", new Float64Array(1), location.origin, "8");
requestHeaders("Fetch with POST with DataView body", url, "POST", new DataView(new ArrayBuffer(8), 0, 4), location.origin, "4");
Expand Down
23 changes: 23 additions & 0 deletions test/fixtures/wpt/fetch/api/resources/keepalive-helper.js
Original file line number Diff line number Diff line change
Expand Up @@ -174,3 +174,26 @@ function keepaliveRedirectInUnloadTest(desc, {
desc, token, {expectTokenExist: expectFetchSucceed});
}, `${desc}; setting up`);
}

/**
* utility to create pending keepalive fetch requests
* The pending request state is achieved by ensuring the server (trickle.py) does not
* immediately respond to the fetch requests.
* The response delay is set as a url parameter.
*/

function createPendingKeepAliveRequest(delay, remote = false) {
// trickle.py is a script that can make a delayed response to the client request
const trickleRemoteURL = get_host_info().HTTPS_REMOTE_ORIGIN + '/fetch/api/resources/trickle.py?count=1&ms=';
const trickleLocalURL = get_host_info().HTTP_ORIGIN + '/fetch/api/resources/trickle.py?count=1&ms=';
url = remote ? trickleRemoteURL : trickleLocalURL;

const body = '*'.repeat(10);
return fetch(url + delay, { keepalive: true, body, method: 'POST' }).then(res => {
return res.text();
}).then(() => {
return new Promise(resolve => step_timeout(resolve, 1));
}).catch((error) => {
return Promise.reject(error);;
})
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
// META: global=window
// META: title=realm of Response bytes()

"use strict";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

// Check if the data compressed using Brotli with the dictionary can be
// decompressed.
const data_url = `${kCompressedDataPath}?content_encoding=br-d`;
const data_url = `${kCompressedDataPath}?content_encoding=dcb`;
assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData);
}, 'Decompresion using Brotli with the dictionary works as expected');

Expand All @@ -34,7 +34,7 @@

// Check if the data compressed using Zstandard with the dictionary can be
// decompressed.
const data_url = `${kCompressedDataPath}?content_encoding=zstd-d`;
const data_url = `${kCompressedDataPath}?content_encoding=dcz`;
assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData);
}, 'Decompresion using Zstandard with the dictionary works as expected');

Expand All @@ -50,7 +50,7 @@
// Check if the data compressed using Brotli with the dictionary can be
// decompressed.
const data_url =
getRemoteHostUrl(`${kCompressedDataPath}?content_encoding=br-d`);
getRemoteHostUrl(`${kCompressedDataPath}?content_encoding=dcb`);
assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData);
}, 'Decompresion of a cross origin resource works as expected');

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
<body>
<script>

function addLinkRelDictionaryElement(url, crossOrigin) {
function addLinkRelCompressionDictionaryElement(url, crossOrigin) {
const link = document.createElement('link');
link.rel = 'dictionary';
link.rel = 'compression-dictionary';
link.href = url;
if (crossOrigin) {
link.crossOrigin = crossOrigin;
Expand All @@ -22,13 +22,14 @@
}

test(t => {
assert_true(document.createElement('link').relList.supports('dictionary'));
}, "Browser supports link element with dictionary rel.");
const link_element = document.createElement('link');
assert_true(link_element.relList.supports('compression-dictionary'));
}, "Browser supports link element with compression-dictionary rel.");

compression_dictionary_promise_test(async (t) => {
const dict_token = token();
const url = `${kRegisterDictionaryPath}?save_header=${dict_token}`;
addLinkRelDictionaryElement(url);
addLinkRelCompressionDictionaryElement(url);
// Wait for a while to ensure that the dictionary is fetched.
await new Promise(resolve => window.requestIdleCallback(resolve));
const headers = await waitUntilPreviousRequestHeaders(t, dict_token);
Expand All @@ -40,15 +41,15 @@
kDefaultDictionaryHashBase64);
// Check if the data compressed using Brotli with the dictionary can be
// decompressed.
const data_url = `${kCompressedDataPath}?content_encoding=br-d`;
const data_url = `${kCompressedDataPath}?content_encoding=dcb`;
assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData);
}, 'Fetch same origin dictionary using link element');

compression_dictionary_promise_test(async (t) => {
const dict_token = token();
const url =
getRemoteHostUrl(`${kRegisterDictionaryPath}?save_header=${dict_token}`);
addLinkRelDictionaryElement(url, 'anonymous');
addLinkRelCompressionDictionaryElement(url, 'anonymous');
// Wait for a while to ensure that the dictionary is fetched.
await new Promise(resolve => window.requestIdleCallback(resolve));
const headers = await waitUntilPreviousRequestHeaders(
Expand All @@ -63,7 +64,7 @@
// Check if the data compressed using Brotli with the dictionary can be
// decompressed.
const data_url =
getRemoteHostUrl(`${kCompressedDataPath}?content_encoding=br-d`);
getRemoteHostUrl(`${kCompressedDataPath}?content_encoding=dcb`);
assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData);
}, 'Fetch cross origin dictionary using link element');

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,12 @@
<body>
<script>

async function addIframeWithLinkRelDictionaryHeader(dict_url) {
async function addIframeWithLinkRelCompressionDictionaryHeader(dict_url) {
return new Promise((resolve) => {
const base_page_url = './resources/empty.html';
const page_url =
base_page_url + `?pipe=header(link,<${dict_url}>; rel="dictionary")`;
base_page_url +
`?pipe=header(link,<${dict_url}>; rel="compression-dictionary")`;
const iframe = document.createElement('iframe');
iframe.src = page_url;
iframe.addEventListener('load', () => {
Expand All @@ -29,7 +30,8 @@
const dict_token = token();
const url = new URL(
`${kRegisterDictionaryPath}?save_header=${dict_token}`, location.href);
const iframe = await addIframeWithLinkRelDictionaryHeader(url.href);
const iframe =
await addIframeWithLinkRelCompressionDictionaryHeader(url.href);
t.add_cleanup(() => {
iframe.remove();
});
Expand All @@ -44,7 +46,7 @@
kDefaultDictionaryHashBase64);
// Check if the data compressed using Brotli with the dictionary can be
// decompressed.
const data_url = `${kCompressedDataPath}?content_encoding=br-d`;
const data_url = `${kCompressedDataPath}?content_encoding=dcb`;
assert_equals(await (await fetch(data_url)).text(), kExpectedCompressedData);
}, 'Fetch same origin dictionary using link header');

Expand Down
Original file line number Diff line number Diff line change
@@ -1,28 +1,31 @@
def main(request, response):
response.headers.set(b"Access-Control-Allow-Origin", b"*")
response.headers.set(b"Content-Type", b"text/plain")
response.headers.set(
b"Content-Dictionary",
b":U5abz16WDg7b8KS93msLPpOB4Vbef1uRzoORYkJw9BY=:")

# `br_d_data` and `zstd_d_data` are generated using the following commands:
# `dcb_data` and `dcz_data` are generated using the following commands:
#
# $ echo "This is a test dictionary." > /tmp/dict
# $ echo -n "This is compressed test data using a test dictionary" \
# > /tmp/data
# $ brotli -o /tmp/out.brd -D /tmp/dict /tmp/data
# $ xxd -p /tmp/out.brd | tr -d '\n' | sed 's/\(..\)/\\x\1/g'
br_d_data = b"\xa1\x98\x01\x80\x62\xa4\x4c\x1d\xdf\x12\x84\x8c\xae\xc2\xca\x60\x22\x07\x6e\x81\x05\x14\xc9\xb7\xc3\x44\x8e\xbc\x16\xe0\x15\x0e\xec\xc1\xee\x34\x33\x3e\x0d"
# $ zstd -o /tmp/out.zstdd -D /tmp/dict /tmp/data
# $ xxd -p /tmp/out.zstdd | tr -d '\n' | sed 's/\(..\)/\\x\1/g'
zstd_d_data = b"\x28\xb5\x2f\xfd\x24\x34\xf5\x00\x00\x98\x63\x6f\x6d\x70\x72\x65\x73\x73\x65\x64\x61\x74\x61\x20\x75\x73\x69\x6e\x67\x03\x00\x59\xf9\x73\x54\x46\x27\x26\x10\x9e\x99\xf2\xbc"
#
# $ echo -en '\xffDCB' > /tmp/out.dcb
# $ openssl dgst -sha256 -binary /tmp/dict >> /tmp/out.dcb
# $ brotli --stdout -D /tmp/dict /tmp/data >> /tmp/out.dcb
# $ xxd -p /tmp/out.dcb | tr -d '\n' | sed 's/\(..\)/\\x\1/g'
dcb_data = b"\xff\x44\x43\x42\x53\x96\x9b\xcf\x5e\x96\x0e\x0e\xdb\xf0\xa4\xbd\xde\x6b\x0b\x3e\x93\x81\xe1\x56\xde\x7f\x5b\x91\xce\x83\x91\x62\x42\x70\xf4\x16\xa1\x98\x01\x80\x62\xa4\x4c\x1d\xdf\x12\x84\x8c\xae\xc2\xca\x60\x22\x07\x6e\x81\x05\x14\xc9\xb7\xc3\x44\x8e\xbc\x16\xe0\x15\x0e\xec\xc1\xee\x34\x33\x3e\x0d"
# $ echo -en '\x5e\x2a\x4d\x18\x20\x00\x00\x00' > /tmp/out.dcz
# $ openssl dgst -sha256 -binary /tmp/dict >> /tmp/out.dcz
# $ zstd -D /tmp/dict -f -o /tmp/tmp.zstd /tmp/data
# $ cat /tmp/tmp.zstd >> /tmp/out.dcz
# $ xxd -p /tmp/out.dcz | tr -d '\n' | sed 's/\(..\)/\\x\1/g'
dcz_data = b"\x5e\x2a\x4d\x18\x20\x00\x00\x00\x53\x96\x9b\xcf\x5e\x96\x0e\x0e\xdb\xf0\xa4\xbd\xde\x6b\x0b\x3e\x93\x81\xe1\x56\xde\x7f\x5b\x91\xce\x83\x91\x62\x42\x70\xf4\x16\x28\xb5\x2f\xfd\x24\x34\xf5\x00\x00\x98\x63\x6f\x6d\x70\x72\x65\x73\x73\x65\x64\x61\x74\x61\x20\x75\x73\x69\x6e\x67\x03\x00\x59\xf9\x73\x54\x46\x27\x26\x10\x9e\x99\xf2\xbc"

if b'content_encoding' in request.GET:
content_encoding = request.GET.first(b"content_encoding")
response.headers.set(b"Content-Encoding", content_encoding)
if content_encoding == b"br-d":
if content_encoding == b"dcb":
# Send the pre compressed file
response.content = br_d_data
if content_encoding == b"zstd-d":
response.content = dcb_data
if content_encoding == b"dcz":
# Send the pre compressed file
response.content = zstd_d_data
response.content = dcz_data
2 changes: 1 addition & 1 deletion test/fixtures/wpt/fetch/fetch-later/README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# FetchLater Tests

These tests cover [FetchLater method](https://whatpr.org/fetch/1647/094ea69...152d725.html#fetch-later-method) related behaviors.
These tests cover [FetchLater method](https://whatpr.org/fetch/1647.html#dom-window-fetchlater) related behaviors.
Original file line number Diff line number Diff line change
Expand Up @@ -51,13 +51,5 @@ parallelPromiseTest(async t => {
await expectBeacon(uuid, {count: 1});
}, 'A same-origin iframe can trigger fetchLater.');

parallelPromiseTest(async t => {
const uuid = token();
const url = generateSetBeaconURL(uuid);

// Loads a same-origin iframe that fires a fetchLater request.
await loadFetchLaterIframe(HTTPS_NOTSAMESITE_ORIGIN, url);

// The iframe should have sent the request.
await expectBeacon(uuid, {count: 1});
}, 'A cross-origin iframe can trigger fetchLater.');
// The test to load a cross-origin iframe that fires a fetchLater request is in
// /fetch/fetch-later/permissions-policy/deferred-fetch-default-permissions-policy.tentative.https.window.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Permissions Policy: "deferred-fetch" Tests

This folder contains tests to cover the permissions policy "deferred-fetch",
which is used to gate the `fetchLater()` API.

The tests follow the patterns from
permissions-policy/README.md to cover all use cases of permissions policy for a
new feature.
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
// META: title=Permissions Policy "deferred-fetch" is allowed to redirect by allow attribute
// META: script=/permissions-policy/resources/permissions-policy.js
// META: script=/common/utils.js
// META: script=/common/get-host-info.sub.js
// META: script=/fetch/fetch-later/resources/fetch-later-helper.js
// META: script=/fetch/fetch-later/permissions-policy/resources/helper.js
// META: timeout=long
'use strict';

const {
HTTPS_ORIGIN,
HTTPS_NOTSAMESITE_ORIGIN,
} = get_host_info();

const baseUrl = '/permissions-policy/resources/redirect-on-load.html#';
const description = 'Permissions policy allow="deferred-fetch"';

async_test(t => {
test_feature_availability(
'fetchLater()', t,
getDeferredFetchPolicyInIframeHelperUrl(`${baseUrl}${HTTPS_ORIGIN}`),
expect_feature_available_default, /*feature_name=*/ 'deferred-fetch');
}, `${description} allows same-origin navigation in an iframe.`);

async_test(t => {
test_feature_availability(
'fetchLater()', t,
getDeferredFetchPolicyInIframeHelperUrl(
`${baseUrl}${HTTPS_NOTSAMESITE_ORIGIN}`),
expect_feature_unavailable_default, /*feature_name=*/ 'deferred-fetch');
}, `${description} disallows cross-origin navigation in an iframe.`);
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
// META: title=Permissions Policy "deferred-fetch" is allowed by allow attribute
// META: script=/permissions-policy/resources/permissions-policy.js
// META: script=/common/utils.js
// META: script=/common/get-host-info.sub.js
// META: script=/fetch/fetch-later/resources/fetch-later-helper.js
// META: script=/fetch/fetch-later/permissions-policy/resources/helper.js
// META: timeout=long
'use strict';

const {
HTTPS_ORIGIN,
HTTPS_NOTSAMESITE_ORIGIN,
} = get_host_info();

const description = 'Permissions policy "deferred-fetch"';
const attribute = 'allow="deferred-fetch" attribute';

async_test(
t => {
test_feature_availability(
'fetchLater()', t,
getDeferredFetchPolicyInIframeHelperUrl(HTTPS_ORIGIN),
expect_feature_available_default, /*feature_name=*/ 'deferred-fetch');
},
`${description} can be enabled in the same-origin iframe using ${
attribute}.`);

async_test(
t => {
test_feature_availability(
'fetchLater()', t,
getDeferredFetchPolicyInIframeHelperUrl(HTTPS_NOTSAMESITE_ORIGIN),
expect_feature_available_default, /*feature_name=*/ 'deferred-fetch');
},
`${description} can be enabled in the cross-origin iframe using ${
attribute}.`);
Loading

0 comments on commit a5720c9

Please sign in to comment.