зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1867354 [wpt PR 43427] - Storage buckets: move internal WPT to external WPT, a=testonly
Automatic update from web-platform-tests Storage buckets: move internal WPT to external WPT Bug: 1099413 Change-Id: Ib88c79c1b22f7f1cceffc709fbd525910f1c8870 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/5072535 Commit-Queue: Evan Stade <estade@chromium.org> Reviewed-by: Ayu Ishii <ayui@chromium.org> Cr-Commit-Position: refs/heads/main@{#1232791} -- wpt-commits: dd377f7d26fb9d43a5142f84a3b0a99ae96af309 wpt-pr: 43427
This commit is contained in:
Родитель
be08e82b8f
Коммит
31eb11d9cc
|
@ -2,6 +2,7 @@
|
|||
// META: script=/storage/buckets/resources/util.js
|
||||
|
||||
promise_test(async t => {
|
||||
prepareForBucketTest(t);
|
||||
const arraySize = 1e6;
|
||||
const objectStoreName = "storageManager";
|
||||
const dbname =
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
// META: title=Buckets API: Basic tests for bucket names.
|
||||
// META: script=resources/util.js
|
||||
// META: global=window,worker
|
||||
|
||||
const kGoodBucketNameTests = [
|
||||
['abcdefghijklmnopqrstuvwxyz0123456789-_', 'with allowed characters'],
|
||||
['2021-01-01', 'with `-` in the middle'],
|
||||
['2021_01_01', 'with `_` in the middle'],
|
||||
['2021_01_01_', 'ending with `_`'],
|
||||
['2021-01-01-', 'ending with `-`'],
|
||||
];
|
||||
|
||||
const kBadBucketNameTests = [
|
||||
['_bucket', 'start with `_`'],
|
||||
['-bucket', 'start with `-`'],
|
||||
['bucket name', 'have a space'],
|
||||
['bUcKet123', 'are not all lower case'],
|
||||
['bucket♦♥♠♣', 'are not in ASCII'],
|
||||
['2021/01/01', 'include an invalid special character'],
|
||||
[' ', 'have no characters'],
|
||||
['', 'are an empty string'],
|
||||
['mjnkhtwsiyjsrxvrzzqafldfvomqopdjfiuxqelfkllcugrhvvblkvmiqlguhhqepoggyu',
|
||||
'exceed 64 chars']
|
||||
];
|
||||
|
||||
// Test valid bucket names on open().
|
||||
kGoodBucketNameTests.forEach(test_data => {
|
||||
const bucket_name = test_data[0];
|
||||
const test_description = test_data[1];
|
||||
|
||||
promise_test(async testCase => {
|
||||
await prepareForBucketTest(testCase);
|
||||
const bucket = await navigator.storageBuckets.open(bucket_name);
|
||||
assert_equals(bucket.name, bucket_name);
|
||||
|
||||
const buckets = await navigator.storageBuckets.keys();
|
||||
assert_array_equals(buckets, [bucket_name]);
|
||||
}, `open() allows bucket names ${test_description}`);
|
||||
});
|
||||
|
||||
// Test invalid bucket names on open().
|
||||
kBadBucketNameTests.forEach(test_data => {
|
||||
const bucket_name = test_data[0];
|
||||
const test_description = test_data[1];
|
||||
|
||||
promise_test(async testCase => {
|
||||
await prepareForBucketTest(testCase);
|
||||
return promise_rejects_js(
|
||||
testCase, TypeError,
|
||||
navigator.storageBuckets.open(bucket_name));
|
||||
}, `open() throws an error if bucket names ${test_description}`);
|
||||
});
|
||||
|
||||
// Test valid bucket names on delete().
|
||||
kGoodBucketNameTests.forEach(test_data => {
|
||||
const bucket_name = test_data[0];
|
||||
const test_description = test_data[1];
|
||||
|
||||
promise_test(async testCase => {
|
||||
await prepareForBucketTest(testCase);
|
||||
await navigator.storageBuckets.open(bucket_name);
|
||||
let buckets = await navigator.storageBuckets.keys();
|
||||
assert_equals(buckets.length, 1);
|
||||
|
||||
await navigator.storageBuckets.delete(bucket_name);
|
||||
|
||||
buckets = await navigator.storageBuckets.keys();
|
||||
assert_equals(buckets.length, 0);
|
||||
}, `delete() allows bucket names ${test_description}`);
|
||||
});
|
||||
|
||||
// Test invalid bucket names on delete().
|
||||
kBadBucketNameTests.forEach(test_data => {
|
||||
const bucket_name = test_data[0];
|
||||
const test_description = test_data[1];
|
||||
|
||||
promise_test(async testCase => {
|
||||
await prepareForBucketTest(testCase);
|
||||
return promise_rejects_js(
|
||||
testCase, TypeError,
|
||||
navigator.storageBuckets.delete(bucket_name));
|
||||
}, `delete() throws an error if bucket names ${test_description}`);
|
||||
});
|
||||
|
||||
promise_test(async testCase => {
|
||||
await prepareForBucketTest(testCase);
|
||||
|
||||
await navigator.storageBuckets.open('bucket_name');
|
||||
await navigator.storageBuckets.open('bucket_name');
|
||||
|
||||
const buckets = await navigator.storageBuckets.keys();
|
||||
assert_array_equals(buckets, ['bucket_name']);
|
||||
}, 'open() does not store duplicate bucket names');
|
|
@ -0,0 +1,50 @@
|
|||
// META: title=Buckets API: Basic tests for open(), keys(), delete().
|
||||
// META: script=resources/util.js
|
||||
// META: global=window,worker
|
||||
|
||||
'use strict';
|
||||
|
||||
// This test is for initial IDL version optimized for debugging.
|
||||
// Split and add extensive testing once implementation for the endpoints are
|
||||
// added and method definitions are more defined.
|
||||
promise_test(async testCase => {
|
||||
await prepareForBucketTest(testCase);
|
||||
|
||||
await navigator.storageBuckets.open('bucket_name3');
|
||||
await navigator.storageBuckets.open('bucket_name1');
|
||||
await navigator.storageBuckets.open('bucket_name2');
|
||||
|
||||
const buckets = await navigator.storageBuckets.keys();
|
||||
assert_array_equals(
|
||||
buckets, ['bucket_name1', 'bucket_name2', 'bucket_name3']);
|
||||
}, 'keys() lists all stored bucket names alphabetically');
|
||||
|
||||
promise_test(async testCase => {
|
||||
await prepareForBucketTest(testCase);
|
||||
|
||||
await navigator.storageBuckets.open('bucket_name1');
|
||||
await navigator.storageBuckets.open('bucket_name2');
|
||||
|
||||
let buckets = await navigator.storageBuckets.keys();
|
||||
assert_array_equals(buckets, ['bucket_name1', 'bucket_name2']);
|
||||
|
||||
await navigator.storageBuckets.delete('bucket_name1');
|
||||
|
||||
buckets = await navigator.storageBuckets.keys();
|
||||
assert_array_equals(buckets, ['bucket_name2']);
|
||||
}, 'delete() removes stored bucket name');
|
||||
|
||||
promise_test(async testCase => {
|
||||
await prepareForBucketTest(testCase);
|
||||
|
||||
await navigator.storageBuckets.open('bucket_name');
|
||||
|
||||
let buckets = await navigator.storageBuckets.keys();
|
||||
assert_array_equals(buckets, ['bucket_name']);
|
||||
|
||||
await navigator.storageBuckets.delete('does-not-exist');
|
||||
|
||||
buckets = await navigator.storageBuckets.keys();
|
||||
assert_equals(buckets.length, 1);
|
||||
assert_equals(buckets[0], 'bucket_name');
|
||||
}, 'delete() does nothing if bucket name does not exist');
|
|
@ -0,0 +1,32 @@
|
|||
// META: title=Buckets API: Tests for bucket storage policies.
|
||||
// META: script=resources/util.js
|
||||
// META: global=window,worker
|
||||
|
||||
'use strict';
|
||||
|
||||
function sanitizeQuota(quota) {
|
||||
return Math.max(1, Math.min(Number.MAX_SAFE_INTEGER, Math.floor(quota)));
|
||||
}
|
||||
|
||||
async function testQuota(storageKeyQuota, quota, name) {
|
||||
const safeQuota = sanitizeQuota(quota);
|
||||
const bucket = await navigator.storageBuckets.open(name, { quota: safeQuota });
|
||||
const estimateQuota = (await bucket.estimate()).quota;
|
||||
assert_equals(estimateQuota, Math.min(safeQuota, storageKeyQuota));
|
||||
}
|
||||
|
||||
promise_test(async testCase => {
|
||||
await prepareForBucketTest(testCase);
|
||||
|
||||
const storageKeyQuota = (await navigator.storage.estimate()).quota;
|
||||
|
||||
testQuota(storageKeyQuota, 1, 'one');
|
||||
testQuota(storageKeyQuota, storageKeyQuota / 4, 'quarter');
|
||||
testQuota(storageKeyQuota, storageKeyQuota / 2, 'half');
|
||||
testQuota(storageKeyQuota, storageKeyQuota - 1, 'one_less');
|
||||
testQuota(storageKeyQuota, storageKeyQuota, 'origin_quota');
|
||||
testQuota(storageKeyQuota, storageKeyQuota + 1, 'one_more');
|
||||
testQuota(storageKeyQuota, storageKeyQuota * 2, 'twice');
|
||||
testQuota(storageKeyQuota, storageKeyQuota * 4, 'four_times');
|
||||
testQuota(storageKeyQuota, Number.MAX_SAFE_INTEGER, 'max_safe_int');
|
||||
}, 'For an individual bucket, the quota is the minimum of the requested quota and the StorageKey quota.');
|
|
@ -0,0 +1,62 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>Storage Buckets API on detached iframe</title>
|
||||
<link rel='help' href=''>
|
||||
<script src="/resources/testharness.js"></script>
|
||||
<script src="/resources/testharnessreport.js"></script>
|
||||
|
||||
<body>
|
||||
<script>
|
||||
'use strict';
|
||||
|
||||
promise_test(async testCase => {
|
||||
const iframe = document.createElement('iframe');
|
||||
document.body.appendChild(iframe);
|
||||
const bucketManager = iframe.contentWindow.navigator.storageBuckets;
|
||||
await bucketManager.open('iframe-bucket');
|
||||
|
||||
let bucketKeys = await bucketManager.keys();
|
||||
assert_equals(bucketKeys.length, 1);
|
||||
|
||||
// Note that these tests cannot make use of `prepareForBucketTest` because the
|
||||
// iframe is detached before the test ends, so the test's cleanup step comes
|
||||
// too late to delete buckets.
|
||||
await bucketManager.delete('iframe-bucket');
|
||||
|
||||
iframe.remove();
|
||||
|
||||
// Calling open() from a detached iframe should fail but not crash.
|
||||
assert_equals(bucketManager.open('iframe-bucket'), undefined);
|
||||
}, 'Verify open() on detached iframe returns an error');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const iframe = document.createElement('iframe');
|
||||
document.body.appendChild(iframe);
|
||||
const bucketManager = iframe.contentWindow.navigator.storageBuckets;
|
||||
await bucketManager.open('iframe-bucket');
|
||||
|
||||
let bucketKeys = await bucketManager.keys();
|
||||
assert_equals(bucketKeys.length, 1);
|
||||
await bucketManager.delete('iframe-bucket');
|
||||
|
||||
iframe.remove();
|
||||
|
||||
// Calling keys() from a detached iframe should fail but not crash.
|
||||
assert_equals(bucketManager.keys(), undefined);
|
||||
}, 'Verify keys() on detached iframe returns an error');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const iframe = document.createElement('iframe');
|
||||
document.body.appendChild(iframe);
|
||||
const bucketManager = iframe.contentWindow.navigator.storageBuckets;
|
||||
await bucketManager.open('iframe-bucket');
|
||||
await bucketManager.delete('iframe-bucket');
|
||||
|
||||
iframe.remove();
|
||||
|
||||
// Calling delete() from a detached iframe should fail but not crash.
|
||||
assert_equals(bucketManager.delete('foo-bucket'), undefined);
|
||||
}, 'Verify delete() on detached iframe returns an error');
|
||||
|
||||
</script>
|
||||
</body>
|
|
@ -0,0 +1,22 @@
|
|||
// META: global=window,worker
|
||||
// META: script=/resources/WebIDLParser.js
|
||||
// META: script=/resources/idlharness.js
|
||||
|
||||
'use strict';
|
||||
|
||||
idl_test(
|
||||
['storage-buckets'],
|
||||
['html'],
|
||||
async (idl_array, t) => {
|
||||
idl_array.add_objects({
|
||||
StorageBucketManager: ['navigator.storageBuckets'],
|
||||
StorageBucket: []
|
||||
});
|
||||
|
||||
if (self.Window) {
|
||||
idl_array.add_objects({ Navigator: ['navigator'] });
|
||||
} else {
|
||||
idl_array.add_objects({ WorkerNavigator: ['navigator'] });
|
||||
}
|
||||
}
|
||||
);
|
|
@ -0,0 +1,58 @@
|
|||
// META: title=Storage Buckets API: Interface is not exposed in opaque origins.
|
||||
// META: script=resources/util.js
|
||||
// META: global=window
|
||||
|
||||
const kSandboxWindowUrl = 'resources/opaque-origin-sandbox.html';
|
||||
|
||||
function add_iframe(test, src, sandbox) {
|
||||
const iframe = document.createElement('iframe');
|
||||
iframe.src = src;
|
||||
if (sandbox !== undefined) {
|
||||
iframe.sandbox = sandbox;
|
||||
}
|
||||
document.body.appendChild(iframe);
|
||||
test.add_cleanup(() => {
|
||||
iframe.remove();
|
||||
});
|
||||
}
|
||||
|
||||
// |kSandboxWindowUrl| sends the result of methods on StorageBucketManager.
|
||||
// For windows using sandbox="allow-scripts", it must produce a rejected
|
||||
// promise.
|
||||
async function verify_results_from_sandboxed_child_window(test) {
|
||||
const event_watcher = new EventWatcher(test, self, 'message');
|
||||
|
||||
const first_message_event = await event_watcher.wait_for('message');
|
||||
assert_equals(
|
||||
first_message_event.data,
|
||||
'navigator.storageBuckets.open(): REJECTED: SecurityError');
|
||||
|
||||
const second_message_event = await event_watcher.wait_for('message');
|
||||
assert_equals(
|
||||
second_message_event.data,
|
||||
'navigator.storageBuckets.keys(): REJECTED: SecurityError');
|
||||
|
||||
const third_message_event = await event_watcher.wait_for('message');
|
||||
assert_equals(
|
||||
third_message_event.data,
|
||||
'navigator.storageBuckets.delete(): REJECTED: SecurityError');
|
||||
}
|
||||
|
||||
promise_test(async testCase => {
|
||||
prepareForBucketTest(testCase);
|
||||
add_iframe(testCase, kSandboxWindowUrl, /*sandbox=*/ 'allow-scripts');
|
||||
await verify_results_from_sandboxed_child_window(testCase);
|
||||
}, 'StorageBucketManager methods must reject in a sandboxed iframe.');
|
||||
|
||||
promise_test(async testCase => {
|
||||
prepareForBucketTest(testCase);
|
||||
const child_window_url = kSandboxWindowUrl +
|
||||
'?pipe=header(Content-Security-Policy, sandbox allow-scripts)';
|
||||
|
||||
const child_window = window.open(child_window_url);
|
||||
testCase.add_cleanup(() => {
|
||||
child_window.close();
|
||||
});
|
||||
|
||||
await verify_results_from_sandboxed_child_window(testCase);
|
||||
}, 'StorageBucketManager methods must reject in a sandboxed opened window.');
|
|
@ -0,0 +1,52 @@
|
|||
<!DOCTYPE html>
|
||||
<script>
|
||||
'use strict'
|
||||
|
||||
// Sends the result of navigator.storageBuckets.open('bucket').
|
||||
|
||||
function post_message(data) {
|
||||
if (window.parent !== null) {
|
||||
window.parent.postMessage(data, { targetOrigin: '*' });
|
||||
}
|
||||
if (window.opener !== null) {
|
||||
window.opener.postMessage(data, { targetOrigin: '*' });
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
navigator.storageBuckets.open('opaque-origin-bucket')
|
||||
.then(() => {
|
||||
post_message('navigator.storageBuckets.open(): FULFILLED');
|
||||
}).catch(error => {
|
||||
post_message(
|
||||
`navigator.storageBuckets.open(): REJECTED: ${error.name}`);
|
||||
});
|
||||
} catch (error) {
|
||||
post_message(`navigator.storageBuckets.open(): EXCEPTION: ${error.name}`);
|
||||
}
|
||||
|
||||
try {
|
||||
navigator.storageBuckets.keys()
|
||||
.then(() => {
|
||||
post_message('navigator.storageBuckets.keys(): FULFILLED');
|
||||
}).catch(error => {
|
||||
post_message(
|
||||
`navigator.storageBuckets.keys(): REJECTED: ${error.name}`);
|
||||
});
|
||||
} catch (error) {
|
||||
post_message(`navigator.storageBuckets.keys(): EXCEPTION: ${error.name}`);
|
||||
}
|
||||
|
||||
try {
|
||||
navigator.storageBuckets.delete('opaque-origin-bucket')
|
||||
.then(() => {
|
||||
post_message('navigator.storageBuckets.delete(): FULFILLED');
|
||||
}).catch(error => {
|
||||
post_message(
|
||||
`navigator.storageBuckets.delete(): REJECTED: ${error.name}`);
|
||||
});
|
||||
} catch (error) {
|
||||
post_message(`navigator.storageBuckets.delete(): EXCEPTION: ${error.name}`);
|
||||
}
|
||||
</script>
|
||||
|
|
@ -4,7 +4,7 @@
|
|||
// is over (whether it passes or fails).
|
||||
async function prepareForBucketTest(test) {
|
||||
// Verify initial state.
|
||||
assert_equals('', (await navigator.storageBuckets.keys()).join());
|
||||
assert_equals((await navigator.storageBuckets.keys()).join(), '');
|
||||
// Clean up after test.
|
||||
test.add_cleanup(async function() {
|
||||
const keys = await navigator.storageBuckets.keys();
|
||||
|
|
|
@ -0,0 +1,143 @@
|
|||
// META: title=Buckets API: Tests for the StorageBucket object.
|
||||
// META: global=window,worker
|
||||
|
||||
'use strict';
|
||||
|
||||
promise_test(async testCase => {
|
||||
const bucket = await navigator.storageBuckets.open('bucket_name');
|
||||
testCase.add_cleanup(async () => {
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
});
|
||||
const persisted = await bucket.persisted();
|
||||
assert_false(persisted);
|
||||
|
||||
// Also verify that the promise is rejected after the bucket is deleted.
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
await promise_rejects_dom(testCase, 'UnknownError', bucket.persisted());
|
||||
}, 'persisted() should default to false');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const bucket = await navigator.storageBuckets.open('bucket_name');
|
||||
testCase.add_cleanup(async () => {
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
});
|
||||
const estimate = await bucket.estimate();
|
||||
assert_greater_than(estimate.quota, 0);
|
||||
assert_equals(estimate.usage, 0);
|
||||
|
||||
const cacheName = 'attachments';
|
||||
const cacheKey = 'receipt1.txt';
|
||||
var inboxCache = await bucket.caches.open(cacheName);
|
||||
await inboxCache.put(cacheKey, new Response('bread x 2'))
|
||||
|
||||
const estimate2 = await bucket.estimate();
|
||||
assert_equals(estimate.quota, estimate2.quota);
|
||||
assert_less_than(estimate.usage, estimate2.usage);
|
||||
}, 'estimate() should retrieve quota usage');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const bucket = await navigator.storageBuckets.open(
|
||||
'bucket_name', { durability: 'strict' });
|
||||
testCase.add_cleanup(async () => {
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
});
|
||||
|
||||
const durability = await bucket.durability();
|
||||
assert_equals('strict', durability);
|
||||
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
await promise_rejects_dom(testCase, 'UnknownError', bucket.durability());
|
||||
}, 'durability() should retrieve bucket durability specified during creation');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const bucket = await navigator.storageBuckets.open('bucket_name');
|
||||
testCase.add_cleanup(async () => {
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
});
|
||||
|
||||
const durability = await bucket.durability();
|
||||
assert_equals('relaxed', durability);
|
||||
}, 'Bucket durability defaults to relaxed');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const oneYear = 365 * 24 * 60 * 60 * 1000;
|
||||
const expiresDate = Date.now() + oneYear;
|
||||
const bucket = await navigator.storageBuckets.open(
|
||||
'bucket_name', { expires: expiresDate });
|
||||
testCase.add_cleanup(async () => {
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
});
|
||||
|
||||
const expires = await bucket.expires();
|
||||
assert_equals(expires, expiresDate);
|
||||
}, 'expires() should retrieve expires date');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const bucket = await navigator.storageBuckets.open('bucket_name');
|
||||
testCase.add_cleanup(async () => {
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
});
|
||||
|
||||
const expires = await bucket.expires();
|
||||
assert_equals(expires, null);
|
||||
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
await promise_rejects_dom(testCase, 'UnknownError', bucket.expires());
|
||||
}, 'expires() should be defaulted to null');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const bucket = await navigator.storageBuckets.open('bucket_name');
|
||||
testCase.add_cleanup(async () => {
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
});
|
||||
|
||||
const oneYear = 365 * 24 * 60 * 60 * 1000;
|
||||
const expiresDate = Date.now() + oneYear;
|
||||
await bucket.setExpires(expiresDate);
|
||||
|
||||
const expires = await bucket.expires();
|
||||
assert_equals(expires, expiresDate);
|
||||
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
await promise_rejects_dom(testCase, 'UnknownError', bucket.setExpires(expiresDate));
|
||||
}, 'setExpires() should set bucket expires date');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const oneDay = 24 * 60 * 60 * 1000;
|
||||
const expiresDate = Date.now() + oneDay;
|
||||
const bucket = await navigator.storageBuckets.open('bucket_name', {
|
||||
expires: expiresDate
|
||||
});
|
||||
testCase.add_cleanup(async () => {
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
});
|
||||
let expires = await bucket.expires();
|
||||
assert_equals(expires, expiresDate);
|
||||
|
||||
const oneYear = 365 * oneDay;
|
||||
const newExpiresDate = Date.now() + oneYear;
|
||||
await bucket.setExpires(newExpiresDate);
|
||||
|
||||
expires = await bucket.expires();
|
||||
assert_equals(expires, newExpiresDate);
|
||||
}, 'setExpires() should update expires date');
|
||||
|
||||
promise_test(async testCase => {
|
||||
const bucket = await navigator.storageBuckets.open(
|
||||
'bucket_name', { durability: 'strict' });
|
||||
testCase.add_cleanup(async () => {
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
});
|
||||
|
||||
const same_bucket = await navigator.storageBuckets.open('bucket_name');
|
||||
const durability = await bucket.durability();
|
||||
const other_durability = await same_bucket.durability();
|
||||
assert_equals(durability, other_durability);
|
||||
|
||||
// Delete the bucket and remake it.
|
||||
await navigator.storageBuckets.delete('bucket_name');
|
||||
const remade_bucket = await navigator.storageBuckets.open('bucket_name');
|
||||
await promise_rejects_dom(testCase, 'UnknownError', bucket.durability());
|
||||
const remade_durability = await remade_bucket.durability();
|
||||
assert_not_equals(remade_durability, durability);
|
||||
}, 'two handles can refer to the same bucket, and a bucket name can be reused after deletion');
|
Загрузка…
Ссылка в новой задаче