Bug 1864020: test(webgpu): bump CTS to 41f89e77b67e6b66cb017be4e00235a0a9429ca7 r=webgpu-reviewers,nical
Differential Revision: https://phabricator.services.mozilla.com/D193225
This commit is contained in:
@@ -54,7 +54,8 @@
|
||||
"@typescript-eslint/no-unnecessary-type-constraint": "warn",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"warn",
|
||||
{ "vars": "all", "args": "none", "varsIgnorePattern": "^_" }
|
||||
// MAINTENANCE_TODO: Enable warnings for args
|
||||
{ "vars": "all", "args": "none", "varsIgnorePattern": "^_", "argsIgnorePattern": "^_" }
|
||||
],
|
||||
"@typescript-eslint/prefer-as-const": "warn",
|
||||
"@typescript-eslint/prefer-for-of": "warn",
|
||||
|
||||
@@ -8,21 +8,18 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2.3.1
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
- run: |
|
||||
git fetch origin ${{ github.event.pull_request.head.sha }}
|
||||
git checkout ${{ github.event.pull_request.head.sha }}
|
||||
- uses: actions/setup-node@v2-beta
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "16.x"
|
||||
- run: npm ci
|
||||
- run: npm test
|
||||
- run: |
|
||||
mkdir deploy-build/
|
||||
cp -r README.md src standalone out docs deploy-build/
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: pr-artifact
|
||||
path: deploy-build/
|
||||
- name: copy out-wpt to wpt tree
|
||||
run: |
|
||||
git clone --depth 2 https://github.com/web-platform-tests/wpt.git
|
||||
rsync -av out-wpt/ wpt/webgpu
|
||||
- name: test wpt lint
|
||||
run: ./wpt lint
|
||||
working-directory: ./wpt
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
name: Workflow CI
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- "Pull Request CI"
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2.3.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
- run: |
|
||||
PR=$(curl https://api.github.com/search/issues?q=${{ github.event.workflow_run.head_sha }} |
|
||||
grep -Po "(?<=${{ github.event.workflow_run.repository.full_name }}\/pulls\/)\d*" | head -1)
|
||||
echo "PR=$PR" >> $GITHUB_ENV
|
||||
- uses: actions/github-script@v3
|
||||
id: pr-artifact
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
result-encoding: string
|
||||
script: |
|
||||
const artifacts_url = context.payload.workflow_run.artifacts_url
|
||||
const artifacts_req = await github.request(artifacts_url)
|
||||
const artifact = artifacts_req.data.artifacts[0]
|
||||
const download = await github.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: artifact.id,
|
||||
archive_format: "zip"
|
||||
})
|
||||
return download.url
|
||||
- run: |
|
||||
rm -rf *
|
||||
curl -L -o "pr-artifact.zip" "${{ steps.pr-artifact.outputs.result }}"
|
||||
unzip -o pr-artifact.zip
|
||||
rm pr-artifact.zip
|
||||
- run: |
|
||||
cat << EOF >> firebase.json
|
||||
{
|
||||
"hosting": {
|
||||
"public": ".",
|
||||
"ignore": [
|
||||
"firebase.json",
|
||||
"**/.*",
|
||||
"**/node_modules/**"
|
||||
]
|
||||
}
|
||||
}
|
||||
EOF
|
||||
cat << EOF >> .firebaserc
|
||||
{
|
||||
"projects": {
|
||||
"default": "gpuweb-cts"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
- id: deployment
|
||||
continue-on-error: true
|
||||
uses: FirebaseExtended/action-hosting-deploy@v0
|
||||
with:
|
||||
firebaseServiceAccount: ${{ secrets.FIREBASE_SERVICE_ACCOUNT_CTS }}
|
||||
expires: 10d
|
||||
channelId: cts-prs-${{ env.PR }}-${{ github.event.workflow_run.head_sha }}
|
||||
- uses: peter-evans/create-or-update-comment@v1
|
||||
continue-on-error: true
|
||||
if: ${{ steps.deployment.outcome == 'success' }}
|
||||
with:
|
||||
issue-number: ${{ env.PR }}
|
||||
body: |
|
||||
Previews, as seen when this [build job](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}) started (${{ github.event.workflow_run.head_sha }}):
|
||||
[**Run tests**](${{ steps.deployment.outputs.details_url }}/standalone/) | [**View tsdoc**](${{ steps.deployment.outputs.details_url }}/docs/tsdoc/)
|
||||
<!--
|
||||
pr;head;sha
|
||||
${{ env.PR }};${{ github.event.workflow_run.head_repository.full_name }};${{ github.event.workflow_run.head_sha }}
|
||||
-->
|
||||
@@ -1,26 +0,0 @@
|
||||
name: WPT Lint Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test-wpt-lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "16.x"
|
||||
- run: npm ci
|
||||
- run: npm run wpt
|
||||
- name: copy out-wpt to wpt tree
|
||||
run: |
|
||||
git clone --depth 2 https://github.com/web-platform-tests/wpt.git
|
||||
rsync -av out-wpt/ wpt/webgpu
|
||||
- name: test wpt lint
|
||||
run: ./wpt lint
|
||||
working-directory: ./wpt
|
||||
@@ -2,7 +2,11 @@
|
||||
/* eslint-disable prettier/prettier */
|
||||
/* eslint-disable no-console */
|
||||
|
||||
const timer = require('grunt-timer');
|
||||
|
||||
module.exports = function (grunt) {
|
||||
timer.init(grunt);
|
||||
|
||||
// Project configuration.
|
||||
grunt.initConfig({
|
||||
pkg: grunt.file.readJSON('package.json'),
|
||||
@@ -26,6 +30,10 @@ module.exports = function (grunt) {
|
||||
cmd: 'node',
|
||||
args: ['tools/validate', 'src/webgpu', 'src/stress', 'src/manual', 'src/unittests', 'src/demo'],
|
||||
},
|
||||
'validate-cache': {
|
||||
cmd: 'node',
|
||||
args: ['tools/gen_cache', 'out', 'src/webgpu', '--validate'],
|
||||
},
|
||||
'generate-wpt-cts-html': {
|
||||
cmd: 'node',
|
||||
args: ['tools/gen_wpt_cts_html', 'tools/gen_wpt_cfg_unchunked.json'],
|
||||
@@ -106,17 +114,14 @@ module.exports = function (grunt) {
|
||||
cmd: 'node',
|
||||
args: ['node_modules/eslint/bin/eslint', 'src/**/*.ts', '--max-warnings=0'],
|
||||
},
|
||||
presubmit: {
|
||||
cmd: 'node',
|
||||
args: ['tools/presubmit'],
|
||||
},
|
||||
fix: {
|
||||
cmd: 'node',
|
||||
args: ['node_modules/eslint/bin/eslint', 'src/**/*.ts', '--fix'],
|
||||
},
|
||||
'autoformat-out-wpt': {
|
||||
cmd: 'node',
|
||||
args: ['node_modules/prettier/bin-prettier', '--loglevel=warn', '--write', 'out-wpt/**/*.js'],
|
||||
// MAINTENANCE_TODO(gpuweb/cts#3128): This autoformat step is broken after a dependencies upgrade.
|
||||
args: ['node_modules/prettier/bin/prettier.cjs', '--log-level=warn', '--write', 'out-wpt/**/*.js'],
|
||||
},
|
||||
tsdoc: {
|
||||
cmd: 'node',
|
||||
@@ -194,14 +199,13 @@ module.exports = function (grunt) {
|
||||
registerTaskAndAddToHelp('pre', 'Run all presubmit checks: standalone+wpt+typecheck+unittest+lint', [
|
||||
'clean',
|
||||
'run:validate',
|
||||
'run:validate-cache',
|
||||
'build-standalone',
|
||||
'run:generate-listings',
|
||||
'build-wpt',
|
||||
'run:build-out-node',
|
||||
'run:generate-cache',
|
||||
'build-done-message',
|
||||
'ts:check',
|
||||
'run:presubmit',
|
||||
'run:unittest',
|
||||
'run:lint',
|
||||
'run:tsdoc-treatWarningsAsErrors',
|
||||
|
||||
@@ -1,7 +1,32 @@
|
||||
# Adding Timing Metadata
|
||||
|
||||
## listing_meta.json files
|
||||
|
||||
`listing_meta.json` files are SEMI AUTO-GENERATED.
|
||||
|
||||
The raw data may be edited manually, to add entries or change timing values.
|
||||
|
||||
The **list** of tests must stay up to date, so it can be used by external
|
||||
tools. This is verified by presubmit checks.
|
||||
|
||||
The `subcaseMS` values are estimates. They can be set to 0 if for some reason
|
||||
you can't estimate the time (or there's an existing test with a long name and
|
||||
slow subcases that would result in query strings that are too long), but this
|
||||
will produce a non-fatal warning. Avoid creating new warnings whenever
|
||||
possible. Any existing failures should be fixed (eventually).
|
||||
|
||||
### Performance
|
||||
|
||||
Note this data is typically captured by developers using higher-end
|
||||
computers, so typical test machines might execute more slowly. For this
|
||||
reason, the WPT chunking should be configured to generate chunks much shorter
|
||||
than 5 seconds (a typical default time limit in WPT test executors) so they
|
||||
should still execute in under 5 seconds on lower-end computers.
|
||||
|
||||
## Problem
|
||||
|
||||
When adding new tests to the CTS you may occasionally see an error like this
|
||||
when running `npm test` or `npm run standalone`
|
||||
when running `npm test` or `npm run standalone`:
|
||||
|
||||
```
|
||||
ERROR: Tests missing from listing_meta.json. Please add the new tests (set subcaseMS to 0 if you cannot estimate it):
|
||||
@@ -25,9 +50,10 @@ What this error message is trying to tell us, is that there is no entry for
|
||||
|
||||
These entries are estimates for the amount of time that subcases take to run,
|
||||
and are used as inputs into the WPT tooling to attempt to portion out tests into
|
||||
approximately same sized chunks.
|
||||
approximately same-sized chunks.
|
||||
|
||||
If a value has been defaulted to 0 by someone, you will see warnings like this:
|
||||
|
||||
If a value has been defaulted to 0 by someone, you will see warnings like this
|
||||
```
|
||||
...
|
||||
WARNING: subcaseMS≤0 found in listing_meta.json (allowed, but try to avoid):
|
||||
@@ -38,71 +64,98 @@ WARNING: subcaseMS≤0 found in listing_meta.json (allowed, but try to avoid):
|
||||
These messages should be resolved by adding appropriate entries to the JSON
|
||||
file.
|
||||
|
||||
## Solution
|
||||
## Solution 1 (manual, best for simple tests)
|
||||
|
||||
If you're developing new tests and need to update this file, it is sometimes
|
||||
easiest to do so manually. Run your tests under your usual development workflow
|
||||
and see how long they take. In the standalone web runner `npm start`, the total
|
||||
time for a test case is reported on the right-hand side when the case logs are
|
||||
expanded.
|
||||
|
||||
Record the average time per *subcase* across all cases of the test (you may need
|
||||
to compute this) into the `listing_meta.json` file.
|
||||
|
||||
## Solution 2 (semi-automated)
|
||||
|
||||
There exists tooling in the CTS repo for generating appropriate estimates for
|
||||
these values, though they do require some manual intervention. The rest of this
|
||||
doc will be a walkthrough of running these tools.
|
||||
|
||||
### Default Value
|
||||
Timing data can be captured in bulk and "merged" into this file using
|
||||
the `merge_listing_times` tool. This is useful when a large number of tests
|
||||
change or otherwise a lot of tests need to be updated, but it also automates the
|
||||
manual steps above.
|
||||
|
||||
The first step is to add a default value for entry to
|
||||
`src/webgpu/listing_meta.json`, since there is a chicken-and-egg problem for
|
||||
updating these values.
|
||||
The tool can also be used without any inputs to reformat `listing_meta.json`.
|
||||
Please read the help message of `merge_listing_times` for more information.
|
||||
|
||||
### Placeholder Value
|
||||
|
||||
If your development workflow requires a clean build, the first step is to add a
|
||||
placeholder value for entry to `src/webgpu/listing_meta.json`, since there is a
|
||||
chicken-and-egg problem for updating these values.
|
||||
|
||||
```
|
||||
"webgpu:shader,execution,expression,binary,af_matrix_addition:matrix:*": { "subcaseMS": 0 },
|
||||
```
|
||||
|
||||
(It should have a value of 0, since later tooling updates the value if the newer
|
||||
value is higher)
|
||||
value is higher.)
|
||||
|
||||
### Websocket Logger
|
||||
|
||||
The first tool that needs to be run is `websocket-logger`, which uses a side
|
||||
channel from WPT to report timing data when CTS is run via a websocket. This
|
||||
The first tool that needs to be run is `websocket-logger`, which receives data
|
||||
on a WebSocket channel to capture timing data when CTS is run. This
|
||||
should be run in a separate process/terminal, since it needs to stay running
|
||||
throughout the following steps.
|
||||
|
||||
At `tools/websocket-logger/`
|
||||
In the `tools/websocket-logger/` directory:
|
||||
|
||||
```
|
||||
npm ci
|
||||
npm start
|
||||
```
|
||||
|
||||
The output from this command will indicate where the results are being logged,
|
||||
which will be needed later
|
||||
which will be needed later. For example:
|
||||
|
||||
```
|
||||
...
|
||||
Writing to wslog-2023-09-11T18-57-34.txt
|
||||
Writing to wslog-2023-09-12T18-57-34.txt
|
||||
...
|
||||
```
|
||||
|
||||
### Running CTS
|
||||
|
||||
Now we need to run the specific cases in CTS, which requires serving the CTS
|
||||
locally.
|
||||
Now we need to run the specific cases in CTS that we need to time.
|
||||
This should be possible under any development workflow (as long as its runtime environment, like Node, supports WebSockets), but the most well-tested way is using the standalone web runner.
|
||||
|
||||
This requires serving the CTS locally. In the project root:
|
||||
|
||||
At project root
|
||||
```
|
||||
npm run standalone
|
||||
npm start
|
||||
```
|
||||
|
||||
Once this is started you can then direct a WebGPU enabled browser to the
|
||||
specific CTS entry and run the tests, for example
|
||||
specific CTS entry and run the tests, for example:
|
||||
|
||||
```
|
||||
http://127.0.0.1:8080/standalone/q?webgpu:shader,execution,expression,binary,af_matrix_addition:matrix:*
|
||||
http://localhost:8080/standalone/?q=webgpu:shader,execution,expression,binary,af_matrix_addition:matrix:*
|
||||
```
|
||||
|
||||
If the tests have a high variance in runtime, you can run them multiple times.
|
||||
The longest recorded time will be used.
|
||||
|
||||
### Merging metadata
|
||||
|
||||
The final step is to merge the new data that has been captured into the JSON
|
||||
file.
|
||||
|
||||
This can be done using the following command
|
||||
This can be done using the following command:
|
||||
|
||||
```
|
||||
tools/merge_listing_times webgpu -- tools/websocket-logger/wslog-2023-09-11T18-57-34.txt
|
||||
tools/merge_listing_times webgpu -- tools/websocket-logger/wslog-2023-09-12T18-57-34.txt
|
||||
```
|
||||
|
||||
where the text file is the result file from websocket-logger.
|
||||
|
||||
@@ -690,6 +690,42 @@ library, or if this turns out to be a significant issue in the future, this
|
||||
decision can be revisited.
|
||||
|
||||
## Abstract Float
|
||||
|
||||
### Accuracy
|
||||
|
||||
For the concrete floating point types (f32 & f16) the accuracy of operations are
|
||||
defined in terms of their own type. Specifically for f32, correctly rounded
|
||||
refers to the nearest f32 values, and ULP is in terms of the distance between
|
||||
f32 values.
|
||||
|
||||
AbstractFloat internally is defined as a f64, and this applies for exact and
|
||||
correctly rounded accuracies. Thus, correctly rounded refers to the nearest f64
|
||||
values. However, AbstractFloat differs for ULP and absolute errors. Reading
|
||||
the spec strictly, these all have unbounded accuracies, but it is recommended
|
||||
that their accuracies be at least as good as the f32 equivalent.
|
||||
|
||||
The difference between f32 and f64 ULP at a specific value X are significant, so
|
||||
at least as good as f32 requirement is always less strict than if it was
|
||||
calculated in terms of f64. Similarly, for absolute accuracies the interval
|
||||
`[x - epsilon, x + epsilon]` is always equal or wider if calculated as f32s
|
||||
vs f64s.
|
||||
|
||||
If an inherited accuracy is only defined in terms of correctly rounded
|
||||
accuracies, then the interval is calculated in terms of f64s. If any of the
|
||||
defining accuracies are ULP or absolute errors, then the result falls into the
|
||||
unbounded accuracy, but recommended to be at least as good as f32 bucket.
|
||||
|
||||
What this means from a CTS implementation is that for these "at least as good as
|
||||
f32" error intervals, if the infinitely accurate result is finite for f32, then
|
||||
the error interval for f64 is just the f32 interval. If the result is not finite
|
||||
for f32, then the accuracy interval is just the unbounded interval.
|
||||
|
||||
How this is implemented in the CTS is by having the FPTraits for AbstractFloat
|
||||
forward to the f32 implementation for the operations that are tested to be as
|
||||
good as f32.
|
||||
|
||||
### Implementation
|
||||
|
||||
AbstractFloats are a compile time construct that exist in WGSL. They are
|
||||
expressible as literal values or the result of operations that return them, but
|
||||
a variable cannot be typed as an AbstractFloat. Instead, the variable needs be a
|
||||
@@ -703,15 +739,18 @@ operations that return AbstractFloats.
|
||||
As of the writing of this doc, this second option for testing AbstractFloats
|
||||
is the one being pursued in the CTS.
|
||||
|
||||
### const_assert
|
||||
#### const_assert
|
||||
|
||||
The first proposal is to lean on the `const_assert` statement that exists in
|
||||
WGSL. For each test case a snippet of code would be written out that has a form
|
||||
something like this
|
||||
|
||||
```
|
||||
// foo(x) is the operation under test
|
||||
const_assert lower < foo(x) // Result was below the acceptance interval
|
||||
const_assert upper > foo(x) // Result was above the acceptance interval
|
||||
```
|
||||
|
||||
where lower and upper would actually be string replaced with literals for the
|
||||
bounds of the acceptance interval when generating the shader text.
|
||||
|
||||
@@ -733,7 +772,8 @@ indicate something is working, we would be depending on a signal that it isn't
|
||||
working, and assuming if we don't receive that signal everything is good, not
|
||||
that our signal mechanism was broken.
|
||||
|
||||
### Extracting Bits
|
||||
#### Extracting Bits
|
||||
|
||||
The other proposal that was developed depends on the fact that AbstractFloat is
|
||||
spec'd to be a f64 internally. So the CTS could store the result of an operation
|
||||
as two 32-bit unsigned integers (or broken up into sign, exponent, and
|
||||
@@ -827,6 +867,5 @@ shader being run.
|
||||
- [binary16 on Wikipedia](https://en.wikipedia.org/wiki/Half-precision_floating-point_format)
|
||||
- [IEEE-754 Floating Point Converter](https://www.h-schmidt.net/FloatConverter/IEEE754.html)
|
||||
- [IEEE 754 Calculator](http://weitz.de/ieee/)
|
||||
- [Keisan High Precision Calculator](https://keisan.casio.com/calculator)
|
||||
- [On the definition of ulp(x)](https://hal.inria.fr/inria-00070503/document)
|
||||
- [Float Exposed](https://float.exposed/)
|
||||
|
||||
6650
dom/webgpu/tests/cts/checkout/package-lock.json
generated
6650
dom/webgpu/tests/cts/checkout/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -31,48 +31,50 @@
|
||||
},
|
||||
"homepage": "https://github.com/gpuweb/cts#readme",
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.19.3",
|
||||
"@babel/core": "^7.20.5",
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
"@types/babel__core": "^7.1.20",
|
||||
"@types/dom-mediacapture-transform": "^0.1.4",
|
||||
"@types/dom-webcodecs": "^0.1.5",
|
||||
"@types/express": "^4.17.14",
|
||||
"@types/jquery": "^3.5.14",
|
||||
"@types/morgan": "^1.9.3",
|
||||
"@types/node": "^14.18.12",
|
||||
"@types/offscreencanvas": "^2019.7.0",
|
||||
"@types/pngjs": "^6.0.1",
|
||||
"@types/serve-index": "^1.9.1",
|
||||
"@typescript-eslint/parser": "^4.33.0",
|
||||
"@webgpu/types": "gpuweb/types#ca1a548178567e6021fd194380b97be1bf6b07b7",
|
||||
"ansi-colors": "4.1.1",
|
||||
"@babel/cli": "^7.23.0",
|
||||
"@babel/core": "^7.23.2",
|
||||
"@babel/preset-typescript": "^7.23.2",
|
||||
"@types/babel__core": "^7.20.3",
|
||||
"@types/dom-mediacapture-transform": "^0.1.8",
|
||||
"@types/dom-webcodecs": "^0.1.9",
|
||||
"@types/express": "^4.17.20",
|
||||
"@types/jquery": "^3.5.25",
|
||||
"@types/morgan": "^1.9.7",
|
||||
"@types/node": "^20.8.10",
|
||||
"@types/offscreencanvas": "^2019.7.2",
|
||||
"@types/pngjs": "^6.0.3",
|
||||
"@types/serve-index": "^1.9.3",
|
||||
"@typescript-eslint/eslint-plugin": "^6.9.1",
|
||||
"@typescript-eslint/parser": "^6.9.1",
|
||||
"@webgpu/types": "^0.1.38",
|
||||
"ansi-colors": "4.1.3",
|
||||
"babel-plugin-add-header-comment": "^1.0.3",
|
||||
"babel-plugin-const-enum": "^1.2.0",
|
||||
"chokidar": "^3.5.3",
|
||||
"eslint": "^7.11.0",
|
||||
"eslint": "^8.52.0",
|
||||
"eslint-plugin-ban": "^1.6.0",
|
||||
"eslint-plugin-deprecation": "^1.3.3",
|
||||
"eslint-plugin-deprecation": "^2.0.0",
|
||||
"eslint-plugin-gpuweb-cts": "file:./tools/eslint-plugin-gpuweb-cts",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-import": "^2.29.0",
|
||||
"express": "^4.18.2",
|
||||
"grunt": "^1.5.3",
|
||||
"grunt": "^1.6.1",
|
||||
"grunt-cli": "^1.4.3",
|
||||
"grunt-contrib-clean": "^2.0.1",
|
||||
"grunt-contrib-copy": "^1.0.0",
|
||||
"grunt-run": "^0.8.1",
|
||||
"grunt-timer": "^0.6.0",
|
||||
"grunt-ts": "^6.0.0-beta.22",
|
||||
"gts": "^3.1.1",
|
||||
"gts": "^5.2.0",
|
||||
"http-server": "^14.1.1",
|
||||
"morgan": "^1.10.0",
|
||||
"playwright-core": "^1.29.2",
|
||||
"pngjs": "^6.0.0",
|
||||
"playwright-core": "^1.39.0",
|
||||
"pngjs": "^7.0.0",
|
||||
"portfinder": "^1.0.32",
|
||||
"prettier": "~2.1.2",
|
||||
"prettier": "~3.0.3",
|
||||
"screenshot-ftw": "^1.0.5",
|
||||
"serve-index": "^1.9.1",
|
||||
"ts-node": "^9.0.0",
|
||||
"typedoc": "^0.23.21",
|
||||
"typescript": "~4.7.4"
|
||||
"ts-node": "^10.9.1",
|
||||
"typedoc": "^0.25.3",
|
||||
"typescript": "~5.2.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,15 +3,64 @@
|
||||
* expensive to build using a two-level cache (in-memory, pre-computed file).
|
||||
*/
|
||||
|
||||
import { assert } from '../util/util.js';
|
||||
|
||||
interface DataStore {
|
||||
load(path: string): Promise<string>;
|
||||
load(path: string): Promise<Uint8Array>;
|
||||
}
|
||||
|
||||
/** Logger is a basic debug logger function */
|
||||
export type Logger = (s: string) => void;
|
||||
|
||||
/** DataCache is an interface to a data store used to hold cached data */
|
||||
/**
|
||||
* DataCacheNode represents a single cache entry in the LRU DataCache.
|
||||
* DataCacheNode is a doubly linked list, so that least-recently-used entries can be removed, and
|
||||
* cache hits can move the node to the front of the list.
|
||||
*/
|
||||
class DataCacheNode {
|
||||
public constructor(path: string, data: unknown) {
|
||||
this.path = path;
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
/** insertAfter() re-inserts this node in the doubly-linked list after `prev` */
|
||||
public insertAfter(prev: DataCacheNode) {
|
||||
this.unlink();
|
||||
this.next = prev.next;
|
||||
this.prev = prev;
|
||||
prev.next = this;
|
||||
if (this.next) {
|
||||
this.next.prev = this;
|
||||
}
|
||||
}
|
||||
|
||||
/** unlink() removes this node from the doubly-linked list */
|
||||
public unlink() {
|
||||
const prev = this.prev;
|
||||
const next = this.next;
|
||||
if (prev) {
|
||||
prev.next = next;
|
||||
}
|
||||
if (next) {
|
||||
next.prev = prev;
|
||||
}
|
||||
this.prev = null;
|
||||
this.next = null;
|
||||
}
|
||||
|
||||
public readonly path: string; // The file path this node represents
|
||||
public readonly data: unknown; // The deserialized data for this node
|
||||
public prev: DataCacheNode | null = null; // The previous node in the doubly-linked list
|
||||
public next: DataCacheNode | null = null; // The next node in the doubly-linked list
|
||||
}
|
||||
|
||||
/** DataCache is an interface to a LRU-cached data store used to hold data cached by path */
|
||||
export class DataCache {
|
||||
public constructor() {
|
||||
this.lruHeadNode.next = this.lruTailNode;
|
||||
this.lruTailNode.prev = this.lruHeadNode;
|
||||
}
|
||||
|
||||
/** setDataStore() sets the backing data store used by the data cache */
|
||||
public setStore(dataStore: DataStore) {
|
||||
this.dataStore = dataStore;
|
||||
@@ -28,17 +77,20 @@ export class DataCache {
|
||||
* building the data and storing it in the cache.
|
||||
*/
|
||||
public async fetch<Data>(cacheable: Cacheable<Data>): Promise<Data> {
|
||||
// First check the in-memory cache
|
||||
let data = this.cache.get(cacheable.path);
|
||||
if (data !== undefined) {
|
||||
this.log('in-memory cache hit');
|
||||
return Promise.resolve(data as Data);
|
||||
{
|
||||
// First check the in-memory cache
|
||||
const node = this.cache.get(cacheable.path);
|
||||
if (node !== undefined) {
|
||||
this.log('in-memory cache hit');
|
||||
node.insertAfter(this.lruHeadNode);
|
||||
return Promise.resolve(node.data as Data);
|
||||
}
|
||||
}
|
||||
this.log('in-memory cache miss');
|
||||
// In in-memory cache miss.
|
||||
// Next, try the data store.
|
||||
if (this.dataStore !== null && !this.unavailableFiles.has(cacheable.path)) {
|
||||
let serialized: string | undefined;
|
||||
let serialized: Uint8Array | undefined;
|
||||
try {
|
||||
serialized = await this.dataStore.load(cacheable.path);
|
||||
this.log('loaded serialized');
|
||||
@@ -49,16 +101,37 @@ export class DataCache {
|
||||
}
|
||||
if (serialized !== undefined) {
|
||||
this.log(`deserializing`);
|
||||
data = cacheable.deserialize(serialized);
|
||||
this.cache.set(cacheable.path, data);
|
||||
return data as Data;
|
||||
const data = cacheable.deserialize(serialized);
|
||||
this.addToCache(cacheable.path, data);
|
||||
return data;
|
||||
}
|
||||
}
|
||||
// Not found anywhere. Build the data, and cache for future lookup.
|
||||
this.log(`cache: building (${cacheable.path})`);
|
||||
data = await cacheable.build();
|
||||
this.cache.set(cacheable.path, data);
|
||||
return data as Data;
|
||||
const data = await cacheable.build();
|
||||
this.addToCache(cacheable.path, data);
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* addToCache() creates a new node for `path` and `data`, inserting the new node at the front of
|
||||
* the doubly-linked list. If the number of entries in the cache exceeds this.maxCount, then the
|
||||
* least recently used entry is evicted
|
||||
* @param path the file path for the data
|
||||
* @param data the deserialized data
|
||||
*/
|
||||
private addToCache(path: string, data: unknown) {
|
||||
if (this.cache.size >= this.maxCount) {
|
||||
const toEvict = this.lruTailNode.prev;
|
||||
assert(toEvict !== null);
|
||||
toEvict.unlink();
|
||||
this.cache.delete(toEvict.path);
|
||||
this.log(`evicting ${toEvict.path}`);
|
||||
}
|
||||
const node = new DataCacheNode(path, data);
|
||||
node.insertAfter(this.lruHeadNode);
|
||||
this.cache.set(path, node);
|
||||
this.log(`added ${path}. new count: ${this.cache.size}`);
|
||||
}
|
||||
|
||||
private log(msg: string) {
|
||||
@@ -67,7 +140,12 @@ export class DataCache {
|
||||
}
|
||||
}
|
||||
|
||||
private cache = new Map<string, unknown>();
|
||||
// Max number of entries in the cache before LRU entries are evicted.
|
||||
private readonly maxCount = 4;
|
||||
|
||||
private cache = new Map<string, DataCacheNode>();
|
||||
private lruHeadNode = new DataCacheNode('', null); // placeholder node (no path or data)
|
||||
private lruTailNode = new DataCacheNode('', null); // placeholder node (no path or data)
|
||||
private unavailableFiles = new Set<string>();
|
||||
private dataStore: DataStore | null = null;
|
||||
private debugLogger: Logger | null = null;
|
||||
@@ -107,14 +185,13 @@ export interface Cacheable<Data> {
|
||||
build(): Promise<Data>;
|
||||
|
||||
/**
|
||||
* serialize() transforms `data` to a string (usually JSON encoded) so that it
|
||||
* can be stored in a text cache file.
|
||||
* serialize() encodes `data` to a binary representation so that it can be stored in a cache file.
|
||||
*/
|
||||
serialize(data: Data): string;
|
||||
serialize(data: Data): Uint8Array;
|
||||
|
||||
/**
|
||||
* deserialize() is the inverse of serialize(), transforming the string back
|
||||
* to the Data object.
|
||||
* deserialize() is the inverse of serialize(), decoding the binary representation back to a Data
|
||||
* object.
|
||||
*/
|
||||
deserialize(serialized: string): Data;
|
||||
deserialize(binary: Uint8Array): Data;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { TestCaseRecorder } from '../internal/logging/test_case_recorder.js';
|
||||
import { JSONWithUndefined } from '../internal/params_utils.js';
|
||||
import { assert, unreachable } from '../util/util.js';
|
||||
import { assert, ExceptionCheckOptions, unreachable } from '../util/util.js';
|
||||
|
||||
export class SkipTestCase extends Error {}
|
||||
export class UnexpectedPassError extends Error {}
|
||||
@@ -150,7 +150,7 @@ export class Fixture<S extends SubcaseBatchState = SubcaseBatchState> {
|
||||
o instanceof WebGLRenderingContext ||
|
||||
o instanceof WebGL2RenderingContext
|
||||
) {
|
||||
this.objectsToCleanUp.push((o as unknown) as DestroyableObject);
|
||||
this.objectsToCleanUp.push(o as unknown as DestroyableObject);
|
||||
}
|
||||
}
|
||||
return o;
|
||||
@@ -166,6 +166,13 @@ export class Fixture<S extends SubcaseBatchState = SubcaseBatchState> {
|
||||
throw new SkipTestCase(msg);
|
||||
}
|
||||
|
||||
/** Throws an exception marking the subcase as skipped if condition is true */
|
||||
skipIf(cond: boolean, msg: string | (() => string) = '') {
|
||||
if (cond) {
|
||||
this.skip(typeof msg === 'function' ? msg() : msg);
|
||||
}
|
||||
}
|
||||
|
||||
/** Log a warning and increase the result status to "Warn". */
|
||||
warn(msg?: string): void {
|
||||
this.rec.warn(new Error(msg));
|
||||
@@ -230,16 +237,26 @@ export class Fixture<S extends SubcaseBatchState = SubcaseBatchState> {
|
||||
}
|
||||
|
||||
/** Expect that the provided promise rejects, with the provided exception name. */
|
||||
shouldReject(expectedName: string, p: Promise<unknown>, msg?: string): void {
|
||||
shouldReject(
|
||||
expectedName: string,
|
||||
p: Promise<unknown>,
|
||||
{ allowMissingStack = false, message }: ExceptionCheckOptions = {}
|
||||
): void {
|
||||
this.eventualAsyncExpectation(async niceStack => {
|
||||
const m = msg ? ': ' + msg : '';
|
||||
const m = message ? ': ' + message : '';
|
||||
try {
|
||||
await p;
|
||||
niceStack.message = 'DID NOT REJECT' + m;
|
||||
this.rec.expectationFailed(niceStack);
|
||||
} catch (ex) {
|
||||
niceStack.message = 'rejected as expected' + m;
|
||||
this.expectErrorValue(expectedName, ex, niceStack);
|
||||
if (!allowMissingStack) {
|
||||
if (!(ex instanceof Error && typeof ex.stack === 'string')) {
|
||||
const exMessage = ex instanceof Error ? ex.message : '?';
|
||||
niceStack.message = `rejected as expected, but missing stack (${exMessage})${m}`;
|
||||
this.rec.expectationFailed(niceStack);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -250,8 +267,12 @@ export class Fixture<S extends SubcaseBatchState = SubcaseBatchState> {
|
||||
*
|
||||
* MAINTENANCE_TODO: Change to `string | false` so the exception name is always checked.
|
||||
*/
|
||||
shouldThrow(expectedError: string | boolean, fn: () => void, msg?: string): void {
|
||||
const m = msg ? ': ' + msg : '';
|
||||
shouldThrow(
|
||||
expectedError: string | boolean,
|
||||
fn: () => void,
|
||||
{ allowMissingStack = false, message }: ExceptionCheckOptions = {}
|
||||
) {
|
||||
const m = message ? ': ' + message : '';
|
||||
try {
|
||||
fn();
|
||||
if (expectedError === false) {
|
||||
@@ -264,6 +285,11 @@ export class Fixture<S extends SubcaseBatchState = SubcaseBatchState> {
|
||||
this.rec.expectationFailed(new Error('threw unexpectedly' + m));
|
||||
} else {
|
||||
this.expectErrorValue(expectedError, ex, new Error(m));
|
||||
if (!allowMissingStack) {
|
||||
if (!(ex instanceof Error && typeof ex.stack === 'string')) {
|
||||
this.rec.expectationFailed(new Error('threw as expected, but missing stack' + m));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Merged, mergeParams, mergeParamsChecked } from '../internal/params_utils.js';
|
||||
import { comparePublicParamsPaths, Ordering } from '../internal/query/compare.js';
|
||||
import { stringifyPublicParams } from '../internal/query/stringify_params.js';
|
||||
import { DeepReadonly } from '../util/types.js';
|
||||
import { assert, mapLazy, objectEquals } from '../util/util.js';
|
||||
|
||||
import { TestParams } from './fixture.js';
|
||||
@@ -81,7 +82,7 @@ export interface ParamsBuilder {
|
||||
*/
|
||||
export type ParamTypeOf<
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
T extends ParamsBuilder
|
||||
T extends ParamsBuilder,
|
||||
> = T extends SubcaseParamsBuilder<infer CaseP, infer SubcaseP>
|
||||
? Merged<CaseP, SubcaseP>
|
||||
: T extends CaseParamsBuilder<infer CaseP>
|
||||
@@ -98,7 +99,7 @@ export type ParamTypeOf<
|
||||
* - `[case params, undefined]` if not.
|
||||
*/
|
||||
export type CaseSubcaseIterable<CaseP, SubcaseP> = Iterable<
|
||||
readonly [CaseP, Iterable<SubcaseP> | undefined]
|
||||
readonly [DeepReadonly<CaseP>, Iterable<DeepReadonly<SubcaseP>> | undefined]
|
||||
>;
|
||||
|
||||
/**
|
||||
@@ -130,7 +131,7 @@ export function builderIterateCasesWithSubcases(
|
||||
iterateCasesWithSubcases(caseFilter: TestParams | null): CaseSubcaseIterable<{}, {}>;
|
||||
}
|
||||
|
||||
return ((builder as unknown) as IterableParamsBuilder).iterateCasesWithSubcases(caseFilter);
|
||||
return (builder as unknown as IterableParamsBuilder).iterateCasesWithSubcases(caseFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -143,7 +144,8 @@ export function builderIterateCasesWithSubcases(
|
||||
*/
|
||||
export class CaseParamsBuilder<CaseP extends {}>
|
||||
extends ParamsBuilderBase<CaseP, {}>
|
||||
implements Iterable<CaseP>, ParamsBuilder {
|
||||
implements Iterable<DeepReadonly<CaseP>>, ParamsBuilder
|
||||
{
|
||||
*iterateCasesWithSubcases(caseFilter: TestParams | null): CaseSubcaseIterable<CaseP, {}> {
|
||||
for (const caseP of this.cases(caseFilter)) {
|
||||
if (caseFilter) {
|
||||
@@ -155,12 +157,12 @@ export class CaseParamsBuilder<CaseP extends {}>
|
||||
}
|
||||
}
|
||||
|
||||
yield [caseP, undefined];
|
||||
yield [caseP as DeepReadonly<typeof caseP>, undefined];
|
||||
}
|
||||
}
|
||||
|
||||
[Symbol.iterator](): Iterator<CaseP> {
|
||||
return this.cases(null);
|
||||
[Symbol.iterator](): Iterator<DeepReadonly<CaseP>> {
|
||||
return this.cases(null) as Iterator<DeepReadonly<CaseP>>;
|
||||
}
|
||||
|
||||
/** @inheritDoc */
|
||||
@@ -229,7 +231,7 @@ export class CaseParamsBuilder<CaseP extends {}>
|
||||
values: Iterable<NewPValue>
|
||||
): CaseParamsBuilder<Merged<CaseP, { [name in NewPKey]: NewPValue }>> {
|
||||
assertNotGenerator(values);
|
||||
const mapped = mapLazy(values, v => ({ [key]: v } as { [name in NewPKey]: NewPValue }));
|
||||
const mapped = mapLazy(values, v => ({ [key]: v }) as { [name in NewPKey]: NewPValue });
|
||||
return this.combineWithParams(mapped);
|
||||
}
|
||||
|
||||
@@ -278,7 +280,8 @@ export const kUnitCaseParamsBuilder = new CaseParamsBuilder(function* () {
|
||||
*/
|
||||
export class SubcaseParamsBuilder<CaseP extends {}, SubcaseP extends {}>
|
||||
extends ParamsBuilderBase<CaseP, SubcaseP>
|
||||
implements ParamsBuilder {
|
||||
implements ParamsBuilder
|
||||
{
|
||||
protected readonly subcases: (_: CaseP) => Generator<SubcaseP>;
|
||||
|
||||
constructor(
|
||||
@@ -302,7 +305,10 @@ export class SubcaseParamsBuilder<CaseP extends {}, SubcaseP extends {}>
|
||||
|
||||
const subcases = Array.from(this.subcases(caseP));
|
||||
if (subcases.length) {
|
||||
yield [caseP, subcases];
|
||||
yield [
|
||||
caseP as DeepReadonly<typeof caseP>,
|
||||
subcases as DeepReadonly<(typeof subcases)[number]>[],
|
||||
];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +29,9 @@ interface TestFileLoaderEventMap {
|
||||
finish: MessageEvent<void>;
|
||||
}
|
||||
|
||||
// Override the types for addEventListener/removeEventListener so the callbacks can be used as
|
||||
// strongly-typed.
|
||||
/* eslint-disable-next-line @typescript-eslint/no-unsafe-declaration-merging */
|
||||
export interface TestFileLoader extends EventTarget {
|
||||
addEventListener<K extends keyof TestFileLoaderEventMap>(
|
||||
type: K,
|
||||
@@ -53,19 +56,16 @@ export interface TestFileLoader extends EventTarget {
|
||||
}
|
||||
|
||||
// Base class for DefaultTestFileLoader and FakeTestFileLoader.
|
||||
/* eslint-disable-next-line @typescript-eslint/no-unsafe-declaration-merging */
|
||||
export abstract class TestFileLoader extends EventTarget {
|
||||
abstract listing(suite: string): Promise<TestSuiteListing>;
|
||||
protected abstract import(path: string): Promise<SpecFile>;
|
||||
|
||||
async importSpecFile(suite: string, path: string[]): Promise<SpecFile> {
|
||||
const url = `${suite}/${path.join('/')}.spec.js`;
|
||||
this.dispatchEvent(
|
||||
new MessageEvent<ImportInfo>('import', { data: { url } })
|
||||
);
|
||||
this.dispatchEvent(new MessageEvent<ImportInfo>('import', { data: { url } }));
|
||||
const ret = await this.import(url);
|
||||
this.dispatchEvent(
|
||||
new MessageEvent<ImportInfo>('imported', { data: { url } })
|
||||
);
|
||||
this.dispatchEvent(new MessageEvent<ImportInfo>('imported', { data: { url } }));
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { LogMessageWithStack } from './log_message.js';
|
||||
// MAINTENANCE_TODO: Add warn expectations
|
||||
export type Expectation = 'pass' | 'skip' | 'fail';
|
||||
|
||||
export type Status = 'running' | 'warn' | Expectation;
|
||||
export type Status = 'notrun' | 'running' | 'warn' | Expectation;
|
||||
|
||||
export interface TestCaseResult {
|
||||
status: Status;
|
||||
|
||||
@@ -3,27 +3,43 @@ import { globalTestConfig } from '../../framework/test_config.js';
|
||||
import { now, assert } from '../../util/util.js';
|
||||
|
||||
import { LogMessageWithStack } from './log_message.js';
|
||||
import { Expectation, LiveTestCaseResult } from './result.js';
|
||||
import { Expectation, LiveTestCaseResult, Status } from './result.js';
|
||||
|
||||
enum LogSeverity {
|
||||
Pass = 0,
|
||||
NotRun = 0,
|
||||
Skip = 1,
|
||||
Warn = 2,
|
||||
ExpectFailed = 3,
|
||||
ValidationFailed = 4,
|
||||
ThrewException = 5,
|
||||
Pass = 2,
|
||||
Warn = 3,
|
||||
ExpectFailed = 4,
|
||||
ValidationFailed = 5,
|
||||
ThrewException = 6,
|
||||
}
|
||||
|
||||
const kMaxLogStacks = 2;
|
||||
const kMinSeverityForStack = LogSeverity.Warn;
|
||||
|
||||
function logSeverityToString(status: LogSeverity): Status {
|
||||
switch (status) {
|
||||
case LogSeverity.NotRun:
|
||||
return 'notrun';
|
||||
case LogSeverity.Pass:
|
||||
return 'pass';
|
||||
case LogSeverity.Skip:
|
||||
return 'skip';
|
||||
case LogSeverity.Warn:
|
||||
return 'warn';
|
||||
default:
|
||||
return 'fail'; // Everything else is an error
|
||||
}
|
||||
}
|
||||
|
||||
/** Holds onto a LiveTestCaseResult owned by the Logger, and writes the results into it. */
|
||||
export class TestCaseRecorder {
|
||||
readonly result: LiveTestCaseResult;
|
||||
public nonskippedSubcaseCount: number = 0;
|
||||
private inSubCase: boolean = false;
|
||||
private subCaseStatus = LogSeverity.Pass;
|
||||
private finalCaseStatus = LogSeverity.Pass;
|
||||
private subCaseStatus = LogSeverity.NotRun;
|
||||
private finalCaseStatus = LogSeverity.NotRun;
|
||||
private hideStacksBelowSeverity = kMinSeverityForStack;
|
||||
private startTime = -1;
|
||||
private logs: LogMessageWithStack[] = [];
|
||||
@@ -56,20 +72,13 @@ export class TestCaseRecorder {
|
||||
}
|
||||
|
||||
// Convert numeric enum back to string (but expose 'exception' as 'fail')
|
||||
this.result.status =
|
||||
this.finalCaseStatus === LogSeverity.Pass
|
||||
? 'pass'
|
||||
: this.finalCaseStatus === LogSeverity.Skip
|
||||
? 'skip'
|
||||
: this.finalCaseStatus === LogSeverity.Warn
|
||||
? 'warn'
|
||||
: 'fail'; // Everything else is an error
|
||||
this.result.status = logSeverityToString(this.finalCaseStatus);
|
||||
|
||||
this.result.logs = this.logs;
|
||||
}
|
||||
|
||||
beginSubCase() {
|
||||
this.subCaseStatus = LogSeverity.Pass;
|
||||
this.subCaseStatus = LogSeverity.NotRun;
|
||||
this.inSubCase = true;
|
||||
}
|
||||
|
||||
@@ -87,9 +96,7 @@ export class TestCaseRecorder {
|
||||
}
|
||||
} finally {
|
||||
this.inSubCase = false;
|
||||
if (this.subCaseStatus > this.finalCaseStatus) {
|
||||
this.finalCaseStatus = this.subCaseStatus;
|
||||
}
|
||||
this.finalCaseStatus = Math.max(this.finalCaseStatus, this.subCaseStatus);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -103,7 +110,8 @@ export class TestCaseRecorder {
|
||||
}
|
||||
|
||||
info(ex: Error): void {
|
||||
this.logImpl(LogSeverity.Pass, 'INFO', ex);
|
||||
// We need this to use the lowest LogSeverity so it doesn't override the current severity for this test case.
|
||||
this.logImpl(LogSeverity.NotRun, 'INFO', ex);
|
||||
}
|
||||
|
||||
skipped(ex: SkipTestCase): void {
|
||||
@@ -122,6 +130,14 @@ export class TestCaseRecorder {
|
||||
this.logImpl(LogSeverity.ValidationFailed, 'VALIDATION FAILED', ex);
|
||||
}
|
||||
|
||||
passed(): void {
|
||||
if (this.inSubCase) {
|
||||
this.subCaseStatus = Math.max(this.subCaseStatus, LogSeverity.Pass);
|
||||
} else {
|
||||
this.finalCaseStatus = Math.max(this.finalCaseStatus, LogSeverity.Pass);
|
||||
}
|
||||
}
|
||||
|
||||
threw(ex: unknown): void {
|
||||
if (ex instanceof SkipTestCase) {
|
||||
this.skipped(ex);
|
||||
@@ -137,9 +153,9 @@ export class TestCaseRecorder {
|
||||
|
||||
// Final case status should be the "worst" of all log entries.
|
||||
if (this.inSubCase) {
|
||||
if (level > this.subCaseStatus) this.subCaseStatus = level;
|
||||
this.subCaseStatus = Math.max(this.subCaseStatus, level);
|
||||
} else {
|
||||
if (level > this.finalCaseStatus) this.finalCaseStatus = level;
|
||||
this.finalCaseStatus = Math.max(this.finalCaseStatus, level);
|
||||
}
|
||||
|
||||
// setFirstLineOnly for all logs except `kMaxLogStacks` stacks at the highest severity
|
||||
|
||||
@@ -69,7 +69,7 @@ export type FlattenUnionOfInterfaces<T> = {
|
||||
};
|
||||
|
||||
/* eslint-disable-next-line @typescript-eslint/no-unused-vars */
|
||||
function typeAssert<T extends 'pass'>() {}
|
||||
function typeAssert<_ extends 'pass'>() {}
|
||||
{
|
||||
type Test<T, U> = [T] extends [U]
|
||||
? [U] extends [T]
|
||||
|
||||
@@ -80,7 +80,8 @@ export function comparePublicParamsPaths(a: TestParams, b: TestParams): Ordering
|
||||
const commonKeys = new Set(aKeys.filter(k => k in b));
|
||||
|
||||
for (const k of commonKeys) {
|
||||
if (!objectEquals(a[k], b[k])) {
|
||||
// Treat +/-0.0 as different query by distinguishing them in objectEquals
|
||||
if (!objectEquals(a[k], b[k], true)) {
|
||||
return Ordering.Unordered;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ const fromStringMagicValue = new Map<string, unknown>([
|
||||
[jsNegativeZeroMagicValue, -0],
|
||||
]);
|
||||
|
||||
function stringifyFilter(k: string, v: unknown): unknown {
|
||||
function stringifyFilter(_k: string, v: unknown): unknown {
|
||||
// Make sure no one actually uses a magic value as a parameter.
|
||||
if (typeof v === 'string') {
|
||||
assert(
|
||||
@@ -93,7 +93,7 @@ export function stringifyParamValueUniquely(value: JSONWithUndefined): string {
|
||||
|
||||
// 'any' is part of the JSON.parse reviver interface, so cannot be avoided.
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function parseParamValueReviver(k: string, v: any): any {
|
||||
function parseParamValueReviver(_k: string, v: any): any {
|
||||
if (fromStringMagicValue.has(v)) {
|
||||
return fromStringMagicValue.get(v);
|
||||
}
|
||||
|
||||
@@ -68,8 +68,8 @@ export class TestQueryMultiFile {
|
||||
* Immutable (makes copies of constructor args).
|
||||
*/
|
||||
export class TestQueryMultiTest extends TestQueryMultiFile {
|
||||
readonly level: TestQueryLevel = 2;
|
||||
readonly isMultiFile: false = false;
|
||||
override readonly level: TestQueryLevel = 2;
|
||||
override readonly isMultiFile = false as const;
|
||||
readonly isMultiTest: boolean = true;
|
||||
readonly testPathParts: readonly string[];
|
||||
|
||||
@@ -79,11 +79,11 @@ export class TestQueryMultiTest extends TestQueryMultiFile {
|
||||
this.testPathParts = [...test];
|
||||
}
|
||||
|
||||
get depthInLevel() {
|
||||
override get depthInLevel() {
|
||||
return this.testPathParts.length;
|
||||
}
|
||||
|
||||
protected toStringHelper(): string[] {
|
||||
protected override toStringHelper(): string[] {
|
||||
return [
|
||||
this.suite,
|
||||
this.filePathParts.join(kPathSeparator),
|
||||
@@ -99,8 +99,8 @@ export class TestQueryMultiTest extends TestQueryMultiFile {
|
||||
* (which aren't normally supposed to change; they're marked readonly in TestParams).
|
||||
*/
|
||||
export class TestQueryMultiCase extends TestQueryMultiTest {
|
||||
readonly level: TestQueryLevel = 3;
|
||||
readonly isMultiTest: false = false;
|
||||
override readonly level: TestQueryLevel = 3;
|
||||
override readonly isMultiTest = false as const;
|
||||
readonly isMultiCase: boolean = true;
|
||||
readonly params: TestParams;
|
||||
|
||||
@@ -110,11 +110,11 @@ export class TestQueryMultiCase extends TestQueryMultiTest {
|
||||
this.params = { ...params };
|
||||
}
|
||||
|
||||
get depthInLevel() {
|
||||
override get depthInLevel() {
|
||||
return Object.keys(this.params).length;
|
||||
}
|
||||
|
||||
protected toStringHelper(): string[] {
|
||||
protected override toStringHelper(): string[] {
|
||||
return [
|
||||
this.suite,
|
||||
this.filePathParts.join(kPathSeparator),
|
||||
@@ -130,14 +130,14 @@ export class TestQueryMultiCase extends TestQueryMultiTest {
|
||||
* Immutable (makes copies of constructor args).
|
||||
*/
|
||||
export class TestQuerySingleCase extends TestQueryMultiCase {
|
||||
readonly level: TestQueryLevel = 4;
|
||||
readonly isMultiCase: false = false;
|
||||
override readonly level: TestQueryLevel = 4;
|
||||
override readonly isMultiCase = false as const;
|
||||
|
||||
get depthInLevel() {
|
||||
override get depthInLevel() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
protected toStringHelper(): string[] {
|
||||
protected override toStringHelper(): string[] {
|
||||
return [
|
||||
this.suite,
|
||||
this.filePathParts.join(kPathSeparator),
|
||||
|
||||
@@ -19,13 +19,19 @@ import { Expectation } from '../internal/logging/result.js';
|
||||
import { TestCaseRecorder } from '../internal/logging/test_case_recorder.js';
|
||||
import { extractPublicParams, Merged, mergeParams } from '../internal/params_utils.js';
|
||||
import { compareQueries, Ordering } from '../internal/query/compare.js';
|
||||
import { TestQuerySingleCase, TestQueryWithExpectation } from '../internal/query/query.js';
|
||||
import {
|
||||
TestQueryMultiFile,
|
||||
TestQueryMultiTest,
|
||||
TestQuerySingleCase,
|
||||
TestQueryWithExpectation,
|
||||
} from '../internal/query/query.js';
|
||||
import { kPathSeparator } from '../internal/query/separators.js';
|
||||
import {
|
||||
stringifyPublicParams,
|
||||
stringifyPublicParamsUniquely,
|
||||
} from '../internal/query/stringify_params.js';
|
||||
import { validQueryPart } from '../internal/query/validQueryPart.js';
|
||||
import { DeepReadonly } from '../util/types.js';
|
||||
import { assert, unreachable } from '../util/util.js';
|
||||
|
||||
import { logToWebsocket } from './websocket_logger.js';
|
||||
@@ -56,13 +62,13 @@ export interface TestGroupBuilder<F extends Fixture> {
|
||||
test(name: string): TestBuilderWithName<F>;
|
||||
}
|
||||
export function makeTestGroup<F extends Fixture>(fixture: FixtureClass<F>): TestGroupBuilder<F> {
|
||||
return new TestGroup((fixture as unknown) as FixtureClass);
|
||||
return new TestGroup(fixture as unknown as FixtureClass);
|
||||
}
|
||||
|
||||
// Interfaces for running tests
|
||||
export interface IterableTestGroup {
|
||||
iterate(): Iterable<IterableTest>;
|
||||
validate(): void;
|
||||
validate(fileQuery: TestQueryMultiFile): void;
|
||||
/** Returns the file-relative test paths of tests which have >0 cases. */
|
||||
collectNonEmptyTests(): { testPath: string[] }[];
|
||||
}
|
||||
@@ -79,12 +85,17 @@ export function makeTestGroupForUnitTesting<F extends Fixture>(
|
||||
return new TestGroup(fixture);
|
||||
}
|
||||
|
||||
/** The maximum allowed length of a test query string. Checked by tools/validate. */
|
||||
export const kQueryMaxLength = 375;
|
||||
|
||||
/** Parameter name for batch number (see also TestBuilder.batch). */
|
||||
const kBatchParamName = 'batch__';
|
||||
|
||||
type TestFn<F extends Fixture, P extends {}> = (t: F & { params: P }) => Promise<void> | void;
|
||||
type TestFn<F extends Fixture, P extends {}> = (
|
||||
t: F & { params: DeepReadonly<P> }
|
||||
) => Promise<void> | void;
|
||||
type BeforeAllSubcasesFn<S extends SubcaseBatchState, P extends {}> = (
|
||||
s: S & { params: P }
|
||||
s: S & { params: DeepReadonly<P> }
|
||||
) => Promise<void> | void;
|
||||
|
||||
export class TestGroup<F extends Fixture> implements TestGroupBuilder<F> {
|
||||
@@ -124,12 +135,17 @@ export class TestGroup<F extends Fixture> implements TestGroupBuilder<F> {
|
||||
|
||||
const test = new TestBuilder(parts, this.fixture, testCreationStack);
|
||||
this.tests.push(test);
|
||||
return (test as unknown) as TestBuilderWithName<F>;
|
||||
return test as unknown as TestBuilderWithName<F>;
|
||||
}
|
||||
|
||||
validate(): void {
|
||||
validate(fileQuery: TestQueryMultiFile): void {
|
||||
for (const test of this.tests) {
|
||||
test.validate();
|
||||
const testQuery = new TestQueryMultiTest(
|
||||
fileQuery.suite,
|
||||
fileQuery.filePathParts,
|
||||
test.testPath
|
||||
);
|
||||
test.validate(testQuery);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -247,7 +263,7 @@ class TestBuilder<S extends SubcaseBatchState, F extends Fixture> {
|
||||
return this;
|
||||
}
|
||||
|
||||
specURL(url: string): this {
|
||||
specURL(_url: string): this {
|
||||
return this;
|
||||
}
|
||||
|
||||
@@ -284,7 +300,7 @@ class TestBuilder<S extends SubcaseBatchState, F extends Fixture> {
|
||||
}
|
||||
|
||||
/** Perform various validation/"lint" chenks. */
|
||||
validate(): void {
|
||||
validate(testQuery: TestQueryMultiTest): void {
|
||||
const testPathString = this.testPath.join(kPathSeparator);
|
||||
assert(this.testFn !== undefined, () => {
|
||||
let s = `Test is missing .fn(): ${testPathString}`;
|
||||
@@ -294,12 +310,30 @@ class TestBuilder<S extends SubcaseBatchState, F extends Fixture> {
|
||||
return s;
|
||||
});
|
||||
|
||||
assert(
|
||||
testQuery.toString().length <= kQueryMaxLength,
|
||||
() =>
|
||||
`Test query ${testQuery} is too long. Max length is ${kQueryMaxLength} characters. Please shorten names or reduce parameters.`
|
||||
);
|
||||
|
||||
if (this.testCases === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const seen = new Set<string>();
|
||||
for (const [caseParams, subcases] of builderIterateCasesWithSubcases(this.testCases, null)) {
|
||||
const caseQuery = new TestQuerySingleCase(
|
||||
testQuery.suite,
|
||||
testQuery.filePathParts,
|
||||
testQuery.testPathParts,
|
||||
caseParams
|
||||
).toString();
|
||||
assert(
|
||||
caseQuery.length <= kQueryMaxLength,
|
||||
() =>
|
||||
`Case query ${caseQuery} is too long. Max length is ${kQueryMaxLength} characters. Please shorten names or reduce parameters.`
|
||||
);
|
||||
|
||||
for (const subcaseParams of subcases ?? [{}]) {
|
||||
const params = mergeParams(caseParams, subcaseParams);
|
||||
assert(this.batchSize === 0 || !(kBatchParamName in params));
|
||||
@@ -316,7 +350,7 @@ class TestBuilder<S extends SubcaseBatchState, F extends Fixture> {
|
||||
const testcaseStringUnique = stringifyPublicParamsUniquely(params);
|
||||
assert(
|
||||
!seen.has(testcaseStringUnique),
|
||||
`Duplicate public test case params for test ${testPathString}: ${testcaseString}`
|
||||
`Duplicate public test case+subcase params for test ${testPathString}: ${testcaseString}`
|
||||
);
|
||||
seen.add(testcaseStringUnique);
|
||||
}
|
||||
@@ -491,6 +525,7 @@ class RunCaseSpecific implements RunCase {
|
||||
try {
|
||||
await inst.init();
|
||||
await this.fn(inst as Fixture & { params: {} });
|
||||
rec.passed();
|
||||
} finally {
|
||||
// Runs as long as constructor succeeded, even if initialization or the test failed.
|
||||
await inst.finalize();
|
||||
|
||||
@@ -11,7 +11,7 @@ import { LiveTestCaseResult } from '../internal/logging/result.js';
|
||||
import { parseQuery } from '../internal/query/parseQuery.js';
|
||||
import { parseExpectationsForTestQuery } from '../internal/query/query.js';
|
||||
import { Colors } from '../util/colors.js';
|
||||
import { setGPUProvider } from '../util/navigator_gpu.js';
|
||||
import { setDefaultRequestAdapterOptions, setGPUProvider } from '../util/navigator_gpu.js';
|
||||
import { assert, unreachable } from '../util/util.js';
|
||||
|
||||
import sys from './helper/sys.js';
|
||||
@@ -22,6 +22,7 @@ function usage(rc: number): never {
|
||||
tools/run_${sys.type} 'unittests:*' 'webgpu:buffers,*'
|
||||
Options:
|
||||
--colors Enable ANSI colors in output.
|
||||
--compat Runs tests in compatibility mode.
|
||||
--coverage Emit coverage data.
|
||||
--verbose Print result/log of every test as it runs.
|
||||
--list Print all testcase names that match the given query and exit.
|
||||
@@ -99,6 +100,8 @@ for (let i = 0; i < sys.args.length; ++i) {
|
||||
quiet = true;
|
||||
} else if (a === '--unroll-const-eval-loops') {
|
||||
globalTestConfig.unrollConstEvalLoops = true;
|
||||
} else if (a === '--compat') {
|
||||
globalTestConfig.compatibility = true;
|
||||
} else {
|
||||
console.log('unrecognized flag: ', a);
|
||||
usage(1);
|
||||
@@ -110,6 +113,11 @@ for (let i = 0; i < sys.args.length; ++i) {
|
||||
|
||||
let codeCoverage: CodeCoverageProvider | undefined = undefined;
|
||||
|
||||
if (globalTestConfig.compatibility) {
|
||||
// MAINTENANCE_TODO: remove the cast once compatibilityMode is officially added
|
||||
setDefaultRequestAdapterOptions({ compatibilityMode: true } as GPURequestAdapterOptions);
|
||||
}
|
||||
|
||||
if (gpuProviderModule) {
|
||||
setGPUProvider(() => gpuProviderModule!.create(gpuProviderFlags));
|
||||
if (emitCoverage) {
|
||||
@@ -127,8 +135,8 @@ Did you remember to build with code coverage instrumentation enabled?`
|
||||
if (dataPath !== undefined) {
|
||||
dataCache.setStore({
|
||||
load: (path: string) => {
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
fs.readFile(`${dataPath}/${path}`, 'utf8', (err, data) => {
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
fs.readFile(`${dataPath}/${path}`, (err, data) => {
|
||||
if (err !== null) {
|
||||
reject(err.message);
|
||||
} else {
|
||||
|
||||
@@ -100,7 +100,7 @@ function getOptionsInfoFromSearchString<Type extends CTSOptions>(
|
||||
const parser = info.parser || optionEnabled;
|
||||
optionValues[optionName] = parser(camelCaseToSnakeCase(optionName), searchParams);
|
||||
}
|
||||
return (optionValues as unknown) as Type;
|
||||
return optionValues as unknown as Type;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -14,7 +14,7 @@ import { parseQuery } from '../internal/query/parseQuery.js';
|
||||
import { TestQueryWithExpectation } from '../internal/query/query.js';
|
||||
import { TestTreeLeaf } from '../internal/tree.js';
|
||||
import { Colors } from '../util/colors.js';
|
||||
import { setGPUProvider } from '../util/navigator_gpu.js';
|
||||
import { setDefaultRequestAdapterOptions, setGPUProvider } from '../util/navigator_gpu.js';
|
||||
|
||||
import sys from './helper/sys.js';
|
||||
|
||||
@@ -23,6 +23,7 @@ function usage(rc: number): never {
|
||||
tools/run_${sys.type} [OPTIONS...]
|
||||
Options:
|
||||
--colors Enable ANSI colors in output.
|
||||
--compat Run tests in compatibility mode.
|
||||
--coverage Add coverage data to each result.
|
||||
--data Path to the data cache directory.
|
||||
--verbose Print result/log of every test as it runs.
|
||||
@@ -84,6 +85,8 @@ for (let i = 0; i < sys.args.length; ++i) {
|
||||
if (a.startsWith('-')) {
|
||||
if (a === '--colors') {
|
||||
Colors.enabled = true;
|
||||
} else if (a === '--compat') {
|
||||
globalTestConfig.compatibility = true;
|
||||
} else if (a === '--coverage') {
|
||||
emitCoverage = true;
|
||||
} else if (a === '--data') {
|
||||
@@ -107,6 +110,11 @@ for (let i = 0; i < sys.args.length; ++i) {
|
||||
|
||||
let codeCoverage: CodeCoverageProvider | undefined = undefined;
|
||||
|
||||
if (globalTestConfig.compatibility) {
|
||||
// MAINTENANCE_TODO: remove the cast once compatibilityMode is officially added
|
||||
setDefaultRequestAdapterOptions({ compatibilityMode: true } as GPURequestAdapterOptions);
|
||||
}
|
||||
|
||||
if (gpuProviderModule) {
|
||||
setGPUProvider(() => gpuProviderModule!.create(gpuProviderFlags));
|
||||
|
||||
@@ -125,8 +133,8 @@ Did you remember to build with code coverage instrumentation enabled?`
|
||||
if (dataPath !== undefined) {
|
||||
dataCache.setStore({
|
||||
load: (path: string) => {
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
fs.readFile(`${dataPath}/${path}`, 'utf8', (err, data) => {
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
fs.readFile(`${dataPath}/${path}`, (err, data) => {
|
||||
if (err !== null) {
|
||||
reject(err.message);
|
||||
} else {
|
||||
|
||||
@@ -11,7 +11,7 @@ import { parseQuery } from '../internal/query/parseQuery.js';
|
||||
import { TestQueryLevel } from '../internal/query/query.js';
|
||||
import { TestTreeNode, TestSubtree, TestTreeLeaf, TestTree } from '../internal/tree.js';
|
||||
import { setDefaultRequestAdapterOptions } from '../util/navigator_gpu.js';
|
||||
import { assert, ErrorWithExtra, unreachable } from '../util/util.js';
|
||||
import { ErrorWithExtra, unreachable } from '../util/util.js';
|
||||
|
||||
import {
|
||||
kCTSOptionsInfo,
|
||||
@@ -84,7 +84,7 @@ dataCache.setStore({
|
||||
if (!response.ok) {
|
||||
return Promise.reject(response.statusText);
|
||||
}
|
||||
return await response.text();
|
||||
return new Uint8Array(await response.arrayBuffer());
|
||||
},
|
||||
});
|
||||
|
||||
@@ -427,11 +427,20 @@ function makeTreeNodeHeaderHTML(
|
||||
.attr('alt', runtext)
|
||||
.attr('title', runtext)
|
||||
.on('click', async () => {
|
||||
if (runDepth > 0) {
|
||||
showInfo('tests are already running');
|
||||
return;
|
||||
}
|
||||
showInfo('');
|
||||
console.log(`Starting run for ${n.query}`);
|
||||
// turn off all run buttons
|
||||
$('#resultsVis').addClass('disable-run');
|
||||
const startTime = performance.now();
|
||||
await runSubtree();
|
||||
const dt = performance.now() - startTime;
|
||||
const dtMinutes = dt / 1000 / 60;
|
||||
// turn on all run buttons
|
||||
$('#resultsVis').removeClass('disable-run');
|
||||
console.log(`Finished run: ${dt.toFixed(1)} ms = ${dtMinutes.toFixed(1)} min`);
|
||||
})
|
||||
.appendTo(header);
|
||||
@@ -528,7 +537,7 @@ function prepareParams(params: Record<string, ParamValue>): string {
|
||||
|
||||
// This is just a cast in one place.
|
||||
export function optionsToRecord(options: CTSOptions) {
|
||||
return (options as unknown) as Record<string, boolean | string>;
|
||||
return options as unknown as Record<string, boolean | string>;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -543,6 +552,14 @@ function createSearchQuery(queries: string[], params?: string) {
|
||||
return `?${params}${params ? '&' : ''}${queries.map(q => 'q=' + q).join('&')}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Show an info message on the page.
|
||||
* @param msg Message to show
|
||||
*/
|
||||
function showInfo(msg: string) {
|
||||
$('#info')[0].textContent = msg;
|
||||
}
|
||||
|
||||
void (async () => {
|
||||
const loader = new DefaultTestFileLoader();
|
||||
|
||||
@@ -609,26 +626,37 @@ void (async () => {
|
||||
};
|
||||
addOptionsToPage(options, kStandaloneOptionsInfos);
|
||||
|
||||
assert(qs.length === 1, 'currently, there must be exactly one ?q=');
|
||||
const rootQuery = parseQuery(qs[0]);
|
||||
if (qs.length !== 1) {
|
||||
showInfo('currently, there must be exactly one ?q=');
|
||||
return;
|
||||
}
|
||||
|
||||
let rootQuery;
|
||||
try {
|
||||
rootQuery = parseQuery(qs[0]);
|
||||
} catch (e) {
|
||||
showInfo((e as Error).toString());
|
||||
return;
|
||||
}
|
||||
|
||||
if (rootQuery.level > lastQueryLevelToExpand) {
|
||||
lastQueryLevelToExpand = rootQuery.level;
|
||||
}
|
||||
loader.addEventListener('import', ev => {
|
||||
$('#info')[0].textContent = `loading: ${ev.data.url}`;
|
||||
showInfo(`loading: ${ev.data.url}`);
|
||||
});
|
||||
loader.addEventListener('imported', ev => {
|
||||
$('#info')[0].textContent = `imported: ${ev.data.url}`;
|
||||
showInfo(`imported: ${ev.data.url}`);
|
||||
});
|
||||
loader.addEventListener('finish', () => {
|
||||
$('#info')[0].textContent = '';
|
||||
showInfo('');
|
||||
});
|
||||
|
||||
let tree;
|
||||
try {
|
||||
tree = await loader.loadTree(rootQuery);
|
||||
} catch (err) {
|
||||
$('#info')[0].textContent = (err as Error).toString();
|
||||
showInfo((err as Error).toString());
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
{
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": { "project": "./tsconfig.json" },
|
||||
"rules": {
|
||||
"no-console": "off",
|
||||
"no-process-exit": "off",
|
||||
|
||||
@@ -7,7 +7,7 @@ import * as path from 'path';
|
||||
|
||||
import { loadMetadataForSuite } from '../framework/metadata.js';
|
||||
import { SpecFile } from '../internal/file_loader.js';
|
||||
import { TestQueryMultiCase } from '../internal/query/query.js';
|
||||
import { TestQueryMultiCase, TestQueryMultiFile } from '../internal/query/query.js';
|
||||
import { validQueryPart } from '../internal/query/validQueryPart.js';
|
||||
import { TestSuiteListingEntry, TestSuiteListing } from '../internal/test_suite_listing.js';
|
||||
import { assert, unreachable } from '../util/util.js';
|
||||
@@ -83,6 +83,8 @@ export async function crawl(suiteDir: string, validate: boolean): Promise<TestSu
|
||||
assert(mod.description !== undefined, 'Test spec file missing description: ' + filename);
|
||||
assert(mod.g !== undefined, 'Test spec file missing TestGroup definition: ' + filename);
|
||||
|
||||
mod.g.validate(new TestQueryMultiFile(suite, pathSegments));
|
||||
|
||||
for (const { testPath } of mod.g.collectNonEmptyTests()) {
|
||||
const testQuery = new TestQueryMultiCase(suite, pathSegments, testPath, {}).toString();
|
||||
if (validateTimingsEntries) {
|
||||
|
||||
@@ -14,6 +14,19 @@ import { makeListing } from './crawl.js';
|
||||
// Make sure that makeListing doesn't cache imported spec files. See crawl().
|
||||
process.env.STANDALONE_DEV_SERVER = '1';
|
||||
|
||||
function usage(rc: number): void {
|
||||
console.error(`\
|
||||
Usage:
|
||||
tools/dev_server
|
||||
tools/dev_server 0.0.0.0
|
||||
npm start
|
||||
npm start 0.0.0.0
|
||||
|
||||
By default, serves on localhost only. If the argument 0.0.0.0 is passed, serves on all interfaces.
|
||||
`);
|
||||
process.exit(rc);
|
||||
}
|
||||
|
||||
const srcDir = path.resolve(__dirname, '../../');
|
||||
|
||||
// Import the project's babel.config.js. We'll use the same config for the runtime compiler.
|
||||
@@ -92,7 +105,7 @@ watcher.on('change', dirtyCompileCache);
|
||||
const app = express();
|
||||
|
||||
// Send Chrome Origin Trial tokens
|
||||
app.use((req, res, next) => {
|
||||
app.use((_req, res, next) => {
|
||||
res.header('Origin-Trial', [
|
||||
// Token for http://localhost:8080
|
||||
'AvyDIV+RJoYs8fn3W6kIrBhWw0te0klraoz04mw/nPb8VTus3w5HCdy+vXqsSzomIH745CT6B5j1naHgWqt/tw8AAABJeyJvcmlnaW4iOiJodHRwOi8vbG9jYWxob3N0OjgwODAiLCJmZWF0dXJlIjoiV2ViR1BVIiwiZXhwaXJ5IjoxNjYzNzE4Mzk5fQ==',
|
||||
@@ -110,7 +123,7 @@ app.use('/out-wpt', express.static(path.resolve(srcDir, '../out-wpt')));
|
||||
app.use('/docs/tsdoc', express.static(path.resolve(srcDir, '../docs/tsdoc')));
|
||||
|
||||
// Serve a suite's listing.js file by crawling the filesystem for all tests.
|
||||
app.get('/out/:suite/listing.js', async (req, res, next) => {
|
||||
app.get('/out/:suite([a-zA-Z0-9_-]+)/listing.js', async (req, res, next) => {
|
||||
const suite = req.params['suite'];
|
||||
|
||||
if (listingCache.has(suite)) {
|
||||
@@ -162,28 +175,40 @@ app.get('/out/**/*.js', async (req, res, next) => {
|
||||
}
|
||||
});
|
||||
|
||||
const host = '0.0.0.0';
|
||||
const port = 8080;
|
||||
// Find an available port, starting at 8080.
|
||||
portfinder.getPort({ host, port }, (err, port) => {
|
||||
if (err) {
|
||||
throw err;
|
||||
// Serve everything else (not .js) as static, and directories as directory listings.
|
||||
app.use('/out', serveIndex(path.resolve(srcDir, '../src')));
|
||||
app.use('/out', express.static(path.resolve(srcDir, '../src')));
|
||||
|
||||
void (async () => {
|
||||
let host = '127.0.0.1';
|
||||
if (process.argv.length >= 3) {
|
||||
if (process.argv.length !== 3) usage(1);
|
||||
if (process.argv[2] === '0.0.0.0') {
|
||||
host = '0.0.0.0';
|
||||
} else {
|
||||
usage(1);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Finding an available port on ${host}...`);
|
||||
const kPortFinderStart = 8080;
|
||||
const port = await portfinder.getPortPromise({ host, port: kPortFinderStart });
|
||||
|
||||
watcher.on('ready', () => {
|
||||
// Listen on the available port.
|
||||
app.listen(port, host, () => {
|
||||
console.log('Standalone test runner running at:');
|
||||
for (const iface of Object.values(os.networkInterfaces())) {
|
||||
for (const details of iface || []) {
|
||||
if (details.family === 'IPv4') {
|
||||
console.log(` http://${details.address}:${port}/standalone/`);
|
||||
if (host === '0.0.0.0') {
|
||||
for (const iface of Object.values(os.networkInterfaces())) {
|
||||
for (const details of iface || []) {
|
||||
if (details.family === 'IPv4') {
|
||||
console.log(` http://${details.address}:${port}/standalone/`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.log(` http://${host}:${port}/standalone/`);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Serve everything else (not .js) as static, and directories as directory listings.
|
||||
app.use('/out', serveIndex(path.resolve(srcDir, '../src')));
|
||||
app.use('/out', express.static(path.resolve(srcDir, '../src')));
|
||||
})();
|
||||
|
||||
@@ -14,33 +14,68 @@ DataCache will load this instead of building the expensive data at CTS runtime.
|
||||
Options:
|
||||
--help Print this message and exit.
|
||||
--list Print the list of output files without writing them.
|
||||
--nth i/n Only process every file where (file_index % n == i)
|
||||
--validate Check that cache should build (Tests for collisions).
|
||||
--verbose Print each action taken.
|
||||
`);
|
||||
process.exit(rc);
|
||||
}
|
||||
|
||||
let mode: 'emit' | 'list' = 'emit';
|
||||
let mode: 'emit' | 'list' | 'validate' = 'emit';
|
||||
let nth = { i: 0, n: 1 };
|
||||
let verbose = false;
|
||||
|
||||
const nonFlagsArgs: string[] = [];
|
||||
for (const a of process.argv) {
|
||||
if (a.startsWith('-')) {
|
||||
switch (a) {
|
||||
case '--list':
|
||||
|
||||
for (let i = 0; i < process.argv.length; i++) {
|
||||
const arg = process.argv[i];
|
||||
if (arg.startsWith('-')) {
|
||||
switch (arg) {
|
||||
case '--list': {
|
||||
mode = 'list';
|
||||
break;
|
||||
case '--help':
|
||||
}
|
||||
case '--help': {
|
||||
usage(0);
|
||||
break;
|
||||
case '--verbose':
|
||||
}
|
||||
case '--verbose': {
|
||||
verbose = true;
|
||||
break;
|
||||
default:
|
||||
console.log('unrecognized flag: ', a);
|
||||
}
|
||||
case '--validate': {
|
||||
mode = 'validate';
|
||||
break;
|
||||
}
|
||||
case '--nth': {
|
||||
const err = () => {
|
||||
console.error(
|
||||
`--nth requires a value of the form 'i/n', where i and n are positive integers and i < n`
|
||||
);
|
||||
process.exit(1);
|
||||
};
|
||||
i++;
|
||||
if (i >= process.argv.length) {
|
||||
err();
|
||||
}
|
||||
const value = process.argv[i];
|
||||
const parts = value.split('/');
|
||||
if (parts.length !== 2) {
|
||||
err();
|
||||
}
|
||||
nth = { i: parseInt(parts[0]), n: parseInt(parts[1]) };
|
||||
if (nth.i < 0 || nth.n < 1 || nth.i > nth.n) {
|
||||
err();
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
console.log('unrecognized flag: ', arg);
|
||||
usage(1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
nonFlagsArgs.push(a);
|
||||
nonFlagsArgs.push(arg);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,8 +87,8 @@ const outRootDir = nonFlagsArgs[2];
|
||||
|
||||
dataCache.setStore({
|
||||
load: (path: string) => {
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
fs.readFile(`data/${path}`, 'utf8', (err, data) => {
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
fs.readFile(`data/${path}`, (err, data) => {
|
||||
if (err !== null) {
|
||||
reject(err.message);
|
||||
} else {
|
||||
@@ -105,34 +140,38 @@ async function build(suiteDir: string) {
|
||||
}
|
||||
|
||||
// Crawl files and convert paths to be POSIX-style, relative to suiteDir.
|
||||
const filesToEnumerate = (await crawlFilesRecursively(suiteDir)).sort();
|
||||
let filesToEnumerate = (await crawlFilesRecursively(suiteDir)).sort();
|
||||
|
||||
// Filter out non-spec files
|
||||
filesToEnumerate = filesToEnumerate.filter(f => f.endsWith(specFileSuffix));
|
||||
|
||||
const cacheablePathToTS = new Map<string, string>();
|
||||
|
||||
let fileIndex = 0;
|
||||
for (const file of filesToEnumerate) {
|
||||
if (file.endsWith(specFileSuffix)) {
|
||||
const pathWithoutExtension = file.substring(0, file.length - specFileSuffix.length);
|
||||
const mod = await import(`../../../${pathWithoutExtension}.spec.js`);
|
||||
if (mod.d?.serialize !== undefined) {
|
||||
const cacheable = mod.d as Cacheable<unknown>;
|
||||
const pathWithoutExtension = file.substring(0, file.length - specFileSuffix.length);
|
||||
const mod = await import(`../../../${pathWithoutExtension}.spec.js`);
|
||||
if (mod.d?.serialize !== undefined) {
|
||||
const cacheable = mod.d as Cacheable<unknown>;
|
||||
|
||||
{
|
||||
// Check for collisions
|
||||
const existing = cacheablePathToTS.get(cacheable.path);
|
||||
if (existing !== undefined) {
|
||||
console.error(
|
||||
`error: Cacheable '${cacheable.path}' is emitted by both:
|
||||
{
|
||||
// Check for collisions
|
||||
const existing = cacheablePathToTS.get(cacheable.path);
|
||||
if (existing !== undefined) {
|
||||
console.error(
|
||||
`error: Cacheable '${cacheable.path}' is emitted by both:
|
||||
'${existing}'
|
||||
and
|
||||
'${file}'`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
cacheablePathToTS.set(cacheable.path, file);
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
cacheablePathToTS.set(cacheable.path, file);
|
||||
}
|
||||
|
||||
const outPath = `${outRootDir}/data/${cacheable.path}`;
|
||||
const outPath = `${outRootDir}/data/${cacheable.path}`;
|
||||
|
||||
if (fileIndex++ % nth.n === nth.i) {
|
||||
switch (mode) {
|
||||
case 'emit': {
|
||||
if (verbose) {
|
||||
@@ -141,13 +180,17 @@ and
|
||||
const data = await cacheable.build();
|
||||
const serialized = cacheable.serialize(data);
|
||||
fs.mkdirSync(path.dirname(outPath), { recursive: true });
|
||||
fs.writeFileSync(outPath, serialized);
|
||||
fs.writeFileSync(outPath, serialized, 'binary');
|
||||
break;
|
||||
}
|
||||
case 'list': {
|
||||
console.log(outPath);
|
||||
break;
|
||||
}
|
||||
case 'validate': {
|
||||
// Only check currently performed is the collision detection above
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ How to generate TIMING_LOG_FILES files:
|
||||
}
|
||||
|
||||
const kHeader = `{
|
||||
"_comment": "SEMI AUTO-GENERATED: Please read tools/merge_listing_times.",
|
||||
"_comment": "SEMI AUTO-GENERATED: Please read docs/adding_timing_metadata.md.",
|
||||
`;
|
||||
const kFooter = `\
|
||||
"_end": ""
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { DefaultTestFileLoader } from '../internal/file_loader.js';
|
||||
import { parseQuery } from '../internal/query/parseQuery.js';
|
||||
import { assert } from '../util/util.js';
|
||||
|
||||
void (async () => {
|
||||
for (const suite of ['unittests', 'webgpu']) {
|
||||
const loader = new DefaultTestFileLoader();
|
||||
const filterQuery = parseQuery(`${suite}:*`);
|
||||
const testcases = await loader.loadCases(filterQuery);
|
||||
for (const testcase of testcases) {
|
||||
const name = testcase.query.toString();
|
||||
const maxLength = 375;
|
||||
assert(
|
||||
name.length <= maxLength,
|
||||
`Testcase ${name} is too long. Max length is ${maxLength} characters. Please shorten names or reduce parameters.`
|
||||
);
|
||||
}
|
||||
}
|
||||
})();
|
||||
@@ -11,6 +11,7 @@ For each suite in SUITE_DIRS, validate some properties about the file:
|
||||
- Has a test function (or is marked unimplemented)
|
||||
- Has no duplicate cases
|
||||
- Configures batching correctly, if used
|
||||
- That each case query is not too long
|
||||
|
||||
Example:
|
||||
tools/validate src/unittests/ src/webgpu/
|
||||
|
||||
@@ -3,15 +3,15 @@ import { ResolveType, ZipKeysWithValues } from './types.js';
|
||||
export type valueof<K> = K[keyof K];
|
||||
|
||||
export function keysOf<T extends string>(obj: { [k in T]: unknown }): readonly T[] {
|
||||
return (Object.keys(obj) as unknown[]) as T[];
|
||||
return Object.keys(obj) as unknown[] as T[];
|
||||
}
|
||||
|
||||
export function numericKeysOf<T>(obj: object): readonly T[] {
|
||||
return (Object.keys(obj).map(n => Number(n)) as unknown[]) as T[];
|
||||
return Object.keys(obj).map(n => Number(n)) as unknown[] as T[];
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns a new Record from @p objects, using the string returned by Object.toString() as the keys
|
||||
* @returns a new Record from `objects`, using the string returned by Object.toString() as the keys
|
||||
* and the objects as the values.
|
||||
*/
|
||||
export function objectsToRecord<T extends Object>(objects: readonly T[]): Record<string, T> {
|
||||
@@ -32,7 +32,7 @@ export function objectsToRecord<T extends Object>(objects: readonly T[]): Record
|
||||
export function makeTable<
|
||||
Members extends readonly string[],
|
||||
Defaults extends readonly unknown[],
|
||||
Table extends { readonly [k: string]: readonly unknown[] }
|
||||
Table extends { readonly [k: string]: readonly unknown[] },
|
||||
>(
|
||||
members: Members,
|
||||
defaults: Defaults,
|
||||
@@ -51,3 +51,79 @@ export function makeTable<
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
return result as any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an info lookup object from a more nicely-formatted table.
|
||||
*
|
||||
* Note: Using `as const` on the arguments to this function is necessary to infer the correct type.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* ```
|
||||
* const t = makeTableWithDefaults(
|
||||
* { c: 'default' }, // columnRenames
|
||||
* ['a', 'default', 'd'], // columnsKept
|
||||
* ['a', 'b', 'c', 'd'], // columns
|
||||
* [123, 456, 789, 1011], // defaults
|
||||
* { // table
|
||||
* foo: [1, 2, 3, 4],
|
||||
* bar: [5, , , 8],
|
||||
* moo: [ , 9,10, ],
|
||||
* }
|
||||
* );
|
||||
*
|
||||
* // t = {
|
||||
* // foo: { a: 1, default: 3, d: 4 },
|
||||
* // bar: { a: 5, default: 789, d: 8 },
|
||||
* // moo: { a: 123, default: 10, d: 1011 },
|
||||
* // };
|
||||
* ```
|
||||
*
|
||||
* MAINTENANCE_TODO: `ZipKeysWithValues<Members, Table[k], Defaults>` is incorrect
|
||||
* because Members no longer maps to Table[k]. It's not clear if this is even possible to fix
|
||||
* because it requires mapping, not zipping. Maybe passing in a index mapping
|
||||
* would fix it (which is gross) but if you have columnsKept as [0, 2, 3] then maybe it would
|
||||
* be possible to generate the correct type? I don't think we can generate the map at compile time
|
||||
* so we'd have to hand code it. Other ideas, don't generate kLimitsInfoCore and kLimitsInfoCompat
|
||||
* where they are keys of infos. Instead, generate kLimitsInfoCoreDefaults, kLimitsInfoCoreMaximums,
|
||||
* kLimitsInfoCoreClasses where each is just a `{[k: string]: type}`. Could zip those after or,
|
||||
* maybe that suggests passing in the hard coded indices would work.
|
||||
*
|
||||
* @param columnRenames the name of the column in the table that will be assigned to the 'default' property of each entry.
|
||||
* @param columnsKept the names of properties you want in the generated lookup table. This must be a subset of the columns of the tables except for the name 'default' which is looked from the previous argument.
|
||||
* @param columns the names of the columns of the name
|
||||
* @param defaults the default value by column for any element in a row of the table that is undefined
|
||||
* @param table named table rows.
|
||||
*/
|
||||
export function makeTableRenameAndFilter<
|
||||
Members extends readonly string[],
|
||||
DataMembers extends readonly string[],
|
||||
Defaults extends readonly unknown[],
|
||||
Table extends { readonly [k: string]: readonly unknown[] },
|
||||
>(
|
||||
columnRenames: { [key: string]: string },
|
||||
columnsKept: Members,
|
||||
columns: DataMembers,
|
||||
defaults: Defaults,
|
||||
table: Table
|
||||
): {
|
||||
readonly [k in keyof Table]: ResolveType<ZipKeysWithValues<Members, Table[k], Defaults>>;
|
||||
} {
|
||||
const result: { [k: string]: { [m: string]: unknown } } = {};
|
||||
const keyToIndex = new Map<string, number>(
|
||||
columnsKept.map(name => {
|
||||
const remappedName = columnRenames[name] === undefined ? name : columnRenames[name];
|
||||
return [name, columns.indexOf(remappedName)];
|
||||
})
|
||||
);
|
||||
for (const [k, v] of Object.entries<readonly unknown[]>(table)) {
|
||||
const item: { [m: string]: unknown } = {};
|
||||
for (const member of columnsKept) {
|
||||
const ndx = keyToIndex.get(member)!;
|
||||
item[member] = v[ndx] ?? defaults[ndx];
|
||||
}
|
||||
result[k] = item;
|
||||
}
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
return result as any;
|
||||
}
|
||||
|
||||
@@ -54,7 +54,7 @@ class If extends Directive {
|
||||
}
|
||||
|
||||
class ElseIf extends If {
|
||||
applyTo(stack: StateStack) {
|
||||
override applyTo(stack: StateStack) {
|
||||
assert(stack.length >= 1);
|
||||
const { allowsFollowingElse, state: siblingState } = stack.pop()!;
|
||||
this.checkDepth(stack);
|
||||
|
||||
@@ -11,7 +11,46 @@ export type TypeEqual<X, Y> = (<T>() => T extends X ? 1 : 2) extends <T>() => T
|
||||
: false;
|
||||
|
||||
/* eslint-disable-next-line @typescript-eslint/no-unused-vars */
|
||||
export function assertTypeTrue<T extends true>() {}
|
||||
export function assertTypeTrue<_ extends true>() {}
|
||||
|
||||
/** `ReadonlyArray` of `ReadonlyArray`s. */
|
||||
export type ROArrayArray<T> = ReadonlyArray<ReadonlyArray<T>>;
|
||||
/** `ReadonlyArray` of `ReadonlyArray`s of `ReadonlyArray`s. */
|
||||
export type ROArrayArrayArray<T> = ReadonlyArray<ReadonlyArray<ReadonlyArray<T>>>;
|
||||
|
||||
/**
|
||||
* Deep version of the Readonly<> type, with support for tuples (up to length 7).
|
||||
* <https://gist.github.com/masterkidan/7322752f569b1bba53e0426266768623>
|
||||
*/
|
||||
export type DeepReadonly<T> = T extends [infer A]
|
||||
? DeepReadonlyObject<[A]>
|
||||
: T extends [infer A, infer B]
|
||||
? DeepReadonlyObject<[A, B]>
|
||||
: T extends [infer A, infer B, infer C]
|
||||
? DeepReadonlyObject<[A, B, C]>
|
||||
: T extends [infer A, infer B, infer C, infer D]
|
||||
? DeepReadonlyObject<[A, B, C, D]>
|
||||
: T extends [infer A, infer B, infer C, infer D, infer E]
|
||||
? DeepReadonlyObject<[A, B, C, D, E]>
|
||||
: T extends [infer A, infer B, infer C, infer D, infer E, infer F]
|
||||
? DeepReadonlyObject<[A, B, C, D, E, F]>
|
||||
: T extends [infer A, infer B, infer C, infer D, infer E, infer F, infer G]
|
||||
? DeepReadonlyObject<[A, B, C, D, E, F, G]>
|
||||
: T extends Map<infer U, infer V>
|
||||
? ReadonlyMap<DeepReadonlyObject<U>, DeepReadonlyObject<V>>
|
||||
: T extends Set<infer U>
|
||||
? ReadonlySet<DeepReadonlyObject<U>>
|
||||
: T extends Promise<infer U>
|
||||
? Promise<DeepReadonlyObject<U>>
|
||||
: T extends Primitive
|
||||
? T
|
||||
: T extends (infer A)[]
|
||||
? DeepReadonlyArray<A>
|
||||
: DeepReadonlyObject<T>;
|
||||
|
||||
type Primitive = string | number | boolean | undefined | null | Function | symbol;
|
||||
type DeepReadonlyArray<T> = ReadonlyArray<DeepReadonly<T>>;
|
||||
type DeepReadonlyObject<T> = { readonly [P in keyof T]: DeepReadonly<T[P]> };
|
||||
|
||||
/**
|
||||
* Computes the intersection of a set of types, given the union of those types.
|
||||
@@ -41,7 +80,7 @@ type TypeOr<T, Default> = T extends undefined ? Default : T;
|
||||
export type ZipKeysWithValues<
|
||||
Keys extends readonly string[],
|
||||
Values extends readonly unknown[],
|
||||
Defaults extends readonly unknown[]
|
||||
Defaults extends readonly unknown[],
|
||||
> =
|
||||
//
|
||||
Keys extends readonly [infer KHead, ...infer KTail]
|
||||
@@ -50,10 +89,9 @@ export type ZipKeysWithValues<
|
||||
TupleHeadOr<Values, undefined>,
|
||||
TupleHeadOr<Defaults, undefined>
|
||||
>;
|
||||
} &
|
||||
ZipKeysWithValues<
|
||||
EnsureSubtype<KTail, readonly string[]>,
|
||||
TupleTailOr<Values, []>,
|
||||
TupleTailOr<Defaults, []>
|
||||
>
|
||||
} & ZipKeysWithValues<
|
||||
EnsureSubtype<KTail, readonly string[]>,
|
||||
TupleTailOr<Values, []>,
|
||||
TupleTailOr<Defaults, []>
|
||||
>
|
||||
: {}; // K exhausted
|
||||
|
||||
@@ -47,15 +47,29 @@ export function assertOK<T>(value: Error | T): T {
|
||||
return value;
|
||||
}
|
||||
|
||||
/** Options for assertReject, shouldReject, and friends. */
|
||||
export type ExceptionCheckOptions = { allowMissingStack?: boolean; message?: string };
|
||||
|
||||
/**
|
||||
* Resolves if the provided promise rejects; rejects if it does not.
|
||||
*/
|
||||
export async function assertReject(p: Promise<unknown>, msg?: string): Promise<void> {
|
||||
export async function assertReject(
|
||||
expectedName: string,
|
||||
p: Promise<unknown>,
|
||||
{ allowMissingStack = false, message }: ExceptionCheckOptions = {}
|
||||
): Promise<void> {
|
||||
try {
|
||||
await p;
|
||||
unreachable(msg);
|
||||
unreachable(message);
|
||||
} catch (ex) {
|
||||
// Assertion OK
|
||||
// Asserted as expected
|
||||
if (!allowMissingStack) {
|
||||
const m = message ? ` (${message})` : '';
|
||||
assert(
|
||||
ex instanceof Error && typeof ex.stack === 'string',
|
||||
'threw as expected, but missing stack' + m
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,7 +160,7 @@ export function assertNotSettledWithinTime(
|
||||
const handle = timeout(() => {
|
||||
resolve(undefined);
|
||||
}, ms);
|
||||
p.finally(() => clearTimeout(handle));
|
||||
void p.finally(() => clearTimeout(handle));
|
||||
});
|
||||
return Promise.race([rejectWhenSettled, timeoutPromise]);
|
||||
}
|
||||
@@ -182,14 +196,24 @@ export function sortObjectByKey(v: { [k: string]: unknown }): { [k: string]: unk
|
||||
|
||||
/**
|
||||
* Determines whether two JS values are equal, recursing into objects and arrays.
|
||||
* NaN is treated specially, such that `objectEquals(NaN, NaN)`.
|
||||
* NaN is treated specially, such that `objectEquals(NaN, NaN)`. +/-0.0 are treated as equal
|
||||
* by default, but can be opted to be distinguished.
|
||||
* @param x the first JS values that get compared
|
||||
* @param y the second JS values that get compared
|
||||
* @param distinguishSignedZero if set to true, treat 0.0 and -0.0 as unequal. Default to false.
|
||||
*/
|
||||
export function objectEquals(x: unknown, y: unknown): boolean {
|
||||
export function objectEquals(
|
||||
x: unknown,
|
||||
y: unknown,
|
||||
distinguishSignedZero: boolean = false
|
||||
): boolean {
|
||||
if (typeof x !== 'object' || typeof y !== 'object') {
|
||||
if (typeof x === 'number' && typeof y === 'number' && Number.isNaN(x) && Number.isNaN(y)) {
|
||||
return true;
|
||||
}
|
||||
return x === y;
|
||||
// Object.is(0.0, -0.0) is false while (0.0 === -0.0) is true. Other than +/-0.0 and NaN cases,
|
||||
// Object.is works in the same way as ===.
|
||||
return distinguishSignedZero ? Object.is(x, y) : x === y;
|
||||
}
|
||||
if (x === null || y === null) return x === y;
|
||||
if (x.constructor !== y.constructor) return false;
|
||||
@@ -282,28 +306,27 @@ const TypedArrayBufferViewInstances = [
|
||||
new Float64Array(),
|
||||
] as const;
|
||||
|
||||
export type TypedArrayBufferView = typeof TypedArrayBufferViewInstances[number];
|
||||
export type TypedArrayBufferView = (typeof TypedArrayBufferViewInstances)[number];
|
||||
|
||||
export type TypedArrayBufferViewConstructor<
|
||||
A extends TypedArrayBufferView = TypedArrayBufferView
|
||||
> = {
|
||||
// Interface copied from Uint8Array, and made generic.
|
||||
readonly prototype: A;
|
||||
readonly BYTES_PER_ELEMENT: number;
|
||||
export type TypedArrayBufferViewConstructor<A extends TypedArrayBufferView = TypedArrayBufferView> =
|
||||
{
|
||||
// Interface copied from Uint8Array, and made generic.
|
||||
readonly prototype: A;
|
||||
readonly BYTES_PER_ELEMENT: number;
|
||||
|
||||
new (): A;
|
||||
new (elements: Iterable<number>): A;
|
||||
new (array: ArrayLike<number> | ArrayBufferLike): A;
|
||||
new (buffer: ArrayBufferLike, byteOffset?: number, length?: number): A;
|
||||
new (length: number): A;
|
||||
new (): A;
|
||||
new (elements: Iterable<number>): A;
|
||||
new (array: ArrayLike<number> | ArrayBufferLike): A;
|
||||
new (buffer: ArrayBufferLike, byteOffset?: number, length?: number): A;
|
||||
new (length: number): A;
|
||||
|
||||
from(arrayLike: ArrayLike<number>): A;
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
from(arrayLike: Iterable<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): A;
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): A;
|
||||
of(...items: number[]): A;
|
||||
};
|
||||
from(arrayLike: ArrayLike<number>): A;
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
from(arrayLike: Iterable<number>, mapfn?: (v: number, k: number) => number, thisArg?: any): A;
|
||||
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
|
||||
from<T>(arrayLike: ArrayLike<T>, mapfn: (v: T, k: number) => number, thisArg?: any): A;
|
||||
of(...items: number[]): A;
|
||||
};
|
||||
|
||||
export const kTypedArrayBufferViews: {
|
||||
readonly [k: string]: TypedArrayBufferViewConstructor;
|
||||
@@ -336,7 +359,7 @@ interface TypedArrayMap {
|
||||
|
||||
type TypedArrayParam<K extends keyof TypedArrayMap> = {
|
||||
type: K;
|
||||
data: number[];
|
||||
data: readonly number[];
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -377,7 +400,7 @@ export function typedArrayParam<K extends keyof TypedArrayMap>(
|
||||
|
||||
export function createTypedArray<K extends keyof TypedArrayMap>(
|
||||
type: K,
|
||||
data: number[]
|
||||
data: readonly number[]
|
||||
): TypedArrayMap[K] {
|
||||
return new kTypedArrayBufferViews[type](data) as TypedArrayMap[K];
|
||||
}
|
||||
@@ -423,3 +446,31 @@ export function memcpy(
|
||||
): void {
|
||||
subarrayAsU8(dst.dst, dst).set(subarrayAsU8(src.src, src));
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to create a value that is specified by multiplying some runtime value
|
||||
* by a constant and then adding a constant to it.
|
||||
*/
|
||||
export interface ValueTestVariant {
|
||||
mult: number;
|
||||
add: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filters out SpecValues that are the same.
|
||||
*/
|
||||
export function filterUniqueValueTestVariants(valueTestVariants: ValueTestVariant[]) {
|
||||
return new Map<string, ValueTestVariant>(
|
||||
valueTestVariants.map(v => [`m:${v.mult},a:${v.add}`, v])
|
||||
).values();
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to create a value that is specified by multiplied some runtime value
|
||||
* by a constant and then adding a constant to it. This happens often in test
|
||||
* with limits that can only be known at runtime and yet we need a way to
|
||||
* add parameters to a test and those parameters must be constants.
|
||||
*/
|
||||
export function makeValueTestVariant(base: number, variant: ValueTestVariant) {
|
||||
return base * variant.mult + variant.add;
|
||||
}
|
||||
|
||||
@@ -8,14 +8,15 @@ import { attemptGarbageCollection } from '../../common/util/collect_garbage.js';
|
||||
import { keysOf } from '../../common/util/data_tables.js';
|
||||
import { getGPU } from '../../common/util/navigator_gpu.js';
|
||||
import { assert, iterRange } from '../../common/util/util.js';
|
||||
import { kLimitInfo } from '../../webgpu/capability_info.js';
|
||||
import { getDefaultLimitsForAdapter } from '../../webgpu/capability_info.js';
|
||||
|
||||
export const g = makeTestGroup(Fixture);
|
||||
|
||||
/** Adapter preference identifier to option. */
|
||||
const kAdapterTypeOptions: {
|
||||
readonly [k in GPUPowerPreference | 'fallback']: GPURequestAdapterOptions;
|
||||
} = /* prettier-ignore */ {
|
||||
} =
|
||||
/* prettier-ignore */ {
|
||||
'low-power': { powerPreference: 'low-power', forceFallbackAdapter: false },
|
||||
'high-performance': { powerPreference: 'high-performance', forceFallbackAdapter: false },
|
||||
'fallback': { powerPreference: undefined, forceFallbackAdapter: true },
|
||||
@@ -33,10 +34,11 @@ async function createDeviceAndComputeCommands(adapter: GPUAdapter) {
|
||||
// Constants are computed such that per run, this function should allocate roughly 2G
|
||||
// worth of data. This should be sufficient as we run these creation functions many
|
||||
// times. If the data backing the created objects is not recycled we should OOM.
|
||||
const limitInfo = getDefaultLimitsForAdapter(adapter);
|
||||
const kNumPipelines = 64;
|
||||
const kNumBindgroups = 128;
|
||||
const kNumBufferElements =
|
||||
kLimitInfo.maxComputeWorkgroupSizeX.default * kLimitInfo.maxComputeWorkgroupSizeY.default;
|
||||
limitInfo.maxComputeWorkgroupSizeX.default * limitInfo.maxComputeWorkgroupSizeY.default;
|
||||
const kBufferSize = kNumBufferElements * 4;
|
||||
const kBufferData = new Uint32Array([...iterRange(kNumBufferElements, x => x)]);
|
||||
|
||||
@@ -54,8 +56,8 @@ async function createDeviceAndComputeCommands(adapter: GPUAdapter) {
|
||||
@group(0) @binding(0) var<storage, read_write> buffer: Buffer;
|
||||
@compute @workgroup_size(1) fn main(
|
||||
@builtin(global_invocation_id) id: vec3<u32>) {
|
||||
buffer.data[id.x * ${kLimitInfo.maxComputeWorkgroupSizeX.default}u + id.y] =
|
||||
buffer.data[id.x * ${kLimitInfo.maxComputeWorkgroupSizeX.default}u + id.y] +
|
||||
buffer.data[id.x * ${limitInfo.maxComputeWorkgroupSizeX.default}u + id.y] =
|
||||
buffer.data[id.x * ${limitInfo.maxComputeWorkgroupSizeX.default}u + id.y] +
|
||||
${pipelineIndex}u;
|
||||
}
|
||||
`,
|
||||
@@ -79,8 +81,8 @@ async function createDeviceAndComputeCommands(adapter: GPUAdapter) {
|
||||
pass.setPipeline(pipeline);
|
||||
pass.setBindGroup(0, bindgroup);
|
||||
pass.dispatchWorkgroups(
|
||||
kLimitInfo.maxComputeWorkgroupSizeX.default,
|
||||
kLimitInfo.maxComputeWorkgroupSizeY.default
|
||||
limitInfo.maxComputeWorkgroupSizeX.default,
|
||||
limitInfo.maxComputeWorkgroupSizeY.default
|
||||
);
|
||||
pass.end();
|
||||
commands.push(encoder.finish());
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
export const description = `
|
||||
Stress tests for pipeline statistics queries.
|
||||
|
||||
TODO: pipeline statistics queries are removed from core; consider moving tests to another suite.
|
||||
`;
|
||||
|
||||
import { makeTestGroup } from '../../common/framework/test_group.js';
|
||||
import { GPUTest } from '../../webgpu/gpu_test.js';
|
||||
|
||||
export const g = makeTestGroup(GPUTest);
|
||||
|
||||
g.test('render_pass_one_query_set')
|
||||
.desc(
|
||||
`Tests a huge number of pipeline statistics queries over a single query set in a
|
||||
single render pass.`
|
||||
)
|
||||
.unimplemented();
|
||||
|
||||
g.test('render_pass_many_query_sets')
|
||||
.desc(
|
||||
`Tests a huge number of pipeline statistics queries over a huge number of query
|
||||
sets in a single render pass.`
|
||||
)
|
||||
.unimplemented();
|
||||
|
||||
g.test('compute_pass_one_query_set')
|
||||
.desc(
|
||||
`Tests a huge number of pipeline statistics queries over a single query set in a
|
||||
single compute pass.`
|
||||
)
|
||||
.unimplemented();
|
||||
|
||||
g.test('compute_pass_many_query_sets')
|
||||
.desc(
|
||||
`Tests a huge number of pipeline statistics queries over a huge number of query
|
||||
sets in a single compute pass.`
|
||||
)
|
||||
.unimplemented();
|
||||
@@ -11,10 +11,10 @@ import { TestGroupTest } from './test_group_test.js';
|
||||
import { UnitTest } from './unit_test.js';
|
||||
|
||||
class FixtureToTest extends UnitTest {
|
||||
public immediateAsyncExpectation<T>(fn: () => Promise<T>): Promise<T> {
|
||||
public override immediateAsyncExpectation<T>(fn: () => Promise<T>): Promise<T> {
|
||||
return super.immediateAsyncExpectation(fn);
|
||||
}
|
||||
public eventualAsyncExpectation<T>(fn: (niceStack: Error) => Promise<T>): void {
|
||||
public override eventualAsyncExpectation<T>(fn: (niceStack: Error) => Promise<T>): void {
|
||||
super.eventualAsyncExpectation(fn);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,9 +8,9 @@ import { UnitTest } from './unit_test.js';
|
||||
|
||||
export const g = makeTestGroup(UnitTest);
|
||||
|
||||
g.test('test,sync').fn(t => {});
|
||||
g.test('test,sync').fn(_t => {});
|
||||
|
||||
g.test('test,async').fn(async t => {});
|
||||
g.test('test,async').fn(async _t => {});
|
||||
|
||||
g.test('test_with_params,sync')
|
||||
.paramsSimple([{}])
|
||||
|
||||
@@ -428,15 +428,15 @@ g.test('pack2x16float')
|
||||
|
||||
// f32 subnormals
|
||||
// prettier-ignore
|
||||
{ inputs: [kValue.f32.subnormal.positive.max, 1], result: [0x3c000000, 0x3c008000, 0x3c000001] },
|
||||
{ inputs: [kValue.f32.positive.subnormal.max, 1], result: [0x3c000000, 0x3c008000, 0x3c000001] },
|
||||
// prettier-ignore
|
||||
{ inputs: [kValue.f32.subnormal.negative.min, 1], result: [0x3c008001, 0x3c000000, 0x3c008000] },
|
||||
{ inputs: [kValue.f32.negative.subnormal.min, 1], result: [0x3c008001, 0x3c000000, 0x3c008000] },
|
||||
|
||||
// f16 subnormals
|
||||
// prettier-ignore
|
||||
{ inputs: [kValue.f16.subnormal.positive.max, 1], result: [0x3c0003ff, 0x3c000000, 0x3c008000] },
|
||||
{ inputs: [kValue.f16.positive.subnormal.max, 1], result: [0x3c0003ff, 0x3c000000, 0x3c008000] },
|
||||
// prettier-ignore
|
||||
{ inputs: [kValue.f16.subnormal.negative.min, 1], result: [0x03c0083ff, 0x3c000000, 0x3c008000] },
|
||||
{ inputs: [kValue.f16.negative.subnormal.min, 1], result: [0x03c0083ff, 0x3c000000, 0x3c008000] },
|
||||
|
||||
// f16 out of bounds
|
||||
{ inputs: [kValue.f16.positive.max + 1, 1], result: [undefined] },
|
||||
@@ -481,8 +481,8 @@ g.test('pack2x16snorm')
|
||||
{ inputs: [-0.1, -0.5], result: 0xc001f333 },
|
||||
|
||||
// Subnormals
|
||||
{ inputs: [kValue.f32.subnormal.positive.max, 1], result: 0x7fff0000 },
|
||||
{ inputs: [kValue.f32.subnormal.negative.min, 1], result: 0x7fff0000 },
|
||||
{ inputs: [kValue.f32.positive.subnormal.max, 1], result: 0x7fff0000 },
|
||||
{ inputs: [kValue.f32.negative.subnormal.min, 1], result: 0x7fff0000 },
|
||||
] as const)
|
||||
.fn(test => {
|
||||
const inputs = test.params.inputs;
|
||||
@@ -506,7 +506,7 @@ g.test('pack2x16unorm')
|
||||
{ inputs: [10, 10], result: 0xffffffff },
|
||||
|
||||
// Subnormals
|
||||
{ inputs: [kValue.f32.subnormal.positive.max, 1], result: 0xffff0000 },
|
||||
{ inputs: [kValue.f32.positive.subnormal.max, 1], result: 0xffff0000 },
|
||||
] as const)
|
||||
.fn(test => {
|
||||
const inputs = test.params.inputs;
|
||||
@@ -542,8 +542,8 @@ g.test('pack4x8snorm')
|
||||
{ inputs: [-0.1, -0.5, -0.1, -0.5], result: 0xc1f3c1f3 },
|
||||
|
||||
// Subnormals
|
||||
{ inputs: [kValue.f32.subnormal.positive.max, 1, 1, 1], result: 0x7f7f7f00 },
|
||||
{ inputs: [kValue.f32.subnormal.negative.min, 1, 1, 1], result: 0x7f7f7f00 },
|
||||
{ inputs: [kValue.f32.positive.subnormal.max, 1, 1, 1], result: 0x7f7f7f00 },
|
||||
{ inputs: [kValue.f32.negative.subnormal.min, 1, 1, 1], result: 0x7f7f7f00 },
|
||||
] as const)
|
||||
.fn(test => {
|
||||
const inputs = test.params.inputs;
|
||||
@@ -570,7 +570,7 @@ g.test('pack4x8unorm')
|
||||
{ inputs: [0.1, 0.5, 0.1, 0.5], result: 0x801a801a },
|
||||
|
||||
// Subnormals
|
||||
{ inputs: [kValue.f32.subnormal.positive.max, 1, 1, 1], result: 0xffffff00 },
|
||||
{ inputs: [kValue.f32.positive.subnormal.max, 1, 1, 1], result: 0xffffff00 },
|
||||
] as const)
|
||||
.fn(test => {
|
||||
const inputs = test.params.inputs;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -124,7 +124,7 @@ class LoadingTest extends UnitTest {
|
||||
if (!this.isListenersAdded) {
|
||||
this.isListenersAdded = true;
|
||||
this.loader.addEventListener('import', ev => this.events.push(ev.data.url));
|
||||
this.loader.addEventListener('finish', ev => this.events.push(null));
|
||||
this.loader.addEventListener('finish', _ev => this.events.push(null));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -703,7 +703,10 @@ async function testIterateCollapsed(
|
||||
subqueriesToExpand: expectations,
|
||||
});
|
||||
if (expectedResult === 'throws') {
|
||||
t.shouldReject('Error', treePromise, 'loadTree should have thrown Error');
|
||||
t.shouldReject('Error', treePromise, {
|
||||
// Some errors here use StacklessError to print nicer command line outputs.
|
||||
allowMissingStack: true,
|
||||
});
|
||||
return;
|
||||
}
|
||||
const tree = await treePromise;
|
||||
|
||||
@@ -36,6 +36,18 @@ g.test('empty').fn(t => {
|
||||
t.expect(res.status === 'running');
|
||||
rec.finish();
|
||||
|
||||
t.expect(res.status === 'notrun');
|
||||
t.expect(res.timems >= 0);
|
||||
});
|
||||
|
||||
g.test('passed').fn(t => {
|
||||
const mylog = new Logger({ overrideDebugMode: true });
|
||||
const [rec, res] = mylog.record('one');
|
||||
|
||||
rec.start();
|
||||
rec.passed();
|
||||
rec.finish();
|
||||
|
||||
t.expect(res.status === 'pass');
|
||||
t.expect(res.timems >= 0);
|
||||
});
|
||||
@@ -59,13 +71,27 @@ g.test('skip').fn(t => {
|
||||
|
||||
rec.start();
|
||||
rec.skipped(new SkipTestCase());
|
||||
rec.debug(new Error('hello'));
|
||||
rec.finish();
|
||||
|
||||
t.expect(res.status === 'skip');
|
||||
t.expect(res.timems >= 0);
|
||||
});
|
||||
|
||||
// Tests if there's some skips and at least one pass it's pass.
|
||||
g.test('skip_pass').fn(t => {
|
||||
const mylog = new Logger({ overrideDebugMode: true });
|
||||
const [rec, res] = mylog.record('one');
|
||||
|
||||
rec.start();
|
||||
rec.skipped(new SkipTestCase());
|
||||
rec.debug(new Error('hello'));
|
||||
rec.skipped(new SkipTestCase());
|
||||
rec.finish();
|
||||
|
||||
t.expect(res.status === 'pass');
|
||||
t.expect(res.timems >= 0);
|
||||
});
|
||||
|
||||
g.test('warn').fn(t => {
|
||||
const mylog = new Logger({ overrideDebugMode: true });
|
||||
const [rec, res] = mylog.record('one');
|
||||
|
||||
@@ -25,9 +25,6 @@ import {
|
||||
fullF16Range,
|
||||
fullF32Range,
|
||||
fullI32Range,
|
||||
reinterpretU16AsF16,
|
||||
reinterpretU32AsF32,
|
||||
reinterpretU64AsF64,
|
||||
lerp,
|
||||
linearRange,
|
||||
nextAfterF16,
|
||||
@@ -40,6 +37,11 @@ import {
|
||||
lerpBigInt,
|
||||
linearRangeBigInt,
|
||||
} from '../webgpu/util/math.js';
|
||||
import {
|
||||
reinterpretU16AsF16,
|
||||
reinterpretU32AsF32,
|
||||
reinterpretU64AsF64,
|
||||
} from '../webgpu/util/reinterpret.js';
|
||||
|
||||
import { UnitTest } from './unit_test.js';
|
||||
|
||||
@@ -70,8 +72,8 @@ function withinOneULPF32(got: number, expected: number, mode: FlushMode): boolea
|
||||
* FTZ occur during comparison
|
||||
**/
|
||||
function compareArrayOfNumbersF32(
|
||||
got: Array<number>,
|
||||
expect: Array<number>,
|
||||
got: readonly number[],
|
||||
expect: readonly number[],
|
||||
mode: FlushMode = 'flush'
|
||||
): boolean {
|
||||
return (
|
||||
@@ -108,10 +110,10 @@ g.test('nextAfterF64FlushToZero')
|
||||
// Edge Cases
|
||||
{ val: Number.NaN, dir: 'positive', result: Number.NaN },
|
||||
{ val: Number.NaN, dir: 'negative', result: Number.NaN },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f64.infinity.positive },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f64.infinity.positive },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f64.infinity.negative },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f64.infinity.negative },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f64.positive.infinity },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f64.positive.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f64.negative.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f64.negative.infinity },
|
||||
|
||||
// Zeroes
|
||||
{ val: +0, dir: 'positive', result: kValue.f64.positive.min },
|
||||
@@ -120,24 +122,24 @@ g.test('nextAfterF64FlushToZero')
|
||||
{ val: -0, dir: 'negative', result: kValue.f64.negative.max },
|
||||
|
||||
// Subnormals
|
||||
{ val: kValue.f64.subnormal.positive.min, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.subnormal.positive.min, dir: 'negative', result: kValue.f64.negative.max },
|
||||
{ val: kValue.f64.subnormal.positive.max, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.subnormal.positive.max, dir: 'negative', result: kValue.f64.negative.max },
|
||||
{ val: kValue.f64.subnormal.negative.min, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.subnormal.negative.min, dir: 'negative', result: kValue.f64.negative.max },
|
||||
{ val: kValue.f64.subnormal.negative.max, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.subnormal.negative.max, dir: 'negative', result: kValue.f64.negative.max },
|
||||
{ val: kValue.f64.positive.subnormal.min, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.positive.subnormal.min, dir: 'negative', result: kValue.f64.negative.max },
|
||||
{ val: kValue.f64.positive.subnormal.max, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.positive.subnormal.max, dir: 'negative', result: kValue.f64.negative.max },
|
||||
{ val: kValue.f64.negative.subnormal.min, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.negative.subnormal.min, dir: 'negative', result: kValue.f64.negative.max },
|
||||
{ val: kValue.f64.negative.subnormal.max, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.negative.subnormal.max, dir: 'negative', result: kValue.f64.negative.max },
|
||||
|
||||
// Normals
|
||||
{ val: kValue.f64.positive.max, dir: 'positive', result: kValue.f64.infinity.positive },
|
||||
{ val: kValue.f64.positive.max, dir: 'positive', result: kValue.f64.positive.infinity },
|
||||
{ val: kValue.f64.positive.max, dir: 'negative', result: kValue.f64.positive.nearest_max },
|
||||
{ val: kValue.f64.positive.min, dir: 'positive', result: reinterpretU64AsF64(0x0010_0000_0000_0001n ) },
|
||||
{ val: kValue.f64.positive.min, dir: 'negative', result: 0 },
|
||||
{ val: kValue.f64.negative.max, dir: 'positive', result: 0 },
|
||||
{ val: kValue.f64.negative.max, dir: 'negative', result: reinterpretU64AsF64(0x8010_0000_0000_0001n) },
|
||||
{ val: kValue.f64.negative.min, dir: 'positive', result: kValue.f64.negative.nearest_min },
|
||||
{ val: kValue.f64.negative.min, dir: 'negative', result: kValue.f64.infinity.negative },
|
||||
{ val: kValue.f64.negative.min, dir: 'negative', result: kValue.f64.negative.infinity },
|
||||
{ val: reinterpretU64AsF64(0x0380_0000_0000_0000n), dir: 'positive', result: reinterpretU64AsF64(0x0380_0000_0000_0001n) },
|
||||
{ val: reinterpretU64AsF64(0x0380_0000_0000_0000n), dir: 'negative', result: reinterpretU64AsF64(0x037f_ffff_ffff_ffffn) },
|
||||
{ val: reinterpretU64AsF64(0x8380_0000_0000_0000n), dir: 'positive', result: reinterpretU64AsF64(0x837f_ffff_ffff_ffffn) },
|
||||
@@ -162,36 +164,36 @@ g.test('nextAfterF64NoFlush')
|
||||
// Edge Cases
|
||||
{ val: Number.NaN, dir: 'positive', result: Number.NaN },
|
||||
{ val: Number.NaN, dir: 'negative', result: Number.NaN },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f64.infinity.positive },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f64.infinity.positive },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f64.infinity.negative },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f64.infinity.negative },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f64.positive.infinity },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f64.positive.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f64.negative.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f64.negative.infinity },
|
||||
|
||||
// Zeroes
|
||||
{ val: +0, dir: 'positive', result: kValue.f64.subnormal.positive.min },
|
||||
{ val: +0, dir: 'negative', result: kValue.f64.subnormal.negative.max },
|
||||
{ val: -0, dir: 'positive', result: kValue.f64.subnormal.positive.min },
|
||||
{ val: -0, dir: 'negative', result: kValue.f64.subnormal.negative.max },
|
||||
{ val: +0, dir: 'positive', result: kValue.f64.positive.subnormal.min },
|
||||
{ val: +0, dir: 'negative', result: kValue.f64.negative.subnormal.max },
|
||||
{ val: -0, dir: 'positive', result: kValue.f64.positive.subnormal.min },
|
||||
{ val: -0, dir: 'negative', result: kValue.f64.negative.subnormal.max },
|
||||
|
||||
// Subnormals
|
||||
{ val: kValue.f64.subnormal.positive.min, dir: 'positive', result: reinterpretU64AsF64(0x0000_0000_0000_0002n) },
|
||||
{ val: kValue.f64.subnormal.positive.min, dir: 'negative', result: 0 },
|
||||
{ val: kValue.f64.subnormal.positive.max, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.subnormal.positive.max, dir: 'negative', result: reinterpretU64AsF64(0x000f_ffff_ffff_fffen) },
|
||||
{ val: kValue.f64.subnormal.negative.min, dir: 'positive', result: reinterpretU64AsF64(0x800f_ffff_ffff_fffen) },
|
||||
{ val: kValue.f64.subnormal.negative.min, dir: 'negative', result: kValue.f64.negative.max },
|
||||
{ val: kValue.f64.subnormal.negative.max, dir: 'positive', result: 0 },
|
||||
{ val: kValue.f64.subnormal.negative.max, dir: 'negative', result: reinterpretU64AsF64(0x8000_0000_0000_0002n) },
|
||||
{ val: kValue.f64.positive.subnormal.min, dir: 'positive', result: reinterpretU64AsF64(0x0000_0000_0000_0002n) },
|
||||
{ val: kValue.f64.positive.subnormal.min, dir: 'negative', result: 0 },
|
||||
{ val: kValue.f64.positive.subnormal.max, dir: 'positive', result: kValue.f64.positive.min },
|
||||
{ val: kValue.f64.positive.subnormal.max, dir: 'negative', result: reinterpretU64AsF64(0x000f_ffff_ffff_fffen) },
|
||||
{ val: kValue.f64.negative.subnormal.min, dir: 'positive', result: reinterpretU64AsF64(0x800f_ffff_ffff_fffen) },
|
||||
{ val: kValue.f64.negative.subnormal.min, dir: 'negative', result: kValue.f64.negative.max },
|
||||
{ val: kValue.f64.negative.subnormal.max, dir: 'positive', result: 0 },
|
||||
{ val: kValue.f64.negative.subnormal.max, dir: 'negative', result: reinterpretU64AsF64(0x8000_0000_0000_0002n) },
|
||||
|
||||
// Normals
|
||||
{ val: kValue.f64.positive.max, dir: 'positive', result: kValue.f64.infinity.positive },
|
||||
{ val: kValue.f64.positive.max, dir: 'positive', result: kValue.f64.positive.infinity },
|
||||
{ val: kValue.f64.positive.max, dir: 'negative', result: kValue.f64.positive.nearest_max },
|
||||
{ val: kValue.f64.positive.min, dir: 'positive', result: reinterpretU64AsF64(0x0010_0000_0000_0001n ) },
|
||||
{ val: kValue.f64.positive.min, dir: 'negative', result: reinterpretU64AsF64(0x000f_ffff_ffff_ffffn) },
|
||||
{ val: kValue.f64.negative.max, dir: 'positive', result: reinterpretU64AsF64(0x800f_ffff_ffff_ffffn) },
|
||||
{ val: kValue.f64.negative.max, dir: 'negative', result: reinterpretU64AsF64(0x8010_0000_0000_0001n) },
|
||||
{ val: kValue.f64.negative.min, dir: 'positive', result: kValue.f64.negative.nearest_min },
|
||||
{ val: kValue.f64.negative.min, dir: 'negative', result: kValue.f64.infinity.negative },
|
||||
{ val: kValue.f64.negative.min, dir: 'negative', result: kValue.f64.negative.infinity },
|
||||
{ val: reinterpretU64AsF64(0x0380_0000_0000_0000n), dir: 'positive', result: reinterpretU64AsF64(0x0380_0000_0000_0001n) },
|
||||
{ val: reinterpretU64AsF64(0x0380_0000_0000_0000n), dir: 'negative', result: reinterpretU64AsF64(0x037f_ffff_ffff_ffffn) },
|
||||
{ val: reinterpretU64AsF64(0x8380_0000_0000_0000n), dir: 'positive', result: reinterpretU64AsF64(0x837f_ffff_ffff_ffffn) },
|
||||
@@ -218,10 +220,10 @@ g.test('nextAfterF32FlushToZero')
|
||||
// Edge Cases
|
||||
{ val: Number.NaN, dir: 'positive', result: Number.NaN },
|
||||
{ val: Number.NaN, dir: 'negative', result: Number.NaN },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f32.infinity.positive },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f32.infinity.positive },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f32.infinity.negative },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f32.infinity.negative },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f32.positive.infinity },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f32.positive.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f32.negative.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f32.negative.infinity },
|
||||
|
||||
// Zeroes
|
||||
{ val: +0, dir: 'positive', result: kValue.f32.positive.min },
|
||||
@@ -230,24 +232,24 @@ g.test('nextAfterF32FlushToZero')
|
||||
{ val: -0, dir: 'negative', result: kValue.f32.negative.max },
|
||||
|
||||
// Subnormals
|
||||
{ val: kValue.f32.subnormal.positive.min, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val: kValue.f32.subnormal.positive.min, dir: 'negative', result: kValue.f32.negative.max },
|
||||
{ val: kValue.f32.subnormal.positive.max, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val: kValue.f32.subnormal.positive.max, dir: 'negative', result: kValue.f32.negative.max },
|
||||
{ val: kValue.f32.subnormal.negative.min, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val: kValue.f32.subnormal.negative.min, dir: 'negative', result: kValue.f32.negative.max },
|
||||
{ val: kValue.f32.subnormal.negative.max, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val: kValue.f32.subnormal.negative.max, dir: 'negative', result: kValue.f32.negative.max },
|
||||
{ val: kValue.f32.positive.subnormal.min, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val: kValue.f32.positive.subnormal.min, dir: 'negative', result: kValue.f32.negative.max },
|
||||
{ val: kValue.f32.positive.subnormal.max, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val: kValue.f32.positive.subnormal.max, dir: 'negative', result: kValue.f32.negative.max },
|
||||
{ val: kValue.f32.negative.subnormal.min, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val: kValue.f32.negative.subnormal.min, dir: 'negative', result: kValue.f32.negative.max },
|
||||
{ val: kValue.f32.negative.subnormal.max, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val: kValue.f32.negative.subnormal.max, dir: 'negative', result: kValue.f32.negative.max },
|
||||
|
||||
// Normals
|
||||
{ val: kValue.f32.positive.max, dir: 'positive', result: kValue.f32.infinity.positive },
|
||||
{ val: kValue.f32.positive.max, dir: 'positive', result: kValue.f32.positive.infinity },
|
||||
{ val: kValue.f32.positive.max, dir: 'negative', result: kValue.f32.positive.nearest_max },
|
||||
{ val: kValue.f32.positive.min, dir: 'positive', result: reinterpretU32AsF32(0x00800001) },
|
||||
{ val: kValue.f32.positive.min, dir: 'negative', result: 0 },
|
||||
{ val: kValue.f32.negative.max, dir: 'positive', result: 0 },
|
||||
{ val: kValue.f32.negative.max, dir: 'negative', result: reinterpretU32AsF32(0x80800001) },
|
||||
{ val: kValue.f32.negative.min, dir: 'positive', result: reinterpretU32AsF32(0xff7ffffe) },
|
||||
{ val: kValue.f32.negative.min, dir: 'negative', result: kValue.f32.infinity.negative },
|
||||
{ val: kValue.f32.negative.min, dir: 'negative', result: kValue.f32.negative.infinity },
|
||||
{ val: reinterpretU32AsF32(0x03800000), dir: 'positive', result: reinterpretU32AsF32(0x03800001) },
|
||||
{ val: reinterpretU32AsF32(0x03800000), dir: 'negative', result: reinterpretU32AsF32(0x037fffff) },
|
||||
{ val: reinterpretU32AsF32(0x83800000), dir: 'positive', result: reinterpretU32AsF32(0x837fffff) },
|
||||
@@ -282,36 +284,36 @@ g.test('nextAfterF32NoFlush')
|
||||
// Edge Cases
|
||||
{ val: Number.NaN, dir: 'positive', result: Number.NaN },
|
||||
{ val: Number.NaN, dir: 'negative', result: Number.NaN },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f32.infinity.positive },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f32.infinity.positive },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f32.infinity.negative },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f32.infinity.negative },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f32.positive.infinity },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f32.positive.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f32.negative.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f32.negative.infinity },
|
||||
|
||||
// Zeroes
|
||||
{ val: +0, dir: 'positive', result: kValue.f32.subnormal.positive.min },
|
||||
{ val: +0, dir: 'negative', result: kValue.f32.subnormal.negative.max },
|
||||
{ val: -0, dir: 'positive', result: kValue.f32.subnormal.positive.min },
|
||||
{ val: -0, dir: 'negative', result: kValue.f32.subnormal.negative.max },
|
||||
{ val: +0, dir: 'positive', result: kValue.f32.positive.subnormal.min },
|
||||
{ val: +0, dir: 'negative', result: kValue.f32.negative.subnormal.max },
|
||||
{ val: -0, dir: 'positive', result: kValue.f32.positive.subnormal.min },
|
||||
{ val: -0, dir: 'negative', result: kValue.f32.negative.subnormal.max },
|
||||
|
||||
// Subnormals
|
||||
{ val:kValue.f32.subnormal.positive.min, dir: 'positive', result: reinterpretU32AsF32(0x00000002) },
|
||||
{ val:kValue.f32.subnormal.positive.min, dir: 'negative', result: 0 },
|
||||
{ val:kValue.f32.subnormal.positive.max, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val:kValue.f32.subnormal.positive.max, dir: 'negative', result: reinterpretU32AsF32(0x007ffffe) },
|
||||
{ val:kValue.f32.subnormal.negative.min, dir: 'positive', result: reinterpretU32AsF32(0x807ffffe) },
|
||||
{ val:kValue.f32.subnormal.negative.min, dir: 'negative', result: kValue.f32.negative.max },
|
||||
{ val:kValue.f32.subnormal.negative.max, dir: 'positive', result: 0 },
|
||||
{ val:kValue.f32.subnormal.negative.max, dir: 'negative', result: reinterpretU32AsF32(0x80000002) },
|
||||
{ val:kValue.f32.positive.subnormal.min, dir: 'positive', result: reinterpretU32AsF32(0x00000002) },
|
||||
{ val:kValue.f32.positive.subnormal.min, dir: 'negative', result: 0 },
|
||||
{ val:kValue.f32.positive.subnormal.max, dir: 'positive', result: kValue.f32.positive.min },
|
||||
{ val:kValue.f32.positive.subnormal.max, dir: 'negative', result: reinterpretU32AsF32(0x007ffffe) },
|
||||
{ val:kValue.f32.negative.subnormal.min, dir: 'positive', result: reinterpretU32AsF32(0x807ffffe) },
|
||||
{ val:kValue.f32.negative.subnormal.min, dir: 'negative', result: kValue.f32.negative.max },
|
||||
{ val:kValue.f32.negative.subnormal.max, dir: 'positive', result: 0 },
|
||||
{ val:kValue.f32.negative.subnormal.max, dir: 'negative', result: reinterpretU32AsF32(0x80000002) },
|
||||
|
||||
// Normals
|
||||
{ val: kValue.f32.positive.max, dir: 'positive', result: kValue.f32.infinity.positive },
|
||||
{ val: kValue.f32.positive.max, dir: 'positive', result: kValue.f32.positive.infinity },
|
||||
{ val: kValue.f32.positive.max, dir: 'negative', result: kValue.f32.positive.nearest_max },
|
||||
{ val: kValue.f32.positive.min, dir: 'positive', result: reinterpretU32AsF32(0x00800001) },
|
||||
{ val: kValue.f32.positive.min, dir: 'negative', result: kValue.f32.subnormal.positive.max },
|
||||
{ val: kValue.f32.negative.max, dir: 'positive', result: kValue.f32.subnormal.negative.min },
|
||||
{ val: kValue.f32.positive.min, dir: 'negative', result: kValue.f32.positive.subnormal.max },
|
||||
{ val: kValue.f32.negative.max, dir: 'positive', result: kValue.f32.negative.subnormal.min },
|
||||
{ val: kValue.f32.negative.max, dir: 'negative', result: reinterpretU32AsF32(0x80800001) },
|
||||
{ val: kValue.f32.negative.min, dir: 'positive', result: kValue.f32.negative.nearest_min },
|
||||
{ val: kValue.f32.negative.min, dir: 'negative', result: kValue.f32.infinity.negative },
|
||||
{ val: kValue.f32.negative.min, dir: 'negative', result: kValue.f32.negative.infinity },
|
||||
{ val: reinterpretU32AsF32(0x03800000), dir: 'positive', result: reinterpretU32AsF32(0x03800001) },
|
||||
{ val: reinterpretU32AsF32(0x03800000), dir: 'negative', result: reinterpretU32AsF32(0x037fffff) },
|
||||
{ val: reinterpretU32AsF32(0x83800000), dir: 'positive', result: reinterpretU32AsF32(0x837fffff) },
|
||||
@@ -348,10 +350,10 @@ g.test('nextAfterF16FlushToZero')
|
||||
// Edge Cases
|
||||
{ val: Number.NaN, dir: 'positive', result: Number.NaN },
|
||||
{ val: Number.NaN, dir: 'negative', result: Number.NaN },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f16.infinity.positive },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f16.infinity.positive },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f16.infinity.negative },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f16.infinity.negative },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f16.positive.infinity },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f16.positive.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f16.negative.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f16.negative.infinity },
|
||||
|
||||
// Zeroes
|
||||
{ val: +0, dir: 'positive', result: kValue.f16.positive.min },
|
||||
@@ -360,24 +362,24 @@ g.test('nextAfterF16FlushToZero')
|
||||
{ val: -0, dir: 'negative', result: kValue.f16.negative.max },
|
||||
|
||||
// Subnormals
|
||||
{ val: kValue.f16.subnormal.positive.min, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.subnormal.positive.min, dir: 'negative', result: kValue.f16.negative.max },
|
||||
{ val: kValue.f16.subnormal.positive.max, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.subnormal.positive.max, dir: 'negative', result: kValue.f16.negative.max },
|
||||
{ val: kValue.f16.subnormal.negative.min, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.subnormal.negative.min, dir: 'negative', result: kValue.f16.negative.max },
|
||||
{ val: kValue.f16.subnormal.negative.max, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.subnormal.negative.max, dir: 'negative', result: kValue.f16.negative.max },
|
||||
{ val: kValue.f16.positive.subnormal.min, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.positive.subnormal.min, dir: 'negative', result: kValue.f16.negative.max },
|
||||
{ val: kValue.f16.positive.subnormal.max, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.positive.subnormal.max, dir: 'negative', result: kValue.f16.negative.max },
|
||||
{ val: kValue.f16.negative.subnormal.min, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.negative.subnormal.min, dir: 'negative', result: kValue.f16.negative.max },
|
||||
{ val: kValue.f16.negative.subnormal.max, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.negative.subnormal.max, dir: 'negative', result: kValue.f16.negative.max },
|
||||
|
||||
// Normals
|
||||
{ val: kValue.f16.positive.max, dir: 'positive', result: kValue.f16.infinity.positive },
|
||||
{ val: kValue.f16.positive.max, dir: 'positive', result: kValue.f16.positive.infinity },
|
||||
{ val: kValue.f16.positive.max, dir: 'negative', result: reinterpretU16AsF16(0x7bfe) },
|
||||
{ val: kValue.f16.positive.min, dir: 'positive', result: reinterpretU16AsF16(0x0401) },
|
||||
{ val: kValue.f16.positive.min, dir: 'negative', result: 0 },
|
||||
{ val: kValue.f16.negative.max, dir: 'positive', result: 0 },
|
||||
{ val: kValue.f16.negative.max, dir: 'negative', result: reinterpretU16AsF16(0x8401) },
|
||||
{ val: kValue.f16.negative.min, dir: 'positive', result: reinterpretU16AsF16(0xfbfe) },
|
||||
{ val: kValue.f16.negative.min, dir: 'negative', result: kValue.f16.infinity.negative },
|
||||
{ val: kValue.f16.negative.min, dir: 'negative', result: kValue.f16.negative.infinity },
|
||||
{ val: reinterpretU16AsF16(0x1380), dir: 'positive', result: reinterpretU16AsF16(0x1381) },
|
||||
{ val: reinterpretU16AsF16(0x1380), dir: 'negative', result: reinterpretU16AsF16(0x137f) },
|
||||
{ val: reinterpretU16AsF16(0x9380), dir: 'positive', result: reinterpretU16AsF16(0x937f) },
|
||||
@@ -412,36 +414,36 @@ g.test('nextAfterF16NoFlush')
|
||||
// Edge Cases
|
||||
{ val: Number.NaN, dir: 'positive', result: Number.NaN },
|
||||
{ val: Number.NaN, dir: 'negative', result: Number.NaN },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f16.infinity.positive },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f16.infinity.positive },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f16.infinity.negative },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f16.infinity.negative },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'positive', result: kValue.f16.positive.infinity },
|
||||
{ val: Number.POSITIVE_INFINITY, dir: 'negative', result: kValue.f16.positive.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'positive', result: kValue.f16.negative.infinity },
|
||||
{ val: Number.NEGATIVE_INFINITY, dir: 'negative', result: kValue.f16.negative.infinity },
|
||||
|
||||
// Zeroes
|
||||
{ val: +0, dir: 'positive', result: kValue.f16.subnormal.positive.min },
|
||||
{ val: +0, dir: 'negative', result: kValue.f16.subnormal.negative.max },
|
||||
{ val: -0, dir: 'positive', result: kValue.f16.subnormal.positive.min },
|
||||
{ val: -0, dir: 'negative', result: kValue.f16.subnormal.negative.max },
|
||||
{ val: +0, dir: 'positive', result: kValue.f16.positive.subnormal.min },
|
||||
{ val: +0, dir: 'negative', result: kValue.f16.negative.subnormal.max },
|
||||
{ val: -0, dir: 'positive', result: kValue.f16.positive.subnormal.min },
|
||||
{ val: -0, dir: 'negative', result: kValue.f16.negative.subnormal.max },
|
||||
|
||||
// Subnormals
|
||||
{ val: kValue.f16.subnormal.positive.min, dir: 'positive', result: reinterpretU16AsF16(0x0002) },
|
||||
{ val: kValue.f16.subnormal.positive.min, dir: 'negative', result: 0 },
|
||||
{ val: kValue.f16.subnormal.positive.max, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.subnormal.positive.max, dir: 'negative', result: reinterpretU16AsF16(0x03fe) },
|
||||
{ val: kValue.f16.subnormal.negative.min, dir: 'positive', result: reinterpretU16AsF16(0x83fe) },
|
||||
{ val: kValue.f16.subnormal.negative.min, dir: 'negative', result: kValue.f16.negative.max },
|
||||
{ val: kValue.f16.subnormal.negative.max, dir: 'positive', result: 0 },
|
||||
{ val: kValue.f16.subnormal.negative.max, dir: 'negative', result: reinterpretU16AsF16(0x8002) },
|
||||
{ val: kValue.f16.positive.subnormal.min, dir: 'positive', result: reinterpretU16AsF16(0x0002) },
|
||||
{ val: kValue.f16.positive.subnormal.min, dir: 'negative', result: 0 },
|
||||
{ val: kValue.f16.positive.subnormal.max, dir: 'positive', result: kValue.f16.positive.min },
|
||||
{ val: kValue.f16.positive.subnormal.max, dir: 'negative', result: reinterpretU16AsF16(0x03fe) },
|
||||
{ val: kValue.f16.negative.subnormal.min, dir: 'positive', result: reinterpretU16AsF16(0x83fe) },
|
||||
{ val: kValue.f16.negative.subnormal.min, dir: 'negative', result: kValue.f16.negative.max },
|
||||
{ val: kValue.f16.negative.subnormal.max, dir: 'positive', result: 0 },
|
||||
{ val: kValue.f16.negative.subnormal.max, dir: 'negative', result: reinterpretU16AsF16(0x8002) },
|
||||
|
||||
// Normals
|
||||
{ val: kValue.f16.positive.max, dir: 'positive', result: kValue.f16.infinity.positive },
|
||||
{ val: kValue.f16.positive.max, dir: 'positive', result: kValue.f16.positive.infinity },
|
||||
{ val: kValue.f16.positive.max, dir: 'negative', result: reinterpretU16AsF16(0x7bfe) },
|
||||
{ val: kValue.f16.positive.min, dir: 'positive', result: reinterpretU16AsF16(0x0401) },
|
||||
{ val: kValue.f16.positive.min, dir: 'negative', result: kValue.f16.subnormal.positive.max },
|
||||
{ val: kValue.f16.negative.max, dir: 'positive', result: kValue.f16.subnormal.negative.min },
|
||||
{ val: kValue.f16.positive.min, dir: 'negative', result: kValue.f16.positive.subnormal.max },
|
||||
{ val: kValue.f16.negative.max, dir: 'positive', result: kValue.f16.negative.subnormal.min },
|
||||
{ val: kValue.f16.negative.max, dir: 'negative', result: reinterpretU16AsF16(0x8401) },
|
||||
{ val: kValue.f16.negative.min, dir: 'positive', result: reinterpretU16AsF16(0xfbfe) },
|
||||
{ val: kValue.f16.negative.min, dir: 'negative', result: kValue.f16.infinity.negative },
|
||||
{ val: kValue.f16.negative.min, dir: 'negative', result: kValue.f16.negative.infinity },
|
||||
{ val: reinterpretU16AsF16(0x1380), dir: 'positive', result: reinterpretU16AsF16(0x1381) },
|
||||
{ val: reinterpretU16AsF16(0x1380), dir: 'negative', result: reinterpretU16AsF16(0x137f) },
|
||||
{ val: reinterpretU16AsF16(0x9380), dir: 'positive', result: reinterpretU16AsF16(0x937f) },
|
||||
@@ -452,10 +454,10 @@ g.test('nextAfterF16NoFlush')
|
||||
{ val: 0.01, dir: 'negative', result: reinterpretU16AsF16(0x211e) }, // positive normal
|
||||
{ val: -0.01, dir: 'positive', result: reinterpretU16AsF16(0xa11e) }, // negative normal
|
||||
{ val: -0.01, dir: 'negative', result: reinterpretU16AsF16(0xa11f) }, // negative normal
|
||||
{ val: 2.82E-40, dir: 'positive', result: kValue.f16.subnormal.positive.min }, // positive subnormal
|
||||
{ val: 2.82E-40, dir: 'positive', result: kValue.f16.positive.subnormal.min }, // positive subnormal
|
||||
{ val: 2.82E-40, dir: 'negative', result: 0 }, // positive subnormal
|
||||
{ val: -2.82E-40, dir: 'positive', result: 0 }, // negative subnormal
|
||||
{ val: -2.82E-40, dir: 'negative', result: kValue.f16.subnormal.negative.max }, // negative subnormal
|
||||
{ val: -2.82E-40, dir: 'negative', result: kValue.f16.negative.subnormal.max }, // negative subnormal
|
||||
]
|
||||
)
|
||||
.fn(t => {
|
||||
@@ -489,19 +491,19 @@ g.test('oneULPF64FlushToZero')
|
||||
|
||||
// Subnormals
|
||||
{
|
||||
target: kValue.f64.subnormal.positive.min,
|
||||
target: kValue.f64.positive.subnormal.min,
|
||||
expect: reinterpretU64AsF64(0x0010_0000_0000_0000n),
|
||||
},
|
||||
{
|
||||
target: kValue.f64.subnormal.positive.max,
|
||||
target: kValue.f64.positive.subnormal.max,
|
||||
expect: reinterpretU64AsF64(0x0010_0000_0000_0000n),
|
||||
},
|
||||
{
|
||||
target: kValue.f64.subnormal.negative.min,
|
||||
target: kValue.f64.negative.subnormal.min,
|
||||
expect: reinterpretU64AsF64(0x0010_0000_0000_0000n),
|
||||
},
|
||||
{
|
||||
target: kValue.f64.subnormal.negative.max,
|
||||
target: kValue.f64.negative.subnormal.max,
|
||||
expect: reinterpretU64AsF64(0x0010_0000_0000_0000n),
|
||||
},
|
||||
|
||||
@@ -542,19 +544,19 @@ g.test('oneULPF64NoFlush')
|
||||
|
||||
// Subnormals
|
||||
{
|
||||
target: kValue.f64.subnormal.positive.min,
|
||||
target: kValue.f64.positive.subnormal.min,
|
||||
expect: reinterpretU64AsF64(0x0000_0000_0000_0001n),
|
||||
},
|
||||
{
|
||||
target: kValue.f64.subnormal.positive.max,
|
||||
target: kValue.f64.positive.subnormal.max,
|
||||
expect: reinterpretU64AsF64(0x0000_0000_0000_0001n),
|
||||
},
|
||||
{
|
||||
target: kValue.f64.subnormal.negative.min,
|
||||
target: kValue.f64.negative.subnormal.min,
|
||||
expect: reinterpretU64AsF64(0x0000_0000_0000_0001n),
|
||||
},
|
||||
{
|
||||
target: kValue.f64.subnormal.negative.max,
|
||||
target: kValue.f64.negative.subnormal.max,
|
||||
expect: reinterpretU64AsF64(0x0000_0000_0000_0001n),
|
||||
},
|
||||
|
||||
@@ -595,19 +597,19 @@ g.test('oneULPF64')
|
||||
|
||||
// Subnormals
|
||||
{
|
||||
target: kValue.f64.subnormal.positive.min,
|
||||
target: kValue.f64.positive.subnormal.min,
|
||||
expect: reinterpretU64AsF64(0x0010_0000_0000_0000n),
|
||||
},
|
||||
{
|
||||
target: kValue.f64.subnormal.positive.max,
|
||||
target: kValue.f64.positive.subnormal.max,
|
||||
expect: reinterpretU64AsF64(0x0010_0000_0000_0000n),
|
||||
},
|
||||
{
|
||||
target: kValue.f64.subnormal.negative.min,
|
||||
target: kValue.f64.negative.subnormal.min,
|
||||
expect: reinterpretU64AsF64(0x0010_0000_0000_0000n),
|
||||
},
|
||||
{
|
||||
target: kValue.f64.subnormal.negative.max,
|
||||
target: kValue.f64.negative.subnormal.max,
|
||||
expect: reinterpretU64AsF64(0x0010_0000_0000_0000n),
|
||||
},
|
||||
|
||||
@@ -647,12 +649,12 @@ g.test('oneULPF32FlushToZero')
|
||||
{ target: -0, expect: reinterpretU32AsF32(0x00800000) },
|
||||
|
||||
// Subnormals
|
||||
{ target: kValue.f32.subnormal.positive.min, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.positive.subnormal.min, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: 2.82e-40, expect: reinterpretU32AsF32(0x00800000) }, // positive subnormal
|
||||
{ target: kValue.f32.subnormal.positive.max, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.subnormal.negative.min, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.positive.subnormal.max, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.negative.subnormal.min, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: -2.82e-40, expect: reinterpretU32AsF32(0x00800000) }, // negative subnormal
|
||||
{ target: kValue.f32.subnormal.negative.max, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.negative.subnormal.max, expect: reinterpretU32AsF32(0x00800000) },
|
||||
|
||||
// Normals
|
||||
{ target: kValue.f32.positive.min, expect: reinterpretU32AsF32(0x00000001) },
|
||||
@@ -696,12 +698,12 @@ g.test('oneULPF32NoFlush')
|
||||
{ target: -0, expect: reinterpretU32AsF32(0x00000001) },
|
||||
|
||||
// Subnormals
|
||||
{ target: kValue.f32.subnormal.positive.min, expect: reinterpretU32AsF32(0x00000001) },
|
||||
{ target: kValue.f32.positive.subnormal.min, expect: reinterpretU32AsF32(0x00000001) },
|
||||
{ target: -2.82e-40, expect: reinterpretU32AsF32(0x00000001) }, // negative subnormal
|
||||
{ target: kValue.f32.subnormal.positive.max, expect: reinterpretU32AsF32(0x00000001) },
|
||||
{ target: kValue.f32.subnormal.negative.min, expect: reinterpretU32AsF32(0x00000001) },
|
||||
{ target: kValue.f32.positive.subnormal.max, expect: reinterpretU32AsF32(0x00000001) },
|
||||
{ target: kValue.f32.negative.subnormal.min, expect: reinterpretU32AsF32(0x00000001) },
|
||||
{ target: 2.82e-40, expect: reinterpretU32AsF32(0x00000001) }, // positive subnormal
|
||||
{ target: kValue.f32.subnormal.negative.max, expect: reinterpretU32AsF32(0x00000001) },
|
||||
{ target: kValue.f32.negative.subnormal.max, expect: reinterpretU32AsF32(0x00000001) },
|
||||
|
||||
// Normals
|
||||
{ target: kValue.f32.positive.min, expect: reinterpretU32AsF32(0x00000001) },
|
||||
@@ -745,12 +747,12 @@ g.test('oneULPF32')
|
||||
{ target: -0, expect: reinterpretU32AsF32(0x00800000) },
|
||||
|
||||
// Subnormals
|
||||
{ target: kValue.f32.subnormal.negative.max, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.negative.subnormal.max, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: -2.82e-40, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.subnormal.negative.min, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.subnormal.positive.max, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.negative.subnormal.min, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.positive.subnormal.max, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: 2.82e-40, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.subnormal.positive.min, expect: reinterpretU32AsF32(0x00800000) },
|
||||
{ target: kValue.f32.positive.subnormal.min, expect: reinterpretU32AsF32(0x00800000) },
|
||||
|
||||
// Normals
|
||||
{ target: kValue.f32.positive.min, expect: reinterpretU32AsF32(0x00000001) },
|
||||
@@ -794,12 +796,12 @@ g.test('oneULPF16FlushToZero')
|
||||
{ target: -0, expect: reinterpretU16AsF16(0x0400) },
|
||||
|
||||
// Subnormals
|
||||
{ target: kValue.f16.subnormal.positive.min, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.positive.subnormal.min, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: 1.91e-6, expect: reinterpretU16AsF16(0x0400) }, // positive subnormal
|
||||
{ target: kValue.f16.subnormal.positive.max, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.subnormal.negative.min, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.positive.subnormal.max, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.negative.subnormal.min, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: -1.91e-6, expect: reinterpretU16AsF16(0x0400) }, // negative subnormal
|
||||
{ target: kValue.f16.subnormal.negative.max, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.negative.subnormal.max, expect: reinterpretU16AsF16(0x0400) },
|
||||
|
||||
// Normals
|
||||
{ target: kValue.f16.positive.min, expect: reinterpretU16AsF16(0x0001) },
|
||||
@@ -843,12 +845,12 @@ g.test('oneULPF16NoFlush')
|
||||
{ target: -0, expect: reinterpretU16AsF16(0x0001) },
|
||||
|
||||
// Subnormals
|
||||
{ target: kValue.f16.subnormal.positive.min, expect: reinterpretU16AsF16(0x0001) },
|
||||
{ target: kValue.f16.positive.subnormal.min, expect: reinterpretU16AsF16(0x0001) },
|
||||
{ target: 1.91e-6, expect: reinterpretU16AsF16(0x0001) }, // positive subnormal
|
||||
{ target: kValue.f16.subnormal.positive.max, expect: reinterpretU16AsF16(0x0001) },
|
||||
{ target: kValue.f16.subnormal.negative.min, expect: reinterpretU16AsF16(0x0001) },
|
||||
{ target: kValue.f16.positive.subnormal.max, expect: reinterpretU16AsF16(0x0001) },
|
||||
{ target: kValue.f16.negative.subnormal.min, expect: reinterpretU16AsF16(0x0001) },
|
||||
{ target: -1.91e-6, expect: reinterpretU16AsF16(0x0001) }, // negative subnormal
|
||||
{ target: kValue.f16.subnormal.negative.max, expect: reinterpretU16AsF16(0x0001) },
|
||||
{ target: kValue.f16.negative.subnormal.max, expect: reinterpretU16AsF16(0x0001) },
|
||||
|
||||
// Normals
|
||||
{ target: kValue.f16.positive.min, expect: reinterpretU16AsF16(0x0001) },
|
||||
@@ -892,12 +894,12 @@ g.test('oneULPF16')
|
||||
{ target: -0, expect: reinterpretU16AsF16(0x0400) },
|
||||
|
||||
// Subnormals
|
||||
{ target: kValue.f16.subnormal.positive.min, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.positive.subnormal.min, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: 1.91e-6, expect: reinterpretU16AsF16(0x0400) }, // positive subnormal
|
||||
{ target: kValue.f16.subnormal.positive.max, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.subnormal.negative.min, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.positive.subnormal.max, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.negative.subnormal.min, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: -1.91e-6, expect: reinterpretU16AsF16(0x0400) }, // negative subnormal
|
||||
{ target: kValue.f16.subnormal.negative.max, expect: reinterpretU16AsF16(0x0400) },
|
||||
{ target: kValue.f16.negative.subnormal.max, expect: reinterpretU16AsF16(0x0400) },
|
||||
|
||||
// Normals
|
||||
{ target: kValue.f16.positive.min, expect: reinterpretU16AsF16(0x0001) },
|
||||
@@ -947,20 +949,20 @@ g.test('correctlyRoundedF32')
|
||||
{ value: -(2 ** (kValue.f32.emax + 1)) + oneULPF64(kValue.f32.positive.max), expected: [Number.NEGATIVE_INFINITY, kValue.f32.negative.min] },
|
||||
{ value: 2 ** (kValue.f32.emax + 1), expected: [Number.POSITIVE_INFINITY] },
|
||||
{ value: -(2 ** (kValue.f32.emax + 1)), expected: [Number.NEGATIVE_INFINITY] },
|
||||
{ value: kValue.f32.infinity.positive, expected: [Number.POSITIVE_INFINITY] },
|
||||
{ value: kValue.f32.infinity.negative, expected: [Number.NEGATIVE_INFINITY] },
|
||||
{ value: kValue.f32.positive.infinity, expected: [Number.POSITIVE_INFINITY] },
|
||||
{ value: kValue.f32.negative.infinity, expected: [Number.NEGATIVE_INFINITY] },
|
||||
|
||||
// 32-bit subnormals
|
||||
{ value: kValue.f32.subnormal.positive.min, expected: [kValue.f32.subnormal.positive.min] },
|
||||
{ value: kValue.f32.subnormal.positive.max, expected: [kValue.f32.subnormal.positive.max] },
|
||||
{ value: kValue.f32.subnormal.negative.min, expected: [kValue.f32.subnormal.negative.min] },
|
||||
{ value: kValue.f32.subnormal.negative.max, expected: [kValue.f32.subnormal.negative.max] },
|
||||
{ value: kValue.f32.positive.subnormal.min, expected: [kValue.f32.positive.subnormal.min] },
|
||||
{ value: kValue.f32.positive.subnormal.max, expected: [kValue.f32.positive.subnormal.max] },
|
||||
{ value: kValue.f32.negative.subnormal.min, expected: [kValue.f32.negative.subnormal.min] },
|
||||
{ value: kValue.f32.negative.subnormal.max, expected: [kValue.f32.negative.subnormal.max] },
|
||||
|
||||
// 64-bit subnormals
|
||||
{ value: reinterpretU64AsF64(0x0000_0000_0000_0001n), expected: [0, kValue.f32.subnormal.positive.min] },
|
||||
{ value: reinterpretU64AsF64(0x0000_0000_0000_0002n), expected: [0, kValue.f32.subnormal.positive.min] },
|
||||
{ value: reinterpretU64AsF64(0x800f_ffff_ffff_ffffn), expected: [kValue.f32.subnormal.negative.max, 0] },
|
||||
{ value: reinterpretU64AsF64(0x800f_ffff_ffff_fffen), expected: [kValue.f32.subnormal.negative.max, 0] },
|
||||
{ value: reinterpretU64AsF64(0x0000_0000_0000_0001n), expected: [0, kValue.f32.positive.subnormal.min] },
|
||||
{ value: reinterpretU64AsF64(0x0000_0000_0000_0002n), expected: [0, kValue.f32.positive.subnormal.min] },
|
||||
{ value: reinterpretU64AsF64(0x800f_ffff_ffff_ffffn), expected: [kValue.f32.negative.subnormal.max, 0] },
|
||||
{ value: reinterpretU64AsF64(0x800f_ffff_ffff_fffen), expected: [kValue.f32.negative.subnormal.max, 0] },
|
||||
|
||||
// 32-bit normals
|
||||
{ value: 0, expected: [0] },
|
||||
@@ -1008,20 +1010,20 @@ g.test('correctlyRoundedF16')
|
||||
{ value: -(2 ** (kValue.f16.emax + 1)) + oneULPF64(kValue.f16.positive.max), expected: [Number.NEGATIVE_INFINITY, kValue.f16.negative.min] },
|
||||
{ value: 2 ** (kValue.f16.emax + 1), expected: [Number.POSITIVE_INFINITY] },
|
||||
{ value: -(2 ** (kValue.f16.emax + 1)), expected: [Number.NEGATIVE_INFINITY] },
|
||||
{ value: kValue.f16.infinity.positive, expected: [Number.POSITIVE_INFINITY] },
|
||||
{ value: kValue.f16.infinity.negative, expected: [Number.NEGATIVE_INFINITY] },
|
||||
{ value: kValue.f16.positive.infinity, expected: [Number.POSITIVE_INFINITY] },
|
||||
{ value: kValue.f16.negative.infinity, expected: [Number.NEGATIVE_INFINITY] },
|
||||
|
||||
// 16-bit subnormals
|
||||
{ value: kValue.f16.subnormal.positive.min, expected: [kValue.f16.subnormal.positive.min] },
|
||||
{ value: kValue.f16.subnormal.positive.max, expected: [kValue.f16.subnormal.positive.max] },
|
||||
{ value: kValue.f16.subnormal.negative.min, expected: [kValue.f16.subnormal.negative.min] },
|
||||
{ value: kValue.f16.subnormal.negative.max, expected: [kValue.f16.subnormal.negative.max] },
|
||||
{ value: kValue.f16.positive.subnormal.min, expected: [kValue.f16.positive.subnormal.min] },
|
||||
{ value: kValue.f16.positive.subnormal.max, expected: [kValue.f16.positive.subnormal.max] },
|
||||
{ value: kValue.f16.negative.subnormal.min, expected: [kValue.f16.negative.subnormal.min] },
|
||||
{ value: kValue.f16.negative.subnormal.max, expected: [kValue.f16.negative.subnormal.max] },
|
||||
|
||||
// 32-bit subnormals
|
||||
{ value: kValue.f32.subnormal.positive.min, expected: [0, kValue.f16.subnormal.positive.min] },
|
||||
{ value: kValue.f32.subnormal.positive.max, expected: [0, kValue.f16.subnormal.positive.min] },
|
||||
{ value: kValue.f32.subnormal.negative.max, expected: [kValue.f16.subnormal.negative.max, 0] },
|
||||
{ value: kValue.f32.subnormal.negative.min, expected: [kValue.f16.subnormal.negative.max, 0] },
|
||||
{ value: kValue.f32.positive.subnormal.min, expected: [0, kValue.f16.positive.subnormal.min] },
|
||||
{ value: kValue.f32.positive.subnormal.max, expected: [0, kValue.f16.positive.subnormal.min] },
|
||||
{ value: kValue.f32.negative.subnormal.max, expected: [kValue.f16.negative.subnormal.max, 0] },
|
||||
{ value: kValue.f32.negative.subnormal.min, expected: [kValue.f16.negative.subnormal.max, 0] },
|
||||
|
||||
// 16-bit normals
|
||||
{ value: 0, expected: [0] },
|
||||
@@ -1065,30 +1067,30 @@ const kFrexpCases = {
|
||||
{ input: kValue.f32.positive.min, fract: 0.5, exp: -125 },
|
||||
{ input: kValue.f32.negative.max, fract: -0.5, exp: -125 },
|
||||
{ input: kValue.f32.negative.min, fract: -0.9999999403953552, exp: 128 },
|
||||
{ input: kValue.f32.subnormal.positive.max, fract: 0.9999998807907104, exp: -126 },
|
||||
{ input: kValue.f32.subnormal.positive.min, fract: 0.5, exp: -148 },
|
||||
{ input: kValue.f32.subnormal.negative.max, fract: -0.5, exp: -148 },
|
||||
{ input: kValue.f32.subnormal.negative.min, fract: -0.9999998807907104, exp: -126 },
|
||||
{ input: kValue.f32.positive.subnormal.max, fract: 0.9999998807907104, exp: -126 },
|
||||
{ input: kValue.f32.positive.subnormal.min, fract: 0.5, exp: -148 },
|
||||
{ input: kValue.f32.negative.subnormal.max, fract: -0.5, exp: -148 },
|
||||
{ input: kValue.f32.negative.subnormal.min, fract: -0.9999998807907104, exp: -126 },
|
||||
] as frexpCase[],
|
||||
f16: [
|
||||
{ input: kValue.f16.positive.max, fract: 0.99951171875, exp: 16 },
|
||||
{ input: kValue.f16.positive.min, fract: 0.5, exp: -13 },
|
||||
{ input: kValue.f16.negative.max, fract: -0.5, exp: -13 },
|
||||
{ input: kValue.f16.negative.min, fract: -0.99951171875, exp: 16 },
|
||||
{ input: kValue.f16.subnormal.positive.max, fract: 0.9990234375, exp: -14 },
|
||||
{ input: kValue.f16.subnormal.positive.min, fract: 0.5, exp: -23 },
|
||||
{ input: kValue.f16.subnormal.negative.max, fract: -0.5, exp: -23 },
|
||||
{ input: kValue.f16.subnormal.negative.min, fract: -0.9990234375, exp: -14 },
|
||||
{ input: kValue.f16.positive.subnormal.max, fract: 0.9990234375, exp: -14 },
|
||||
{ input: kValue.f16.positive.subnormal.min, fract: 0.5, exp: -23 },
|
||||
{ input: kValue.f16.negative.subnormal.max, fract: -0.5, exp: -23 },
|
||||
{ input: kValue.f16.negative.subnormal.min, fract: -0.9990234375, exp: -14 },
|
||||
] as frexpCase[],
|
||||
f64: [
|
||||
{ input: kValue.f64.positive.max, fract: reinterpretU64AsF64(0x3fef_ffff_ffff_ffffn) /* ~0.9999999999999999 */, exp: 1024 },
|
||||
{ input: kValue.f64.positive.min, fract: 0.5, exp: -1021 },
|
||||
{ input: kValue.f64.negative.max, fract: -0.5, exp: -1021 },
|
||||
{ input: kValue.f64.negative.min, fract: reinterpretU64AsF64(0xbfef_ffff_ffff_ffffn) /* ~-0.9999999999999999 */, exp: 1024 },
|
||||
{ input: kValue.f64.subnormal.positive.max, fract: reinterpretU64AsF64(0x3fef_ffff_ffff_fffen) /* ~0.9999999999999998 */, exp: -1022 },
|
||||
{ input: kValue.f64.subnormal.positive.min, fract: 0.5, exp: -1073 },
|
||||
{ input: kValue.f64.subnormal.negative.max, fract: -0.5, exp: -1073 },
|
||||
{ input: kValue.f64.subnormal.negative.min, fract: reinterpretU64AsF64(0xbfef_ffff_ffff_fffen) /* ~-0.9999999999999998 */, exp: -1022 },
|
||||
{ input: kValue.f64.positive.subnormal.max, fract: reinterpretU64AsF64(0x3fef_ffff_ffff_fffen) /* ~0.9999999999999998 */, exp: -1022 },
|
||||
{ input: kValue.f64.positive.subnormal.min, fract: 0.5, exp: -1073 },
|
||||
{ input: kValue.f64.negative.subnormal.max, fract: -0.5, exp: -1073 },
|
||||
{ input: kValue.f64.negative.subnormal.min, fract: reinterpretU64AsF64(0xbfef_ffff_ffff_fffen) /* ~-0.9999999999999998 */, exp: -1022 },
|
||||
] as frexpCase[],
|
||||
} as const;
|
||||
|
||||
@@ -1535,19 +1537,19 @@ g.test('fullF32Range')
|
||||
.paramsSimple<fullF32RangeCase>(
|
||||
// prettier-ignore
|
||||
[
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ 0.0 ] },
|
||||
{ neg_norm: 1, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.negative.min, 0.0] },
|
||||
{ neg_norm: 2, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.negative.min, kValue.f32.negative.max, 0.0 ] },
|
||||
{ neg_norm: 3, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.negative.min, -1.9999998807907104, kValue.f32.negative.max, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 1, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.subnormal.negative.min, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 2, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.subnormal.negative.min, kValue.f32.subnormal.negative.max, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 1, pos_norm: 0, expect: [ 0.0, kValue.f32.subnormal.positive.min ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 2, pos_norm: 0, expect: [ 0.0, kValue.f32.subnormal.positive.min, kValue.f32.subnormal.positive.max ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 1, expect: [ 0.0, kValue.f32.positive.min ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 2, expect: [ 0.0, kValue.f32.positive.min, kValue.f32.positive.max ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 3, expect: [ 0.0, kValue.f32.positive.min, 1.9999998807907104, kValue.f32.positive.max ] },
|
||||
{ neg_norm: 1, neg_sub: 1, pos_sub: 1, pos_norm: 1, expect: [ kValue.f32.negative.min, kValue.f32.subnormal.negative.min, 0.0, kValue.f32.subnormal.positive.min, kValue.f32.positive.min ] },
|
||||
{ neg_norm: 2, neg_sub: 2, pos_sub: 2, pos_norm: 2, expect: [ kValue.f32.negative.min, kValue.f32.negative.max, kValue.f32.subnormal.negative.min, kValue.f32.subnormal.negative.max, 0.0, kValue.f32.subnormal.positive.min, kValue.f32.subnormal.positive.max, kValue.f32.positive.min, kValue.f32.positive.max ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ -0.0, 0.0 ] },
|
||||
{ neg_norm: 1, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.negative.min, -0.0, 0.0] },
|
||||
{ neg_norm: 2, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.negative.min, kValue.f32.negative.max, -0.0, 0.0 ] },
|
||||
{ neg_norm: 3, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.negative.min, -1.9999998807907104, kValue.f32.negative.max, -0.0, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 1, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.negative.subnormal.min, -0.0, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 2, pos_sub: 0, pos_norm: 0, expect: [ kValue.f32.negative.subnormal.min, kValue.f32.negative.subnormal.max, -0.0, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 1, pos_norm: 0, expect: [ -0.0, 0.0, kValue.f32.positive.subnormal.min ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 2, pos_norm: 0, expect: [ -0.0, 0.0, kValue.f32.positive.subnormal.min, kValue.f32.positive.subnormal.max ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 1, expect: [ -0.0, 0.0, kValue.f32.positive.min ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 2, expect: [ -0.0, 0.0, kValue.f32.positive.min, kValue.f32.positive.max ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 3, expect: [ -0.0, 0.0, kValue.f32.positive.min, 1.9999998807907104, kValue.f32.positive.max ] },
|
||||
{ neg_norm: 1, neg_sub: 1, pos_sub: 1, pos_norm: 1, expect: [ kValue.f32.negative.min, kValue.f32.negative.subnormal.min, -0.0, 0.0, kValue.f32.positive.subnormal.min, kValue.f32.positive.min ] },
|
||||
{ neg_norm: 2, neg_sub: 2, pos_sub: 2, pos_norm: 2, expect: [ kValue.f32.negative.min, kValue.f32.negative.max, kValue.f32.negative.subnormal.min, kValue.f32.negative.subnormal.max, -0.0, 0.0, kValue.f32.positive.subnormal.min, kValue.f32.positive.subnormal.max, kValue.f32.positive.min, kValue.f32.positive.max ] },
|
||||
]
|
||||
)
|
||||
.fn(test => {
|
||||
@@ -1576,20 +1578,20 @@ g.test('fullF16Range')
|
||||
.paramsSimple<fullF16RangeCase>(
|
||||
// prettier-ignore
|
||||
[
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ 0.0 ] },
|
||||
{ neg_norm: 1, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.negative.min, 0.0] },
|
||||
{ neg_norm: 2, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.negative.min, kValue.f16.negative.max, 0.0 ] },
|
||||
{ neg_norm: 3, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.negative.min, -1.9990234375, kValue.f16.negative.max, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 1, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.subnormal.negative.min, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 2, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.subnormal.negative.min, kValue.f16.subnormal.negative.max, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 1, pos_norm: 0, expect: [ 0.0, kValue.f16.subnormal.positive.min ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 2, pos_norm: 0, expect: [ 0.0, kValue.f16.subnormal.positive.min, kValue.f16.subnormal.positive.max ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 1, expect: [ 0.0, kValue.f16.positive.min ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 2, expect: [ 0.0, kValue.f16.positive.min, kValue.f16.positive.max ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 3, expect: [ 0.0, kValue.f16.positive.min, 1.9990234375, kValue.f16.positive.max ] },
|
||||
{ neg_norm: 1, neg_sub: 1, pos_sub: 1, pos_norm: 1, expect: [ kValue.f16.negative.min, kValue.f16.subnormal.negative.min, 0.0, kValue.f16.subnormal.positive.min, kValue.f16.positive.min ] },
|
||||
{ neg_norm: 2, neg_sub: 2, pos_sub: 2, pos_norm: 2, expect: [ kValue.f16.negative.min, kValue.f16.negative.max, kValue.f16.subnormal.negative.min, kValue.f16.subnormal.negative.max, 0.0, kValue.f16.subnormal.positive.min, kValue.f16.subnormal.positive.max, kValue.f16.positive.min, kValue.f16.positive.max ] },
|
||||
]
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ -0.0, 0.0 ] },
|
||||
{ neg_norm: 1, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.negative.min, -0.0, 0.0] },
|
||||
{ neg_norm: 2, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.negative.min, kValue.f16.negative.max, -0.0, 0.0 ] },
|
||||
{ neg_norm: 3, neg_sub: 0, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.negative.min, -1.9990234375, kValue.f16.negative.max, -0.0, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 1, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.negative.subnormal.min, -0.0, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 2, pos_sub: 0, pos_norm: 0, expect: [ kValue.f16.negative.subnormal.min, kValue.f16.negative.subnormal.max, -0.0, 0.0 ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 1, pos_norm: 0, expect: [ -0.0, 0.0, kValue.f16.positive.subnormal.min ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 2, pos_norm: 0, expect: [ -0.0, 0.0, kValue.f16.positive.subnormal.min, kValue.f16.positive.subnormal.max ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 1, expect: [ -0.0, 0.0, kValue.f16.positive.min ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 2, expect: [ -0.0, 0.0, kValue.f16.positive.min, kValue.f16.positive.max ] },
|
||||
{ neg_norm: 0, neg_sub: 0, pos_sub: 0, pos_norm: 3, expect: [ -0.0, 0.0, kValue.f16.positive.min, 1.9990234375, kValue.f16.positive.max ] },
|
||||
{ neg_norm: 1, neg_sub: 1, pos_sub: 1, pos_norm: 1, expect: [ kValue.f16.negative.min, kValue.f16.negative.subnormal.min, -0.0, 0.0, kValue.f16.positive.subnormal.min, kValue.f16.positive.min ] },
|
||||
{ neg_norm: 2, neg_sub: 2, pos_sub: 2, pos_norm: 2, expect: [ kValue.f16.negative.min, kValue.f16.negative.max, kValue.f16.negative.subnormal.min, kValue.f16.negative.subnormal.max, -0.0, 0.0, kValue.f16.positive.subnormal.min, kValue.f16.positive.subnormal.max, kValue.f16.positive.min, kValue.f16.positive.max ] },
|
||||
]
|
||||
)
|
||||
.fn(test => {
|
||||
const neg_norm = test.params.neg_norm;
|
||||
@@ -1667,12 +1669,12 @@ g.test('f64LimitsEquivalency')
|
||||
{ bits: kBit.f64.negative.pi.third, value: kValue.f64.negative.pi.third },
|
||||
{ bits: kBit.f64.negative.pi.quarter, value: kValue.f64.negative.pi.quarter },
|
||||
{ bits: kBit.f64.negative.pi.sixth, value: kValue.f64.negative.pi.sixth },
|
||||
{ bits: kBit.f64.subnormal.positive.max, value: kValue.f64.subnormal.positive.max },
|
||||
{ bits: kBit.f64.subnormal.positive.min, value: kValue.f64.subnormal.positive.min },
|
||||
{ bits: kBit.f64.subnormal.negative.max, value: kValue.f64.subnormal.negative.max },
|
||||
{ bits: kBit.f64.subnormal.negative.min, value: kValue.f64.subnormal.negative.min },
|
||||
{ bits: kBit.f64.infinity.positive, value: kValue.f64.infinity.positive },
|
||||
{ bits: kBit.f64.infinity.negative, value: kValue.f64.infinity.negative },
|
||||
{ bits: kBit.f64.positive.subnormal.max, value: kValue.f64.positive.subnormal.max },
|
||||
{ bits: kBit.f64.positive.subnormal.min, value: kValue.f64.positive.subnormal.min },
|
||||
{ bits: kBit.f64.negative.subnormal.max, value: kValue.f64.negative.subnormal.max },
|
||||
{ bits: kBit.f64.negative.subnormal.min, value: kValue.f64.negative.subnormal.min },
|
||||
{ bits: kBit.f64.positive.infinity, value: kValue.f64.positive.infinity },
|
||||
{ bits: kBit.f64.negative.infinity, value: kValue.f64.negative.infinity },
|
||||
])
|
||||
.fn(test => {
|
||||
const bits = test.params.bits;
|
||||
@@ -1715,12 +1717,12 @@ g.test('f32LimitsEquivalency')
|
||||
{ bits: kBit.f32.negative.pi.third, value: kValue.f32.negative.pi.third },
|
||||
{ bits: kBit.f32.negative.pi.quarter, value: kValue.f32.negative.pi.quarter },
|
||||
{ bits: kBit.f32.negative.pi.sixth, value: kValue.f32.negative.pi.sixth },
|
||||
{ bits: kBit.f32.subnormal.positive.max, value: kValue.f32.subnormal.positive.max },
|
||||
{ bits: kBit.f32.subnormal.positive.min, value: kValue.f32.subnormal.positive.min },
|
||||
{ bits: kBit.f32.subnormal.negative.max, value: kValue.f32.subnormal.negative.max },
|
||||
{ bits: kBit.f32.subnormal.negative.min, value: kValue.f32.subnormal.negative.min },
|
||||
{ bits: kBit.f32.infinity.positive, value: kValue.f32.infinity.positive },
|
||||
{ bits: kBit.f32.infinity.negative, value: kValue.f32.infinity.negative },
|
||||
{ bits: kBit.f32.positive.subnormal.max, value: kValue.f32.positive.subnormal.max },
|
||||
{ bits: kBit.f32.positive.subnormal.min, value: kValue.f32.positive.subnormal.min },
|
||||
{ bits: kBit.f32.negative.subnormal.max, value: kValue.f32.negative.subnormal.max },
|
||||
{ bits: kBit.f32.negative.subnormal.min, value: kValue.f32.negative.subnormal.min },
|
||||
{ bits: kBit.f32.positive.infinity, value: kValue.f32.positive.infinity },
|
||||
{ bits: kBit.f32.negative.infinity, value: kValue.f32.negative.infinity },
|
||||
])
|
||||
.fn(test => {
|
||||
const bits = test.params.bits;
|
||||
@@ -1758,12 +1760,12 @@ g.test('f16LimitsEquivalency')
|
||||
{ bits: kBit.f16.negative.pi.third, value: kValue.f16.negative.pi.third },
|
||||
{ bits: kBit.f16.negative.pi.quarter, value: kValue.f16.negative.pi.quarter },
|
||||
{ bits: kBit.f16.negative.pi.sixth, value: kValue.f16.negative.pi.sixth },
|
||||
{ bits: kBit.f16.subnormal.positive.max, value: kValue.f16.subnormal.positive.max },
|
||||
{ bits: kBit.f16.subnormal.positive.min, value: kValue.f16.subnormal.positive.min },
|
||||
{ bits: kBit.f16.subnormal.negative.max, value: kValue.f16.subnormal.negative.max },
|
||||
{ bits: kBit.f16.subnormal.negative.min, value: kValue.f16.subnormal.negative.min },
|
||||
{ bits: kBit.f16.infinity.positive, value: kValue.f16.infinity.positive },
|
||||
{ bits: kBit.f16.infinity.negative, value: kValue.f16.infinity.negative },
|
||||
{ bits: kBit.f16.positive.subnormal.max, value: kValue.f16.positive.subnormal.max },
|
||||
{ bits: kBit.f16.positive.subnormal.min, value: kValue.f16.positive.subnormal.min },
|
||||
{ bits: kBit.f16.negative.subnormal.max, value: kValue.f16.negative.subnormal.max },
|
||||
{ bits: kBit.f16.negative.subnormal.min, value: kValue.f16.negative.subnormal.min },
|
||||
{ bits: kBit.f16.positive.infinity, value: kValue.f16.positive.infinity },
|
||||
{ bits: kBit.f16.negative.infinity, value: kValue.f16.negative.infinity },
|
||||
])
|
||||
.fn(test => {
|
||||
const bits = test.params.bits;
|
||||
|
||||
@@ -20,14 +20,14 @@ import { assert, objectEquals } from '../common/util/util.js';
|
||||
import { UnitTest } from './unit_test.js';
|
||||
|
||||
class ParamsTest extends UnitTest {
|
||||
expectParams<CaseP, SubcaseP>(
|
||||
expectParams<CaseP extends {}, SubcaseP extends {}>(
|
||||
act: ParamsBuilderBase<CaseP, SubcaseP>,
|
||||
exp: CaseSubcaseIterable<{}, {}>,
|
||||
caseFilter: TestParams | null = null
|
||||
): void {
|
||||
const a = Array.from(
|
||||
builderIterateCasesWithSubcases(act, caseFilter)
|
||||
).map(([caseP, subcases]) => [caseP, subcases ? Array.from(subcases) : undefined]);
|
||||
const a = Array.from(builderIterateCasesWithSubcases(act, caseFilter)).map(
|
||||
([caseP, subcases]) => [caseP, subcases ? Array.from(subcases) : undefined]
|
||||
);
|
||||
const e = Array.from(exp);
|
||||
this.expect(
|
||||
objectEquals(a, e),
|
||||
|
||||
@@ -130,4 +130,15 @@ g.test('unordered').fn(t => {
|
||||
new TestQuerySingleCase('suite1', ['bar', 'buzz', 'buzz'], ['zap'], {}),
|
||||
new TestQueryMultiTest('suite1', ['bar'], [])
|
||||
);
|
||||
// Expect that 0.0 and -0.0 are treated as different queries
|
||||
t.expectUnordered(
|
||||
new TestQueryMultiCase('suite', ['a', 'b'], ['c', 'd'], { x: 0.0 }),
|
||||
new TestQueryMultiCase('suite', ['a', 'b'], ['c', 'd'], { x: -0.0 })
|
||||
);
|
||||
t.expectUnordered(
|
||||
new TestQuerySingleCase('suite', ['a', 'b'], ['c', 'd'], { x: 0.0, y: 0.0 }),
|
||||
new TestQuerySingleCase('suite', ['a', 'b'], ['c', 'd'], { x: 0.0, y: -0.0 }),
|
||||
new TestQuerySingleCase('suite', ['a', 'b'], ['c', 'd'], { x: -0.0, y: 0.0 }),
|
||||
new TestQuerySingleCase('suite', ['a', 'b'], ['c', 'd'], { x: -0.0, y: -0.0 })
|
||||
);
|
||||
});
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
deserializeExpectation,
|
||||
serializeExpectation,
|
||||
} from '../webgpu/shader/execution/expression/case_cache.js';
|
||||
import BinaryStream from '../webgpu/util/binary_stream.js';
|
||||
import {
|
||||
anyOf,
|
||||
deserializeComparator,
|
||||
@@ -104,12 +105,12 @@ g.test('value').fn(t => {
|
||||
f32(-0.5),
|
||||
f32(kValue.f32.positive.max),
|
||||
f32(kValue.f32.positive.min),
|
||||
f32(kValue.f32.subnormal.positive.max),
|
||||
f32(kValue.f32.subnormal.positive.min),
|
||||
f32(kValue.f32.subnormal.negative.max),
|
||||
f32(kValue.f32.subnormal.negative.min),
|
||||
f32(kValue.f32.infinity.positive),
|
||||
f32(kValue.f32.infinity.negative),
|
||||
f32(kValue.f32.positive.subnormal.max),
|
||||
f32(kValue.f32.positive.subnormal.min),
|
||||
f32(kValue.f32.negative.subnormal.max),
|
||||
f32(kValue.f32.negative.subnormal.min),
|
||||
f32(kValue.f32.positive.infinity),
|
||||
f32(kValue.f32.negative.infinity),
|
||||
|
||||
f16(0),
|
||||
f16(-0),
|
||||
@@ -117,14 +118,14 @@ g.test('value').fn(t => {
|
||||
f16(-1),
|
||||
f16(0.5),
|
||||
f16(-0.5),
|
||||
f16(kValue.f32.positive.max),
|
||||
f16(kValue.f32.positive.min),
|
||||
f16(kValue.f32.subnormal.positive.max),
|
||||
f16(kValue.f32.subnormal.positive.min),
|
||||
f16(kValue.f32.subnormal.negative.max),
|
||||
f16(kValue.f32.subnormal.negative.min),
|
||||
f16(kValue.f32.infinity.positive),
|
||||
f16(kValue.f32.infinity.negative),
|
||||
f16(kValue.f16.positive.max),
|
||||
f16(kValue.f16.positive.min),
|
||||
f16(kValue.f16.positive.subnormal.max),
|
||||
f16(kValue.f16.positive.subnormal.min),
|
||||
f16(kValue.f16.negative.subnormal.max),
|
||||
f16(kValue.f16.negative.subnormal.min),
|
||||
f16(kValue.f16.positive.infinity),
|
||||
f16(kValue.f16.negative.infinity),
|
||||
|
||||
bool(true),
|
||||
bool(false),
|
||||
@@ -145,7 +146,7 @@ g.test('value').fn(t => {
|
||||
[0.0, 1.0, 2.0],
|
||||
[3.0, 4.0, 5.0],
|
||||
],
|
||||
f32
|
||||
f16
|
||||
),
|
||||
toMatrix(
|
||||
[
|
||||
@@ -160,7 +161,7 @@ g.test('value').fn(t => {
|
||||
[2.0, 3.0],
|
||||
[4.0, 5.0],
|
||||
],
|
||||
f32
|
||||
f16
|
||||
),
|
||||
toMatrix(
|
||||
[
|
||||
@@ -176,7 +177,7 @@ g.test('value').fn(t => {
|
||||
[4.0, 5.0, 6.0, 7.0],
|
||||
[8.0, 9.0, 10.0, 11.0],
|
||||
],
|
||||
f32
|
||||
f16
|
||||
),
|
||||
toMatrix(
|
||||
[
|
||||
@@ -194,7 +195,7 @@ g.test('value').fn(t => {
|
||||
[6.0, 7.0, 8.0],
|
||||
[9.0, 10.0, 11.0],
|
||||
],
|
||||
f32
|
||||
f16
|
||||
),
|
||||
toMatrix(
|
||||
[
|
||||
@@ -206,11 +207,14 @@ g.test('value').fn(t => {
|
||||
f32
|
||||
),
|
||||
]) {
|
||||
const serialized = serializeValue(value);
|
||||
const deserialized = deserializeValue(serialized);
|
||||
const s = new BinaryStream(new Uint8Array(1024).buffer);
|
||||
serializeValue(s, value);
|
||||
const d = new BinaryStream(s.buffer().buffer);
|
||||
const deserialized = deserializeValue(d);
|
||||
t.expect(
|
||||
objectEquals(value, deserialized),
|
||||
`value ${value} -> serialize -> deserialize -> ${deserialized}`
|
||||
`${value.type} ${value} -> serialize -> deserialize -> ${deserialized}
|
||||
buffer: ${s.buffer()}`
|
||||
);
|
||||
}
|
||||
});
|
||||
@@ -225,23 +229,61 @@ g.test('fpinterval_f32').fn(t => {
|
||||
FP.f32.toInterval(-0.5),
|
||||
FP.f32.toInterval(kValue.f32.positive.max),
|
||||
FP.f32.toInterval(kValue.f32.positive.min),
|
||||
FP.f32.toInterval(kValue.f32.subnormal.positive.max),
|
||||
FP.f32.toInterval(kValue.f32.subnormal.positive.min),
|
||||
FP.f32.toInterval(kValue.f32.subnormal.negative.max),
|
||||
FP.f32.toInterval(kValue.f32.subnormal.negative.min),
|
||||
FP.f32.toInterval(kValue.f32.infinity.positive),
|
||||
FP.f32.toInterval(kValue.f32.infinity.negative),
|
||||
FP.f32.toInterval(kValue.f32.positive.subnormal.max),
|
||||
FP.f32.toInterval(kValue.f32.positive.subnormal.min),
|
||||
FP.f32.toInterval(kValue.f32.negative.subnormal.max),
|
||||
FP.f32.toInterval(kValue.f32.negative.subnormal.min),
|
||||
FP.f32.toInterval(kValue.f32.positive.infinity),
|
||||
FP.f32.toInterval(kValue.f32.negative.infinity),
|
||||
|
||||
FP.f32.toInterval([-0, 0]),
|
||||
FP.f32.toInterval([-1, 1]),
|
||||
FP.f32.toInterval([-0.5, 0.5]),
|
||||
FP.f32.toInterval([kValue.f32.positive.min, kValue.f32.positive.max]),
|
||||
FP.f32.toInterval([kValue.f32.subnormal.positive.min, kValue.f32.subnormal.positive.max]),
|
||||
FP.f32.toInterval([kValue.f32.subnormal.negative.min, kValue.f32.subnormal.negative.max]),
|
||||
FP.f32.toInterval([kValue.f32.infinity.negative, kValue.f32.infinity.positive]),
|
||||
FP.f32.toInterval([kValue.f32.positive.subnormal.min, kValue.f32.positive.subnormal.max]),
|
||||
FP.f32.toInterval([kValue.f32.negative.subnormal.min, kValue.f32.negative.subnormal.max]),
|
||||
FP.f32.toInterval([kValue.f32.negative.infinity, kValue.f32.positive.infinity]),
|
||||
]) {
|
||||
const serialized = serializeFPInterval(interval);
|
||||
const deserialized = deserializeFPInterval(serialized);
|
||||
const s = new BinaryStream(new Uint8Array(1024).buffer);
|
||||
serializeFPInterval(s, interval);
|
||||
const d = new BinaryStream(s.buffer().buffer);
|
||||
const deserialized = deserializeFPInterval(d);
|
||||
t.expect(
|
||||
objectEquals(interval, deserialized),
|
||||
`interval ${interval} -> serialize -> deserialize -> ${deserialized}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
g.test('fpinterval_f16').fn(t => {
|
||||
for (const interval of [
|
||||
FP.f16.toInterval(0),
|
||||
FP.f16.toInterval(-0),
|
||||
FP.f16.toInterval(1),
|
||||
FP.f16.toInterval(-1),
|
||||
FP.f16.toInterval(0.5),
|
||||
FP.f16.toInterval(-0.5),
|
||||
FP.f16.toInterval(kValue.f16.positive.max),
|
||||
FP.f16.toInterval(kValue.f16.positive.min),
|
||||
FP.f16.toInterval(kValue.f16.positive.subnormal.max),
|
||||
FP.f16.toInterval(kValue.f16.positive.subnormal.min),
|
||||
FP.f16.toInterval(kValue.f16.negative.subnormal.max),
|
||||
FP.f16.toInterval(kValue.f16.negative.subnormal.min),
|
||||
FP.f16.toInterval(kValue.f16.positive.infinity),
|
||||
FP.f16.toInterval(kValue.f16.negative.infinity),
|
||||
|
||||
FP.f16.toInterval([-0, 0]),
|
||||
FP.f16.toInterval([-1, 1]),
|
||||
FP.f16.toInterval([-0.5, 0.5]),
|
||||
FP.f16.toInterval([kValue.f16.positive.min, kValue.f16.positive.max]),
|
||||
FP.f16.toInterval([kValue.f16.positive.subnormal.min, kValue.f16.positive.subnormal.max]),
|
||||
FP.f16.toInterval([kValue.f16.negative.subnormal.min, kValue.f16.negative.subnormal.max]),
|
||||
FP.f16.toInterval([kValue.f16.negative.infinity, kValue.f16.positive.infinity]),
|
||||
]) {
|
||||
const s = new BinaryStream(new Uint8Array(1024).buffer);
|
||||
serializeFPInterval(s, interval);
|
||||
const d = new BinaryStream(s.buffer().buffer);
|
||||
const deserialized = deserializeFPInterval(d);
|
||||
t.expect(
|
||||
objectEquals(interval, deserialized),
|
||||
`interval ${interval} -> serialize -> deserialize -> ${deserialized}`
|
||||
@@ -259,23 +301,25 @@ g.test('fpinterval_abstract').fn(t => {
|
||||
FP.abstract.toInterval(-0.5),
|
||||
FP.abstract.toInterval(kValue.f64.positive.max),
|
||||
FP.abstract.toInterval(kValue.f64.positive.min),
|
||||
FP.abstract.toInterval(kValue.f64.subnormal.positive.max),
|
||||
FP.abstract.toInterval(kValue.f64.subnormal.positive.min),
|
||||
FP.abstract.toInterval(kValue.f64.subnormal.negative.max),
|
||||
FP.abstract.toInterval(kValue.f64.subnormal.negative.min),
|
||||
FP.abstract.toInterval(kValue.f64.infinity.positive),
|
||||
FP.abstract.toInterval(kValue.f64.infinity.negative),
|
||||
FP.abstract.toInterval(kValue.f64.positive.subnormal.max),
|
||||
FP.abstract.toInterval(kValue.f64.positive.subnormal.min),
|
||||
FP.abstract.toInterval(kValue.f64.negative.subnormal.max),
|
||||
FP.abstract.toInterval(kValue.f64.negative.subnormal.min),
|
||||
FP.abstract.toInterval(kValue.f64.positive.infinity),
|
||||
FP.abstract.toInterval(kValue.f64.negative.infinity),
|
||||
|
||||
FP.abstract.toInterval([-0, 0]),
|
||||
FP.abstract.toInterval([-1, 1]),
|
||||
FP.abstract.toInterval([-0.5, 0.5]),
|
||||
FP.abstract.toInterval([kValue.f64.positive.min, kValue.f64.positive.max]),
|
||||
FP.abstract.toInterval([kValue.f64.subnormal.positive.min, kValue.f64.subnormal.positive.max]),
|
||||
FP.abstract.toInterval([kValue.f64.subnormal.negative.min, kValue.f64.subnormal.negative.max]),
|
||||
FP.abstract.toInterval([kValue.f64.infinity.negative, kValue.f64.infinity.positive]),
|
||||
FP.abstract.toInterval([kValue.f64.positive.subnormal.min, kValue.f64.positive.subnormal.max]),
|
||||
FP.abstract.toInterval([kValue.f64.negative.subnormal.min, kValue.f64.negative.subnormal.max]),
|
||||
FP.abstract.toInterval([kValue.f64.negative.infinity, kValue.f64.positive.infinity]),
|
||||
]) {
|
||||
const serialized = serializeFPInterval(interval);
|
||||
const deserialized = deserializeFPInterval(serialized);
|
||||
const s = new BinaryStream(new Uint8Array(1024).buffer);
|
||||
serializeFPInterval(s, interval);
|
||||
const d = new BinaryStream(s.buffer().buffer);
|
||||
const deserialized = deserializeFPInterval(d);
|
||||
t.expect(
|
||||
objectEquals(interval, deserialized),
|
||||
`interval ${interval} -> serialize -> deserialize -> ${deserialized}`
|
||||
@@ -294,8 +338,10 @@ g.test('expression_expectation').fn(t => {
|
||||
// Intervals
|
||||
[FP.f32.toInterval([-8.0, 0.5]), FP.f32.toInterval([2.0, 4.0])],
|
||||
]) {
|
||||
const serialized = serializeExpectation(expectation);
|
||||
const deserialized = deserializeExpectation(serialized);
|
||||
const s = new BinaryStream(new Uint8Array(1024).buffer);
|
||||
serializeExpectation(s, expectation);
|
||||
const d = new BinaryStream(s.buffer().buffer);
|
||||
const deserialized = deserializeExpectation(d);
|
||||
t.expect(
|
||||
objectEquals(expectation, deserialized),
|
||||
`expectation ${expectation} -> serialize -> deserialize -> ${deserialized}`
|
||||
@@ -322,8 +368,10 @@ g.test('anyOf').fn(t => {
|
||||
testCases: [f32(0), f32(10), f32(122), f32(123), f32(124), f32(200)],
|
||||
},
|
||||
]) {
|
||||
const serialized = serializeComparator(c.comparator);
|
||||
const deserialized = deserializeComparator(serialized);
|
||||
const s = new BinaryStream(new Uint8Array(1024).buffer);
|
||||
serializeComparator(s, c.comparator);
|
||||
const d = new BinaryStream(s.buffer().buffer);
|
||||
const deserialized = deserializeComparator(d);
|
||||
for (const val of c.testCases) {
|
||||
const got = deserialized.compare(val);
|
||||
const expect = c.comparator.compare(val);
|
||||
@@ -348,8 +396,10 @@ g.test('skipUndefined').fn(t => {
|
||||
testCases: [f32(0), f32(10), f32(122), f32(123), f32(124), f32(200)],
|
||||
},
|
||||
]) {
|
||||
const serialized = serializeComparator(c.comparator);
|
||||
const deserialized = deserializeComparator(serialized);
|
||||
const s = new BinaryStream(new Uint8Array(1024).buffer);
|
||||
serializeComparator(s, c.comparator);
|
||||
const d = new BinaryStream(s.buffer().buffer);
|
||||
const deserialized = deserializeComparator(d);
|
||||
for (const val of c.testCases) {
|
||||
const got = deserialized.compare(val);
|
||||
const expect = c.comparator.compare(val);
|
||||
|
||||
@@ -5,7 +5,8 @@ Unit tests for TestGroup.
|
||||
|
||||
import { Fixture } from '../common/framework/fixture.js';
|
||||
import { makeTestGroup } from '../common/framework/test_group.js';
|
||||
import { makeTestGroupForUnitTesting } from '../common/internal/test_group.js';
|
||||
import { TestQueryMultiFile } from '../common/internal/query/query.js';
|
||||
import { kQueryMaxLength, makeTestGroupForUnitTesting } from '../common/internal/test_group.js';
|
||||
import { assert } from '../common/util/util.js';
|
||||
|
||||
import { TestGroupTest } from './test_group_test.js';
|
||||
@@ -15,7 +16,7 @@ export const g = makeTestGroup(TestGroupTest);
|
||||
|
||||
g.test('UnitTest_fixture').fn(async t0 => {
|
||||
let seen = 0;
|
||||
function count(t: Fixture): void {
|
||||
function count(_t: Fixture): void {
|
||||
seen++;
|
||||
}
|
||||
|
||||
@@ -65,10 +66,10 @@ g.test('stack').fn(async t0 => {
|
||||
g.test('fail').fn(t => {
|
||||
t.fail();
|
||||
});
|
||||
g.test('throw').fn(t => {
|
||||
g.test('throw').fn(_t => {
|
||||
throw new Error('hello');
|
||||
});
|
||||
g.test('throw_nested').fn(t => {
|
||||
g.test('throw_nested').fn(_t => {
|
||||
doNestedThrow2();
|
||||
});
|
||||
|
||||
@@ -89,7 +90,7 @@ g.test('no_fn').fn(t => {
|
||||
g.test('missing');
|
||||
|
||||
t.shouldThrow('Error', () => {
|
||||
g.validate();
|
||||
g.validate(new TestQueryMultiFile('s', ['f']));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -108,13 +109,13 @@ g.test('duplicate_test_params,none').fn(() => {
|
||||
g.test('abc')
|
||||
.paramsSimple([])
|
||||
.fn(() => {});
|
||||
g.validate();
|
||||
g.validate(new TestQueryMultiFile('s', ['f']));
|
||||
}
|
||||
|
||||
{
|
||||
const g = makeTestGroupForUnitTesting(UnitTest);
|
||||
g.test('abc').fn(() => {});
|
||||
g.validate();
|
||||
g.validate(new TestQueryMultiFile('s', ['f']));
|
||||
}
|
||||
|
||||
{
|
||||
@@ -124,7 +125,7 @@ g.test('duplicate_test_params,none').fn(() => {
|
||||
{ a: 1 }, //
|
||||
])
|
||||
.fn(() => {});
|
||||
g.validate();
|
||||
g.validate(new TestQueryMultiFile('s', ['f']));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -137,7 +138,7 @@ g.test('duplicate_test_params,basic').fn(t => {
|
||||
{ a: 1 }, //
|
||||
{ a: 1 },
|
||||
]);
|
||||
g.validate();
|
||||
g.validate(new TestQueryMultiFile('s', ['f']));
|
||||
});
|
||||
}
|
||||
{
|
||||
@@ -151,7 +152,7 @@ g.test('duplicate_test_params,basic').fn(t => {
|
||||
)
|
||||
.fn(() => {});
|
||||
t.shouldThrow('Error', () => {
|
||||
g.validate();
|
||||
g.validate(new TestQueryMultiFile('s', ['f']));
|
||||
});
|
||||
}
|
||||
{
|
||||
@@ -163,7 +164,7 @@ g.test('duplicate_test_params,basic').fn(t => {
|
||||
])
|
||||
.fn(() => {});
|
||||
t.shouldThrow('Error', () => {
|
||||
g.validate();
|
||||
g.validate(new TestQueryMultiFile('s', ['f']));
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -190,7 +191,7 @@ g.test('duplicate_test_params,with_different_private_params').fn(t => {
|
||||
)
|
||||
.fn(() => {});
|
||||
t.shouldThrow('Error', () => {
|
||||
g.validate();
|
||||
g.validate(new TestQueryMultiFile('s', ['f']));
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -206,11 +207,72 @@ g.test('invalid_test_name').fn(t => {
|
||||
() => {
|
||||
g.test(name).fn(() => {});
|
||||
},
|
||||
name
|
||||
{ message: name }
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
g.test('long_test_query,long_test_name').fn(t => {
|
||||
const g = makeTestGroupForUnitTesting(UnitTest);
|
||||
|
||||
const long = Array(kQueryMaxLength - 5).join('a');
|
||||
|
||||
const fileQuery = new TestQueryMultiFile('s', ['f']);
|
||||
g.test(long).unimplemented();
|
||||
g.validate(fileQuery);
|
||||
|
||||
g.test(long + 'a').unimplemented();
|
||||
t.shouldThrow(
|
||||
'Error',
|
||||
() => {
|
||||
g.validate(fileQuery);
|
||||
},
|
||||
{ message: long }
|
||||
);
|
||||
});
|
||||
|
||||
g.test('long_case_query,long_test_name').fn(t => {
|
||||
const g = makeTestGroupForUnitTesting(UnitTest);
|
||||
|
||||
const long = Array(kQueryMaxLength - 5).join('a');
|
||||
|
||||
const fileQuery = new TestQueryMultiFile('s', ['f']);
|
||||
g.test(long).fn(() => {});
|
||||
g.validate(fileQuery);
|
||||
|
||||
g.test(long + 'a').fn(() => {});
|
||||
t.shouldThrow(
|
||||
'Error',
|
||||
() => {
|
||||
g.validate(fileQuery);
|
||||
},
|
||||
{ message: long }
|
||||
);
|
||||
});
|
||||
|
||||
g.test('long_case_query,long_case_name').fn(t => {
|
||||
const g = makeTestGroupForUnitTesting(UnitTest);
|
||||
|
||||
const long = Array(kQueryMaxLength - 9).join('a');
|
||||
|
||||
const fileQuery = new TestQueryMultiFile('s', ['f']);
|
||||
g.test('t')
|
||||
.paramsSimple([{ x: long }])
|
||||
.fn(() => {});
|
||||
g.validate(fileQuery);
|
||||
|
||||
g.test('u')
|
||||
.paramsSimple([{ x: long + 'a' }])
|
||||
.fn(() => {});
|
||||
t.shouldThrow(
|
||||
'Error',
|
||||
() => {
|
||||
g.validate(fileQuery);
|
||||
},
|
||||
{ message: long }
|
||||
);
|
||||
});
|
||||
|
||||
g.test('param_value,valid').fn(() => {
|
||||
const g = makeTestGroup(UnitTest);
|
||||
g.test('a').paramsSimple([{ x: JSON.stringify({ a: 1, b: 2 }) }]);
|
||||
@@ -262,6 +324,29 @@ g.test('subcases').fn(async t0 => {
|
||||
t0.expect(Array.from(result.values()).every(v => v.status === 'pass'));
|
||||
});
|
||||
|
||||
g.test('subcases,skip')
|
||||
.desc(
|
||||
'If all tests are skipped then status is "skip". If at least one test passed, status is "pass"'
|
||||
)
|
||||
.params(u => u.combine('allSkip', [false, true]))
|
||||
.fn(async t0 => {
|
||||
const { allSkip } = t0.params;
|
||||
const g = makeTestGroupForUnitTesting(UnitTest);
|
||||
g.test('a')
|
||||
.params(u => u.beginSubcases().combine('do', ['pass', 'skip', 'pass']))
|
||||
.fn(t => {
|
||||
t.skipIf(allSkip || t.params.do === 'skip');
|
||||
});
|
||||
const result = await t0.run(g);
|
||||
const values = Array.from(result.values());
|
||||
t0.expect(values.length === 1);
|
||||
const expectedStatus = allSkip ? 'skip' : 'pass';
|
||||
t0.expect(
|
||||
values[0].status === expectedStatus,
|
||||
`expect: ${values[0].status} === ${expectedStatus}}, allSkip: ${allSkip}`
|
||||
);
|
||||
});
|
||||
|
||||
g.test('exceptions')
|
||||
.params(u =>
|
||||
u
|
||||
@@ -279,7 +364,7 @@ g.test('exceptions')
|
||||
} else {
|
||||
b2 = b1.params(u => u);
|
||||
}
|
||||
b2.fn(t => {
|
||||
b2.fn(_t => {
|
||||
if (useDOMException) {
|
||||
throw new DOMException('Message!', 'Name!');
|
||||
} else {
|
||||
@@ -296,7 +381,7 @@ g.test('exceptions')
|
||||
g.test('throws').fn(async t0 => {
|
||||
const g = makeTestGroupForUnitTesting(UnitTest);
|
||||
|
||||
g.test('a').fn(t => {
|
||||
g.test('a').fn(_t => {
|
||||
throw new Error();
|
||||
});
|
||||
|
||||
|
||||
@@ -9,7 +9,12 @@ import { Fixture } from '../../../../common/framework/fixture.js';
|
||||
import { makeTestGroup } from '../../../../common/framework/test_group.js';
|
||||
import { getGPU } from '../../../../common/util/navigator_gpu.js';
|
||||
import { assert, assertReject, raceWithRejectOnTimeout } from '../../../../common/util/util.js';
|
||||
import { kFeatureNames, kLimitInfo, kLimits } from '../../../capability_info.js';
|
||||
import {
|
||||
getDefaultLimitsForAdapter,
|
||||
kFeatureNames,
|
||||
kLimits,
|
||||
kLimitClasses,
|
||||
} from '../../../capability_info.js';
|
||||
import { clamp, isPowerOfTwo } from '../../../util/math.js';
|
||||
|
||||
export const g = makeTestGroup(Fixture);
|
||||
@@ -40,10 +45,11 @@ g.test('default')
|
||||
// Default device should have no features.
|
||||
t.expect(device.features.size === 0, 'Default device should not have any features');
|
||||
// All limits should be defaults.
|
||||
const limitInfo = getDefaultLimitsForAdapter(adapter);
|
||||
for (const limit of kLimits) {
|
||||
t.expect(
|
||||
device.limits[limit] === kLimitInfo[limit].default,
|
||||
`Expected ${limit} == default: ${device.limits[limit]} != ${kLimitInfo[limit].default}`
|
||||
device.limits[limit] === limitInfo[limit].default,
|
||||
`Expected ${limit} == default: ${device.limits[limit]} != ${limitInfo[limit].default}`
|
||||
);
|
||||
}
|
||||
|
||||
@@ -112,6 +118,7 @@ g.test('stale')
|
||||
// Cause a type error by requesting with an unknown feature.
|
||||
if (awaitInitialError) {
|
||||
await assertReject(
|
||||
'TypeError',
|
||||
adapter.requestDevice({ requiredFeatures: ['unknown-feature' as GPUFeatureName] })
|
||||
);
|
||||
} else {
|
||||
@@ -125,6 +132,7 @@ g.test('stale')
|
||||
// Cause an operation error by requesting with an alignment limit that is not a power of 2.
|
||||
if (awaitInitialError) {
|
||||
await assertReject(
|
||||
'OperationError',
|
||||
adapter.requestDevice({ requiredLimits: { minUniformBufferOffsetAlignment: 255 } })
|
||||
);
|
||||
} else {
|
||||
@@ -239,10 +247,11 @@ g.test('limits,supported')
|
||||
const adapter = await gpu.requestAdapter();
|
||||
assert(adapter !== null);
|
||||
|
||||
const limitInfo = getDefaultLimitsForAdapter(adapter);
|
||||
let value: number = -1;
|
||||
switch (limitValue) {
|
||||
case 'default':
|
||||
value = kLimitInfo[limit].default;
|
||||
value = limitInfo[limit].default;
|
||||
break;
|
||||
case 'adapter':
|
||||
value = adapter.limits[limit];
|
||||
@@ -271,7 +280,7 @@ g.test('limit,better_than_supported')
|
||||
.combine('limit', kLimits)
|
||||
.beginSubcases()
|
||||
.expandWithParams(p => {
|
||||
switch (kLimitInfo[p.limit].class) {
|
||||
switch (kLimitClasses[p.limit]) {
|
||||
case 'maximum':
|
||||
return [
|
||||
{ mul: 1, add: 1 },
|
||||
@@ -293,9 +302,10 @@ g.test('limit,better_than_supported')
|
||||
const adapter = await gpu.requestAdapter();
|
||||
assert(adapter !== null);
|
||||
|
||||
const limitInfo = getDefaultLimitsForAdapter(adapter);
|
||||
const value = adapter.limits[limit] * mul + add;
|
||||
const requiredLimits = {
|
||||
[limit]: clamp(value, { min: 0, max: kLimitInfo[limit].maximumValue }),
|
||||
[limit]: clamp(value, { min: 0, max: limitInfo[limit].maximumValue }),
|
||||
};
|
||||
|
||||
t.shouldReject('OperationError', adapter.requestDevice({ requiredLimits }));
|
||||
@@ -314,7 +324,7 @@ g.test('limit,worse_than_default')
|
||||
.combine('limit', kLimits)
|
||||
.beginSubcases()
|
||||
.expandWithParams(p => {
|
||||
switch (kLimitInfo[p.limit].class) {
|
||||
switch (kLimitClasses[p.limit]) {
|
||||
case 'maximum':
|
||||
return [
|
||||
{ mul: 1, add: -1 },
|
||||
@@ -336,13 +346,14 @@ g.test('limit,worse_than_default')
|
||||
const adapter = await gpu.requestAdapter();
|
||||
assert(adapter !== null);
|
||||
|
||||
const value = kLimitInfo[limit].default * mul + add;
|
||||
const limitInfo = getDefaultLimitsForAdapter(adapter);
|
||||
const value = limitInfo[limit].default * mul + add;
|
||||
const requiredLimits = {
|
||||
[limit]: clamp(value, { min: 0, max: kLimitInfo[limit].maximumValue }),
|
||||
[limit]: clamp(value, { min: 0, max: limitInfo[limit].maximumValue }),
|
||||
};
|
||||
|
||||
let success;
|
||||
switch (kLimitInfo[limit].class) {
|
||||
switch (limitInfo[limit].class) {
|
||||
case 'alignment':
|
||||
success = isPowerOfTwo(value);
|
||||
break;
|
||||
@@ -355,7 +366,7 @@ g.test('limit,worse_than_default')
|
||||
const device = await adapter.requestDevice({ requiredLimits });
|
||||
assert(device !== null);
|
||||
t.expect(
|
||||
device.limits[limit] === kLimitInfo[limit].default,
|
||||
device.limits[limit] === limitInfo[limit].default,
|
||||
'Devices reported limit should match the default limit'
|
||||
);
|
||||
device.destroy();
|
||||
|
||||
@@ -48,7 +48,7 @@ function reifyMapRange(bufferSize: number, range: readonly [number?, number?]):
|
||||
}
|
||||
|
||||
const mapRegionBoundModes = ['default-expand', 'explicit-expand', 'minimal'] as const;
|
||||
type MapRegionBoundMode = typeof mapRegionBoundModes[number];
|
||||
type MapRegionBoundMode = (typeof mapRegionBoundModes)[number];
|
||||
|
||||
function getRegionForMap(
|
||||
bufferSize: number,
|
||||
@@ -422,14 +422,8 @@ g.test('mapAsync,mapState')
|
||||
.combine('afterDestroy', [false, true])
|
||||
)
|
||||
.fn(async t => {
|
||||
const {
|
||||
usageType,
|
||||
mapModeType,
|
||||
beforeUnmap,
|
||||
beforeDestroy,
|
||||
afterUnmap,
|
||||
afterDestroy,
|
||||
} = t.params;
|
||||
const { usageType, mapModeType, beforeUnmap, beforeDestroy, afterUnmap, afterDestroy } =
|
||||
t.params;
|
||||
const size = 8;
|
||||
const range = [0, 8];
|
||||
const usage =
|
||||
|
||||
@@ -1088,16 +1088,19 @@ class ImageCopyTest extends TextureTestMixin(GPUTest) {
|
||||
|
||||
// Check the valid data in outputStagingBuffer once per row.
|
||||
for (let y = 0; y < copyFromOutputTextureLayout.mipSize[1]; ++y) {
|
||||
const dataStart =
|
||||
expectedStencilTextureDataOffset +
|
||||
expectedStencilTextureDataBytesPerRow *
|
||||
expectedStencilTextureDataRowsPerImage *
|
||||
stencilTextureLayer +
|
||||
expectedStencilTextureDataBytesPerRow * y;
|
||||
this.expectGPUBufferValuesEqual(
|
||||
outputStagingBuffer,
|
||||
expectedStencilTextureData.slice(
|
||||
expectedStencilTextureDataOffset +
|
||||
expectedStencilTextureDataBytesPerRow *
|
||||
expectedStencilTextureDataRowsPerImage *
|
||||
stencilTextureLayer +
|
||||
expectedStencilTextureDataBytesPerRow * y,
|
||||
copyFromOutputTextureLayout.mipSize[0]
|
||||
)
|
||||
dataStart,
|
||||
dataStart + copyFromOutputTextureLayout.mipSize[0]
|
||||
),
|
||||
copyFromOutputTextureLayout.bytesPerRow * y
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -2048,15 +2051,8 @@ copyTextureToBuffer() with depth aspect.
|
||||
t.selectDeviceOrSkipTestCase(info.feature);
|
||||
})
|
||||
.fn(t => {
|
||||
const {
|
||||
format,
|
||||
copyMethod,
|
||||
aspect,
|
||||
offsetInBlocks,
|
||||
dataPaddingInBytes,
|
||||
copyDepth,
|
||||
mipLevel,
|
||||
} = t.params;
|
||||
const { format, copyMethod, aspect, offsetInBlocks, dataPaddingInBytes, copyDepth, mipLevel } =
|
||||
t.params;
|
||||
const bytesPerBlock = depthStencilFormatAspectSize(format, aspect);
|
||||
const initialDataOffset = offsetInBlocks * bytesPerBlock;
|
||||
const copySize = [3, 3, copyDepth] as const;
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
TODO: test the behavior of creating/using/resolving queries.
|
||||
- pipeline statistics
|
||||
TODO: pipeline statistics queries are removed from core; consider moving tests to another suite.
|
||||
- timestamp
|
||||
- nested (e.g. timestamp or PS query inside occlusion query), if any such cases are valid. Try
|
||||
- nested (e.g. timestamp inside occlusion query), if any such cases are valid. Try
|
||||
writing to the same query set (at same or different indices), if valid. Check results make sense.
|
||||
- start a query (all types) with no draw calls
|
||||
|
||||
@@ -35,10 +35,10 @@ const kBytesPerQuery = 8;
|
||||
const kTextureSize = [4, 4];
|
||||
|
||||
const kRenderModes = ['direct', 'render-bundle'] as const;
|
||||
type RenderMode = typeof kRenderModes[number];
|
||||
type RenderMode = (typeof kRenderModes)[number];
|
||||
|
||||
const kBufferOffsets = ['zero', 'non-zero'] as const;
|
||||
type BufferOffset = typeof kBufferOffsets[number];
|
||||
type BufferOffset = (typeof kBufferOffsets)[number];
|
||||
|
||||
type SetupParams = {
|
||||
numQueries: number;
|
||||
@@ -212,7 +212,9 @@ class QueryStarterRenderBundle implements QueryStarter {
|
||||
) {
|
||||
this._device = device;
|
||||
this._pass = pass;
|
||||
const colorAttachment = (renderPassDescriptor.colorAttachments as GPURenderPassColorAttachment[])[0];
|
||||
const colorAttachment = (
|
||||
renderPassDescriptor.colorAttachments as GPURenderPassColorAttachment[]
|
||||
)[0];
|
||||
this._renderBundleEncoderDescriptor = {
|
||||
colorFormats: ['rgba8unorm'],
|
||||
depthStencilFormat: renderPassDescriptor.depthStencilAttachment?.depthLoadOp
|
||||
|
||||
@@ -3,18 +3,11 @@ Basic command buffer compute tests.
|
||||
`;
|
||||
|
||||
import { makeTestGroup } from '../../../../common/framework/test_group.js';
|
||||
import { kLimitInfo } from '../../../capability_info.js';
|
||||
import { GPUTest } from '../../../gpu_test.js';
|
||||
import { checkElementsEqualGenerated } from '../../../util/check_contents.js';
|
||||
|
||||
export const g = makeTestGroup(GPUTest);
|
||||
|
||||
const kMaxComputeWorkgroupSize = [
|
||||
kLimitInfo.maxComputeWorkgroupSizeX.default,
|
||||
kLimitInfo.maxComputeWorkgroupSizeY.default,
|
||||
kLimitInfo.maxComputeWorkgroupSizeZ.default,
|
||||
];
|
||||
|
||||
g.test('memcpy').fn(t => {
|
||||
const data = new Uint32Array([0x01020304]);
|
||||
|
||||
@@ -71,27 +64,33 @@ g.test('large_dispatch')
|
||||
.params(u =>
|
||||
u
|
||||
// Reasonably-sized powers of two, and some stranger larger sizes.
|
||||
.combine('dispatchSize', [
|
||||
256,
|
||||
2048,
|
||||
315,
|
||||
628,
|
||||
2179,
|
||||
kLimitInfo.maxComputeWorkgroupsPerDimension.default,
|
||||
])
|
||||
.combine('dispatchSize', [256, 2048, 315, 628, 2179, 'maximum'] as const)
|
||||
// Test some reasonable workgroup sizes.
|
||||
.beginSubcases()
|
||||
// 0 == x axis; 1 == y axis; 2 == z axis.
|
||||
.combine('largeDimension', [0, 1, 2] as const)
|
||||
.expand('workgroupSize', p => [1, 2, 8, 32, kMaxComputeWorkgroupSize[p.largeDimension]])
|
||||
.expand('workgroupSize', () => [1, 2, 8, 32, 'maximum'] as const)
|
||||
)
|
||||
.fn(t => {
|
||||
// The output storage buffer is filled with this value.
|
||||
const val = 0x01020304;
|
||||
const badVal = 0xbaadf00d;
|
||||
|
||||
const wgSize = t.params.workgroupSize;
|
||||
const bufferLength = t.params.dispatchSize * wgSize;
|
||||
const kMaxComputeWorkgroupSize = [
|
||||
t.device.limits.maxComputeWorkgroupSizeX,
|
||||
t.device.limits.maxComputeWorkgroupSizeY,
|
||||
t.device.limits.maxComputeWorkgroupSizeZ,
|
||||
];
|
||||
|
||||
const wgSize =
|
||||
t.params.workgroupSize === 'maximum'
|
||||
? kMaxComputeWorkgroupSize[t.params.largeDimension]
|
||||
: t.params.workgroupSize;
|
||||
const dispatchSize =
|
||||
t.params.dispatchSize === 'maximum'
|
||||
? t.device.limits.maxComputeWorkgroupsPerDimension
|
||||
: t.params.dispatchSize;
|
||||
const bufferLength = dispatchSize * wgSize;
|
||||
const bufferByteSize = Uint32Array.BYTES_PER_ELEMENT * bufferLength;
|
||||
const dst = t.device.createBuffer({
|
||||
size: bufferByteSize,
|
||||
@@ -101,9 +100,9 @@ g.test('large_dispatch')
|
||||
// Only use one large dimension and workgroup size in the dispatch
|
||||
// call to keep the size of the test reasonable.
|
||||
const dims = [1, 1, 1];
|
||||
dims[t.params.largeDimension] = t.params.dispatchSize;
|
||||
dims[t.params.largeDimension] = dispatchSize;
|
||||
const wgSizes = [1, 1, 1];
|
||||
wgSizes[t.params.largeDimension] = t.params.workgroupSize;
|
||||
wgSizes[t.params.largeDimension] = wgSize;
|
||||
const pipeline = t.device.createComputePipeline({
|
||||
layout: 'auto',
|
||||
compute: {
|
||||
@@ -154,7 +153,7 @@ g.test('large_dispatch')
|
||||
pass.end();
|
||||
t.device.queue.submit([encoder.finish()]);
|
||||
|
||||
t.expectGPUBufferValuesPassCheck(dst, a => checkElementsEqualGenerated(a, i => val), {
|
||||
t.expectGPUBufferValuesPassCheck(dst, a => checkElementsEqualGenerated(a, _i => val), {
|
||||
type: Uint32Array,
|
||||
typedLength: bufferLength,
|
||||
});
|
||||
|
||||
@@ -153,7 +153,7 @@ g.test('precision')
|
||||
.params(u => u.combine('isAsync', [true, false]))
|
||||
.fn(async t => {
|
||||
const c1 = 3.14159;
|
||||
const c2 = 3.141592653589793238;
|
||||
const c2 = 3.141592653589793;
|
||||
await t.ExpectShaderOutputWithConstants(
|
||||
t.params.isAsync,
|
||||
// These values will get rounded to f32 and createComputePipeline, so the values coming out from the shader won't be the exact same one as shown here.
|
||||
|
||||
@@ -20,8 +20,8 @@ export const kAllReadOps = [
|
||||
'b2t-copy',
|
||||
] as const;
|
||||
|
||||
export type ReadOp = typeof kAllReadOps[number];
|
||||
export type WriteOp = typeof kAllWriteOps[number];
|
||||
export type ReadOp = (typeof kAllReadOps)[number];
|
||||
export type WriteOp = (typeof kAllWriteOps)[number];
|
||||
|
||||
export type Op = ReadOp | WriteOp;
|
||||
|
||||
@@ -31,42 +31,42 @@ interface OpInfo {
|
||||
|
||||
const kOpInfo: {
|
||||
readonly [k in Op]: OpInfo;
|
||||
} = /* prettier-ignore */ {
|
||||
} = {
|
||||
'write-buffer': {
|
||||
contexts: [ 'queue' ],
|
||||
contexts: ['queue'],
|
||||
},
|
||||
'b2t-copy': {
|
||||
contexts: [ 'command-encoder' ],
|
||||
contexts: ['command-encoder'],
|
||||
},
|
||||
'b2b-copy': {
|
||||
contexts: [ 'command-encoder' ],
|
||||
contexts: ['command-encoder'],
|
||||
},
|
||||
't2b-copy': {
|
||||
contexts: [ 'command-encoder' ],
|
||||
contexts: ['command-encoder'],
|
||||
},
|
||||
'storage': {
|
||||
contexts: [ 'compute-pass-encoder', 'render-pass-encoder', 'render-bundle-encoder' ],
|
||||
storage: {
|
||||
contexts: ['compute-pass-encoder', 'render-pass-encoder', 'render-bundle-encoder'],
|
||||
},
|
||||
'storage-read': {
|
||||
contexts: [ 'compute-pass-encoder', 'render-pass-encoder', 'render-bundle-encoder' ],
|
||||
contexts: ['compute-pass-encoder', 'render-pass-encoder', 'render-bundle-encoder'],
|
||||
},
|
||||
'input-vertex': {
|
||||
contexts: [ 'render-pass-encoder', 'render-bundle-encoder' ],
|
||||
contexts: ['render-pass-encoder', 'render-bundle-encoder'],
|
||||
},
|
||||
'input-index': {
|
||||
contexts: [ 'render-pass-encoder', 'render-bundle-encoder' ],
|
||||
contexts: ['render-pass-encoder', 'render-bundle-encoder'],
|
||||
},
|
||||
'input-indirect': {
|
||||
contexts: [ 'render-pass-encoder', 'render-bundle-encoder' ],
|
||||
contexts: ['render-pass-encoder', 'render-bundle-encoder'],
|
||||
},
|
||||
'input-indirect-index': {
|
||||
contexts: [ 'render-pass-encoder', 'render-bundle-encoder' ],
|
||||
contexts: ['render-pass-encoder', 'render-bundle-encoder'],
|
||||
},
|
||||
'input-indirect-dispatch': {
|
||||
contexts: [ 'compute-pass-encoder' ],
|
||||
contexts: ['compute-pass-encoder'],
|
||||
},
|
||||
'constant-uniform': {
|
||||
contexts: [ 'render-pass-encoder', 'render-bundle-encoder' ],
|
||||
contexts: ['render-pass-encoder', 'render-bundle-encoder'],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -285,6 +285,7 @@ export class BufferSyncTest extends GPUTest {
|
||||
|
||||
// Create a 1x1 texture, and initialize it to a specified value for all elements.
|
||||
async createTextureWithValue(initValue: number): Promise<GPUTexture> {
|
||||
// This is not hot in profiles; optimize if this gets used more heavily.
|
||||
const data = new Uint32Array(1).fill(initValue);
|
||||
const texture = this.trackForCleanup(
|
||||
this.device.createTexture({
|
||||
@@ -446,6 +447,7 @@ export class BufferSyncTest extends GPUTest {
|
||||
|
||||
// Write buffer via writeBuffer API on queue
|
||||
writeByWriteBuffer(buffer: GPUBuffer, value: number) {
|
||||
// This is not hot in profiles; optimize if this gets used more heavily.
|
||||
const data = new Uint32Array(1).fill(value);
|
||||
this.device.queue.writeBuffer(buffer, 0, data);
|
||||
}
|
||||
@@ -919,12 +921,14 @@ export class BufferSyncTest extends GPUTest {
|
||||
}
|
||||
|
||||
verifyData(buffer: GPUBuffer, expectedValue: number) {
|
||||
// This is not hot in profiles; optimize if this gets used more heavily.
|
||||
const bufferData = new Uint32Array(1);
|
||||
bufferData[0] = expectedValue;
|
||||
this.expectGPUBufferValuesEqual(buffer, bufferData);
|
||||
}
|
||||
|
||||
verifyDataTwoValidValues(buffer: GPUBuffer, expectedValue1: number, expectedValue2: number) {
|
||||
// This is not hot in profiles; optimize if this gets used more heavily.
|
||||
const bufferData1 = new Uint32Array(1);
|
||||
bufferData1[0] = expectedValue1;
|
||||
const bufferData2 = new Uint32Array(1);
|
||||
|
||||
@@ -14,7 +14,7 @@ export const kOperationBoundaries = [
|
||||
'dispatch', // Operations are in different dispatches.
|
||||
'draw', // Operations are in different draws.
|
||||
] as const;
|
||||
export type OperationBoundary = typeof kOperationBoundaries[number];
|
||||
export type OperationBoundary = (typeof kOperationBoundaries)[number];
|
||||
|
||||
/**
|
||||
* Context a particular operation is permitted in.
|
||||
@@ -28,7 +28,7 @@ export const kOperationContexts = [
|
||||
'render-pass-encoder', // Operation may be encoded in a GPURenderPassEncoder.
|
||||
'render-bundle-encoder', // Operation may be encoded in a GPURenderBundleEncoder.
|
||||
] as const;
|
||||
export type OperationContext = typeof kOperationContexts[number];
|
||||
export type OperationContext = (typeof kOperationContexts)[number];
|
||||
|
||||
interface BoundaryInfo {
|
||||
readonly contexts: [OperationContext, OperationContext][];
|
||||
@@ -60,14 +60,14 @@ const commandBufferContexts = combineContexts(
|
||||
*/
|
||||
export const kBoundaryInfo: {
|
||||
readonly [k in OperationBoundary]: BoundaryInfo;
|
||||
} = /* prettier-ignore */ {
|
||||
} = {
|
||||
'queue-op': {
|
||||
contexts: queueContexts,
|
||||
},
|
||||
'command-buffer': {
|
||||
contexts: commandBufferContexts,
|
||||
},
|
||||
'pass': {
|
||||
pass: {
|
||||
contexts: [
|
||||
['compute-pass-encoder', 'compute-pass-encoder'],
|
||||
['compute-pass-encoder', 'render-pass-encoder'],
|
||||
@@ -79,9 +79,7 @@ export const kBoundaryInfo: {
|
||||
],
|
||||
},
|
||||
'execute-bundles': {
|
||||
contexts: [
|
||||
['render-bundle-encoder', 'render-bundle-encoder'],
|
||||
]
|
||||
contexts: [['render-bundle-encoder', 'render-bundle-encoder']],
|
||||
},
|
||||
'render-bundle': {
|
||||
contexts: [
|
||||
@@ -90,12 +88,10 @@ export const kBoundaryInfo: {
|
||||
['render-bundle-encoder', 'render-bundle-encoder'],
|
||||
],
|
||||
},
|
||||
'dispatch': {
|
||||
contexts: [
|
||||
['compute-pass-encoder', 'compute-pass-encoder'],
|
||||
],
|
||||
dispatch: {
|
||||
contexts: [['compute-pass-encoder', 'compute-pass-encoder']],
|
||||
},
|
||||
'draw': {
|
||||
draw: {
|
||||
contexts: [
|
||||
['render-pass-encoder', 'render-pass-encoder'],
|
||||
['render-bundle-encoder', 'render-pass-encoder'],
|
||||
|
||||
@@ -56,8 +56,8 @@ const fullscreenQuadWGSL = `
|
||||
class TextureSyncTestHelper extends OperationContextHelper {
|
||||
private texture: GPUTexture;
|
||||
|
||||
public readonly kTextureSize = [4, 4] as const;
|
||||
public readonly kTextureFormat: EncodableTextureFormat = 'rgba8unorm';
|
||||
public override readonly kTextureSize = [4, 4] as const;
|
||||
public override readonly kTextureFormat: EncodableTextureFormat = 'rgba8unorm';
|
||||
|
||||
constructor(
|
||||
t: GPUTest,
|
||||
|
||||
@@ -9,10 +9,10 @@ export const kAllWriteOps = [
|
||||
'attachment-store',
|
||||
'attachment-resolve',
|
||||
] as const;
|
||||
export type WriteOp = typeof kAllWriteOps[number];
|
||||
export type WriteOp = (typeof kAllWriteOps)[number];
|
||||
|
||||
export const kAllReadOps = ['t2b-copy', 't2t-copy', 'sample'] as const;
|
||||
export type ReadOp = typeof kAllReadOps[number];
|
||||
export type ReadOp = (typeof kAllReadOps)[number];
|
||||
|
||||
export type Op = ReadOp | WriteOp;
|
||||
|
||||
@@ -28,44 +28,44 @@ interface OpInfo {
|
||||
*/
|
||||
export const kOpInfo: {
|
||||
readonly [k in Op]: OpInfo;
|
||||
} = /* prettier-ignore */ {
|
||||
} = {
|
||||
'write-texture': {
|
||||
contexts: [ 'queue' ],
|
||||
contexts: ['queue'],
|
||||
readUsage: 0,
|
||||
writeUsage: GPUConst.TextureUsage.COPY_DST,
|
||||
},
|
||||
'b2t-copy': {
|
||||
contexts: [ 'command-encoder' ],
|
||||
contexts: ['command-encoder'],
|
||||
readUsage: 0,
|
||||
writeUsage: GPUConst.TextureUsage.COPY_DST,
|
||||
},
|
||||
't2t-copy': {
|
||||
contexts: [ 'command-encoder' ],
|
||||
contexts: ['command-encoder'],
|
||||
readUsage: GPUConst.TextureUsage.COPY_SRC,
|
||||
writeUsage: GPUConst.TextureUsage.COPY_DST,
|
||||
},
|
||||
't2b-copy': {
|
||||
contexts: [ 'command-encoder' ],
|
||||
contexts: ['command-encoder'],
|
||||
readUsage: GPUConst.TextureUsage.COPY_SRC,
|
||||
writeUsage: 0,
|
||||
},
|
||||
'storage': {
|
||||
contexts: [ 'compute-pass-encoder', 'render-pass-encoder', 'render-bundle-encoder' ],
|
||||
storage: {
|
||||
contexts: ['compute-pass-encoder', 'render-pass-encoder', 'render-bundle-encoder'],
|
||||
readUsage: 0,
|
||||
writeUsage: GPUConst.TextureUsage.STORAGE,
|
||||
},
|
||||
'sample': {
|
||||
contexts: [ 'compute-pass-encoder', 'render-pass-encoder', 'render-bundle-encoder' ],
|
||||
sample: {
|
||||
contexts: ['compute-pass-encoder', 'render-pass-encoder', 'render-bundle-encoder'],
|
||||
readUsage: GPUConst.TextureUsage.SAMPLED,
|
||||
writeUsage: 0,
|
||||
},
|
||||
'attachment-store': {
|
||||
contexts: [ 'command-encoder' ],
|
||||
contexts: ['command-encoder'],
|
||||
readUsage: 0,
|
||||
writeUsage: GPUConst.TextureUsage.RENDER_ATTACHMENT,
|
||||
},
|
||||
'attachment-resolve': {
|
||||
contexts: [ 'command-encoder' ],
|
||||
contexts: ['command-encoder'],
|
||||
readUsage: 0,
|
||||
writeUsage: GPUConst.TextureUsage.RENDER_ATTACHMENT,
|
||||
},
|
||||
|
||||
@@ -19,7 +19,7 @@ const kTypedArrays = [
|
||||
type WriteBufferSignature = {
|
||||
bufferOffset: number;
|
||||
data: readonly number[];
|
||||
arrayType: typeof kTypedArrays[number];
|
||||
arrayType: (typeof kTypedArrays)[number];
|
||||
useArrayBuffer: boolean;
|
||||
dataOffset?: number; // In elements when useArrayBuffer === false, bytes otherwise
|
||||
dataSize?: number; // In elements when useArrayBuffer === false, bytes otherwise
|
||||
|
||||
@@ -59,11 +59,8 @@ g.test('stencil_clear_value')
|
||||
t.selectDeviceOrSkipTestCase(info.feature);
|
||||
})
|
||||
.fn(t => {
|
||||
const {
|
||||
stencilFormat,
|
||||
stencilClearValue,
|
||||
applyStencilClearValueAsStencilReferenceValue,
|
||||
} = t.params;
|
||||
const { stencilFormat, stencilClearValue, applyStencilClearValueAsStencilReferenceValue } =
|
||||
t.params;
|
||||
|
||||
const kSize = [1, 1, 1] as const;
|
||||
const colorFormat = 'rgba8unorm';
|
||||
|
||||
@@ -184,12 +184,9 @@ g.test('precision')
|
||||
fragmentConstants: { R: 3.14159 } as Record<string, GPUPipelineConstantValue>,
|
||||
},
|
||||
{
|
||||
expected: { R: 3.141592653589793238, G: 1.0, B: 1.0, A: 1.0 },
|
||||
expected: { R: 3.141592653589793, G: 1.0, B: 1.0, A: 1.0 },
|
||||
vertexConstants: {},
|
||||
fragmentConstants: { R: 3.141592653589793238 } as Record<
|
||||
string,
|
||||
GPUPipelineConstantValue
|
||||
>,
|
||||
fragmentConstants: { R: 3.141592653589793 } as Record<string, GPUPipelineConstantValue>,
|
||||
},
|
||||
])
|
||||
)
|
||||
|
||||
@@ -4,8 +4,11 @@ export const description = `
|
||||
|
||||
import { makeTestGroup } from '../../../../common/framework/test_group.js';
|
||||
import { range } from '../../../../common/util/util.js';
|
||||
import { kLimitInfo } from '../../../capability_info.js';
|
||||
import { kRenderableColorTextureFormats, kTextureFormatInfo } from '../../../format_info.js';
|
||||
import {
|
||||
computeBytesPerSampleFromFormats,
|
||||
kRenderableColorTextureFormats,
|
||||
kTextureFormatInfo,
|
||||
} from '../../../format_info.js';
|
||||
import { GPUTest, TextureTestMixin } from '../../../gpu_test.js';
|
||||
import { getFragmentShaderCodeWithOutput, getPlainTypeInfo } from '../../../util/shader.js';
|
||||
import { kTexelRepresentationInfo } from '../../../util/texture/texel_data.js';
|
||||
@@ -27,11 +30,14 @@ export const g = makeTestGroup(TextureTestMixin(GPUTest));
|
||||
// Values to write into each attachment
|
||||
// We make values different for each attachment index and each channel
|
||||
// to make sure they didn't get mixed up
|
||||
|
||||
// Clamp alpha to 3 to avoid comparing a large expected value with a max 3 value for rgb10a2uint
|
||||
// MAINTENANCE_TODO: Make TexelRepresentation.numericRange per-component and use that.
|
||||
const attachmentsIntWriteValues = [
|
||||
{ R: 1, G: 2, B: 3, A: 4 },
|
||||
{ R: 5, G: 6, B: 7, A: 8 },
|
||||
{ R: 9, G: 10, B: 11, A: 12 },
|
||||
{ R: 13, G: 14, B: 15, A: 16 },
|
||||
{ R: 1, G: 2, B: 3, A: 1 },
|
||||
{ R: 5, G: 6, B: 7, A: 2 },
|
||||
{ R: 9, G: 10, B: 11, A: 3 },
|
||||
{ R: 13, G: 14, B: 15, A: 0 },
|
||||
];
|
||||
const attachmentsFloatWriteValues = [
|
||||
{ R: 0.12, G: 0.34, B: 0.56, A: 0 },
|
||||
@@ -47,14 +53,6 @@ g.test('color,attachments')
|
||||
.combine('format', kRenderableColorTextureFormats)
|
||||
.beginSubcases()
|
||||
.combine('attachmentCount', [2, 3, 4])
|
||||
.filter(t => {
|
||||
// We only need to test formats that have a valid color attachment bytes per sample.
|
||||
const pixelByteCost = kTextureFormatInfo[t.format].colorRender?.byteCost;
|
||||
return (
|
||||
pixelByteCost !== undefined &&
|
||||
pixelByteCost * t.attachmentCount <= kLimitInfo.maxColorAttachmentBytesPerSample.default
|
||||
);
|
||||
})
|
||||
.expand('emptyAttachmentId', p => range(p.attachmentCount, i => i))
|
||||
)
|
||||
.beforeAllSubcases(t => {
|
||||
@@ -67,6 +65,14 @@ g.test('color,attachments')
|
||||
const componentCount = kTexelRepresentationInfo[format].componentOrder.length;
|
||||
const info = kTextureFormatInfo[format];
|
||||
|
||||
// We only need to test formats that have a valid color attachment bytes per sample.
|
||||
const pixelByteCost = kTextureFormatInfo[format].colorRender?.byteCost;
|
||||
t.skipIf(
|
||||
pixelByteCost === undefined ||
|
||||
computeBytesPerSampleFromFormats(range(attachmentCount, () => format)) >
|
||||
t.device.limits.maxColorAttachmentBytesPerSample
|
||||
);
|
||||
|
||||
const writeValues =
|
||||
info.color.type === 'sint' || info.color.type === 'uint'
|
||||
? attachmentsIntWriteValues
|
||||
|
||||
@@ -265,7 +265,7 @@ class F extends TextureTestMixin(GPUTest) {
|
||||
private sampleTexture: GPUTexture | undefined;
|
||||
private sampler: GPUSampler | undefined;
|
||||
|
||||
async init() {
|
||||
override async init() {
|
||||
await super.init();
|
||||
if (this.isCompatibility) {
|
||||
this.skip('WGSL sample_mask is not supported in compatibility mode');
|
||||
@@ -520,26 +520,10 @@ textureLoad each sample index from the texture and write to a storage buffer to
|
||||
})
|
||||
.beginSubcases()
|
||||
.combine('sampleMask', [
|
||||
0,
|
||||
0b0001,
|
||||
0b0010,
|
||||
0b0111,
|
||||
0b1011,
|
||||
0b1101,
|
||||
0b1110,
|
||||
0b1111,
|
||||
0b11110,
|
||||
0, 0b0001, 0b0010, 0b0111, 0b1011, 0b1101, 0b1110, 0b1111, 0b11110,
|
||||
] as const)
|
||||
.combine('fragmentShaderOutputMask', [
|
||||
0,
|
||||
0b0001,
|
||||
0b0010,
|
||||
0b0111,
|
||||
0b1011,
|
||||
0b1101,
|
||||
0b1110,
|
||||
0b1111,
|
||||
0b11110,
|
||||
0, 0b0001, 0b0010, 0b0111, 0b1011, 0b1101, 0b1110, 0b1111, 0b11110,
|
||||
] as const)
|
||||
)
|
||||
.fn(t => {
|
||||
|
||||
@@ -397,7 +397,7 @@ g.test('blending,formats')
|
||||
t.device.queue.submit([commandEncoder.finish()]);
|
||||
|
||||
const expColor = { R: 0.6, G: 0.6, B: 0.6, A: 0.6 };
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, _coords => expColor);
|
||||
t.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [1, 1, 1]);
|
||||
});
|
||||
|
||||
@@ -445,7 +445,7 @@ g.test('blend_constant,initial')
|
||||
// Check that the initial blend constant is black(0,0,0,0) after setting testPipeline which has
|
||||
// a white color buffer data.
|
||||
const expColor = { R: 0, G: 0, B: 0, A: 0 };
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, _coords => expColor);
|
||||
t.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [
|
||||
kSize,
|
||||
kSize,
|
||||
@@ -503,7 +503,7 @@ g.test('blend_constant,setting')
|
||||
// Check that the blend constant is the same as the given constant after setting the constant
|
||||
// via setBlendConstant.
|
||||
const expColor = { R: r, G: g, B: b, A: a };
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, _coords => expColor);
|
||||
|
||||
t.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [
|
||||
kSize,
|
||||
@@ -576,7 +576,7 @@ g.test('blend_constant,not_inherited')
|
||||
|
||||
// Check that the blend constant is not inherited from the first render pass.
|
||||
const expColor = { R: 0, G: 0, B: 0, A: 0 };
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, _coords => expColor);
|
||||
|
||||
t.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [
|
||||
kSize,
|
||||
@@ -585,22 +585,7 @@ g.test('blend_constant,not_inherited')
|
||||
});
|
||||
|
||||
const kColorWriteCombinations: readonly GPUColorWriteFlags[] = [
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
|
||||
];
|
||||
|
||||
g.test('color_write_mask,channel_work')
|
||||
@@ -672,7 +657,7 @@ g.test('color_write_mask,channel_work')
|
||||
t.device.queue.submit([commandEncoder.finish()]);
|
||||
|
||||
const expColor = { R: r, G: g, B: b, A: a };
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, _coords => expColor);
|
||||
|
||||
t.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [
|
||||
kSize,
|
||||
@@ -730,7 +715,7 @@ g.test('color_write_mask,blending_disabled')
|
||||
t.device.queue.submit([commandEncoder.finish()]);
|
||||
|
||||
const expColor = { R: 1, G: 0, B: 0, A: 0 };
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, _coords => expColor);
|
||||
|
||||
t.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [
|
||||
kSize,
|
||||
@@ -827,7 +812,7 @@ g.test('blending,clamping')
|
||||
}
|
||||
|
||||
const expColor = { R: expValue, G: expValue, B: expValue, A: expValue };
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(format, _coords => expColor);
|
||||
|
||||
t.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [1, 1, 1]);
|
||||
});
|
||||
|
||||
@@ -84,7 +84,7 @@ class DepthTest extends TextureTestMixin(GPUTest) {
|
||||
B: expectedColor[2],
|
||||
A: expectedColor[3],
|
||||
};
|
||||
const expTexelView = TexelView.fromTexelsAsColors(renderTargetFormat, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(renderTargetFormat, _coords => expColor);
|
||||
|
||||
this.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [1, 1]);
|
||||
}
|
||||
|
||||
@@ -175,7 +175,7 @@ class DepthBiasTest extends TextureTestMixin(GPUTest) {
|
||||
});
|
||||
|
||||
const expColor = { Depth: _expectedDepth };
|
||||
const expTexelView = TexelView.fromTexelsAsColors(depthFormat, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(depthFormat, _coords => expColor);
|
||||
this.expectTexelViewComparisonIsOkInTexture({ texture: depthTexture }, expTexelView, [1, 1]);
|
||||
}
|
||||
|
||||
@@ -210,7 +210,7 @@ class DepthBiasTest extends TextureTestMixin(GPUTest) {
|
||||
B: _expectedColor[2],
|
||||
A: _expectedColor[3],
|
||||
};
|
||||
const expTexelView = TexelView.fromTexelsAsColors(renderTargetFormat, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(renderTargetFormat, _coords => expColor);
|
||||
this.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [1, 1]);
|
||||
}
|
||||
|
||||
|
||||
@@ -314,7 +314,7 @@ have unexpected values then get drawn to the color buffer, which is later checke
|
||||
|
||||
const kCheckPassedValue = 0;
|
||||
const predicatePrinter: CheckElementsSupplementalTableRows = [
|
||||
{ leftHeader: 'expected ==', getValueForCell: index => kCheckPassedValue },
|
||||
{ leftHeader: 'expected ==', getValueForCell: _index => kCheckPassedValue },
|
||||
];
|
||||
if (dsActual && dsExpected && format === 'depth32float') {
|
||||
await Promise.all([dsActual.mapAsync(GPUMapMode.READ), dsExpected.mapAsync(GPUMapMode.READ)]);
|
||||
@@ -328,7 +328,7 @@ have unexpected values then get drawn to the color buffer, which is later checke
|
||||
t.expectGPUBufferValuesPassCheck(
|
||||
checkBuffer,
|
||||
a =>
|
||||
checkElementsPassPredicate(a, (index, value) => value === kCheckPassedValue, {
|
||||
checkElementsPassPredicate(a, (_index, value) => value === kCheckPassedValue, {
|
||||
predicatePrinter,
|
||||
}),
|
||||
{ type: Uint8Array, typedLength: kNumTestPoints, method: 'map' }
|
||||
|
||||
@@ -54,7 +54,8 @@ class DrawTest extends TextureTestMixin(GPUTest) {
|
||||
// | \
|
||||
// |______\
|
||||
// Unit triangle shaped like this. 0-1 Y-down.
|
||||
const triangleVertices = /* prettier-ignore */ [
|
||||
/* prettier-ignore */
|
||||
const triangleVertices = [
|
||||
0.0, 0.0,
|
||||
0.0, 1.0,
|
||||
1.0, 1.0,
|
||||
|
||||
@@ -17,10 +17,9 @@ const kRenderTargetFormat = 'rgba8unorm';
|
||||
class F extends GPUTest {
|
||||
MakeIndexBuffer(): GPUBuffer {
|
||||
return this.makeBufferWithContents(
|
||||
/* prettier-ignore */
|
||||
new Uint32Array([
|
||||
0, 1, 2, // The bottom left triangle
|
||||
1, 2, 3, // The top right triangle
|
||||
/* prettier-ignore */ new Uint32Array([
|
||||
0, 1, 2, // The bottom left triangle
|
||||
1, 2, 3, // The top right triangle
|
||||
]),
|
||||
GPUBufferUsage.INDEX
|
||||
);
|
||||
|
||||
@@ -213,7 +213,7 @@ class StencilTest extends TextureTestMixin(GPUTest) {
|
||||
B: expectedColor[2],
|
||||
A: expectedColor[3],
|
||||
};
|
||||
const expTexelView = TexelView.fromTexelsAsColors(renderTargetFormat, coords => expColor);
|
||||
const expTexelView = TexelView.fromTexelsAsColors(renderTargetFormat, _coords => expColor);
|
||||
this.expectTexelViewComparisonIsOkInTexture({ texture: renderTarget }, expTexelView, [1, 1]);
|
||||
}
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ class SamplerAnisotropicFilteringSlantedPlaneTest extends GPUTest {
|
||||
}
|
||||
|
||||
private pipeline: GPURenderPipeline | undefined;
|
||||
async init(): Promise<void> {
|
||||
override async init(): Promise<void> {
|
||||
await super.init();
|
||||
|
||||
this.pipeline = this.device.createRenderPipeline({
|
||||
@@ -287,7 +287,7 @@ g.test('anisotropic_filter_mipmap_color')
|
||||
])
|
||||
.fn(t => {
|
||||
const texture = t.createTextureFromTexelViewsMultipleMipmaps(
|
||||
colors.map(value => TexelView.fromTexelsAsBytes(kTextureFormat, coords_ => value)),
|
||||
colors.map(value => TexelView.fromTexelsAsBytes(kTextureFormat, _coords => value)),
|
||||
{ size: [4, 4, 1], usage: GPUTextureUsage.COPY_DST | GPUTextureUsage.TEXTURE_BINDING }
|
||||
);
|
||||
const textureView = texture.createView();
|
||||
|
||||
@@ -480,6 +480,7 @@ g.test('magFilter,nearest')
|
||||
.combine('addressModeV', kAddressModes)
|
||||
)
|
||||
.beforeAllSubcases(t => {
|
||||
t.skipIfTextureFormatNotSupported(t.params.format);
|
||||
if (kTextureFormatInfo[t.params.format].color.type === 'unfilterable-float') {
|
||||
t.selectDeviceOrSkipTestCase('float32-filterable');
|
||||
}
|
||||
@@ -602,6 +603,7 @@ g.test('magFilter,linear')
|
||||
.combine('addressModeV', kAddressModes)
|
||||
)
|
||||
.beforeAllSubcases(t => {
|
||||
t.skipIfTextureFormatNotSupported(t.params.format);
|
||||
if (kTextureFormatInfo[t.params.format].color.type === 'unfilterable-float') {
|
||||
t.selectDeviceOrSkipTestCase('float32-filterable');
|
||||
}
|
||||
@@ -736,6 +738,7 @@ g.test('minFilter,nearest')
|
||||
.combine('addressModeV', kAddressModes)
|
||||
)
|
||||
.beforeAllSubcases(t => {
|
||||
t.skipIfTextureFormatNotSupported(t.params.format);
|
||||
if (kTextureFormatInfo[t.params.format].color.type === 'unfilterable-float') {
|
||||
t.selectDeviceOrSkipTestCase('float32-filterable');
|
||||
}
|
||||
@@ -868,6 +871,7 @@ g.test('minFilter,linear')
|
||||
.combine('addressModeV', kAddressModes)
|
||||
)
|
||||
.beforeAllSubcases(t => {
|
||||
t.skipIfTextureFormatNotSupported(t.params.format);
|
||||
if (kTextureFormatInfo[t.params.format].color.type === 'unfilterable-float') {
|
||||
t.selectDeviceOrSkipTestCase('float32-filterable');
|
||||
}
|
||||
@@ -963,6 +967,7 @@ g.test('mipmapFilter')
|
||||
.combine('filterMode', kMipmapFilterModes)
|
||||
)
|
||||
.beforeAllSubcases(t => {
|
||||
t.skipIfTextureFormatNotSupported(t.params.format);
|
||||
if (kTextureFormatInfo[t.params.format].color.type === 'unfilterable-float') {
|
||||
t.selectDeviceOrSkipTestCase('float32-filterable');
|
||||
}
|
||||
@@ -986,7 +991,7 @@ g.test('mipmapFilter')
|
||||
TexelView.fromTexelsAsColors(format, () => {
|
||||
return { R: 0.0, G: 0.0, B: 0.0, A: 1.0 };
|
||||
}),
|
||||
TexelView.fromTexelsAsColors(format, coord => {
|
||||
TexelView.fromTexelsAsColors(format, _coords => {
|
||||
return { R: 1.0, G: 1.0, B: 1.0, A: 1.0 };
|
||||
}),
|
||||
],
|
||||
|
||||
@@ -4,11 +4,14 @@ float tolerance.
|
||||
`;
|
||||
|
||||
import { makeTestGroup } from '../../../../common/framework/test_group.js';
|
||||
import { assert, memcpy, unreachable } from '../../../../common/util/util.js';
|
||||
import {
|
||||
kMaxVertexAttributes,
|
||||
kMaxVertexBufferArrayStride,
|
||||
kMaxVertexBuffers,
|
||||
assert,
|
||||
filterUniqueValueTestVariants,
|
||||
makeValueTestVariant,
|
||||
memcpy,
|
||||
unreachable,
|
||||
} from '../../../../common/util/util.js';
|
||||
import {
|
||||
kPerStageBindingLimits,
|
||||
kVertexFormatInfo,
|
||||
kVertexFormats,
|
||||
@@ -58,6 +61,20 @@ function mapStateAttribs<V, A1, A2>(
|
||||
return buffers.map(b => mapBufferAttribs(b, f));
|
||||
}
|
||||
|
||||
function makeRgb10a2(rgba: Array<number>): number {
|
||||
const [r, g, b, a] = rgba;
|
||||
assert((r & 0x3ff) === r);
|
||||
assert((g & 0x3ff) === g);
|
||||
assert((b & 0x3ff) === b);
|
||||
assert((a & 0x3) === a);
|
||||
return r | (g << 10) | (b << 20) | (a << 30);
|
||||
}
|
||||
|
||||
function normalizeRgb10a2(rgba: number, index: number): number {
|
||||
const normalizationFactor = index % 4 === 3 ? 3 : 1023;
|
||||
return rgba / normalizationFactor;
|
||||
}
|
||||
|
||||
type TestData = {
|
||||
shaderBaseType: string;
|
||||
floatTolerance?: number;
|
||||
@@ -94,8 +111,11 @@ class VertexStateTest extends GPUTest {
|
||||
// than maxVertexAttributes = 16.
|
||||
// However this might not work in the future for implementations that allow even more vertex
|
||||
// attributes so there will need to be larger changes when that happens.
|
||||
const maxUniformBuffers = kPerStageBindingLimits['uniformBuf'].max;
|
||||
assert(maxUniformBuffers + kPerStageBindingLimits['storageBuf'].max >= kMaxVertexAttributes);
|
||||
const maxUniformBuffers = this.getDefaultLimit(kPerStageBindingLimits['uniformBuf'].maxLimit);
|
||||
assert(
|
||||
maxUniformBuffers + this.getDefaultLimit(kPerStageBindingLimits['storageBuf'].maxLimit) >=
|
||||
this.device.limits.maxVertexAttributes
|
||||
);
|
||||
|
||||
let vsInputs = '';
|
||||
let vsChecks = '';
|
||||
@@ -302,7 +322,8 @@ struct VSOutputs {
|
||||
// test value in a test is still meaningful.
|
||||
generateTestData(format: GPUVertexFormat): TestData {
|
||||
const formatInfo = kVertexFormatInfo[format];
|
||||
const bitSize = formatInfo.bytesPerComponent * 8;
|
||||
const bitSize =
|
||||
formatInfo.bytesPerComponent === 'packed' ? 0 : formatInfo.bytesPerComponent * 8;
|
||||
|
||||
switch (formatInfo.type) {
|
||||
case 'float': {
|
||||
@@ -405,6 +426,28 @@ struct VSOutputs {
|
||||
}
|
||||
|
||||
case 'unorm': {
|
||||
if (formatInfo.bytesPerComponent === 'packed') {
|
||||
assert(format === 'unorm10-10-10-2'); // This is the only packed format for now.
|
||||
assert(bitSize === 0);
|
||||
|
||||
/* prettier-ignore */
|
||||
const data = [
|
||||
[ 0, 0, 0, 0],
|
||||
[1023, 1023, 1023, 3],
|
||||
[ 243, 567, 765, 2],
|
||||
];
|
||||
const vertexData = new Uint32Array(data.map(makeRgb10a2)).buffer;
|
||||
const expectedData = new Float32Array(data.flat().map(normalizeRgb10a2)).buffer;
|
||||
|
||||
return {
|
||||
shaderBaseType: 'f32',
|
||||
testComponentCount: data.flat().length,
|
||||
expectedData,
|
||||
vertexData,
|
||||
floatTolerance: 0.1 / 1023,
|
||||
};
|
||||
}
|
||||
|
||||
/* prettier-ignore */
|
||||
const data = [
|
||||
42,
|
||||
@@ -555,7 +598,7 @@ struct VSOutputs {
|
||||
this.interleaveVertexDataInto(vertexData, attrib.vertexData, {
|
||||
targetStride: buffer.arrayStride,
|
||||
offset: (buffer.vbOffset ?? 0) + attrib.offset,
|
||||
size: formatInfo.componentCount * formatInfo.bytesPerComponent,
|
||||
size: formatInfo.byteSize,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -600,11 +643,21 @@ g.test('vertex_format_to_shader_format_conversion')
|
||||
.combine('format', kVertexFormats)
|
||||
.combine('shaderComponentCount', [1, 2, 3, 4])
|
||||
.beginSubcases()
|
||||
.combine('slot', [0, 1, kMaxVertexBuffers - 1])
|
||||
.combine('shaderLocation', [0, 1, kMaxVertexAttributes - 1])
|
||||
.combine('slotVariant', [
|
||||
{ mult: 0, add: 0 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -1 },
|
||||
])
|
||||
.combine('shaderLocationVariant', [
|
||||
{ mult: 0, add: 0 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -1 },
|
||||
])
|
||||
)
|
||||
.fn(t => {
|
||||
const { format, shaderComponentCount, slot, shaderLocation } = t.params;
|
||||
const { format, shaderComponentCount, slotVariant, shaderLocationVariant } = t.params;
|
||||
const slot = t.makeLimitVariant('maxVertexBuffers', slotVariant);
|
||||
const shaderLocation = t.makeLimitVariant('maxVertexAttributes', shaderLocationVariant);
|
||||
t.runTest([
|
||||
{
|
||||
slot,
|
||||
@@ -637,7 +690,7 @@ g.test('setVertexBuffer_offset_and_attribute_offset')
|
||||
.combine('arrayStride', [128])
|
||||
.expand('offset', p => {
|
||||
const formatInfo = kVertexFormatInfo[p.format];
|
||||
const formatSize = formatInfo.bytesPerComponent * formatInfo.componentCount;
|
||||
const formatSize = formatInfo.byteSize;
|
||||
return new Set([
|
||||
0,
|
||||
4,
|
||||
@@ -683,30 +736,38 @@ g.test('non_zero_array_stride_and_attribute_offset')
|
||||
u //
|
||||
.combine('format', kVertexFormats)
|
||||
.beginSubcases()
|
||||
.expand('arrayStride', p => {
|
||||
.expand('arrayStrideVariant', p => {
|
||||
const formatInfo = kVertexFormatInfo[p.format];
|
||||
const formatSize = formatInfo.bytesPerComponent * formatInfo.componentCount;
|
||||
const formatSize = formatInfo.byteSize;
|
||||
|
||||
return [align(formatSize, 4), align(formatSize, 4) + 4, kMaxVertexBufferArrayStride];
|
||||
return [
|
||||
{ mult: 0, add: align(formatSize, 4) },
|
||||
{ mult: 0, add: align(formatSize, 4) + 4 },
|
||||
{ mult: 1, add: 0 },
|
||||
];
|
||||
})
|
||||
.expand('offset', p => {
|
||||
.expand('offsetVariant', function* (p) {
|
||||
const formatInfo = kVertexFormatInfo[p.format];
|
||||
const formatSize = formatInfo.bytesPerComponent * formatInfo.componentCount;
|
||||
return new Set(
|
||||
[
|
||||
0,
|
||||
formatSize,
|
||||
4,
|
||||
p.arrayStride / 2,
|
||||
p.arrayStride - formatSize * 2,
|
||||
p.arrayStride - formatSize - 4,
|
||||
p.arrayStride - formatSize,
|
||||
].map(offset => clamp(offset, { min: 0, max: p.arrayStride - formatSize }))
|
||||
);
|
||||
const formatSize = formatInfo.byteSize;
|
||||
yield { mult: 0, add: 0 };
|
||||
yield { mult: 0, add: 4 };
|
||||
if (formatSize !== 4) yield { mult: 0, add: formatSize };
|
||||
yield { mult: 0.5, add: 0 };
|
||||
yield { mult: 1, add: -formatSize * 2 };
|
||||
if (formatSize !== 4) yield { mult: 1, add: -formatSize - 4 };
|
||||
yield { mult: 1, add: -formatSize };
|
||||
})
|
||||
)
|
||||
.fn(t => {
|
||||
const { format, arrayStride, offset } = t.params;
|
||||
const { format, arrayStrideVariant, offsetVariant } = t.params;
|
||||
const arrayStride = t.makeLimitVariant('maxVertexBufferArrayStride', arrayStrideVariant);
|
||||
const formatInfo = kVertexFormatInfo[format];
|
||||
const formatSize = formatInfo.byteSize;
|
||||
const offset = clamp(makeValueTestVariant(arrayStride, offsetVariant), {
|
||||
min: 0,
|
||||
max: arrayStride - formatSize,
|
||||
});
|
||||
|
||||
t.runTest([
|
||||
{
|
||||
slot: 0,
|
||||
@@ -764,15 +825,20 @@ g.test('vertex_buffer_used_multiple_times_overlapped')
|
||||
u //
|
||||
.combine('format', kVertexFormats)
|
||||
.beginSubcases()
|
||||
.combine('vbCount', [2, 3, kMaxVertexBuffers])
|
||||
.combine('vbCountVariant', [
|
||||
{ mult: 0, add: 2 },
|
||||
{ mult: 0, add: 3 },
|
||||
{ mult: 1, add: 0 },
|
||||
])
|
||||
.combine('additionalVBOffset', [0, 4, 120])
|
||||
)
|
||||
.fn(t => {
|
||||
const { format, vbCount, additionalVBOffset } = t.params;
|
||||
const { format, vbCountVariant, additionalVBOffset } = t.params;
|
||||
const vbCount = t.makeLimitVariant('maxVertexBuffers', vbCountVariant);
|
||||
const kVertexCount = 20;
|
||||
const kInstanceCount = 1;
|
||||
const formatInfo = kVertexFormatInfo[format];
|
||||
const formatByteSize = formatInfo.bytesPerComponent * formatInfo.componentCount;
|
||||
const formatByteSize = formatInfo.byteSize;
|
||||
// We need to align so the offset for non-0 setVertexBuffer don't fail validation.
|
||||
const alignedFormatByteSize = align(formatByteSize, 4);
|
||||
|
||||
@@ -863,15 +929,20 @@ g.test('vertex_buffer_used_multiple_times_interleaved')
|
||||
u //
|
||||
.combine('format', kVertexFormats)
|
||||
.beginSubcases()
|
||||
.combine('vbCount', [2, 3, kMaxVertexBuffers])
|
||||
.combine('vbCountVariant', [
|
||||
{ mult: 0, add: 2 },
|
||||
{ mult: 0, add: 3 },
|
||||
{ mult: 1, add: 0 },
|
||||
])
|
||||
.combine('additionalVBOffset', [0, 4, 120])
|
||||
)
|
||||
.fn(t => {
|
||||
const { format, vbCount, additionalVBOffset } = t.params;
|
||||
const { format, vbCountVariant, additionalVBOffset } = t.params;
|
||||
const vbCount = t.makeLimitVariant('maxVertexBuffers', vbCountVariant);
|
||||
const kVertexCount = 20;
|
||||
const kInstanceCount = 1;
|
||||
const formatInfo = kVertexFormatInfo[format];
|
||||
const formatByteSize = formatInfo.bytesPerComponent * formatInfo.componentCount;
|
||||
const formatByteSize = formatInfo.byteSize;
|
||||
// We need to align so the offset for non-0 setVertexBuffer don't fail validation.
|
||||
const alignedFormatByteSize = align(formatByteSize, 4);
|
||||
|
||||
@@ -942,12 +1013,14 @@ g.test('max_buffers_and_attribs')
|
||||
.fn(t => {
|
||||
const { format } = t.params;
|
||||
// In compat mode, @builtin(vertex_index) and @builtin(instance_index) each take an attribute
|
||||
const maxVertexAttributes = t.isCompatibility ? kMaxVertexAttributes - 2 : kMaxVertexAttributes;
|
||||
const attributesPerBuffer = Math.ceil(maxVertexAttributes / kMaxVertexBuffers);
|
||||
const maxVertexBuffers = t.device.limits.maxVertexBuffers;
|
||||
const deviceMaxVertexAttributes = t.device.limits.maxVertexAttributes;
|
||||
const maxVertexAttributes = deviceMaxVertexAttributes - (t.isCompatibility ? 2 : 0);
|
||||
const attributesPerBuffer = Math.ceil(maxVertexAttributes / maxVertexBuffers);
|
||||
let attributesEmitted = 0;
|
||||
|
||||
const state: VertexLayoutState<{}, {}> = [];
|
||||
for (let i = 0; i < kMaxVertexBuffers; i++) {
|
||||
for (let i = 0; i < maxVertexBuffers; i++) {
|
||||
const attributes: GPUVertexAttribute[] = [];
|
||||
for (let j = 0; j < attributesPerBuffer && attributesEmitted < maxVertexAttributes; j++) {
|
||||
attributes.push({ format, offset: 0, shaderLocation: attributesEmitted });
|
||||
@@ -974,25 +1047,26 @@ g.test('array_stride_zero')
|
||||
.combine('format', kVertexFormats)
|
||||
.beginSubcases()
|
||||
.combine('stepMode', ['vertex', 'instance'] as const)
|
||||
.expand('offset', p => {
|
||||
.expand('offsetVariant', p => {
|
||||
const formatInfo = kVertexFormatInfo[p.format];
|
||||
const formatSize = formatInfo.bytesPerComponent * formatInfo.componentCount;
|
||||
return new Set([
|
||||
0,
|
||||
4,
|
||||
8,
|
||||
formatSize,
|
||||
formatSize * 2,
|
||||
kMaxVertexBufferArrayStride / 2,
|
||||
kMaxVertexBufferArrayStride - formatSize - 4,
|
||||
kMaxVertexBufferArrayStride - formatSize - 8,
|
||||
kMaxVertexBufferArrayStride - formatSize,
|
||||
kMaxVertexBufferArrayStride - formatSize * 2,
|
||||
const formatSize = formatInfo.byteSize;
|
||||
return filterUniqueValueTestVariants([
|
||||
{ mult: 0, add: 0 },
|
||||
{ mult: 0, add: 4 },
|
||||
{ mult: 0, add: 8 },
|
||||
{ mult: 0, add: formatSize },
|
||||
{ mult: 0, add: formatSize * 2 },
|
||||
{ mult: 0.5, add: 0 },
|
||||
{ mult: 1, add: -formatSize - 4 },
|
||||
{ mult: 1, add: -formatSize - 8 },
|
||||
{ mult: 1, add: -formatSize },
|
||||
{ mult: 1, add: -formatSize * 2 },
|
||||
]);
|
||||
})
|
||||
)
|
||||
.fn(t => {
|
||||
const { format, stepMode, offset } = t.params;
|
||||
const { format, stepMode, offsetVariant } = t.params;
|
||||
const offset = t.makeLimitVariant('maxVertexBufferArrayStride', offsetVariant);
|
||||
const kCount = 10;
|
||||
|
||||
// Create the stride 0 part of the test, first by faking a single vertex being drawn and
|
||||
@@ -1055,7 +1129,7 @@ g.test('discontiguous_location_and_attribs')
|
||||
.fn(t => {
|
||||
t.runTest([
|
||||
{
|
||||
slot: kMaxVertexBuffers - 1,
|
||||
slot: t.device.limits.maxVertexBuffers - 1,
|
||||
arrayStride: 4,
|
||||
stepMode: 'vertex',
|
||||
attributes: [
|
||||
@@ -1068,7 +1142,13 @@ g.test('discontiguous_location_and_attribs')
|
||||
arrayStride: 16,
|
||||
stepMode: 'instance',
|
||||
vbOffset: 1000,
|
||||
attributes: [{ format: 'uint32x4', offset: 0, shaderLocation: kMaxVertexAttributes - 1 }],
|
||||
attributes: [
|
||||
{
|
||||
format: 'uint32x4',
|
||||
offset: 0,
|
||||
shaderLocation: t.device.limits.maxVertexAttributes - 1,
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
@@ -1083,7 +1163,7 @@ g.test('overlapping_attributes')
|
||||
const { format } = t.params;
|
||||
|
||||
// In compat mode, @builtin(vertex_index) and @builtin(instance_index) each take an attribute
|
||||
const maxVertexAttributes = t.isCompatibility ? kMaxVertexAttributes - 2 : kMaxVertexAttributes;
|
||||
const maxVertexAttributes = t.device.limits.maxVertexAttributes - (t.isCompatibility ? 2 : 0);
|
||||
const attributes: GPUVertexAttribute[] = [];
|
||||
for (let i = 0; i < maxVertexAttributes; i++) {
|
||||
attributes.push({ format, offset: 0, shaderLocation: i });
|
||||
|
||||
@@ -15,7 +15,7 @@ type Raster8x4 = readonly [
|
||||
readonly [0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1],
|
||||
readonly [0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1],
|
||||
readonly [0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1],
|
||||
readonly [0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1]
|
||||
readonly [0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1, 0 | 1],
|
||||
];
|
||||
|
||||
/** Expected 4x4 rasterization of a bottom-left triangle. */
|
||||
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
kAllBufferUsageBits,
|
||||
kBufferSizeAlignment,
|
||||
kBufferUsages,
|
||||
kLimitInfo,
|
||||
} from '../../../capability_info.js';
|
||||
import { GPUConst } from '../../../constants.js';
|
||||
import { kMaxSafeMultipleOf8 } from '../../../util/math.js';
|
||||
@@ -46,18 +45,11 @@ g.test('size')
|
||||
|
||||
g.test('limit')
|
||||
.desc('Test buffer size is validated against maxBufferSize.')
|
||||
.params(u =>
|
||||
u
|
||||
.beginSubcases()
|
||||
.combine('size', [
|
||||
kLimitInfo.maxBufferSize.default - 1,
|
||||
kLimitInfo.maxBufferSize.default,
|
||||
kLimitInfo.maxBufferSize.default + 1,
|
||||
])
|
||||
)
|
||||
.params(u => u.beginSubcases().combine('sizeAddition', [-1, 0, +1]))
|
||||
.fn(t => {
|
||||
const { size } = t.params;
|
||||
const isValid = size <= kLimitInfo.maxBufferSize.default;
|
||||
const { sizeAddition } = t.params;
|
||||
const size = t.makeLimitVariant('maxBufferSize', { mult: 1, add: sizeAddition });
|
||||
const isValid = size <= t.device.limits.maxBufferSize;
|
||||
const usage = BufferUsage.COPY_SRC;
|
||||
t.expectGPUError('validation', () => t.device.createBuffer({ size, usage }), !isValid);
|
||||
});
|
||||
|
||||
@@ -45,6 +45,7 @@ class F extends ValidationTest {
|
||||
assert(expectation.rejectName === null, 'mapAsync unexpectedly passed');
|
||||
} catch (ex) {
|
||||
assert(ex instanceof Error, 'mapAsync rejected with non-error');
|
||||
assert(typeof ex.stack === 'string', 'mapAsync rejected without a stack');
|
||||
assert(expectation.rejectName === ex.name, `mapAsync rejected unexpectedly with: ${ex}`);
|
||||
assert(
|
||||
expectation.earlyRejection === rejectedEarly,
|
||||
|
||||
@@ -14,7 +14,7 @@ g.test('createQuerySet')
|
||||
'timestamp-query'.
|
||||
- createQuerySet
|
||||
- type {occlusion, timestamp}
|
||||
- x= {pipeline statistics, timestamp} query {enable, disable}
|
||||
- x= timestamp query {enable, disable}
|
||||
`
|
||||
)
|
||||
.params(u =>
|
||||
|
||||
@@ -3,6 +3,7 @@ Tests for capability checking for features enabling optional texture formats.
|
||||
`;
|
||||
|
||||
import { makeTestGroup } from '../../../../../common/framework/test_group.js';
|
||||
import { getGPU } from '../../../../../common/util/navigator_gpu.js';
|
||||
import { assert } from '../../../../../common/util/util.js';
|
||||
import { kAllTextureFormats, kTextureFormatInfo } from '../../../../format_info.js';
|
||||
import { kAllCanvasTypes, createCanvas } from '../../../../util/create_elements.js';
|
||||
@@ -273,6 +274,7 @@ g.test('color_target_state')
|
||||
)
|
||||
.params(u =>
|
||||
u
|
||||
.combine('isAsync', [false, true])
|
||||
.combine('format', kOptionalTextureFormats)
|
||||
.filter(t => !!kTextureFormatInfo[t.format].colorRender)
|
||||
.combine('enable_required_feature', [true, false])
|
||||
@@ -286,10 +288,12 @@ g.test('color_target_state')
|
||||
}
|
||||
})
|
||||
.fn(t => {
|
||||
const { format, enable_required_feature } = t.params;
|
||||
const { isAsync, format, enable_required_feature } = t.params;
|
||||
|
||||
t.shouldThrow(enable_required_feature ? false : 'TypeError', () => {
|
||||
t.device.createRenderPipeline({
|
||||
t.doCreateRenderPipelineTest(
|
||||
isAsync,
|
||||
enable_required_feature,
|
||||
{
|
||||
layout: 'auto',
|
||||
vertex: {
|
||||
module: t.device.createShaderModule({
|
||||
@@ -312,8 +316,9 @@ g.test('color_target_state')
|
||||
entryPoint: 'main',
|
||||
targets: [{ format }],
|
||||
},
|
||||
});
|
||||
});
|
||||
},
|
||||
'TypeError'
|
||||
);
|
||||
});
|
||||
|
||||
g.test('depth_stencil_state')
|
||||
@@ -325,6 +330,7 @@ g.test('depth_stencil_state')
|
||||
)
|
||||
.params(u =>
|
||||
u
|
||||
.combine('isAsync', [false, true])
|
||||
.combine('format', kOptionalTextureFormats)
|
||||
.filter(t => !!(kTextureFormatInfo[t.format].depth || kTextureFormatInfo[t.format].stencil))
|
||||
.combine('enable_required_feature', [true, false])
|
||||
@@ -338,10 +344,12 @@ g.test('depth_stencil_state')
|
||||
}
|
||||
})
|
||||
.fn(t => {
|
||||
const { format, enable_required_feature } = t.params;
|
||||
const { isAsync, format, enable_required_feature } = t.params;
|
||||
|
||||
t.shouldThrow(enable_required_feature ? false : 'TypeError', () => {
|
||||
t.device.createRenderPipeline({
|
||||
t.doCreateRenderPipelineTest(
|
||||
isAsync,
|
||||
enable_required_feature,
|
||||
{
|
||||
layout: 'auto',
|
||||
vertex: {
|
||||
module: t.device.createShaderModule({
|
||||
@@ -369,8 +377,9 @@ g.test('depth_stencil_state')
|
||||
entryPoint: 'main',
|
||||
targets: [{ format: 'rgba8unorm' }],
|
||||
},
|
||||
});
|
||||
});
|
||||
},
|
||||
'TypeError'
|
||||
);
|
||||
});
|
||||
|
||||
g.test('render_bundle_encoder_descriptor_color_format')
|
||||
@@ -437,3 +446,18 @@ g.test('render_bundle_encoder_descriptor_depth_stencil_format')
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
g.test('check_capability_guarantees')
|
||||
.desc(
|
||||
`check "texture-compression-bc" is supported or both "texture-compression-etc2" and "texture-compression-astc" are supported.`
|
||||
)
|
||||
.fn(async t => {
|
||||
const adapter = await getGPU(t.rec).requestAdapter();
|
||||
assert(adapter !== null);
|
||||
|
||||
const features = adapter.features;
|
||||
t.expect(
|
||||
features.has('texture-compression-bc') ||
|
||||
(features.has('texture-compression-etc2') && features.has('texture-compression-astc'))
|
||||
);
|
||||
});
|
||||
|
||||
@@ -2,10 +2,8 @@ import { kUnitCaseParamsBuilder } from '../../../../../common/framework/params_b
|
||||
import { makeTestGroup } from '../../../../../common/framework/test_group.js';
|
||||
import { getGPU } from '../../../../../common/util/navigator_gpu.js';
|
||||
import { assert, range, reorder, ReorderOrder } from '../../../../../common/util/util.js';
|
||||
import { kLimitInfo } from '../../../../capability_info.js';
|
||||
import { kTextureFormatInfo } from '../../../../format_info.js';
|
||||
import { getDefaultLimitsForAdapter } from '../../../../capability_info.js';
|
||||
import { GPUTestBase } from '../../../../gpu_test.js';
|
||||
import { align } from '../../../../util/math.js';
|
||||
|
||||
type GPUSupportedLimit = keyof GPUSupportedLimits;
|
||||
|
||||
@@ -14,16 +12,16 @@ export const kCreatePipelineTypes = [
|
||||
'createRenderPipelineWithFragmentStage',
|
||||
'createComputePipeline',
|
||||
] as const;
|
||||
export type CreatePipelineType = typeof kCreatePipelineTypes[number];
|
||||
export type CreatePipelineType = (typeof kCreatePipelineTypes)[number];
|
||||
|
||||
export const kRenderEncoderTypes = ['render', 'renderBundle'] as const;
|
||||
export type RenderEncoderType = typeof kRenderEncoderTypes[number];
|
||||
export type RenderEncoderType = (typeof kRenderEncoderTypes)[number];
|
||||
|
||||
export const kEncoderTypes = ['compute', 'render', 'renderBundle'] as const;
|
||||
export type EncoderType = typeof kEncoderTypes[number];
|
||||
export type EncoderType = (typeof kEncoderTypes)[number];
|
||||
|
||||
export const kBindGroupTests = ['sameGroup', 'differentGroups'] as const;
|
||||
export type BindGroupTest = typeof kBindGroupTests[number];
|
||||
export type BindGroupTest = (typeof kBindGroupTests)[number];
|
||||
|
||||
export const kBindingCombinations = [
|
||||
'vertex',
|
||||
@@ -32,7 +30,7 @@ export const kBindingCombinations = [
|
||||
'vertexAndFragmentWithPossibleFragmentStageOverflow',
|
||||
'compute',
|
||||
] as const;
|
||||
export type BindingCombination = typeof kBindingCombinations[number];
|
||||
export type BindingCombination = (typeof kBindingCombinations)[number];
|
||||
|
||||
export function getPipelineTypeForBindingCombination(bindingCombination: BindingCombination) {
|
||||
switch (bindingCombination) {
|
||||
@@ -76,19 +74,6 @@ function getWGSLBindings(
|
||||
).join('\n ');
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an array of GPUColorTargetState return the number of bytes per sample
|
||||
*/
|
||||
export function computeBytesPerSample(targets: GPUColorTargetState[]) {
|
||||
let bytesPerSample = 0;
|
||||
for (const { format } of targets) {
|
||||
const info = kTextureFormatInfo[format];
|
||||
const alignedBytesPerSample = align(bytesPerSample, info.colorRender!.alignment);
|
||||
bytesPerSample = alignedBytesPerSample + info.colorRender!.byteCost;
|
||||
}
|
||||
return bytesPerSample;
|
||||
}
|
||||
|
||||
export function getPerStageWGSLForBindingCombinationImpl(
|
||||
bindingCombination: BindingCombination,
|
||||
order: ReorderOrder,
|
||||
@@ -216,11 +201,11 @@ export function getPerStageWGSLForBindingCombinationStorageTextures(
|
||||
}
|
||||
|
||||
export const kLimitModes = ['defaultLimit', 'adapterLimit'] as const;
|
||||
export type LimitMode = typeof kLimitModes[number];
|
||||
export type LimitMode = (typeof kLimitModes)[number];
|
||||
export type LimitsRequest = Record<string, LimitMode>;
|
||||
|
||||
export const kMaximumTestValues = ['atLimit', 'overLimit'] as const;
|
||||
export type MaximumTestValue = typeof kMaximumTestValues[number];
|
||||
export type MaximumTestValue = (typeof kMaximumTestValues)[number];
|
||||
|
||||
export function getMaximumTestValue(limit: number, testValue: MaximumTestValue) {
|
||||
switch (testValue) {
|
||||
@@ -232,7 +217,7 @@ export function getMaximumTestValue(limit: number, testValue: MaximumTestValue)
|
||||
}
|
||||
|
||||
export const kMinimumTestValues = ['atLimit', 'underLimit'] as const;
|
||||
export type MinimumTestValue = typeof kMinimumTestValues[number];
|
||||
export type MinimumTestValue = (typeof kMinimumTestValues)[number];
|
||||
|
||||
export const kMaximumLimitValueTests = [
|
||||
'atDefault',
|
||||
@@ -241,7 +226,7 @@ export const kMaximumLimitValueTests = [
|
||||
'atMaximum',
|
||||
'overMaximum',
|
||||
] as const;
|
||||
export type MaximumLimitValueTest = typeof kMaximumLimitValueTests[number];
|
||||
export type MaximumLimitValueTest = (typeof kMaximumLimitValueTests)[number];
|
||||
|
||||
export function getLimitValue(
|
||||
defaultLimit: number,
|
||||
@@ -270,10 +255,11 @@ export const kMinimumLimitValueTests = [
|
||||
'atMinimum',
|
||||
'underMinimum',
|
||||
] as const;
|
||||
export type MinimumLimitValueTest = typeof kMinimumLimitValueTests[number];
|
||||
export type MinimumLimitValueTest = (typeof kMinimumLimitValueTests)[number];
|
||||
|
||||
export function getDefaultLimit(limit: GPUSupportedLimit): number {
|
||||
return (kLimitInfo as Record<string, { default: number }>)[limit].default;
|
||||
export function getDefaultLimitForAdapter(adapter: GPUAdapter, limit: GPUSupportedLimit): number {
|
||||
const limitInfo = getDefaultLimitsForAdapter(adapter);
|
||||
return limitInfo[limit as keyof typeof limitInfo].default;
|
||||
}
|
||||
|
||||
export type DeviceAndLimits = {
|
||||
@@ -316,12 +302,12 @@ export class LimitTestsImpl extends GPUTestBase {
|
||||
defaultLimit = 0;
|
||||
adapterLimit = 0;
|
||||
|
||||
async init() {
|
||||
override async init() {
|
||||
await super.init();
|
||||
const gpu = getGPU(this.rec);
|
||||
this._adapter = await gpu.requestAdapter();
|
||||
const limit = this.limit;
|
||||
this.defaultLimit = getDefaultLimit(limit);
|
||||
this.defaultLimit = getDefaultLimitForAdapter(this.adapter, limit);
|
||||
this.adapterLimit = this.adapter.limits[limit] as number;
|
||||
assert(!Number.isNaN(this.defaultLimit));
|
||||
assert(!Number.isNaN(this.adapterLimit));
|
||||
@@ -332,7 +318,7 @@ export class LimitTestsImpl extends GPUTestBase {
|
||||
return this._adapter!;
|
||||
}
|
||||
|
||||
get device(): GPUDevice {
|
||||
override get device(): GPUDevice {
|
||||
assert(this._device !== undefined, 'device is only valid in _testThenDestroyDevice callback');
|
||||
return this._device;
|
||||
}
|
||||
@@ -344,7 +330,9 @@ export class LimitTestsImpl extends GPUTestBase {
|
||||
requiredFeatures?: GPUFeatureName[]
|
||||
) {
|
||||
if (shouldReject) {
|
||||
this.shouldReject('OperationError', adapter.requestDevice({ requiredLimits }));
|
||||
this.shouldReject('OperationError', adapter.requestDevice({ requiredLimits }), {
|
||||
allowMissingStack: true,
|
||||
});
|
||||
return undefined;
|
||||
} else {
|
||||
return await adapter.requestDevice({ requiredLimits, requiredFeatures });
|
||||
@@ -354,7 +342,7 @@ export class LimitTestsImpl extends GPUTestBase {
|
||||
getDefaultOrAdapterLimit(limit: GPUSupportedLimit, limitMode: LimitMode) {
|
||||
switch (limitMode) {
|
||||
case 'defaultLimit':
|
||||
return getDefaultLimit(limit);
|
||||
return getDefaultLimitForAdapter(this.adapter, limit);
|
||||
case 'adapterLimit':
|
||||
return this.adapter.limits[limit];
|
||||
}
|
||||
@@ -380,7 +368,7 @@ export class LimitTestsImpl extends GPUTestBase {
|
||||
const extraLimit = extraLimitStr as GPUSupportedLimit;
|
||||
requiredLimits[extraLimit] =
|
||||
limitMode === 'defaultLimit'
|
||||
? getDefaultLimit(extraLimit)
|
||||
? getDefaultLimitForAdapter(adapter, extraLimit)
|
||||
: (adapter.limits[extraLimit] as number);
|
||||
}
|
||||
}
|
||||
@@ -576,12 +564,12 @@ export class LimitTestsImpl extends GPUTestBase {
|
||||
expectedName: string,
|
||||
p: Promise<unknown>,
|
||||
shouldReject: boolean,
|
||||
msg?: string
|
||||
message?: string
|
||||
): Promise<void> {
|
||||
if (shouldReject) {
|
||||
this.shouldReject(expectedName, p, msg);
|
||||
this.shouldReject(expectedName, p, { message });
|
||||
} else {
|
||||
this.shouldResolve(p, msg);
|
||||
this.shouldResolve(p, message);
|
||||
}
|
||||
|
||||
// We need to explicitly wait for the promise because the device may be
|
||||
@@ -596,7 +584,11 @@ export class LimitTestsImpl extends GPUTestBase {
|
||||
/**
|
||||
* Calls a function that expects a validation error if shouldError is true
|
||||
*/
|
||||
async expectValidationError<R>(fn: () => R, shouldError: boolean = true, msg = ''): Promise<R> {
|
||||
override async expectValidationError<R>(
|
||||
fn: () => R,
|
||||
shouldError: boolean = true,
|
||||
msg = ''
|
||||
): Promise<R> {
|
||||
return this.expectGPUErrorAsync('validation', fn, shouldError, msg);
|
||||
}
|
||||
|
||||
@@ -1079,7 +1071,7 @@ export class LimitTestsImpl extends GPUTestBase {
|
||||
*/
|
||||
function makeLimitTestFixture(limit: GPUSupportedLimit): typeof LimitTestsImpl {
|
||||
class LimitTests extends LimitTestsImpl {
|
||||
limit = limit;
|
||||
override limit = limit;
|
||||
}
|
||||
|
||||
return LimitTests;
|
||||
|
||||
@@ -19,7 +19,7 @@ g.test('createPipelineLayout,at_over')
|
||||
limitTest,
|
||||
testValueName,
|
||||
async ({ device, testValue, shouldError }) => {
|
||||
const bindGroupLayouts = range(testValue, (i: number) =>
|
||||
const bindGroupLayouts = range(testValue, _i =>
|
||||
device.createBindGroupLayout({
|
||||
entries: [
|
||||
{
|
||||
@@ -85,3 +85,11 @@ g.test('setBindGroup,at_over')
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
g.test('validate,maxBindGroupsPlusVertexBuffers')
|
||||
.desc(`Test that ${limit} <= maxBindGroupsPlusVertexBuffers`)
|
||||
.fn(t => {
|
||||
const { adapter, defaultLimit, adapterLimit } = t;
|
||||
t.expect(defaultLimit <= t.getDefaultLimit('maxBindGroupsPlusVertexBuffers'));
|
||||
t.expect(adapterLimit <= adapter.limits.maxBindGroupsPlusVertexBuffers);
|
||||
});
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { range } from '../../../../../common/util/util.js';
|
||||
import { kMaxColorAttachmentsToTest } from '../../../../capability_info.js';
|
||||
|
||||
import { kMaximumLimitBaseParams, getDefaultLimit, makeLimitTestGroup } from './limit_utils.js';
|
||||
import { kMaximumLimitBaseParams, makeLimitTestGroup } from './limit_utils.js';
|
||||
|
||||
function getPipelineDescriptor(device: GPUDevice, testValue: number): GPURenderPipelineDescriptor {
|
||||
const code = `
|
||||
@@ -105,9 +106,19 @@ g.test('validate,maxColorAttachmentBytesPerSample')
|
||||
.desc(`Test ${limit} against maxColorAttachmentBytesPerSample`)
|
||||
.fn(t => {
|
||||
const { adapter, defaultLimit, adapterLimit: maximumLimit } = t;
|
||||
const minColorAttachmentBytesPerSample = getDefaultLimit('maxColorAttachmentBytesPerSample');
|
||||
const minColorAttachmentBytesPerSample = t.getDefaultLimit('maxColorAttachmentBytesPerSample');
|
||||
// The smallest attachment is 1 byte
|
||||
// so make sure maxColorAttachments < maxColorAttachmentBytesPerSample
|
||||
t.expect(defaultLimit <= minColorAttachmentBytesPerSample);
|
||||
t.expect(maximumLimit <= adapter.limits.maxColorAttachmentBytesPerSample);
|
||||
});
|
||||
|
||||
g.test('validate,kMaxColorAttachmentsToTest')
|
||||
.desc(
|
||||
`
|
||||
Tests that kMaxColorAttachmentsToTest is large enough to test the limits of this device
|
||||
`
|
||||
)
|
||||
.fn(t => {
|
||||
t.expect(t.adapter.limits.maxColorAttachments <= kMaxColorAttachmentsToTest);
|
||||
});
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { GPUTestBase } from '../../../../gpu_test.js';
|
||||
|
||||
import {
|
||||
kMaximumLimitBaseParams,
|
||||
getDefaultLimit,
|
||||
MaximumLimitValueTest,
|
||||
MaximumTestValue,
|
||||
makeLimitTestGroup,
|
||||
@@ -75,11 +76,15 @@ function getDeviceLimitToRequest(
|
||||
}
|
||||
}
|
||||
|
||||
function getTestWorkgroupSize(testValueName: MaximumTestValue, requestedLimit: number) {
|
||||
function getTestWorkgroupSize(
|
||||
t: GPUTestBase,
|
||||
testValueName: MaximumTestValue,
|
||||
requestedLimit: number
|
||||
) {
|
||||
const maxDimensions = [
|
||||
getDefaultLimit('maxComputeWorkgroupSizeX'),
|
||||
getDefaultLimit('maxComputeWorkgroupSizeY'),
|
||||
getDefaultLimit('maxComputeWorkgroupSizeZ'),
|
||||
t.getDefaultLimit('maxComputeWorkgroupSizeX'),
|
||||
t.getDefaultLimit('maxComputeWorkgroupSizeY'),
|
||||
t.getDefaultLimit('maxComputeWorkgroupSizeZ'),
|
||||
];
|
||||
|
||||
switch (testValueName) {
|
||||
@@ -91,13 +96,14 @@ function getTestWorkgroupSize(testValueName: MaximumTestValue, requestedLimit: n
|
||||
}
|
||||
|
||||
function getDeviceLimitToRequestAndValueToTest(
|
||||
t: GPUTestBase,
|
||||
limitValueTest: MaximumLimitValueTest,
|
||||
testValueName: MaximumTestValue,
|
||||
defaultLimit: number,
|
||||
maximumLimit: number
|
||||
) {
|
||||
const requestedLimit = getDeviceLimitToRequest(limitValueTest, defaultLimit, maximumLimit);
|
||||
const workgroupSize = getTestWorkgroupSize(testValueName, requestedLimit);
|
||||
const workgroupSize = getTestWorkgroupSize(t, testValueName, requestedLimit);
|
||||
return {
|
||||
requestedLimit,
|
||||
workgroupSize,
|
||||
@@ -115,6 +121,7 @@ g.test('createComputePipeline,at_over')
|
||||
const { defaultLimit, adapterLimit: maximumLimit } = t;
|
||||
|
||||
const { requestedLimit, workgroupSize } = getDeviceLimitToRequestAndValueToTest(
|
||||
t,
|
||||
limitTest,
|
||||
testValueName,
|
||||
defaultLimit,
|
||||
|
||||
@@ -10,3 +10,11 @@ g.test('createComputePipeline,at_over')
|
||||
const { limitTest, testValueName, async } = t.params;
|
||||
await t.testMaxComputeWorkgroupSize(limitTest, testValueName, async, 'X');
|
||||
});
|
||||
|
||||
g.test('validate,maxComputeInvocationsPerWorkgroup')
|
||||
.desc(`Test that ${limit} <= maxComputeInvocationsPerWorkgroup`)
|
||||
.fn(t => {
|
||||
const { adapter, defaultLimit, adapterLimit } = t;
|
||||
t.expect(defaultLimit <= t.getDefaultLimit('maxComputeInvocationsPerWorkgroup'));
|
||||
t.expect(adapterLimit <= adapter.limits.maxComputeInvocationsPerWorkgroup);
|
||||
});
|
||||
|
||||
@@ -10,3 +10,11 @@ g.test('createComputePipeline,at_over')
|
||||
const { limitTest, testValueName, async } = t.params;
|
||||
await t.testMaxComputeWorkgroupSize(limitTest, testValueName, async, 'Y');
|
||||
});
|
||||
|
||||
g.test('validate,maxComputeInvocationsPerWorkgroup')
|
||||
.desc(`Test that ${limit} <= maxComputeInvocationsPerWorkgroup`)
|
||||
.fn(t => {
|
||||
const { adapter, defaultLimit, adapterLimit } = t;
|
||||
t.expect(defaultLimit <= t.getDefaultLimit('maxComputeInvocationsPerWorkgroup'));
|
||||
t.expect(adapterLimit <= adapter.limits.maxComputeInvocationsPerWorkgroup);
|
||||
});
|
||||
|
||||
@@ -10,3 +10,11 @@ g.test('createComputePipeline,at_over')
|
||||
const { limitTest, testValueName, async } = t.params;
|
||||
await t.testMaxComputeWorkgroupSize(limitTest, testValueName, async, 'Z');
|
||||
});
|
||||
|
||||
g.test('validate,maxComputeInvocationsPerWorkgroup')
|
||||
.desc(`Test that ${limit} <= maxComputeInvocationsPerWorkgroup`)
|
||||
.fn(t => {
|
||||
const { adapter, defaultLimit, adapterLimit } = t;
|
||||
t.expect(defaultLimit <= t.getDefaultLimit('maxComputeInvocationsPerWorkgroup'));
|
||||
t.expect(adapterLimit <= adapter.limits.maxComputeInvocationsPerWorkgroup);
|
||||
});
|
||||
|
||||
@@ -7,7 +7,7 @@ const kCreateComputePipelineTypes = [
|
||||
'createComputePipeline',
|
||||
'createComputePipelineAsync',
|
||||
] as const;
|
||||
type CreateComputePipelineType = typeof kCreateComputePipelineTypes[number];
|
||||
type CreateComputePipelineType = (typeof kCreateComputePipelineTypes)[number];
|
||||
|
||||
async function createComputePipeline(
|
||||
device: GPUDevice,
|
||||
@@ -77,3 +77,21 @@ g.test('dispatchWorkgroups,at_over')
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
g.test('validate')
|
||||
.desc(
|
||||
`Test that ${limit} <= maxComputeWorkgroupSizeX x maxComputeWorkgroupSizeY x maxComputeWorkgroupSizeZ`
|
||||
)
|
||||
.fn(t => {
|
||||
const { adapter, defaultLimit, adapterLimit } = t;
|
||||
const defaultMaxComputeWorkgroupSizeProduct =
|
||||
t.getDefaultLimit('maxComputeWorkgroupSizeX') *
|
||||
t.getDefaultLimit('maxComputeWorkgroupSizeY') *
|
||||
t.getDefaultLimit('maxComputeWorkgroupSizeZ');
|
||||
const maxComputeWorkgroupSizeProduct =
|
||||
adapter.limits.maxComputeWorkgroupSizeX *
|
||||
adapter.limits.maxComputeWorkgroupSizeY *
|
||||
adapter.limits.maxComputeWorkgroupSizeZ;
|
||||
t.expect(defaultLimit <= defaultMaxComputeWorkgroupSizeProduct);
|
||||
t.expect(adapterLimit <= maxComputeWorkgroupSizeProduct);
|
||||
});
|
||||
|
||||
@@ -21,6 +21,7 @@ g.test('createBindGroupLayout,at_over')
|
||||
limitTest,
|
||||
testValueName,
|
||||
async ({ device, testValue, shouldError }) => {
|
||||
shouldError ||= testValue > t.device.limits.maxStorageBuffersPerShaderStage;
|
||||
await t.expectValidationError(() => {
|
||||
device.createBindGroupLayout({
|
||||
entries: range(testValue, i => ({
|
||||
|
||||
@@ -25,6 +25,7 @@ g.test('createBindGroupLayout,at_over')
|
||||
limitTest,
|
||||
testValueName,
|
||||
async ({ device, testValue, shouldError }) => {
|
||||
shouldError ||= testValue > t.device.limits.maxUniformBuffersPerShaderStage;
|
||||
await t.expectValidationError(() => {
|
||||
device.createBindGroupLayout({
|
||||
entries: range(testValue, i => ({
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { assert, range } from '../../../../../common/util/util.js';
|
||||
import { range } from '../../../../../common/util/util.js';
|
||||
|
||||
import { kMaximumLimitBaseParams, makeLimitTestGroup } from './limit_utils.js';
|
||||
import { kMaximumLimitBaseParams, LimitsRequest, makeLimitTestGroup } from './limit_utils.js';
|
||||
|
||||
function getTypeForNumComponents(numComponents: number) {
|
||||
return numComponents > 1 ? `vec${numComponents}f` : 'f32';
|
||||
@@ -21,7 +21,6 @@ function getPipelineDescriptor(
|
||||
|
||||
const maxInterStageVariables = device.limits.maxInterStageShaderVariables;
|
||||
const numComponents = Math.min(maxVertexShaderOutputComponents, maxFragmentShaderInputComponents);
|
||||
assert(Math.ceil(numComponents / 4) <= maxInterStageVariables);
|
||||
|
||||
const num4ComponentVaryings = Math.floor(numComponents / 4);
|
||||
const lastVaryingNumComponents = numComponents % 4;
|
||||
@@ -42,8 +41,8 @@ function getPipelineDescriptor(
|
||||
// maxInterStageShaderComponents : ${device.limits.maxInterStageShaderComponents}
|
||||
// num components in vertex shader : ${numComponents}${pointList ? ' + point-list' : ''}
|
||||
// num components in fragment shader : ${numComponents}${frontFacing ? ' + front-facing' : ''}${
|
||||
sampleIndex ? ' + sample_index' : ''
|
||||
}${sampleMaskIn ? ' + sample_mask' : ''}
|
||||
sampleIndex ? ' + sample_index' : ''
|
||||
}${sampleMaskIn ? ' + sample_mask' : ''}
|
||||
// maxVertexShaderOutputComponents : ${maxVertexShaderOutputComponents}
|
||||
// maxFragmentShaderInputComponents : ${maxFragmentShaderInputComponents}
|
||||
// maxInterStageVariables: : ${maxInterStageVariables}
|
||||
@@ -127,6 +126,10 @@ g.test('createRenderPipeline,at_over')
|
||||
sampleMaskIn,
|
||||
sampleMaskOut,
|
||||
} = t.params;
|
||||
// Request the largest value of maxInterStageShaderVariables to allow the test using as many
|
||||
// inter-stage shader components as possible without being limited by
|
||||
// maxInterStageShaderVariables.
|
||||
const extraLimits: LimitsRequest = { maxInterStageShaderVariables: 'adapterLimit' };
|
||||
await t.testDeviceWithRequestedMaximumLimits(
|
||||
limitTest,
|
||||
testValueName,
|
||||
@@ -142,6 +145,7 @@ g.test('createRenderPipeline,at_over')
|
||||
);
|
||||
|
||||
await t.testCreateRenderPipeline(pipelineDescriptor, async, shouldError, code);
|
||||
}
|
||||
},
|
||||
extraLimits
|
||||
);
|
||||
});
|
||||
|
||||
@@ -4,13 +4,12 @@ import {
|
||||
kMaximumLimitBaseParams,
|
||||
makeLimitTestGroup,
|
||||
LimitMode,
|
||||
getDefaultLimit,
|
||||
MaximumLimitValueTest,
|
||||
MaximumTestValue,
|
||||
} from './limit_utils.js';
|
||||
|
||||
const kBufferParts = ['wholeBuffer', 'biggerBufferWithOffset'] as const;
|
||||
type BufferPart = typeof kBufferParts[number];
|
||||
type BufferPart = (typeof kBufferParts)[number];
|
||||
|
||||
function getSizeAndOffsetForBufferPart(device: GPUDevice, bufferPart: BufferPart, size: number) {
|
||||
const align = device.limits.minUniformBufferOffsetAlignment;
|
||||
@@ -145,10 +144,18 @@ g.test('createBindGroup,at_over')
|
||||
);
|
||||
});
|
||||
|
||||
g.test('validate')
|
||||
.desc(`Test that ${limit} is a multiple of 4 bytes`)
|
||||
.fn(t => {
|
||||
const { defaultLimit, adapterLimit } = t;
|
||||
t.expect(defaultLimit % 4 === 0);
|
||||
t.expect(adapterLimit % 4 === 0);
|
||||
});
|
||||
|
||||
g.test('validate,maxBufferSize')
|
||||
.desc(`Test that ${limit} <= maxBufferSize`)
|
||||
.fn(t => {
|
||||
const { adapter, defaultLimit, adapterLimit } = t;
|
||||
t.expect(defaultLimit <= getDefaultLimit('maxBufferSize'));
|
||||
t.expect(defaultLimit <= t.getDefaultLimit('maxBufferSize'));
|
||||
t.expect(adapterLimit <= adapter.limits.maxBufferSize);
|
||||
});
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
import {
|
||||
LimitMode,
|
||||
getDefaultLimit,
|
||||
kMaximumLimitBaseParams,
|
||||
makeLimitTestGroup,
|
||||
} from './limit_utils.js';
|
||||
import { LimitMode, kMaximumLimitBaseParams, makeLimitTestGroup } from './limit_utils.js';
|
||||
|
||||
const kBufferParts = ['wholeBuffer', 'biggerBufferWithOffset'] as const;
|
||||
type BufferPart = typeof kBufferParts[number];
|
||||
type BufferPart = (typeof kBufferParts)[number];
|
||||
|
||||
function getSizeAndOffsetForBufferPart(device: GPUDevice, bufferPart: BufferPart, size: number) {
|
||||
const align = device.limits.minUniformBufferOffsetAlignment;
|
||||
@@ -90,6 +85,6 @@ g.test('validate,maxBufferSize')
|
||||
.desc(`Test that ${limit} <= maxBufferSize`)
|
||||
.fn(t => {
|
||||
const { adapter, defaultLimit, adapterLimit } = t;
|
||||
t.expect(defaultLimit <= getDefaultLimit('maxBufferSize'));
|
||||
t.expect(defaultLimit <= t.getDefaultLimit('maxBufferSize'));
|
||||
t.expect(adapterLimit <= adapter.limits.maxBufferSize);
|
||||
});
|
||||
|
||||
@@ -111,3 +111,11 @@ g.test('createRenderPipeline,at_over')
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
g.test('validate')
|
||||
.desc(`Test that ${limit} is a multiple of 4 bytes`)
|
||||
.fn(t => {
|
||||
const { defaultLimit, adapterLimit } = t;
|
||||
t.expect(defaultLimit % 4 === 0);
|
||||
t.expect(adapterLimit % 4 === 0);
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ import { range } from '../../../../../common/util/util.js';
|
||||
import { kRenderEncoderTypes, kMaximumLimitBaseParams, makeLimitTestGroup } from './limit_utils.js';
|
||||
|
||||
const kPipelineTypes = ['withoutLocations', 'withLocations'] as const;
|
||||
type PipelineType = typeof kPipelineTypes[number];
|
||||
type PipelineType = (typeof kPipelineTypes)[number];
|
||||
|
||||
function getPipelineDescriptor(
|
||||
device: GPUDevice,
|
||||
@@ -90,3 +90,11 @@ g.test('setVertexBuffer,at_over')
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
g.test('validate,maxBindGroupsPlusVertexBuffers')
|
||||
.desc(`Test that ${limit} <= maxBindGroupsPlusVertexBuffers`)
|
||||
.fn(t => {
|
||||
const { adapter, defaultLimit, adapterLimit } = t;
|
||||
t.expect(defaultLimit <= t.getDefaultLimit('maxBindGroupsPlusVertexBuffers'));
|
||||
t.expect(adapterLimit <= adapter.limits.maxBindGroupsPlusVertexBuffers);
|
||||
});
|
||||
|
||||
@@ -233,7 +233,7 @@ Tests calling createComputePipeline(Async) validation for compute workgroup_size
|
||||
[1, 1, 63],
|
||||
[1, 1, 64],
|
||||
[1, 1, 65],
|
||||
])
|
||||
] as const)
|
||||
)
|
||||
.fn(t => {
|
||||
const { isAsync, size } = t.params;
|
||||
@@ -251,13 +251,14 @@ Tests calling createComputePipeline(Async) validation for compute workgroup_size
|
||||
},
|
||||
};
|
||||
|
||||
size[1] = size[1] ?? 1;
|
||||
size[2] = size[2] ?? 1;
|
||||
const workgroupX = size[0];
|
||||
const workgroupY = size[1] ?? 1;
|
||||
const workgroupZ = size[2] ?? 1;
|
||||
|
||||
const _success =
|
||||
size[0] <= t.device.limits.maxComputeWorkgroupSizeX &&
|
||||
size[1] <= t.device.limits.maxComputeWorkgroupSizeY &&
|
||||
size[2] <= t.device.limits.maxComputeWorkgroupSizeZ;
|
||||
workgroupX <= t.device.limits.maxComputeWorkgroupSizeX &&
|
||||
workgroupY <= t.device.limits.maxComputeWorkgroupSizeY &&
|
||||
workgroupZ <= t.device.limits.maxComputeWorkgroupSizeZ;
|
||||
t.doCreateComputePipelineTest(isAsync, _success, descriptor);
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ export const description = `
|
||||
`;
|
||||
|
||||
import { makeTestGroup } from '../../../common/framework/test_group.js';
|
||||
import { assert, unreachable } from '../../../common/util/util.js';
|
||||
import { assert, makeValueTestVariant, unreachable } from '../../../common/util/util.js';
|
||||
import {
|
||||
allBindingEntries,
|
||||
bindingTypeInfo,
|
||||
@@ -15,7 +15,6 @@ import {
|
||||
kBufferBindingTypes,
|
||||
kBufferUsages,
|
||||
kCompareFunctions,
|
||||
kLimitInfo,
|
||||
kSamplerBindingTypes,
|
||||
kTextureUsages,
|
||||
kTextureViewDimensions,
|
||||
@@ -467,19 +466,20 @@ g.test('minBindingSize')
|
||||
usage: GPUBufferUsage.STORAGE,
|
||||
});
|
||||
|
||||
t.expectValidationError(() => {
|
||||
t.device.createBindGroup({
|
||||
layout: bindGroupLayout,
|
||||
entries: [
|
||||
{
|
||||
binding: 0,
|
||||
resource: {
|
||||
buffer: storageBuffer,
|
||||
t.expectValidationError(
|
||||
() => {
|
||||
t.device.createBindGroup({
|
||||
layout: bindGroupLayout,
|
||||
entries: [
|
||||
{
|
||||
binding: 0,
|
||||
resource: { buffer: storageBuffer },
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
}, minBindingSize !== undefined && size < minBindingSize);
|
||||
],
|
||||
});
|
||||
},
|
||||
minBindingSize !== undefined && size < minBindingSize
|
||||
);
|
||||
});
|
||||
|
||||
g.test('buffer,resource_state')
|
||||
@@ -882,24 +882,20 @@ g.test('buffer,resource_offset')
|
||||
u //
|
||||
.combine('type', kBufferBindingTypes)
|
||||
.beginSubcases()
|
||||
.expand('offset', ({ type }) =>
|
||||
type === 'uniform'
|
||||
? [
|
||||
kLimitInfo.minUniformBufferOffsetAlignment.default,
|
||||
kLimitInfo.minUniformBufferOffsetAlignment.default * 0.5,
|
||||
kLimitInfo.minUniformBufferOffsetAlignment.default * 1.5,
|
||||
kLimitInfo.minUniformBufferOffsetAlignment.default + 2,
|
||||
]
|
||||
: [
|
||||
kLimitInfo.minStorageBufferOffsetAlignment.default,
|
||||
kLimitInfo.minStorageBufferOffsetAlignment.default * 0.5,
|
||||
kLimitInfo.minStorageBufferOffsetAlignment.default * 1.5,
|
||||
kLimitInfo.minStorageBufferOffsetAlignment.default + 2,
|
||||
]
|
||||
)
|
||||
.combine('offsetAddMult', [
|
||||
{ add: 0, mult: 0 },
|
||||
{ add: 0, mult: 0.5 },
|
||||
{ add: 0, mult: 1.5 },
|
||||
{ add: 2, mult: 0 },
|
||||
])
|
||||
)
|
||||
.fn(t => {
|
||||
const { type, offset } = t.params;
|
||||
const { type, offsetAddMult } = t.params;
|
||||
const minAlignment =
|
||||
t.device.limits[
|
||||
type === 'uniform' ? 'minUniformBufferOffsetAlignment' : 'minStorageBufferOffsetAlignment'
|
||||
];
|
||||
const offset = makeValueTestVariant(minAlignment, offsetAddMult);
|
||||
|
||||
const bindGroupLayout = t.device.createBindGroupLayout({
|
||||
entries: [
|
||||
@@ -911,14 +907,8 @@ g.test('buffer,resource_offset')
|
||||
],
|
||||
});
|
||||
|
||||
let usage, isValid;
|
||||
if (type === 'uniform') {
|
||||
usage = GPUBufferUsage.UNIFORM;
|
||||
isValid = offset % kLimitInfo.minUniformBufferOffsetAlignment.default === 0;
|
||||
} else {
|
||||
usage = GPUBufferUsage.STORAGE;
|
||||
isValid = offset % kLimitInfo.minStorageBufferOffsetAlignment.default === 0;
|
||||
}
|
||||
const usage = type === 'uniform' ? GPUBufferUsage.UNIFORM : GPUBufferUsage.STORAGE;
|
||||
const isValid = offset % minAlignment === 0;
|
||||
|
||||
const buffer = t.device.createBuffer({
|
||||
size: 1024,
|
||||
@@ -947,22 +937,23 @@ g.test('buffer,resource_binding_size')
|
||||
.beginSubcases()
|
||||
// Test a size of 1 (for uniform buffer) or 4 (for storage and read-only storage buffer)
|
||||
// then values just within and just above the limit.
|
||||
.expand('bindingSize', ({ type }) =>
|
||||
type === 'uniform'
|
||||
? [
|
||||
1,
|
||||
kLimitInfo.maxUniformBufferBindingSize.default,
|
||||
kLimitInfo.maxUniformBufferBindingSize.default + 1,
|
||||
]
|
||||
: [
|
||||
4,
|
||||
kLimitInfo.maxStorageBufferBindingSize.default,
|
||||
kLimitInfo.maxStorageBufferBindingSize.default + 4,
|
||||
]
|
||||
)
|
||||
.combine('bindingSize', [
|
||||
{ base: 1, limit: 0 },
|
||||
{ base: 0, limit: 1 },
|
||||
{ base: 1, limit: 1 },
|
||||
])
|
||||
)
|
||||
.fn(t => {
|
||||
const { type, bindingSize } = t.params;
|
||||
const {
|
||||
type,
|
||||
bindingSize: { base, limit },
|
||||
} = t.params;
|
||||
const mult = type === 'uniform' ? 1 : 4;
|
||||
const maxBindingSize =
|
||||
t.device.limits[
|
||||
type === 'uniform' ? 'maxUniformBufferBindingSize' : 'maxStorageBufferBindingSize'
|
||||
];
|
||||
const bindingSize = base * mult + maxBindingSize * limit;
|
||||
|
||||
const bindGroupLayout = t.device.createBindGroupLayout({
|
||||
entries: [
|
||||
@@ -974,17 +965,12 @@ g.test('buffer,resource_binding_size')
|
||||
],
|
||||
});
|
||||
|
||||
let usage, isValid;
|
||||
if (type === 'uniform') {
|
||||
usage = GPUBufferUsage.UNIFORM;
|
||||
isValid = bindingSize <= kLimitInfo.maxUniformBufferBindingSize.default;
|
||||
} else {
|
||||
usage = GPUBufferUsage.STORAGE;
|
||||
isValid = bindingSize <= kLimitInfo.maxStorageBufferBindingSize.default;
|
||||
}
|
||||
const usage = type === 'uniform' ? GPUBufferUsage.UNIFORM : GPUBufferUsage.STORAGE;
|
||||
const isValid = bindingSize <= maxBindingSize;
|
||||
|
||||
// MAINTENANCE_TODO: Allocating the max size seems likely to fail. Refactor test.
|
||||
const buffer = t.device.createBuffer({
|
||||
size: kLimitInfo.maxStorageBufferBindingSize.default,
|
||||
size: maxBindingSize,
|
||||
usage,
|
||||
});
|
||||
|
||||
@@ -1007,26 +993,18 @@ g.test('buffer,effective_buffer_binding_size')
|
||||
u
|
||||
.combine('type', kBufferBindingTypes)
|
||||
.beginSubcases()
|
||||
.expand('offset', ({ type }) =>
|
||||
type === 'uniform'
|
||||
? [0, kLimitInfo.minUniformBufferOffsetAlignment.default]
|
||||
: [0, kLimitInfo.minStorageBufferOffsetAlignment.default]
|
||||
)
|
||||
.expand('bufferSize', ({ type }) =>
|
||||
type === 'uniform'
|
||||
? [
|
||||
kLimitInfo.minUniformBufferOffsetAlignment.default + 8,
|
||||
kLimitInfo.minUniformBufferOffsetAlignment.default + 10,
|
||||
]
|
||||
: [
|
||||
kLimitInfo.minStorageBufferOffsetAlignment.default + 8,
|
||||
kLimitInfo.minStorageBufferOffsetAlignment.default + 10,
|
||||
]
|
||||
)
|
||||
.combine('offsetMult', [0, 1])
|
||||
.combine('bufferSizeAddition', [8, 10])
|
||||
.combine('bindingSize', [undefined, 2, 4, 6])
|
||||
)
|
||||
.fn(t => {
|
||||
const { type, offset, bufferSize, bindingSize } = t.params;
|
||||
const { type, offsetMult, bufferSizeAddition, bindingSize } = t.params;
|
||||
const minAlignment =
|
||||
t.device.limits[
|
||||
type === 'uniform' ? 'minUniformBufferOffsetAlignment' : 'minStorageBufferOffsetAlignment'
|
||||
];
|
||||
const offset = minAlignment * offsetMult;
|
||||
const bufferSize = minAlignment + bufferSizeAddition;
|
||||
|
||||
const bindGroupLayout = t.device.createBindGroupLayout({
|
||||
entries: [
|
||||
|
||||
@@ -7,7 +7,6 @@ TODO: make sure tests are complete.
|
||||
import { kUnitCaseParamsBuilder } from '../../../common/framework/params_builder.js';
|
||||
import { makeTestGroup } from '../../../common/framework/test_group.js';
|
||||
import {
|
||||
kLimitInfo,
|
||||
kShaderStages,
|
||||
kShaderStageCombinations,
|
||||
kStorageTextureAccessValues,
|
||||
@@ -63,27 +62,26 @@ g.test('maximum_binding_limit')
|
||||
`
|
||||
)
|
||||
.paramsSubcasesOnly(u =>
|
||||
u //
|
||||
.combine('binding', [
|
||||
1,
|
||||
4,
|
||||
8,
|
||||
256,
|
||||
kLimitInfo.maxBindingsPerBindGroup.default - 1,
|
||||
kLimitInfo.maxBindingsPerBindGroup.default,
|
||||
])
|
||||
u.combine('bindingVariant', [1, 4, 8, 256, 'default', 'default-minus-one'] as const)
|
||||
)
|
||||
.fn(t => {
|
||||
const { binding } = t.params;
|
||||
const { bindingVariant } = t.params;
|
||||
const entries: Array<GPUBindGroupLayoutEntry> = [];
|
||||
|
||||
const binding =
|
||||
bindingVariant === 'default'
|
||||
? t.device.limits.maxBindingsPerBindGroup
|
||||
: bindingVariant === 'default-minus-one'
|
||||
? t.device.limits.maxBindingsPerBindGroup - 1
|
||||
: bindingVariant;
|
||||
|
||||
entries.push({
|
||||
binding,
|
||||
visibility: GPUShaderStage.COMPUTE,
|
||||
buffer: { type: 'storage' as const },
|
||||
});
|
||||
|
||||
const success = binding < kLimitInfo.maxBindingsPerBindGroup.default;
|
||||
const success = binding < t.device.limits.maxBindingsPerBindGroup;
|
||||
|
||||
t.expectValidationError(() => {
|
||||
t.device.createBindGroupLayout({
|
||||
@@ -234,7 +232,10 @@ g.test('max_dynamic_buffers')
|
||||
const { type, extraDynamicBuffers, staticBuffers } = t.params;
|
||||
const info = bufferBindingTypeInfo({ type });
|
||||
|
||||
const dynamicBufferCount = info.perPipelineLimitClass.maxDynamic + extraDynamicBuffers;
|
||||
const limitName = info.perPipelineLimitClass.maxDynamicLimit;
|
||||
const bufferCount = limitName ? t.getDefaultLimit(limitName) : 0;
|
||||
const dynamicBufferCount = bufferCount + extraDynamicBuffers;
|
||||
const perStageLimit = t.getDefaultLimit(info.perStageLimitClass.maxLimit);
|
||||
|
||||
const entries = [];
|
||||
for (let i = 0; i < dynamicBufferCount; i++) {
|
||||
@@ -257,9 +258,12 @@ g.test('max_dynamic_buffers')
|
||||
entries,
|
||||
};
|
||||
|
||||
t.expectValidationError(() => {
|
||||
t.device.createBindGroupLayout(descriptor);
|
||||
}, extraDynamicBuffers > 0);
|
||||
t.expectValidationError(
|
||||
() => {
|
||||
t.device.createBindGroupLayout(descriptor);
|
||||
},
|
||||
extraDynamicBuffers > 0 || entries.length > perStageLimit
|
||||
);
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -297,7 +301,7 @@ const kMaxResourcesCases = kUnitCaseParamsBuilder
|
||||
.combine('extraVisibility', kShaderStages)
|
||||
.filter(p => (bindingTypeInfo(p.extraEntry).validStages & p.extraVisibility) !== 0);
|
||||
|
||||
// Should never fail unless kLimitInfo.maxBindingsPerBindGroup.default is exceeded, because the validation for
|
||||
// Should never fail unless limitInfo.maxBindingsPerBindGroup.default is exceeded, because the validation for
|
||||
// resources-of-type-per-stage is in pipeline layout creation.
|
||||
g.test('max_resources_per_stage,in_bind_group_layout')
|
||||
.desc(
|
||||
@@ -313,7 +317,7 @@ g.test('max_resources_per_stage,in_bind_group_layout')
|
||||
.fn(t => {
|
||||
const { maxedEntry, extraEntry, maxedVisibility, extraVisibility } = t.params;
|
||||
const maxedTypeInfo = bindingTypeInfo(maxedEntry);
|
||||
const maxedCount = maxedTypeInfo.perStageLimitClass.max;
|
||||
const maxedCount = t.getDefaultLimit(maxedTypeInfo.perStageLimitClass.maxLimit);
|
||||
const extraTypeInfo = bindingTypeInfo(extraEntry);
|
||||
|
||||
const maxResourceBindings: GPUBindGroupLayoutEntry[] = [];
|
||||
@@ -364,7 +368,7 @@ g.test('max_resources_per_stage,in_pipeline_layout')
|
||||
.fn(t => {
|
||||
const { maxedEntry, extraEntry, maxedVisibility, extraVisibility } = t.params;
|
||||
const maxedTypeInfo = bindingTypeInfo(maxedEntry);
|
||||
const maxedCount = maxedTypeInfo.perStageLimitClass.max;
|
||||
const maxedCount = t.getDefaultLimit(maxedTypeInfo.perStageLimitClass.maxLimit);
|
||||
const extraTypeInfo = bindingTypeInfo(extraEntry);
|
||||
|
||||
const maxResourceBindings: GPUBindGroupLayoutEntry[] = [];
|
||||
@@ -443,15 +447,18 @@ g.test('storage_texture,formats')
|
||||
const { format } = t.params;
|
||||
const info = kTextureFormatInfo[format];
|
||||
|
||||
t.expectValidationError(() => {
|
||||
t.device.createBindGroupLayout({
|
||||
entries: [
|
||||
{
|
||||
binding: 0,
|
||||
visibility: GPUShaderStage.COMPUTE,
|
||||
storageTexture: { format },
|
||||
},
|
||||
],
|
||||
});
|
||||
}, !info.color?.storage);
|
||||
t.expectValidationError(
|
||||
() => {
|
||||
t.device.createBindGroupLayout({
|
||||
entries: [
|
||||
{
|
||||
binding: 0,
|
||||
visibility: GPUShaderStage.COMPUTE,
|
||||
storageTexture: { format },
|
||||
},
|
||||
],
|
||||
});
|
||||
},
|
||||
!info.color?.storage
|
||||
);
|
||||
});
|
||||
|
||||
@@ -33,7 +33,13 @@ g.test('number_of_dynamic_buffers_exceeds_the_maximum_value')
|
||||
)
|
||||
.fn(t => {
|
||||
const { type, visibility } = t.params;
|
||||
const { maxDynamic } = bufferBindingTypeInfo({ type }).perPipelineLimitClass;
|
||||
const info = bufferBindingTypeInfo({ type });
|
||||
const { maxDynamicLimit } = info.perPipelineLimitClass;
|
||||
const perStageLimit = t.getDefaultLimit(info.perStageLimitClass.maxLimit);
|
||||
const maxDynamic = Math.min(
|
||||
maxDynamicLimit ? t.getDefaultLimit(maxDynamicLimit) : 0,
|
||||
perStageLimit
|
||||
);
|
||||
|
||||
const maxDynamicBufferBindings: GPUBindGroupLayoutEntry[] = [];
|
||||
for (let binding = 0; binding < maxDynamic; binding++) {
|
||||
@@ -52,15 +58,17 @@ g.test('number_of_dynamic_buffers_exceeds_the_maximum_value')
|
||||
entries: [{ binding: 0, visibility, buffer: { type, hasDynamicOffset: false } }],
|
||||
};
|
||||
|
||||
const goodPipelineLayoutDescriptor = {
|
||||
bindGroupLayouts: [
|
||||
maxDynamicBufferBindGroupLayout,
|
||||
t.device.createBindGroupLayout(goodDescriptor),
|
||||
],
|
||||
};
|
||||
if (perStageLimit > maxDynamic) {
|
||||
const goodPipelineLayoutDescriptor = {
|
||||
bindGroupLayouts: [
|
||||
maxDynamicBufferBindGroupLayout,
|
||||
t.device.createBindGroupLayout(goodDescriptor),
|
||||
],
|
||||
};
|
||||
|
||||
// Control case
|
||||
t.device.createPipelineLayout(goodPipelineLayoutDescriptor);
|
||||
// Control case
|
||||
t.device.createPipelineLayout(goodPipelineLayoutDescriptor);
|
||||
}
|
||||
|
||||
// Check dynamic buffers exceed maximum in pipeline layout.
|
||||
const badDescriptor = clone(goodDescriptor);
|
||||
|
||||
@@ -16,12 +16,16 @@ g.test('lodMinAndMaxClamp')
|
||||
.combine('lodMaxClamp', [-4e-30, -1, 0, 0.5, 1, 10, 4e30])
|
||||
)
|
||||
.fn(t => {
|
||||
const shouldError =
|
||||
t.params.lodMinClamp > t.params.lodMaxClamp ||
|
||||
t.params.lodMinClamp < 0 ||
|
||||
t.params.lodMaxClamp < 0;
|
||||
t.expectValidationError(() => {
|
||||
t.device.createSampler({
|
||||
lodMinClamp: t.params.lodMinClamp,
|
||||
lodMaxClamp: t.params.lodMaxClamp,
|
||||
});
|
||||
}, t.params.lodMinClamp > t.params.lodMaxClamp || t.params.lodMinClamp < 0 || t.params.lodMaxClamp < 0);
|
||||
}, shouldError);
|
||||
});
|
||||
|
||||
g.test('maxAnisotropy')
|
||||
@@ -48,6 +52,11 @@ g.test('maxAnisotropy')
|
||||
magFilter?: GPUFilterMode;
|
||||
mipmapFilter?: GPUFilterMode;
|
||||
};
|
||||
|
||||
const shouldError =
|
||||
maxAnisotropy < 1 ||
|
||||
(maxAnisotropy > 1 &&
|
||||
!(minFilter === 'linear' && magFilter === 'linear' && mipmapFilter === 'linear'));
|
||||
t.expectValidationError(() => {
|
||||
t.device.createSampler({
|
||||
minFilter,
|
||||
@@ -55,5 +64,5 @@ g.test('maxAnisotropy')
|
||||
mipmapFilter,
|
||||
maxAnisotropy,
|
||||
});
|
||||
}, maxAnisotropy < 1 || (maxAnisotropy > 1 && !(minFilter === 'linear' && magFilter === 'linear' && mipmapFilter === 'linear')));
|
||||
}, shouldError);
|
||||
});
|
||||
|
||||
@@ -2,8 +2,8 @@ export const description = `createTexture validation tests.`;
|
||||
|
||||
import { SkipTestCase } from '../../../common/framework/fixture.js';
|
||||
import { makeTestGroup } from '../../../common/framework/test_group.js';
|
||||
import { assert } from '../../../common/util/util.js';
|
||||
import { kTextureDimensions, kTextureUsages, kLimitInfo } from '../../capability_info.js';
|
||||
import { assert, makeValueTestVariant } from '../../../common/util/util.js';
|
||||
import { kTextureDimensions, kTextureUsages } from '../../capability_info.js';
|
||||
import { GPUConst } from '../../constants.js';
|
||||
import {
|
||||
kTextureFormats,
|
||||
@@ -326,7 +326,7 @@ g.test('sampleCount,valid_sampleCount_with_other_parameter_varies')
|
||||
arrayLayerCount === 2 && dimension !== '2d' && dimension !== undefined
|
||||
)
|
||||
.combine('mipLevelCount', [1, 2])
|
||||
.expand('usage', p => {
|
||||
.expand('usage', () => {
|
||||
const usageSet = new Set<number>();
|
||||
for (const usage0 of kTextureUsages) {
|
||||
for (const usage1 of kTextureUsages) {
|
||||
@@ -495,10 +495,10 @@ g.test('texture_size,1d_texture')
|
||||
// Compressed and depth-stencil textures are invalid for 1D.
|
||||
.combine('format', kRegularTextureFormats)
|
||||
.beginSubcases()
|
||||
.combine('width', [
|
||||
kLimitInfo.maxTextureDimension1D.default - 1,
|
||||
kLimitInfo.maxTextureDimension1D.default,
|
||||
kLimitInfo.maxTextureDimension1D.default + 1,
|
||||
.combine('widthVariant', [
|
||||
{ mult: 1, add: -1 },
|
||||
{ mult: 1, add: 0 },
|
||||
{ mult: 1, add: 1 },
|
||||
])
|
||||
.combine('height', [1, 2])
|
||||
.combine('depthOrArrayLayers', [1, 2])
|
||||
@@ -510,7 +510,8 @@ g.test('texture_size,1d_texture')
|
||||
t.selectDeviceOrSkipTestCase(info.feature);
|
||||
})
|
||||
.fn(t => {
|
||||
const { format, width, height, depthOrArrayLayers } = t.params;
|
||||
const { format, widthVariant, height, depthOrArrayLayers } = t.params;
|
||||
const width = t.makeLimitVariant('maxTextureDimension1D', widthVariant);
|
||||
|
||||
const descriptor: GPUTextureDescriptor = {
|
||||
size: [width, height, depthOrArrayLayers],
|
||||
@@ -520,7 +521,7 @@ g.test('texture_size,1d_texture')
|
||||
};
|
||||
|
||||
const success =
|
||||
width <= kLimitInfo.maxTextureDimension1D.default && height === 1 && depthOrArrayLayers === 1;
|
||||
width <= t.device.limits.maxTextureDimension1D && height === 1 && depthOrArrayLayers === 1;
|
||||
|
||||
t.expectValidationError(() => {
|
||||
t.device.createTexture(descriptor);
|
||||
@@ -533,20 +534,23 @@ g.test('texture_size,2d_texture,uncompressed_format')
|
||||
u
|
||||
.combine('dimension', [undefined, '2d'] as const)
|
||||
.combine('format', kUncompressedTextureFormats)
|
||||
.combine('size', [
|
||||
// Test the bound of width
|
||||
[kLimitInfo.maxTextureDimension2D.default - 1, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension2D.default, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension2D.default + 1, 1, 1],
|
||||
// Test the bound of height
|
||||
[1, kLimitInfo.maxTextureDimension2D.default - 1, 1],
|
||||
[1, kLimitInfo.maxTextureDimension2D.default, 1],
|
||||
[1, kLimitInfo.maxTextureDimension2D.default + 1, 1],
|
||||
// Test the bound of array layers
|
||||
[1, 1, kLimitInfo.maxTextureArrayLayers.default - 1],
|
||||
[1, 1, kLimitInfo.maxTextureArrayLayers.default],
|
||||
[1, 1, kLimitInfo.maxTextureArrayLayers.default + 1],
|
||||
])
|
||||
.combine(
|
||||
'sizeVariant',
|
||||
/* prettier-ignore */ [
|
||||
// Test the bound of width
|
||||
[{ mult: 1, add: -1 }, { mult: 0, add: 1 }, { mult: 0, add: 1 }],
|
||||
[{ mult: 1, add: 0 }, { mult: 0, add: 1 }, { mult: 0, add: 1 }],
|
||||
[{ mult: 1, add: 1 }, { mult: 0, add: 1 }, { mult: 0, add: 1 }],
|
||||
// Test the bound of height
|
||||
[{ mult: 0, add: 1 }, { mult: 1, add: -1 }, { mult: 0, add: 1 }],
|
||||
[{ mult: 0, add: 1 }, { mult: 1, add: 0 }, { mult: 0, add: 1 }],
|
||||
[{ mult: 0, add: 1 }, { mult: 1, add: 1 }, { mult: 0, add: 1 }],
|
||||
// Test the bound of array layers
|
||||
[{ mult: 0, add: 1 }, { mult: 0, add: 1 }, { mult: 1, add: -1 }],
|
||||
[{ mult: 0, add: 1 }, { mult: 0, add: 1 }, { mult: 1, add: 0 }],
|
||||
[{ mult: 0, add: 1 }, { mult: 0, add: 1 }, { mult: 1, add: 1 }],
|
||||
]
|
||||
)
|
||||
)
|
||||
.beforeAllSubcases(t => {
|
||||
const { format } = t.params;
|
||||
@@ -555,7 +559,12 @@ g.test('texture_size,2d_texture,uncompressed_format')
|
||||
t.selectDeviceOrSkipTestCase(info.feature);
|
||||
})
|
||||
.fn(t => {
|
||||
const { dimension, format, size } = t.params;
|
||||
const { dimension, format, sizeVariant } = t.params;
|
||||
const size = [
|
||||
t.device.limits.maxTextureDimension2D,
|
||||
t.device.limits.maxTextureDimension2D,
|
||||
t.device.limits.maxTextureArrayLayers,
|
||||
].map((limit, ndx) => makeValueTestVariant(limit, sizeVariant[ndx]));
|
||||
|
||||
const descriptor: GPUTextureDescriptor = {
|
||||
size,
|
||||
@@ -565,9 +574,9 @@ g.test('texture_size,2d_texture,uncompressed_format')
|
||||
};
|
||||
|
||||
const success =
|
||||
size[0] <= kLimitInfo.maxTextureDimension2D.default &&
|
||||
size[1] <= kLimitInfo.maxTextureDimension2D.default &&
|
||||
size[2] <= kLimitInfo.maxTextureArrayLayers.default;
|
||||
size[0] <= t.device.limits.maxTextureDimension2D &&
|
||||
size[1] <= t.device.limits.maxTextureDimension2D &&
|
||||
size[2] <= t.device.limits.maxTextureArrayLayers;
|
||||
|
||||
t.expectValidationError(() => {
|
||||
t.device.createTexture(descriptor);
|
||||
@@ -580,40 +589,152 @@ g.test('texture_size,2d_texture,compressed_format')
|
||||
u
|
||||
.combine('dimension', [undefined, '2d'] as const)
|
||||
.combine('format', kCompressedTextureFormats)
|
||||
.expand('size', p => {
|
||||
.expand('sizeVariant', p => {
|
||||
const { blockWidth, blockHeight } = kTextureFormatInfo[p.format];
|
||||
return [
|
||||
// Test the bound of width
|
||||
[kLimitInfo.maxTextureDimension2D.default - 1, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension2D.default - blockWidth, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension2D.default - blockWidth, blockHeight, 1],
|
||||
[kLimitInfo.maxTextureDimension2D.default, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension2D.default, blockHeight, 1],
|
||||
[kLimitInfo.maxTextureDimension2D.default + 1, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension2D.default + blockWidth, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension2D.default + blockWidth, blockHeight, 1],
|
||||
[
|
||||
{ mult: 1, add: -1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: -blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: -blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: 0 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: 0 },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
// Test the bound of height
|
||||
[1, kLimitInfo.maxTextureDimension2D.default - 1, 1],
|
||||
[1, kLimitInfo.maxTextureDimension2D.default - blockHeight, 1],
|
||||
[blockWidth, kLimitInfo.maxTextureDimension2D.default - blockHeight, 1],
|
||||
[1, kLimitInfo.maxTextureDimension2D.default, 1],
|
||||
[blockWidth, kLimitInfo.maxTextureDimension2D.default, 1],
|
||||
[1, kLimitInfo.maxTextureDimension2D.default + 1, 1],
|
||||
[1, kLimitInfo.maxTextureDimension2D.default + blockWidth, 1],
|
||||
[blockWidth, kLimitInfo.maxTextureDimension2D.default + blockHeight, 1],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 1, add: -blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: 0 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 1, add: 0 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: +1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: +blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 1, add: +blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
// Test the bound of array layers
|
||||
[1, 1, kLimitInfo.maxTextureArrayLayers.default - 1],
|
||||
[blockWidth, 1, kLimitInfo.maxTextureArrayLayers.default - 1],
|
||||
[1, blockHeight, kLimitInfo.maxTextureArrayLayers.default - 1],
|
||||
[blockWidth, blockHeight, kLimitInfo.maxTextureArrayLayers.default - 1],
|
||||
[1, 1, kLimitInfo.maxTextureArrayLayers.default],
|
||||
[blockWidth, 1, kLimitInfo.maxTextureArrayLayers.default],
|
||||
[1, blockHeight, kLimitInfo.maxTextureArrayLayers.default],
|
||||
[blockWidth, blockHeight, kLimitInfo.maxTextureArrayLayers.default],
|
||||
[1, 1, kLimitInfo.maxTextureArrayLayers.default + 1],
|
||||
[blockWidth, 1, kLimitInfo.maxTextureArrayLayers.default + 1],
|
||||
[1, blockHeight, kLimitInfo.maxTextureArrayLayers.default + 1],
|
||||
[blockWidth, blockHeight, kLimitInfo.maxTextureArrayLayers.default + 1],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: -1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: -1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: 0 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: 0 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: 0 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: 0 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: +1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: +1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: +1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: +1 },
|
||||
],
|
||||
];
|
||||
})
|
||||
)
|
||||
@@ -623,8 +744,13 @@ g.test('texture_size,2d_texture,compressed_format')
|
||||
t.selectDeviceOrSkipTestCase(info.feature);
|
||||
})
|
||||
.fn(t => {
|
||||
const { dimension, format, size } = t.params;
|
||||
const { dimension, format, sizeVariant } = t.params;
|
||||
const info = kTextureFormatInfo[format];
|
||||
const size = [
|
||||
t.device.limits.maxTextureDimension2D,
|
||||
t.device.limits.maxTextureDimension2D,
|
||||
t.device.limits.maxTextureArrayLayers,
|
||||
].map((limit, ndx) => makeValueTestVariant(limit, sizeVariant[ndx]));
|
||||
|
||||
const descriptor: GPUTextureDescriptor = {
|
||||
size,
|
||||
@@ -636,9 +762,9 @@ g.test('texture_size,2d_texture,compressed_format')
|
||||
const success =
|
||||
size[0] % info.blockWidth === 0 &&
|
||||
size[1] % info.blockHeight === 0 &&
|
||||
size[0] <= kLimitInfo.maxTextureDimension2D.default &&
|
||||
size[1] <= kLimitInfo.maxTextureDimension2D.default &&
|
||||
size[2] <= kLimitInfo.maxTextureArrayLayers.default;
|
||||
size[0] <= t.device.limits.maxTextureDimension2D &&
|
||||
size[1] <= t.device.limits.maxTextureDimension2D &&
|
||||
size[2] <= t.device.limits.maxTextureArrayLayers;
|
||||
|
||||
t.expectValidationError(() => {
|
||||
t.device.createTexture(descriptor);
|
||||
@@ -653,20 +779,23 @@ g.test('texture_size,3d_texture,uncompressed_format')
|
||||
u //
|
||||
.combine('format', kRegularTextureFormats)
|
||||
.beginSubcases()
|
||||
.combine('size', [
|
||||
// Test the bound of width
|
||||
[kLimitInfo.maxTextureDimension3D.default - 1, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension3D.default, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension3D.default + 1, 1, 1],
|
||||
// Test the bound of height
|
||||
[1, kLimitInfo.maxTextureDimension3D.default - 1, 1],
|
||||
[1, kLimitInfo.maxTextureDimension3D.default, 1],
|
||||
[1, kLimitInfo.maxTextureDimension3D.default + 1, 1],
|
||||
// Test the bound of depth
|
||||
[1, 1, kLimitInfo.maxTextureDimension3D.default - 1],
|
||||
[1, 1, kLimitInfo.maxTextureDimension3D.default],
|
||||
[1, 1, kLimitInfo.maxTextureDimension3D.default + 1],
|
||||
])
|
||||
.combine(
|
||||
'sizeVariant',
|
||||
/* prettier-ignore */ [
|
||||
// Test the bound of width
|
||||
[{ mult: 1, add: -1 }, { mult: 0, add: 1 }, { mult: 0, add: 1 }],
|
||||
[{ mult: 1, add: 0 }, { mult: 0, add: 1 }, { mult: 0, add: 1 }],
|
||||
[{ mult: 1, add: +1 }, { mult: 0, add: 1 }, { mult: 0, add: 1 }],
|
||||
// Test the bound of height
|
||||
[{ mult: 0, add: 1 }, { mult: 1, add: -1 }, { mult: 0, add: 1 }],
|
||||
[{ mult: 0, add: 1 }, { mult: 1, add: 0 }, { mult: 0, add: 1 }],
|
||||
[{ mult: 0, add: 1 }, { mult: 1, add: +1 }, { mult: 0, add: 1 }],
|
||||
// Test the bound of depth
|
||||
[{ mult: 0, add: 1 }, { mult: 0, add: 1 }, { mult: 1, add: -1 }],
|
||||
[{ mult: 0, add: 1 }, { mult: 0, add: 1 }, { mult: 1, add: 0 }],
|
||||
[{ mult: 0, add: 1 }, { mult: 0, add: 1 }, { mult: 1, add: +1 }],
|
||||
]
|
||||
)
|
||||
)
|
||||
.beforeAllSubcases(t => {
|
||||
const { format } = t.params;
|
||||
@@ -675,7 +804,9 @@ g.test('texture_size,3d_texture,uncompressed_format')
|
||||
t.selectDeviceOrSkipTestCase(info.feature);
|
||||
})
|
||||
.fn(t => {
|
||||
const { format, size } = t.params;
|
||||
const { format, sizeVariant } = t.params;
|
||||
const maxTextureDimension3D = t.device.limits.maxTextureDimension3D;
|
||||
const size = sizeVariant.map(variant => t.makeLimitVariant('maxTextureDimension3D', variant));
|
||||
|
||||
const descriptor: GPUTextureDescriptor = {
|
||||
size,
|
||||
@@ -685,9 +816,9 @@ g.test('texture_size,3d_texture,uncompressed_format')
|
||||
};
|
||||
|
||||
const success =
|
||||
size[0] <= kLimitInfo.maxTextureDimension3D.default &&
|
||||
size[1] <= kLimitInfo.maxTextureDimension3D.default &&
|
||||
size[2] <= kLimitInfo.maxTextureDimension3D.default;
|
||||
size[0] <= maxTextureDimension3D &&
|
||||
size[1] <= maxTextureDimension3D &&
|
||||
size[2] <= maxTextureDimension3D;
|
||||
|
||||
t.expectValidationError(() => {
|
||||
t.device.createTexture(descriptor);
|
||||
@@ -700,40 +831,152 @@ g.test('texture_size,3d_texture,compressed_format')
|
||||
u //
|
||||
.combine('format', kCompressedTextureFormats)
|
||||
.beginSubcases()
|
||||
.expand('size', p => {
|
||||
.expand('sizeVariant', p => {
|
||||
const { blockWidth, blockHeight } = kTextureFormatInfo[p.format];
|
||||
return [
|
||||
// Test the bound of width
|
||||
[kLimitInfo.maxTextureDimension3D.default - 1, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension3D.default - blockWidth, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension3D.default - blockWidth, blockHeight, 1],
|
||||
[kLimitInfo.maxTextureDimension3D.default, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension3D.default, blockHeight, 1],
|
||||
[kLimitInfo.maxTextureDimension3D.default + 1, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension3D.default + blockWidth, 1, 1],
|
||||
[kLimitInfo.maxTextureDimension3D.default + blockWidth, blockHeight, 1],
|
||||
[
|
||||
{ mult: 1, add: -1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: -blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: -blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: 0 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: 0 },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: +1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: +blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 1, add: +blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
// Test the bound of height
|
||||
[1, kLimitInfo.maxTextureDimension3D.default - 1, 1],
|
||||
[1, kLimitInfo.maxTextureDimension3D.default - blockHeight, 1],
|
||||
[blockWidth, kLimitInfo.maxTextureDimension3D.default - blockHeight, 1],
|
||||
[1, kLimitInfo.maxTextureDimension3D.default, 1],
|
||||
[blockWidth, kLimitInfo.maxTextureDimension3D.default, 1],
|
||||
[1, kLimitInfo.maxTextureDimension3D.default + 1, 1],
|
||||
[1, kLimitInfo.maxTextureDimension3D.default + blockWidth, 1],
|
||||
[blockWidth, kLimitInfo.maxTextureDimension3D.default + blockHeight, 1],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 1, add: -blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: 0 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 1, add: 0 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: +1 },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: +blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 1, add: +blockHeight },
|
||||
{ mult: 0, add: 1 },
|
||||
],
|
||||
// Test the bound of depth
|
||||
[1, 1, kLimitInfo.maxTextureDimension3D.default - 1],
|
||||
[blockWidth, 1, kLimitInfo.maxTextureDimension3D.default - 1],
|
||||
[1, blockHeight, kLimitInfo.maxTextureDimension3D.default - 1],
|
||||
[blockWidth, blockHeight, kLimitInfo.maxTextureDimension3D.default - 1],
|
||||
[1, 1, kLimitInfo.maxTextureDimension3D.default],
|
||||
[blockWidth, 1, kLimitInfo.maxTextureDimension3D.default],
|
||||
[1, blockHeight, kLimitInfo.maxTextureDimension3D.default],
|
||||
[blockWidth, blockHeight, kLimitInfo.maxTextureDimension3D.default],
|
||||
[1, 1, kLimitInfo.maxTextureDimension3D.default + 1],
|
||||
[blockWidth, 1, kLimitInfo.maxTextureDimension3D.default + 1],
|
||||
[1, blockHeight, kLimitInfo.maxTextureDimension3D.default + 1],
|
||||
[blockWidth, blockHeight, kLimitInfo.maxTextureDimension3D.default + 1],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: -1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: -1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: -1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: 0 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: 0 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: 0 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: 0 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: +1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 1, add: +1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: 1 },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: +1 },
|
||||
],
|
||||
[
|
||||
{ mult: 0, add: blockWidth },
|
||||
{ mult: 0, add: blockHeight },
|
||||
{ mult: 1, add: +1 },
|
||||
],
|
||||
];
|
||||
})
|
||||
)
|
||||
@@ -746,12 +989,15 @@ g.test('texture_size,3d_texture,compressed_format')
|
||||
t.selectDeviceOrSkipTestCase(info.feature);
|
||||
})
|
||||
.fn(t => {
|
||||
const { format, size } = t.params;
|
||||
const { format, sizeVariant } = t.params;
|
||||
const info = kTextureFormatInfo[format];
|
||||
|
||||
const maxTextureDimension3D = t.device.limits.maxTextureDimension3D;
|
||||
const size = sizeVariant.map(variant => t.makeLimitVariant('maxTextureDimension3D', variant));
|
||||
|
||||
assert(
|
||||
kLimitInfo.maxTextureDimension3D.default % info.blockWidth === 0 &&
|
||||
kLimitInfo.maxTextureDimension3D.default % info.blockHeight === 0
|
||||
maxTextureDimension3D % info.blockWidth === 0 &&
|
||||
maxTextureDimension3D % info.blockHeight === 0
|
||||
);
|
||||
|
||||
const descriptor: GPUTextureDescriptor = {
|
||||
@@ -764,9 +1010,9 @@ g.test('texture_size,3d_texture,compressed_format')
|
||||
const success =
|
||||
size[0] % info.blockWidth === 0 &&
|
||||
size[1] % info.blockHeight === 0 &&
|
||||
size[0] <= kLimitInfo.maxTextureDimension3D.default &&
|
||||
size[1] <= kLimitInfo.maxTextureDimension3D.default &&
|
||||
size[2] <= kLimitInfo.maxTextureDimension3D.default;
|
||||
size[0] <= maxTextureDimension3D &&
|
||||
size[1] <= maxTextureDimension3D &&
|
||||
size[2] <= maxTextureDimension3D;
|
||||
|
||||
t.expectValidationError(() => {
|
||||
t.device.createTexture(descriptor);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user