Import trybuild 1.0.80 upstream upstream/1.0.80
authorDongHun Kwak <dh0128.kwak@samsung.com>
Mon, 17 Apr 2023 01:19:20 +0000 (10:19 +0900)
committerDongHun Kwak <dh0128.kwak@samsung.com>
Mon, 17 Apr 2023 01:19:20 +0000 (10:19 +0900)
66 files changed:
.cargo_vcs_info.json [new file with mode: 0644]
.clippy.toml [new file with mode: 0644]
.github/FUNDING.yml [new file with mode: 0644]
.github/workflows/ci.yml [new file with mode: 0644]
.gitignore [new file with mode: 0644]
Cargo.toml [new file with mode: 0644]
Cargo.toml.orig [new file with mode: 0644]
LICENSE-APACHE [new file with mode: 0644]
LICENSE-MIT [new file with mode: 0644]
README.md [new file with mode: 0644]
build.rs [new file with mode: 0644]
src/cargo.rs [new file with mode: 0644]
src/dependencies.rs [new file with mode: 0644]
src/diff.rs [new file with mode: 0644]
src/directory.rs [new file with mode: 0644]
src/env.rs [new file with mode: 0644]
src/error.rs [new file with mode: 0644]
src/expand.rs [new file with mode: 0644]
src/features.rs [new file with mode: 0644]
src/flock.rs [new file with mode: 0644]
src/inherit.rs [new file with mode: 0644]
src/lib.rs [new file with mode: 0644]
src/manifest.rs [new file with mode: 0644]
src/message.rs [new file with mode: 0644]
src/normalize.rs [new file with mode: 0644]
src/path.rs [new file with mode: 0644]
src/run.rs [new file with mode: 0644]
src/rustflags.rs [new file with mode: 0644]
src/term.rs [new file with mode: 0644]
src/tests.rs [new file with mode: 0644]
src/tests/and-n-others.rs [new file with mode: 0644]
src/tests/basic.rs [new file with mode: 0644]
src/tests/cargo-registry-sparse.rs [new file with mode: 0644]
src/tests/cargo-registry.rs [new file with mode: 0644]
src/tests/dir-backslash.rs [new file with mode: 0644]
src/tests/dropshot-required-by.rs [new file with mode: 0644]
src/tests/long-file-names.rs [new file with mode: 0644]
src/tests/proc-macro-panic.rs [new file with mode: 0644]
src/tests/py03-url.rs [new file with mode: 0644]
src/tests/rust-lib-with-githash.rs [new file with mode: 0644]
src/tests/rust-lib.rs [new file with mode: 0644]
src/tests/strip-path-dependencies.rs [new file with mode: 0644]
src/tests/traits-must-be-implemented.rs [new file with mode: 0644]
src/tests/type-dir-backslash.rs [new file with mode: 0644]
src/tests/uniffi-out-dir.rs [new file with mode: 0644]
tests/test.rs [new file with mode: 0644]
tests/ui/compile-fail-0.rs [new file with mode: 0644]
tests/ui/compile-fail-1.rs [new file with mode: 0644]
tests/ui/compile-fail-2.rs [new file with mode: 0644]
tests/ui/compile-fail-2.stderr [new file with mode: 0644]
tests/ui/compile-fail-3.rs [new file with mode: 0644]
tests/ui/compile-fail-3.stderr [new file with mode: 0644]
tests/ui/print-both.rs [new file with mode: 0644]
tests/ui/print-stderr.rs [new file with mode: 0644]
tests/ui/print-stdout.rs [new file with mode: 0644]
tests/ui/run-fail.rs [new file with mode: 0644]
tests/ui/run-pass-0.rs [new file with mode: 0644]
tests/ui/run-pass-1.rs [new file with mode: 0644]
tests/ui/run-pass-2.rs [new file with mode: 0644]
tests/ui/run-pass-3.rs [new file with mode: 0644]
tests/ui/run-pass-4.rs [new file with mode: 0644]
tests/ui/run-pass-5.rs [new file with mode: 0644]
tests/ui/run-pass-6.rs [new file with mode: 0644]
tests/ui/run-pass-7.rs [new file with mode: 0644]
tests/ui/run-pass-8.rs [new file with mode: 0644]
tests/ui/run-pass-9.rs [new file with mode: 0644]

diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
new file mode 100644 (file)
index 0000000..b61667f
--- /dev/null
@@ -0,0 +1,6 @@
+{
+  "git": {
+    "sha1": "a408546745c2bdc0a1fb6a81a814c3e87c9d5e9d"
+  },
+  "path_in_vcs": ""
+}
\ No newline at end of file
diff --git a/.clippy.toml b/.clippy.toml
new file mode 100644 (file)
index 0000000..90bfd5f
--- /dev/null
@@ -0,0 +1 @@
+msrv = "1.45.0"
diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644 (file)
index 0000000..7507077
--- /dev/null
@@ -0,0 +1 @@
+github: dtolnay
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
new file mode 100644 (file)
index 0000000..c6ab704
--- /dev/null
@@ -0,0 +1,103 @@
+name: CI
+
+on:
+  push:
+  pull_request:
+  workflow_dispatch:
+  schedule: [cron: "40 1 * * *"]
+
+permissions:
+  contents: read
+
+env:
+  RUSTFLAGS: -Dwarnings
+
+jobs:
+  pre_ci:
+    uses: dtolnay/.github/.github/workflows/pre_ci.yml@master
+
+  test:
+    name: Rust ${{matrix.rust}}
+    needs: pre_ci
+    if: needs.pre_ci.outputs.continue
+    runs-on: ubuntu-latest
+    strategy:
+      fail-fast: false
+      matrix:
+        rust: [nightly, beta, stable, 1.56.0]
+    timeout-minutes: 45
+    steps:
+      - uses: actions/checkout@v3
+      - uses: dtolnay/rust-toolchain@master
+        with:
+          toolchain: ${{matrix.rust}}
+      - name: Enable type layout randomization
+        run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV
+        if: matrix.rust == 'nightly'
+      - run: cargo test
+
+  xplat:
+    name: ${{matrix.name}}
+    needs: pre_ci
+    if: needs.pre_ci.outputs.continue
+    runs-on: ${{matrix.os}}-latest
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+          - name: macOS
+            os: macos
+          - name: Windows
+            os: windows
+    timeout-minutes: 45
+    steps:
+      - uses: actions/checkout@v3
+      - uses: dtolnay/rust-toolchain@nightly
+      - name: Enable type layout randomization
+        run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV
+      - run: cargo test
+
+  minimal:
+    name: Minimal versions
+    needs: pre_ci
+    if: needs.pre_ci.outputs.continue
+    runs-on: ubuntu-latest
+    timeout-minutes: 45
+    steps:
+      - uses: actions/checkout@v3
+      - uses: dtolnay/rust-toolchain@nightly
+      - run: cargo update -Z minimal-versions
+      - run: cargo check
+
+  clippy:
+    name: Clippy
+    runs-on: ubuntu-latest
+    if: github.event_name != 'pull_request'
+    timeout-minutes: 45
+    steps:
+      - uses: actions/checkout@v3
+      - uses: dtolnay/rust-toolchain@clippy
+      - run: cargo clippy --tests -- -Dclippy::all -Dclippy::pedantic
+
+  outdated:
+    name: Outdated
+    runs-on: ubuntu-latest
+    if: github.event_name != 'pull_request'
+    timeout-minutes: 45
+    steps:
+      - uses: actions/checkout@v3
+      - uses: dtolnay/install@cargo-outdated
+      - run: cargo outdated --workspace --exit-code 1
+      - run: cargo outdated --manifest-path fuzz/Cargo.toml --exit-code 1
+
+  fuzz:
+    name: Fuzz
+    needs: pre_ci
+    if: needs.pre_ci.outputs.continue
+    runs-on: ubuntu-latest
+    timeout-minutes: 45
+    steps:
+      - uses: actions/checkout@v3
+      - uses: dtolnay/rust-toolchain@nightly
+      - uses: dtolnay/install@cargo-fuzz
+      - run: cargo fuzz check
diff --git a/.gitignore b/.gitignore
new file mode 100644 (file)
index 0000000..98e5fcf
--- /dev/null
@@ -0,0 +1,3 @@
+target
+**/*.rs.bk
+Cargo.lock
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644 (file)
index 0000000..a2a681f
--- /dev/null
@@ -0,0 +1,66 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2018"
+rust-version = "1.45"
+name = "trybuild"
+version = "1.0.80"
+authors = ["David Tolnay <dtolnay@gmail.com>"]
+exclude = ["screenshots/*"]
+description = "Test harness for ui tests of compiler diagnostics"
+documentation = "https://docs.rs/trybuild"
+readme = "README.md"
+keywords = [
+    "macros",
+    "testing",
+    "dev-dependencies",
+]
+categories = ["development-tools::testing"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/dtolnay/trybuild"
+
+[package.metadata.docs.rs]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[lib]
+doc-scrape-examples = false
+
+[dependencies.basic-toml]
+version = "0.1"
+
+[dependencies.dissimilar]
+version = "1.0"
+optional = true
+
+[dependencies.glob]
+version = "0.3"
+
+[dependencies.once_cell]
+version = "1.9"
+
+[dependencies.serde]
+version = "1.0.139"
+
+[dependencies.serde_derive]
+version = "1.0.139"
+
+[dependencies.serde_json]
+version = "1.0"
+
+[dependencies.termcolor]
+version = "1.0.4"
+
+[dev-dependencies.automod]
+version = "1.0"
+
+[features]
+diff = ["dissimilar"]
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
new file mode 100644 (file)
index 0000000..d883f41
--- /dev/null
@@ -0,0 +1,38 @@
+[package]
+name = "trybuild"
+version = "1.0.80"
+authors = ["David Tolnay <dtolnay@gmail.com>"]
+categories = ["development-tools::testing"]
+description = "Test harness for ui tests of compiler diagnostics"
+documentation = "https://docs.rs/trybuild"
+edition = "2018"
+exclude = ["screenshots/*"]
+keywords = ["macros", "testing", "dev-dependencies"]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/dtolnay/trybuild"
+rust-version = "1.45"
+
+[features]
+# Experimental: highlight the diff between the expected and actual compiler
+# output. Currently unix-only. If you test this out, please provide any feedback
+# in https://github.com/dtolnay/trybuild/issues/41.
+diff = ["dissimilar"]
+
+[dependencies]
+dissimilar = { version = "1.0", optional = true }
+glob = "0.3"
+once_cell = "1.9"
+serde = "1.0.139"
+serde_derive = "1.0.139"
+serde_json = "1.0"
+termcolor = "1.0.4"
+basic-toml = "0.1"
+
+[dev-dependencies]
+automod = "1.0"
+
+[lib]
+doc-scrape-examples = false
+
+[package.metadata.docs.rs]
+targets = ["x86_64-unknown-linux-gnu"]
diff --git a/LICENSE-APACHE b/LICENSE-APACHE
new file mode 100644 (file)
index 0000000..1b5ec8b
--- /dev/null
@@ -0,0 +1,176 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
diff --git a/LICENSE-MIT b/LICENSE-MIT
new file mode 100644 (file)
index 0000000..31aa793
--- /dev/null
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644 (file)
index 0000000..9041e35
--- /dev/null
+++ b/README.md
@@ -0,0 +1,244 @@
+Trybuild
+========
+
+[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/trybuild-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/trybuild)
+[<img alt="crates.io" src="https://img.shields.io/crates/v/trybuild.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/trybuild)
+[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-trybuild-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/trybuild)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/trybuild/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/trybuild/actions?query=branch%3Amaster)
+
+Trybuild is a test harness for invoking rustc on a set of test cases and
+asserting that any resulting error messages are the ones intended.
+
+<p align="center">
+<a href="#compile-fail-tests">
+<img src="https://user-images.githubusercontent.com/1940490/57186574-76469e00-6e96-11e9-8cb5-b63b657170c9.png" width="600">
+</a>
+</p>
+
+Such tests are commonly useful for testing error reporting involving procedural
+macros. We would write test cases triggering either errors detected by the macro
+or errors detected by the Rust compiler in the resulting expanded code, and
+compare against the expected errors to ensure that they remain user-friendly.
+
+This style of testing is sometimes called *ui tests* because they test aspects
+of the user's interaction with a library outside of what would be covered by
+ordinary API tests.
+
+Nothing here is specific to macros; trybuild would work equally well for testing
+misuse of non-macro APIs.
+
+```toml
+[dev-dependencies]
+trybuild = "1.0"
+```
+
+*Compiler support: requires rustc 1.45+*
+
+<br>
+
+## Compile-fail tests
+
+A minimal trybuild setup looks like this:
+
+```rust
+#[test]
+fn ui() {
+    let t = trybuild::TestCases::new();
+    t.compile_fail("tests/ui/*.rs");
+}
+```
+
+The test can be run with `cargo test`. It will individually compile each of the
+source files matching the glob pattern, expect them to fail to compile, and
+assert that the compiler's error message matches an adjacently named _*.stderr_
+file containing the expected output (same file name as the test except with a
+different extension). If it matches, the test case is considered to succeed.
+
+Dependencies listed under `[dev-dependencies]` in the project's Cargo.toml are
+accessible from within the test cases.
+
+Failing tests display the expected vs actual compiler output inline.
+
+<p align="center">
+<a href="#compile-fail-tests">
+<img src="https://user-images.githubusercontent.com/1940490/57186575-79418e80-6e96-11e9-9478-c9b3dc10327f.png" width="600">
+</a>
+</p>
+
+A compile\_fail test that fails to fail to compile is also a failure.
+
+<p align="center">
+<a href="#compile-fail-tests">
+<img src="https://user-images.githubusercontent.com/1940490/57186576-7b0b5200-6e96-11e9-8bfd-2de705125108.png" width="600">
+</a>
+</p>
+
+To test just one source file, use:
+```
+cargo test -- ui trybuild=example.rs
+```
+where `ui` is the name of the `#[test]` funtion that invokes `trybuild`, and
+`example.rs` is the name of the file to test.
+
+<br>
+
+## Pass tests
+
+The same test harness is able to run tests that are expected to pass, too.
+Ordinarily you would just have Cargo run such tests directly, but being able to
+combine modes like this could be useful for workshops in which participants work
+through test cases enabling one at a time. Trybuild was originally developed for
+my [procedural macros workshop at Rust Latam][workshop].
+
+[workshop]: https://github.com/dtolnay/proc-macro-workshop
+
+```rust
+#[test]
+fn ui() {
+    let t = trybuild::TestCases::new();
+    t.pass("tests/01-parse-header.rs");
+    t.pass("tests/02-parse-body.rs");
+    t.compile_fail("tests/03-expand-four-errors.rs");
+    t.pass("tests/04-paste-ident.rs");
+    t.pass("tests/05-repeat-section.rs");
+    //t.pass("tests/06-make-work-in-function.rs");
+    //t.pass("tests/07-init-array.rs");
+    //t.compile_fail("tests/08-ident-span.rs");
+}
+```
+
+Pass tests are considered to succeed if they compile successfully and have a
+`main` function that does not panic when the compiled binary is executed.
+
+<p align="center">
+<a href="#pass-tests">
+<img src="https://user-images.githubusercontent.com/1940490/57186580-7f376f80-6e96-11e9-9cae-8257609269ef.png" width="600">
+</a>
+</p>
+
+<br>
+
+## Details
+
+That's the entire API.
+
+<br>
+
+## Workflow
+
+There are two ways to update the _*.stderr_ files as you iterate on your test
+cases or your library; handwriting them is not recommended.
+
+First, if a test case is being run as compile\_fail but a corresponding
+_*.stderr_ file does not exist, the test runner will save the actual compiler
+output with the right filename into a directory called *wip* within the
+directory containing Cargo.toml. So you can update these files by deleting them,
+running `cargo test`, and moving all the files from *wip* into your testcase
+directory.
+
+<p align="center">
+<a href="#workflow">
+<img src="https://user-images.githubusercontent.com/1940490/57186579-7cd51580-6e96-11e9-9f19-54dcecc9fbba.png" width="600">
+</a>
+</p>
+
+Alternatively, run `cargo test` with the environment variable
+`TRYBUILD=overwrite` to skip the *wip* directory and write all compiler output
+directly in place. You'll want to check `git diff` afterward to be sure the
+compiler's output is what you had in mind.
+
+<br>
+
+## What to test
+
+When it comes to compile-fail tests, write tests for anything for which you care
+to find out when there are changes in the user-facing compiler output. As a
+negative example, please don't write compile-fail tests simply calling all of
+your public APIs with arguments of the wrong type; there would be no benefit.
+
+A common use would be for testing specific targeted error messages emitted by a
+procedural macro. For example the derive macro from the [`ref-cast`] crate is
+required to be placed on a type that has either `#[repr(C)]` or
+`#[repr(transparent)]` in order for the expansion to be free of undefined
+behavior, which it enforces at compile time:
+
+[`ref-cast`]: https://github.com/dtolnay/ref-cast
+
+```console
+error: RefCast trait requires #[repr(C)] or #[repr(transparent)]
+ --> $DIR/missing-repr.rs:3:10
+  |
+3 | #[derive(RefCast)]
+  |          ^^^^^^^
+```
+
+Macros that consume helper attributes will want to check that unrecognized
+content within those attributes is properly indicated to the caller. Is the
+error message correctly placed under the erroneous tokens, not on a useless
+call\_site span?
+
+```console
+error: unknown serde field attribute `qqq`
+ --> $DIR/unknown-attribute.rs:5:13
+  |
+5 |     #[serde(qqq = "...")]
+  |             ^^^
+```
+
+Declarative macros can benefit from compile-fail tests too. The [`json!`] macro
+from serde\_json is just a great big macro\_rules macro but makes an effort to
+have error messages from broken JSON in the input always appear on the most
+appropriate token:
+
+[`json!`]: https://docs.rs/serde_json/1.0/serde_json/macro.json.html
+
+```console
+error: no rules expected the token `,`
+ --> $DIR/double-comma.rs:4:38
+  |
+4 |     println!("{}", json!({ "k": null,, }));
+  |                                      ^ no rules expected this token in macro call
+```
+
+Sometimes we may have a macro that expands successfully but we count on it to
+trigger particular compiler errors at some point beyond macro expansion. For
+example the [`readonly`] crate introduces struct fields that are public but
+readable only, even if the caller has a &mut reference to the surrounding
+struct. If someone writes to a readonly field, we need to be sure that it
+wouldn't compile:
+
+[`readonly`]: https://github.com/dtolnay/readonly
+
+```console
+error[E0594]: cannot assign to data in a `&` reference
+  --> $DIR/write-a-readonly.rs:17:26
+   |
+17 |     println!("{}", s.n); s.n += 1;
+   |                          ^^^^^^^^ cannot assign
+```
+
+In all of these cases, the compiler's output can change because our crate or one
+of our dependencies broke something, or as a consequence of changes in the Rust
+compiler. Both are good reasons to have well conceived compile-fail tests. If we
+refactor and mistakenly cause an error that used to be correct to now no longer
+be emitted or be emitted in the wrong place, that is important for a test suite
+to catch. If the compiler changes something that makes error messages that we
+care about substantially worse, it is also important to catch and report as a
+compiler issue.
+
+<br>
+
+#### License
+
+<sup>
+Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
+2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
+</sup>
+
+<br>
+
+<sub>
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
+be dual licensed as above, without any additional terms or conditions.
+</sub>
diff --git a/build.rs b/build.rs
new file mode 100644 (file)
index 0000000..05a92cc
--- /dev/null
+++ b/build.rs
@@ -0,0 +1,17 @@
+use std::env;
+use std::fs;
+use std::io;
+use std::path::Path;
+
+fn main() -> io::Result<()> {
+    println!("cargo:rerun-if-changed=build.rs");
+
+    let out_dir = env::var_os("OUT_DIR").unwrap();
+    let target = env::var("TARGET").ok();
+    let path = Path::new(&out_dir).join("target");
+    let value = match target {
+        Some(target) => format!(r#"Some("{}")"#, target.escape_debug()),
+        None => "None".to_owned(),
+    };
+    fs::write(path, value)
+}
diff --git a/src/cargo.rs b/src/cargo.rs
new file mode 100644 (file)
index 0000000..1f50b73
--- /dev/null
@@ -0,0 +1,209 @@
+use crate::directory::Directory;
+use crate::error::{Error, Result};
+use crate::manifest::Name;
+use crate::run::Project;
+use crate::rustflags;
+use serde_derive::Deserialize;
+use std::path::PathBuf;
+use std::process::{Command, Output, Stdio};
+use std::{env, fs, iter};
+
+#[derive(Deserialize)]
+pub struct Metadata {
+    pub target_directory: Directory,
+    pub workspace_root: Directory,
+    pub packages: Vec<PackageMetadata>,
+}
+
+#[derive(Deserialize)]
+pub struct PackageMetadata {
+    pub name: String,
+    pub targets: Vec<BuildTarget>,
+    pub manifest_path: PathBuf,
+}
+
+#[derive(Deserialize)]
+pub struct BuildTarget {
+    pub crate_types: Vec<String>,
+}
+
+fn raw_cargo() -> Command {
+    match env::var_os("CARGO") {
+        Some(cargo) => Command::new(cargo),
+        None => Command::new("cargo"),
+    }
+}
+
+fn cargo(project: &Project) -> Command {
+    let mut cmd = raw_cargo();
+    cmd.current_dir(&project.dir);
+    cmd.envs(cargo_target_dir(project));
+    cmd.envs(rustflags::envs());
+    cmd.env("CARGO_INCREMENTAL", "0");
+    cmd.arg("--offline");
+    cmd
+}
+
+fn cargo_target_dir(project: &Project) -> impl Iterator<Item = (&'static str, PathBuf)> {
+    iter::once((
+        "CARGO_TARGET_DIR",
+        path!(project.target_dir / "tests" / "trybuild"),
+    ))
+}
+
+pub fn manifest_dir() -> Result<Directory> {
+    if let Some(manifest_dir) = env::var_os("CARGO_MANIFEST_DIR") {
+        return Ok(Directory::from(manifest_dir));
+    }
+    let mut dir = Directory::current()?;
+    loop {
+        if dir.join("Cargo.toml").exists() {
+            return Ok(dir);
+        }
+        dir = dir.parent().ok_or(Error::ProjectDir)?;
+    }
+}
+
+pub fn build_dependencies(project: &mut Project) -> Result<()> {
+    let workspace_cargo_lock = path!(project.workspace / "Cargo.lock");
+    if workspace_cargo_lock.exists() {
+        let _ = fs::copy(workspace_cargo_lock, path!(project.dir / "Cargo.lock"));
+    } else {
+        let _ = cargo(project).arg("generate-lockfile").status();
+    }
+
+    let mut command = cargo(project);
+    command
+        .arg(if project.has_pass { "build" } else { "check" })
+        .args(target())
+        .arg("--bin")
+        .arg(&project.name)
+        .args(features(project));
+
+    let status = command.status().map_err(Error::Cargo)?;
+    if !status.success() {
+        return Err(Error::CargoFail);
+    }
+
+    // Check if this Cargo contains https://github.com/rust-lang/cargo/pull/10383
+    project.keep_going = command
+        .arg("-Zunstable-options")
+        .arg("--keep-going")
+        .stdout(Stdio::null())
+        .stderr(Stdio::null())
+        .status()
+        .map(|status| status.success())
+        .unwrap_or(false);
+
+    Ok(())
+}
+
+pub fn build_test(project: &Project, name: &Name) -> Result<Output> {
+    let _ = cargo(project)
+        .arg("clean")
+        .arg("--package")
+        .arg(&project.name)
+        .arg("--color=never")
+        .stdout(Stdio::null())
+        .stderr(Stdio::null())
+        .status();
+
+    cargo(project)
+        .arg(if project.has_pass { "build" } else { "check" })
+        .args(target())
+        .arg("--bin")
+        .arg(name)
+        .args(features(project))
+        .arg("--quiet")
+        .arg("--color=never")
+        .arg("--message-format=json")
+        .output()
+        .map_err(Error::Cargo)
+}
+
+pub fn build_all_tests(project: &Project) -> Result<Output> {
+    let _ = cargo(project)
+        .arg("clean")
+        .arg("--package")
+        .arg(&project.name)
+        .arg("--color=never")
+        .stdout(Stdio::null())
+        .stderr(Stdio::null())
+        .status();
+
+    cargo(project)
+        .arg(if project.has_pass { "build" } else { "check" })
+        .args(target())
+        .arg("--bins")
+        .args(features(project))
+        .arg("--quiet")
+        .arg("--color=never")
+        .arg("--message-format=json")
+        .arg("-Zunstable-options")
+        .arg("--keep-going")
+        .output()
+        .map_err(Error::Cargo)
+}
+
+pub fn run_test(project: &Project, name: &Name) -> Result<Output> {
+    cargo(project)
+        .arg("run")
+        .args(target())
+        .arg("--bin")
+        .arg(name)
+        .args(features(project))
+        .arg("--quiet")
+        .arg("--color=never")
+        .output()
+        .map_err(Error::Cargo)
+}
+
+pub fn metadata() -> Result<Metadata> {
+    let output = raw_cargo()
+        .arg("metadata")
+        .arg("--no-deps")
+        .arg("--format-version=1")
+        .output()
+        .map_err(Error::Cargo)?;
+
+    serde_json::from_slice(&output.stdout).map_err(|err| {
+        print!("{}", String::from_utf8_lossy(&output.stderr));
+        Error::Metadata(err)
+    })
+}
+
+fn features(project: &Project) -> Vec<String> {
+    match &project.features {
+        Some(features) => vec![
+            "--no-default-features".to_owned(),
+            "--features".to_owned(),
+            features.join(","),
+        ],
+        None => vec![],
+    }
+}
+
+fn target() -> Vec<&'static str> {
+    const TARGET: Option<&str> = include!(concat!(env!("OUT_DIR"), "/target"));
+
+    // When --target flag is passed, cargo does not pass RUSTFLAGS to rustc when
+    // building proc-macro and build script even if the host and target triples
+    // are the same. Therefore, if we always pass --target to cargo, tools such
+    // as coverage that require RUSTFLAGS do not work for tests run by trybuild.
+    //
+    // To avoid that problem, do not pass --target to cargo if we know that it
+    // has not been passed.
+    //
+    // Currently, cargo does not have a way to tell the build script whether
+    // --target has been passed or not, and there is no heuristic that can
+    // handle this well.
+    //
+    // Therefore, expose a cfg to always treat the target as host.
+    if cfg!(trybuild_no_target) {
+        vec![]
+    } else if let Some(target) = TARGET {
+        vec!["--target", target]
+    } else {
+        vec![]
+    }
+}
diff --git a/src/dependencies.rs b/src/dependencies.rs
new file mode 100644 (file)
index 0000000..6b25485
--- /dev/null
@@ -0,0 +1,297 @@
+use crate::directory::Directory;
+use crate::error::Error;
+use crate::inherit::InheritEdition;
+use crate::manifest::Edition;
+use serde::de::value::MapAccessDeserializer;
+use serde::de::value::StrDeserializer;
+use serde::de::{self, Deserialize, Deserializer, Visitor};
+use serde::ser::{Serialize, Serializer};
+use serde_derive::{Deserialize, Serialize};
+use serde_json::Value;
+use std::collections::BTreeMap as Map;
+use std::fmt;
+use std::fs;
+use std::path::PathBuf;
+
+pub fn get_manifest(manifest_dir: &Directory) -> Result<Manifest, Error> {
+    let cargo_toml_path = manifest_dir.join("Cargo.toml");
+    let mut manifest = (|| {
+        let manifest_str = fs::read_to_string(&cargo_toml_path)?;
+        let manifest: Manifest = basic_toml::from_str(&manifest_str)?;
+        Ok(manifest)
+    })()
+    .map_err(|err| Error::GetManifest(cargo_toml_path, Box::new(err)))?;
+
+    fix_dependencies(&mut manifest.dependencies, manifest_dir);
+    fix_dependencies(&mut manifest.dev_dependencies, manifest_dir);
+    for target in manifest.target.values_mut() {
+        fix_dependencies(&mut target.dependencies, manifest_dir);
+        fix_dependencies(&mut target.dev_dependencies, manifest_dir);
+    }
+
+    Ok(manifest)
+}
+
+pub fn get_workspace_manifest(manifest_dir: &Directory) -> WorkspaceManifest {
+    try_get_workspace_manifest(manifest_dir).unwrap_or_default()
+}
+
+pub fn try_get_workspace_manifest(manifest_dir: &Directory) -> Result<WorkspaceManifest, Error> {
+    let cargo_toml_path = manifest_dir.join("Cargo.toml");
+    let manifest_str = fs::read_to_string(cargo_toml_path)?;
+    let mut manifest: WorkspaceManifest = basic_toml::from_str(&manifest_str)?;
+
+    fix_dependencies(&mut manifest.workspace.dependencies, manifest_dir);
+    fix_patches(&mut manifest.patch, manifest_dir);
+    fix_replacements(&mut manifest.replace, manifest_dir);
+
+    Ok(manifest)
+}
+
+fn fix_dependencies(dependencies: &mut Map<String, Dependency>, dir: &Directory) {
+    dependencies.remove("trybuild");
+    for dep in dependencies.values_mut() {
+        dep.path = dep.path.as_ref().map(|path| Directory::new(dir.join(path)));
+    }
+}
+
+fn fix_patches(patches: &mut Map<String, RegistryPatch>, dir: &Directory) {
+    for registry in patches.values_mut() {
+        registry.crates.remove("trybuild");
+        for patch in registry.crates.values_mut() {
+            patch.path = patch.path.as_ref().map(|path| dir.join(path));
+        }
+    }
+}
+
+fn fix_replacements(replacements: &mut Map<String, Patch>, dir: &Directory) {
+    replacements.remove("trybuild");
+    for replacement in replacements.values_mut() {
+        replacement.path = replacement.path.as_ref().map(|path| dir.join(path));
+    }
+}
+
+#[derive(Deserialize, Default, Debug)]
+pub struct WorkspaceManifest {
+    #[serde(default)]
+    pub workspace: WorkspaceWorkspace,
+    #[serde(default)]
+    pub patch: Map<String, RegistryPatch>,
+    #[serde(default)]
+    pub replace: Map<String, Patch>,
+}
+
+#[derive(Deserialize, Default, Debug)]
+pub struct WorkspaceWorkspace {
+    #[serde(default)]
+    pub package: WorkspacePackage,
+    #[serde(default)]
+    pub dependencies: Map<String, Dependency>,
+}
+
+#[derive(Deserialize, Default, Debug)]
+pub struct WorkspacePackage {
+    pub edition: Option<Edition>,
+}
+
+#[derive(Deserialize, Default, Debug)]
+pub struct Manifest {
+    #[serde(default)]
+    pub package: Package,
+    #[serde(default)]
+    pub features: Map<String, Vec<String>>,
+    #[serde(default)]
+    pub dependencies: Map<String, Dependency>,
+    #[serde(default, alias = "dev-dependencies")]
+    pub dev_dependencies: Map<String, Dependency>,
+    #[serde(default)]
+    pub target: Map<String, TargetDependencies>,
+}
+
+#[derive(Deserialize, Default, Debug)]
+pub struct Package {
+    pub name: String,
+    #[serde(default)]
+    pub edition: EditionOrInherit,
+    pub resolver: Option<String>,
+}
+
+#[derive(Debug)]
+pub enum EditionOrInherit {
+    Edition(Edition),
+    Inherit,
+}
+
+#[derive(Serialize, Deserialize, Clone, Debug)]
+#[serde(remote = "Self")]
+pub struct Dependency {
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub version: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub path: Option<Directory>,
+    #[serde(default, skip_serializing_if = "is_false")]
+    pub optional: bool,
+    #[serde(
+        rename = "default-features",
+        default = "get_true",
+        skip_serializing_if = "is_true"
+    )]
+    pub default_features: bool,
+    #[serde(default, skip_serializing_if = "Vec::is_empty")]
+    pub features: Vec<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub git: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub branch: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub tag: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub rev: Option<String>,
+    #[serde(default, skip_serializing_if = "is_false")]
+    pub workspace: bool,
+    #[serde(flatten)]
+    pub rest: Map<String, Value>,
+}
+
+#[derive(Serialize, Deserialize, Clone, Debug)]
+pub struct TargetDependencies {
+    #[serde(default, skip_serializing_if = "Map::is_empty")]
+    pub dependencies: Map<String, Dependency>,
+    #[serde(
+        default,
+        alias = "dev-dependencies",
+        skip_serializing_if = "Map::is_empty"
+    )]
+    pub dev_dependencies: Map<String, Dependency>,
+}
+
+#[derive(Serialize, Deserialize, Clone, Debug)]
+#[serde(transparent)]
+pub struct RegistryPatch {
+    pub crates: Map<String, Patch>,
+}
+
+#[derive(Serialize, Deserialize, Clone, Debug)]
+pub struct Patch {
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub path: Option<PathBuf>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub git: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub branch: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub tag: Option<String>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub rev: Option<String>,
+    #[serde(flatten)]
+    pub rest: Map<String, Value>,
+}
+
+fn get_true() -> bool {
+    true
+}
+
+fn is_true(boolean: &bool) -> bool {
+    *boolean
+}
+
+fn is_false(boolean: &bool) -> bool {
+    !*boolean
+}
+
+impl Default for EditionOrInherit {
+    fn default() -> Self {
+        EditionOrInherit::Edition(Edition::default())
+    }
+}
+
+impl<'de> Deserialize<'de> for EditionOrInherit {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        struct EditionOrInheritVisitor;
+
+        impl<'de> Visitor<'de> for EditionOrInheritVisitor {
+            type Value = EditionOrInherit;
+
+            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                formatter.write_str("edition")
+            }
+
+            fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Edition::deserialize(StrDeserializer::new(s)).map(EditionOrInherit::Edition)
+            }
+
+            fn visit_map<M>(self, map: M) -> Result<Self::Value, M::Error>
+            where
+                M: de::MapAccess<'de>,
+            {
+                InheritEdition::deserialize(MapAccessDeserializer::new(map))?;
+                Ok(EditionOrInherit::Inherit)
+            }
+        }
+
+        deserializer.deserialize_any(EditionOrInheritVisitor)
+    }
+}
+
+impl Serialize for Dependency {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: Serializer,
+    {
+        Dependency::serialize(self, serializer)
+    }
+}
+
+impl<'de> Deserialize<'de> for Dependency {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        struct DependencyVisitor;
+
+        impl<'de> Visitor<'de> for DependencyVisitor {
+            type Value = Dependency;
+
+            fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                formatter.write_str(
+                    "a version string like \"0.9.8\" or a \
+                     dependency like { version = \"0.9.8\" }",
+                )
+            }
+
+            fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+            where
+                E: de::Error,
+            {
+                Ok(Dependency {
+                    version: Some(s.to_owned()),
+                    path: None,
+                    optional: false,
+                    default_features: true,
+                    features: Vec::new(),
+                    git: None,
+                    branch: None,
+                    tag: None,
+                    rev: None,
+                    workspace: false,
+                    rest: Map::new(),
+                })
+            }
+
+            fn visit_map<M>(self, map: M) -> Result<Self::Value, M::Error>
+            where
+                M: de::MapAccess<'de>,
+            {
+                Dependency::deserialize(MapAccessDeserializer::new(map))
+            }
+        }
+
+        deserializer.deserialize_any(DependencyVisitor)
+    }
+}
diff --git a/src/diff.rs b/src/diff.rs
new file mode 100644 (file)
index 0000000..190a96e
--- /dev/null
@@ -0,0 +1,82 @@
+pub use self::r#impl::Diff;
+
+pub enum Render<'a> {
+    Common(&'a str),
+    Unique(&'a str),
+}
+
+#[cfg(all(feature = "diff", not(windows)))]
+mod r#impl {
+    use super::Render;
+    use dissimilar::Chunk;
+    use std::cmp;
+    use std::panic;
+
+    pub struct Diff<'a> {
+        expected: &'a str,
+        actual: &'a str,
+        diff: Vec<Chunk<'a>>,
+    }
+
+    impl<'a> Diff<'a> {
+        pub fn compute(expected: &'a str, actual: &'a str) -> Option<Self> {
+            if expected.len() + actual.len() > 2048 {
+                // We don't yet trust the dissimilar crate to work well on large
+                // inputs.
+                return None;
+            }
+
+            // Nor on non-ascii inputs.
+            let diff = panic::catch_unwind(|| dissimilar::diff(expected, actual)).ok()?;
+
+            let mut common_len = 0;
+            for chunk in &diff {
+                if let Chunk::Equal(common) = chunk {
+                    common_len += common.len();
+                }
+            }
+
+            let bigger_len = cmp::max(expected.len(), actual.len());
+            let worth_printing = 5 * common_len >= 4 * bigger_len;
+            if !worth_printing {
+                return None;
+            }
+
+            Some(Diff {
+                expected,
+                actual,
+                diff,
+            })
+        }
+
+        pub fn iter<'i>(&'i self, input: &str) -> impl Iterator<Item = Render<'a>> + 'i {
+            let expected = input == self.expected;
+            let actual = input == self.actual;
+            self.diff.iter().filter_map(move |chunk| match chunk {
+                Chunk::Equal(common) => Some(Render::Common(common)),
+                Chunk::Delete(unique) if expected => Some(Render::Unique(unique)),
+                Chunk::Insert(unique) if actual => Some(Render::Unique(unique)),
+                _ => None,
+            })
+        }
+    }
+}
+
+#[cfg(any(not(feature = "diff"), windows))]
+mod r#impl {
+    use super::Render;
+
+    pub enum Diff {}
+
+    impl Diff {
+        pub fn compute(_expected: &str, _actual: &str) -> Option<Self> {
+            None
+        }
+
+        pub fn iter(&self, _input: &str) -> Box<dyn Iterator<Item = Render>> {
+            let _ = Render::Common;
+            let _ = Render::Unique;
+            match *self {}
+        }
+    }
+}
diff --git a/src/directory.rs b/src/directory.rs
new file mode 100644 (file)
index 0000000..d2193be
--- /dev/null
@@ -0,0 +1,62 @@
+use serde::de::{Deserialize, Deserializer};
+use serde_derive::Serialize;
+use std::borrow::Cow;
+use std::env;
+use std::ffi::OsString;
+use std::io;
+use std::path::{Path, PathBuf};
+
+#[derive(Clone, Debug, Serialize)]
+#[serde(transparent)]
+pub struct Directory {
+    path: PathBuf,
+}
+
+impl Directory {
+    pub fn new<P: Into<PathBuf>>(path: P) -> Self {
+        let mut path = path.into();
+        path.push("");
+        Directory { path }
+    }
+
+    pub fn current() -> io::Result<Self> {
+        env::current_dir().map(Directory::new)
+    }
+
+    pub fn to_string_lossy(&self) -> Cow<str> {
+        self.path.to_string_lossy()
+    }
+
+    pub fn join<P: AsRef<Path>>(&self, tail: P) -> PathBuf {
+        self.path.join(tail)
+    }
+
+    pub fn parent(&self) -> Option<Self> {
+        self.path.parent().map(Directory::new)
+    }
+
+    pub fn canonicalize(&self) -> io::Result<Self> {
+        self.path.canonicalize().map(Directory::new)
+    }
+}
+
+impl From<OsString> for Directory {
+    fn from(os_string: OsString) -> Self {
+        Directory::new(PathBuf::from(os_string))
+    }
+}
+
+impl AsRef<Path> for Directory {
+    fn as_ref(&self) -> &Path {
+        &self.path
+    }
+}
+
+impl<'de> Deserialize<'de> for Directory {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        PathBuf::deserialize(deserializer).map(Directory::new)
+    }
+}
diff --git a/src/env.rs b/src/env.rs
new file mode 100644 (file)
index 0000000..cfd4f43
--- /dev/null
@@ -0,0 +1,29 @@
+use crate::error::{Error, Result};
+use std::env;
+
+#[derive(PartialEq, Debug)]
+pub enum Update {
+    Wip,
+    Overwrite,
+}
+
+impl Default for Update {
+    fn default() -> Self {
+        Update::Wip
+    }
+}
+
+impl Update {
+    pub fn env() -> Result<Self> {
+        let var = match env::var_os("TRYBUILD") {
+            Some(var) => var,
+            None => return Ok(Update::default()),
+        };
+
+        match var.as_os_str().to_str() {
+            Some("wip") => Ok(Update::Wip),
+            Some("overwrite") => Ok(Update::Overwrite),
+            _ => Err(Error::UpdateVar(var)),
+        }
+    }
+}
diff --git a/src/error.rs b/src/error.rs
new file mode 100644 (file)
index 0000000..d6da1d1
--- /dev/null
@@ -0,0 +1,95 @@
+use glob::{GlobError, PatternError};
+use std::ffi::OsString;
+use std::fmt::{self, Display};
+use std::io;
+use std::path::PathBuf;
+
+#[derive(Debug)]
+pub enum Error {
+    Cargo(io::Error),
+    CargoFail,
+    GetManifest(PathBuf, Box<Error>),
+    Glob(GlobError),
+    Io(io::Error),
+    Metadata(serde_json::Error),
+    Mismatch,
+    NoWorkspaceManifest,
+    Open(PathBuf, io::Error),
+    Pattern(PatternError),
+    ProjectDir,
+    ReadStderr(io::Error),
+    RunFailed,
+    ShouldNotHaveCompiled,
+    Toml(basic_toml::Error),
+    UpdateVar(OsString),
+    WriteStderr(io::Error),
+}
+
+pub type Result<T> = std::result::Result<T, Error>;
+
+impl Display for Error {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        use self::Error::*;
+
+        match self {
+            Cargo(e) => write!(f, "failed to execute cargo: {}", e),
+            CargoFail => write!(f, "cargo reported an error"),
+            GetManifest(path, e) => write!(f, "failed to read manifest {}: {}", path.display(), e),
+            Glob(e) => write!(f, "{}", e),
+            Io(e) => write!(f, "{}", e),
+            Metadata(e) => write!(f, "failed to read cargo metadata: {}", e),
+            Mismatch => write!(f, "compiler error does not match expected error"),
+            NoWorkspaceManifest => write!(f, "Cargo.toml uses edition.workspace=true, but no edition found in workspace's manifest"),
+            Open(path, e) => write!(f, "{}: {}", path.display(), e),
+            Pattern(e) => write!(f, "{}", e),
+            ProjectDir => write!(f, "failed to determine name of project dir"),
+            ReadStderr(e) => write!(f, "failed to read stderr file: {}", e),
+            RunFailed => write!(f, "execution of the test case was unsuccessful"),
+            ShouldNotHaveCompiled => {
+                write!(f, "expected test case to fail to compile, but it succeeded")
+            }
+            Toml(e) => write!(f, "{}", e),
+            UpdateVar(var) => write!(
+                f,
+                "unrecognized value of TRYBUILD: {:?}",
+                var.to_string_lossy(),
+            ),
+            WriteStderr(e) => write!(f, "failed to write stderr file: {}", e),
+        }
+    }
+}
+
+impl Error {
+    pub fn already_printed(&self) -> bool {
+        use self::Error::*;
+
+        matches!(
+            self,
+            CargoFail | Mismatch | RunFailed | ShouldNotHaveCompiled
+        )
+    }
+}
+
+impl From<GlobError> for Error {
+    fn from(err: GlobError) -> Self {
+        Error::Glob(err)
+    }
+}
+
+impl From<PatternError> for Error {
+    fn from(err: PatternError) -> Self {
+        Error::Pattern(err)
+    }
+}
+
+impl From<io::Error> for Error {
+    fn from(err: io::Error) -> Self {
+        Error::Io(err)
+    }
+}
+
+impl From<basic_toml::Error> for Error {
+    fn from(err: basic_toml::Error) -> Self {
+        Error::Toml(err)
+    }
+}
diff --git a/src/expand.rs b/src/expand.rs
new file mode 100644 (file)
index 0000000..502e755
--- /dev/null
@@ -0,0 +1,76 @@
+use crate::error::{Error, Result};
+use crate::manifest::Name;
+use crate::Test;
+use std::collections::BTreeMap as Map;
+use std::path::PathBuf;
+
+#[derive(Debug)]
+pub(crate) struct ExpandedTest {
+    pub name: Name,
+    pub test: Test,
+    pub error: Option<Error>,
+    is_from_glob: bool,
+}
+
+pub(crate) fn expand_globs(tests: &[Test]) -> Vec<ExpandedTest> {
+    let mut set = ExpandedTestSet::new();
+
+    for test in tests {
+        match test.path.to_str() {
+            Some(utf8) if utf8.contains('*') => match glob(utf8) {
+                Ok(paths) => {
+                    let expected = test.expected;
+                    for path in paths {
+                        set.insert(Test { path, expected }, None, true);
+                    }
+                }
+                Err(error) => set.insert(test.clone(), Some(error), false),
+            },
+            _ => set.insert(test.clone(), None, false),
+        }
+    }
+
+    set.vec
+}
+
+struct ExpandedTestSet {
+    vec: Vec<ExpandedTest>,
+    path_to_index: Map<PathBuf, usize>,
+}
+
+impl ExpandedTestSet {
+    fn new() -> Self {
+        ExpandedTestSet {
+            vec: Vec::new(),
+            path_to_index: Map::new(),
+        }
+    }
+
+    fn insert(&mut self, test: Test, error: Option<Error>, is_from_glob: bool) {
+        if let Some(&i) = self.path_to_index.get(&test.path) {
+            let mut prev = &mut self.vec[i];
+            if prev.is_from_glob {
+                prev.test.expected = test.expected;
+                return;
+            }
+        }
+
+        let index = self.vec.len();
+        let name = Name(format!("trybuild{:03}", index));
+        self.path_to_index.insert(test.path.clone(), index);
+        self.vec.push(ExpandedTest {
+            name,
+            test,
+            error,
+            is_from_glob,
+        });
+    }
+}
+
+fn glob(pattern: &str) -> Result<Vec<PathBuf>> {
+    let mut paths = glob::glob(pattern)?
+        .map(|entry| entry.map_err(Error::from))
+        .collect::<Result<Vec<PathBuf>>>()?;
+    paths.sort();
+    Ok(paths)
+}
diff --git a/src/features.rs b/src/features.rs
new file mode 100644 (file)
index 0000000..a885b6a
--- /dev/null
@@ -0,0 +1,104 @@
+use serde::de::{self, Deserialize, DeserializeOwned, Deserializer};
+use serde_derive::Deserialize;
+use std::env;
+use std::error::Error;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::PathBuf;
+
+pub fn find() -> Option<Vec<String>> {
+    try_find().ok()
+}
+
+struct Ignored;
+
+impl<E: Error> From<E> for Ignored {
+    fn from(_error: E) -> Self {
+        Ignored
+    }
+}
+
+#[derive(Deserialize)]
+struct Build {
+    #[serde(deserialize_with = "from_json")]
+    features: Vec<String>,
+}
+
+fn try_find() -> Result<Vec<String>, Ignored> {
+    // This will look something like:
+    //   /path/to/crate_name/target/debug/deps/test_name-HASH
+    let test_binary = env::args_os().next().ok_or(Ignored)?;
+
+    // The hash at the end is ascii so not lossy, rest of conversion doesn't
+    // matter.
+    let test_binary_lossy = test_binary.to_string_lossy();
+    let hash_range = if cfg!(windows) {
+        // Trim ".exe" from the binary name for windows.
+        test_binary_lossy.len() - 21..test_binary_lossy.len() - 4
+    } else {
+        test_binary_lossy.len() - 17..test_binary_lossy.len()
+    };
+    let hash = test_binary_lossy.get(hash_range).ok_or(Ignored)?;
+    if !hash.starts_with('-') || !hash[1..].bytes().all(is_lower_hex_digit) {
+        return Err(Ignored);
+    }
+
+    let binary_path = PathBuf::from(&test_binary);
+
+    // Feature selection is saved in:
+    //   /path/to/crate_name/target/debug/.fingerprint/*-HASH/*-HASH.json
+    let up = binary_path
+        .parent()
+        .ok_or(Ignored)?
+        .parent()
+        .ok_or(Ignored)?;
+    let fingerprint_dir = up.join(".fingerprint");
+    if !fingerprint_dir.is_dir() {
+        return Err(Ignored);
+    }
+
+    let mut hash_matches = Vec::new();
+    for entry in fingerprint_dir.read_dir()? {
+        let entry = entry?;
+        let is_dir = entry.file_type()?.is_dir();
+        let matching_hash = entry.file_name().to_string_lossy().ends_with(hash);
+        if is_dir && matching_hash {
+            hash_matches.push(entry.path());
+        }
+    }
+
+    if hash_matches.len() != 1 {
+        return Err(Ignored);
+    }
+
+    let mut json_matches = Vec::new();
+    for entry in hash_matches[0].read_dir()? {
+        let entry = entry?;
+        let is_file = entry.file_type()?.is_file();
+        let is_json = entry.path().extension() == Some(OsStr::new("json"));
+        if is_file && is_json {
+            json_matches.push(entry.path());
+        }
+    }
+
+    if json_matches.len() != 1 {
+        return Err(Ignored);
+    }
+
+    let build_json = fs::read_to_string(&json_matches[0])?;
+    let build: Build = serde_json::from_str(&build_json)?;
+    Ok(build.features)
+}
+
+fn is_lower_hex_digit(byte: u8) -> bool {
+    byte >= b'0' && byte <= b'9' || byte >= b'a' && byte <= b'f'
+}
+
+fn from_json<'de, T, D>(deserializer: D) -> Result<T, D::Error>
+where
+    T: DeserializeOwned,
+    D: Deserializer<'de>,
+{
+    let json = String::deserialize(deserializer)?;
+    serde_json::from_str(&json).map_err(de::Error::custom)
+}
diff --git a/src/flock.rs b/src/flock.rs
new file mode 100644 (file)
index 0000000..bd6a0f9
--- /dev/null
@@ -0,0 +1,141 @@
+use once_cell::sync::OnceCell;
+use std::fs::{self, File, OpenOptions};
+use std::io;
+use std::path::{Path, PathBuf};
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::{Arc, Mutex, MutexGuard, PoisonError};
+use std::thread;
+use std::time::{Duration, SystemTime};
+
+static LOCK: OnceCell<Mutex<()>> = OnceCell::new();
+
+pub struct Lock {
+    intraprocess_guard: Guard,
+    lockfile: FileLock,
+}
+
+// High-quality lock to coordinate different #[test] functions within the *same*
+// integration test crate.
+enum Guard {
+    NotLocked,
+    Locked(MutexGuard<'static, ()>),
+}
+
+// Best-effort filesystem lock to coordinate different #[test] functions across
+// *different* integration tests.
+enum FileLock {
+    NotLocked,
+    Locked {
+        path: PathBuf,
+        done: Arc<AtomicBool>,
+    },
+}
+
+impl Lock {
+    pub fn acquire(path: impl AsRef<Path>) -> Self {
+        Lock {
+            intraprocess_guard: Guard::acquire(),
+            lockfile: FileLock::acquire(path),
+        }
+    }
+}
+
+impl Guard {
+    fn acquire() -> Self {
+        Guard::Locked(
+            LOCK.get_or_init(|| Mutex::new(()))
+                .lock()
+                .unwrap_or_else(PoisonError::into_inner),
+        )
+    }
+}
+
+impl FileLock {
+    fn acquire(path: impl AsRef<Path>) -> Self {
+        let path = path.as_ref().to_owned();
+        let lockfile = match create(&path) {
+            None => return FileLock::NotLocked,
+            Some(lockfile) => lockfile,
+        };
+        let done = Arc::new(AtomicBool::new(false));
+        thread::spawn({
+            let done = Arc::clone(&done);
+            move || poll(lockfile, done)
+        });
+        FileLock::Locked { path, done }
+    }
+}
+
+impl Drop for Lock {
+    fn drop(&mut self) {
+        let Lock {
+            intraprocess_guard,
+            lockfile,
+        } = self;
+        // Unlock file lock first.
+        *lockfile = FileLock::NotLocked;
+        *intraprocess_guard = Guard::NotLocked;
+    }
+}
+
+impl Drop for FileLock {
+    fn drop(&mut self) {
+        match self {
+            FileLock::NotLocked => {}
+            FileLock::Locked { path, done } => {
+                done.store(true, Ordering::Release);
+                let _ = fs::remove_file(path);
+            }
+        }
+    }
+}
+
+fn create(path: &Path) -> Option<File> {
+    loop {
+        match OpenOptions::new().write(true).create_new(true).open(path) {
+            // Acquired lock by creating lockfile.
+            Ok(lockfile) => return Some(lockfile),
+            Err(io_error) => match io_error.kind() {
+                // Lock is already held by another test.
+                io::ErrorKind::AlreadyExists => {}
+                // File based locking isn't going to work for some reason.
+                _ => return None,
+            },
+        }
+
+        // Check whether it's okay to bust the lock.
+        let metadata = match fs::metadata(path) {
+            Ok(metadata) => metadata,
+            Err(io_error) => match io_error.kind() {
+                // Other holder of the lock finished. Retry.
+                io::ErrorKind::NotFound => continue,
+                _ => return None,
+            },
+        };
+
+        let modified = match metadata.modified() {
+            Ok(modified) => modified,
+            Err(_) => return None,
+        };
+
+        let now = SystemTime::now();
+        let considered_stale = now - Duration::from_millis(1500);
+        let considered_future = now + Duration::from_millis(1500);
+        if modified < considered_stale || considered_future < modified {
+            return File::create(path).ok();
+        }
+
+        // Try again shortly.
+        thread::sleep(Duration::from_millis(500));
+    }
+}
+
+// Bump mtime periodically while test directory is in use.
+fn poll(lockfile: File, done: Arc<AtomicBool>) {
+    loop {
+        thread::sleep(Duration::from_millis(500));
+        if done.load(Ordering::Acquire) || lockfile.set_len(0).is_err() {
+            return;
+        }
+    }
+}
diff --git a/src/inherit.rs b/src/inherit.rs
new file mode 100644 (file)
index 0000000..056e28d
--- /dev/null
@@ -0,0 +1,41 @@
+use serde::de::{self, Deserialize, Deserializer, Visitor};
+use serde_derive::Deserialize;
+use std::fmt;
+
+#[derive(Deserialize)]
+#[serde(deny_unknown_fields)]
+pub struct InheritEdition {
+    pub workspace: True,
+}
+
+pub struct True;
+
+impl<'de> Deserialize<'de> for True {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        deserializer.deserialize_bool(True)
+    }
+}
+
+impl<'de> Visitor<'de> for True {
+    type Value = True;
+
+    fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+        formatter.write_str("bool")
+    }
+
+    fn visit_bool<E>(self, b: bool) -> Result<Self::Value, E>
+    where
+        E: de::Error,
+    {
+        if b {
+            Ok(True)
+        } else {
+            Err(de::Error::custom(
+                "workspace=false is unsupported for package.edition",
+            ))
+        }
+    }
+}
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644 (file)
index 0000000..0b66749
--- /dev/null
@@ -0,0 +1,318 @@
+//! [![github]](https://github.com/dtolnay/trybuild)&ensp;[![crates-io]](https://crates.io/crates/trybuild)&ensp;[![docs-rs]](https://docs.rs/trybuild)
+//!
+//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
+//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
+//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
+//!
+//! <br>
+//!
+//! #### &emsp;A compiler diagnostics testing library in just 3 functions.
+//!
+//! Trybuild is a test harness for invoking rustc on a set of test cases and
+//! asserting that any resulting error messages are the ones intended.
+//!
+//! Such tests are commonly useful for testing error reporting involving
+//! procedural macros. We would write test cases triggering either errors
+//! detected by the macro or errors detected by the Rust compiler in the
+//! resulting expanded code, and compare against the expected errors to ensure
+//! that they remain user-friendly.
+//!
+//! This style of testing is sometimes called *ui tests* because they test
+//! aspects of the user's interaction with a library outside of what would be
+//! covered by ordinary API tests.
+//!
+//! Nothing here is specific to macros; trybuild would work equally well for
+//! testing misuse of non-macro APIs.
+//!
+//! <br>
+//!
+//! # Compile-fail tests
+//!
+//! A minimal trybuild setup looks like this:
+//!
+//! ```
+//! #[test]
+//! fn ui() {
+//!     let t = trybuild::TestCases::new();
+//!     t.compile_fail("tests/ui/*.rs");
+//! }
+//! ```
+//!
+//! The test can be run with `cargo test`. It will individually compile each of
+//! the source files matching the glob pattern, expect them to fail to compile,
+//! and assert that the compiler's error message matches an adjacently named
+//! _*.stderr_ file containing the expected output (same file name as the test
+//! except with a different extension). If it matches, the test case is
+//! considered to succeed.
+//!
+//! Dependencies listed under `[dev-dependencies]` in the project's Cargo.toml
+//! are accessible from within the test cases.
+//!
+//! <p align="center">
+//! <img src="https://user-images.githubusercontent.com/1940490/57186574-76469e00-6e96-11e9-8cb5-b63b657170c9.png" width="700">
+//! </p>
+//!
+//! Failing tests display the expected vs actual compiler output inline.
+//!
+//! <p align="center">
+//! <img src="https://user-images.githubusercontent.com/1940490/57186575-79418e80-6e96-11e9-9478-c9b3dc10327f.png" width="700">
+//! </p>
+//!
+//! A compile_fail test that fails to fail to compile is also a failure.
+//!
+//! <p align="center">
+//! <img src="https://user-images.githubusercontent.com/1940490/57186576-7b0b5200-6e96-11e9-8bfd-2de705125108.png" width="700">
+//! </p>
+//!
+//! <br>
+//!
+//! # Pass tests
+//!
+//! The same test harness is able to run tests that are expected to pass, too.
+//! Ordinarily you would just have Cargo run such tests directly, but being able
+//! to combine modes like this could be useful for workshops in which
+//! participants work through test cases enabling one at a time. Trybuild was
+//! originally developed for my [procedural macros workshop at Rust
+//! Latam][workshop].
+//!
+//! [workshop]: https://github.com/dtolnay/proc-macro-workshop
+//!
+//! ```
+//! #[test]
+//! fn ui() {
+//!     let t = trybuild::TestCases::new();
+//!     t.pass("tests/01-parse-header.rs");
+//!     t.pass("tests/02-parse-body.rs");
+//!     t.compile_fail("tests/03-expand-four-errors.rs");
+//!     t.pass("tests/04-paste-ident.rs");
+//!     t.pass("tests/05-repeat-section.rs");
+//!     //t.pass("tests/06-make-work-in-function.rs");
+//!     //t.pass("tests/07-init-array.rs");
+//!     //t.compile_fail("tests/08-ident-span.rs");
+//! }
+//! ```
+//!
+//! Pass tests are considered to succeed if they compile successfully and have a
+//! `main` function that does not panic when the compiled binary is executed.
+//!
+//! <p align="center">
+//! <img src="https://user-images.githubusercontent.com/1940490/57186580-7f376f80-6e96-11e9-9cae-8257609269ef.png" width="700">
+//! </p>
+//!
+//! <br>
+//!
+//! # Details
+//!
+//! That's the entire API.
+//!
+//! <br>
+//!
+//! # Workflow
+//!
+//! There are two ways to update the _*.stderr_ files as you iterate on your
+//! test cases or your library; handwriting them is not recommended.
+//!
+//! First, if a test case is being run as compile_fail but a corresponding
+//! _*.stderr_ file does not exist, the test runner will save the actual
+//! compiler output with the right filename into a directory called *wip* within
+//! the directory containing Cargo.toml. So you can update these files by
+//! deleting them, running `cargo test`, and moving all the files from *wip*
+//! into your testcase directory.
+//!
+//! <p align="center">
+//! <img src="https://user-images.githubusercontent.com/1940490/57186579-7cd51580-6e96-11e9-9f19-54dcecc9fbba.png" width="700">
+//! </p>
+//!
+//! Alternatively, run `cargo test` with the environment variable
+//! `TRYBUILD=overwrite` to skip the *wip* directory and write all compiler
+//! output directly in place. You'll want to check `git diff` afterward to be
+//! sure the compiler's output is what you had in mind.
+//!
+//! <br>
+//!
+//! # What to test
+//!
+//! When it comes to compile-fail tests, write tests for anything for which you
+//! care to find out when there are changes in the user-facing compiler output.
+//! As a negative example, please don't write compile-fail tests simply calling
+//! all of your public APIs with arguments of the wrong type; there would be no
+//! benefit.
+//!
+//! A common use would be for testing specific targeted error messages emitted
+//! by a procedural macro. For example the derive macro from the [`ref-cast`]
+//! crate is required to be placed on a type that has either `#[repr(C)]` or
+//! `#[repr(transparent)]` in order for the expansion to be free of undefined
+//! behavior, which it enforces at compile time:
+//!
+//! [`ref-cast`]: https://github.com/dtolnay/ref-cast
+//!
+//! ```console
+//! error: RefCast trait requires #[repr(C)] or #[repr(transparent)]
+//!  --> $DIR/missing-repr.rs:3:10
+//!   |
+//! 3 | #[derive(RefCast)]
+//!   |          ^^^^^^^
+//! ```
+//!
+//! Macros that consume helper attributes will want to check that unrecognized
+//! content within those attributes is properly indicated to the caller. Is the
+//! error message correctly placed under the erroneous tokens, not on a useless
+//! call\_site span?
+//!
+//! ```console
+//! error: unknown serde field attribute `qqq`
+//!  --> $DIR/unknown-attribute.rs:5:13
+//!   |
+//! 5 |     #[serde(qqq = "...")]
+//!   |             ^^^
+//! ```
+//!
+//! Declarative macros can benefit from compile-fail tests too. The [`json!`]
+//! macro from serde\_json is just a great big macro\_rules macro but makes an
+//! effort to have error messages from broken JSON in the input always appear on
+//! the most appropriate token:
+//!
+//! [`json!`]: https://docs.rs/serde_json/1.0/serde_json/macro.json.html
+//!
+//! ```console
+//! error: no rules expected the token `,`
+//!  --> $DIR/double-comma.rs:4:38
+//!   |
+//! 4 |     println!("{}", json!({ "k": null,, }));
+//!   |                                      ^ no rules expected this token in macro call
+//! ```
+//!
+//! Sometimes we may have a macro that expands successfully but we count on it
+//! to trigger particular compiler errors at some point beyond macro expansion.
+//! For example the [`readonly`] crate introduces struct fields that are public
+//! but readable only, even if the caller has a &mut reference to the
+//! surrounding struct. If someone writes to a readonly field, we need to be
+//! sure that it wouldn't compile:
+//!
+//! [`readonly`]: https://github.com/dtolnay/readonly
+//!
+//! ```console
+//! error[E0594]: cannot assign to data in a `&` reference
+//!   --> $DIR/write-a-readonly.rs:17:26
+//!    |
+//! 17 |     println!("{}", s.n); s.n += 1;
+//!    |                          ^^^^^^^^ cannot assign
+//! ```
+//!
+//! In all of these cases, the compiler's output can change because our crate or
+//! one of our dependencies broke something, or as a consequence of changes in
+//! the Rust compiler. Both are good reasons to have well conceived compile-fail
+//! tests. If we refactor and mistakenly cause an error that used to be correct
+//! to now no longer be emitted or be emitted in the wrong place, that is
+//! important for a test suite to catch. If the compiler changes something that
+//! makes error messages that we care about substantially worse, it is also
+//! important to catch and report as a compiler issue.
+
+#![doc(html_root_url = "https://docs.rs/trybuild/1.0.80")]
+#![allow(
+    clippy::collapsible_if,
+    clippy::default_trait_access,
+    clippy::derive_partial_eq_without_eq,
+    clippy::doc_markdown,
+    clippy::enum_glob_use,
+    clippy::iter_not_returning_iterator, // https://github.com/rust-lang/rust-clippy/issues/8285
+    clippy::let_underscore_untyped, // https://github.com/rust-lang/rust-clippy/issues/10410
+    clippy::manual_assert,
+    clippy::manual_range_contains,
+    clippy::module_inception,
+    clippy::module_name_repetitions,
+    clippy::must_use_candidate,
+    clippy::needless_pass_by_value,
+    clippy::non_ascii_literal,
+    clippy::range_plus_one,
+    clippy::similar_names,
+    clippy::single_match_else,
+    clippy::too_many_lines,
+    clippy::trivially_copy_pass_by_ref,
+    clippy::unused_self,
+    clippy::while_let_on_iterator,
+)]
+#![deny(clippy::clone_on_ref_ptr)]
+
+#[macro_use]
+mod term;
+
+#[macro_use]
+mod path;
+
+mod cargo;
+mod dependencies;
+mod diff;
+mod directory;
+mod env;
+mod error;
+mod expand;
+mod features;
+mod flock;
+mod inherit;
+mod manifest;
+mod message;
+mod normalize;
+mod run;
+mod rustflags;
+
+use std::cell::RefCell;
+use std::panic::RefUnwindSafe;
+use std::path::{Path, PathBuf};
+use std::thread;
+
+#[derive(Debug)]
+pub struct TestCases {
+    runner: RefCell<Runner>,
+}
+
+#[derive(Debug)]
+struct Runner {
+    tests: Vec<Test>,
+}
+
+#[derive(Clone, Debug)]
+struct Test {
+    path: PathBuf,
+    expected: Expected,
+}
+
+#[derive(Copy, Clone, Debug)]
+enum Expected {
+    Pass,
+    CompileFail,
+}
+
+impl TestCases {
+    #[allow(clippy::new_without_default)]
+    pub fn new() -> Self {
+        TestCases {
+            runner: RefCell::new(Runner { tests: Vec::new() }),
+        }
+    }
+
+    pub fn pass<P: AsRef<Path>>(&self, path: P) {
+        self.runner.borrow_mut().tests.push(Test {
+            path: path.as_ref().to_owned(),
+            expected: Expected::Pass,
+        });
+    }
+
+    pub fn compile_fail<P: AsRef<Path>>(&self, path: P) {
+        self.runner.borrow_mut().tests.push(Test {
+            path: path.as_ref().to_owned(),
+            expected: Expected::CompileFail,
+        });
+    }
+}
+
+impl RefUnwindSafe for TestCases {}
+
+#[doc(hidden)]
+impl Drop for TestCases {
+    fn drop(&mut self) {
+        if !thread::panicking() {
+            self.runner.borrow_mut().run();
+        }
+    }
+}
diff --git a/src/manifest.rs b/src/manifest.rs
new file mode 100644 (file)
index 0000000..c4bbae5
--- /dev/null
@@ -0,0 +1,103 @@
+use crate::dependencies::{Dependency, Patch, RegistryPatch, TargetDependencies};
+use serde::ser::{SerializeMap, Serializer};
+use serde_derive::{Deserialize, Serialize};
+use std::collections::BTreeMap as Map;
+use std::ffi::OsStr;
+use std::path::PathBuf;
+
+#[derive(Serialize, Debug)]
+pub struct Manifest {
+    pub package: Package,
+    #[serde(skip_serializing_if = "Map::is_empty")]
+    pub features: Map<String, Vec<String>>,
+    pub dependencies: Map<String, Dependency>,
+    #[serde(skip_serializing_if = "Map::is_empty")]
+    pub target: Map<String, TargetDependencies>,
+    #[serde(rename = "bin")]
+    pub bins: Vec<Bin>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub workspace: Option<Workspace>,
+    #[serde(
+        serialize_with = "serialize_patch",
+        skip_serializing_if = "empty_patch"
+    )]
+    pub patch: Map<String, RegistryPatch>,
+    #[serde(skip_serializing_if = "Map::is_empty")]
+    pub replace: Map<String, Patch>,
+}
+
+#[derive(Serialize, Debug)]
+pub struct Package {
+    pub name: String,
+    pub version: String,
+    pub edition: Edition,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub resolver: Option<String>,
+    pub publish: bool,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub enum Edition {
+    #[serde(rename = "2015")]
+    E2015,
+    #[serde(rename = "2018")]
+    E2018,
+    #[serde(rename = "2021")]
+    E2021,
+}
+
+#[derive(Serialize, Debug)]
+pub struct Bin {
+    pub name: Name,
+    pub path: PathBuf,
+}
+
+#[derive(Serialize, Clone, Debug)]
+pub struct Name(pub String);
+
+#[derive(Serialize, Debug)]
+pub struct Config {
+    pub build: Build,
+}
+
+#[derive(Serialize, Debug)]
+pub struct Build {
+    pub rustflags: Vec<&'static str>,
+}
+
+#[derive(Serialize, Debug)]
+pub struct Workspace {
+    #[serde(skip_serializing_if = "Map::is_empty")]
+    pub dependencies: Map<String, Dependency>,
+}
+
+impl Default for Edition {
+    fn default() -> Self {
+        Edition::E2018
+    }
+}
+
+impl AsRef<OsStr> for Name {
+    fn as_ref(&self) -> &OsStr {
+        self.0.as_ref()
+    }
+}
+
+fn serialize_patch<S>(patch: &Map<String, RegistryPatch>, serializer: S) -> Result<S::Ok, S::Error>
+where
+    S: Serializer,
+{
+    let mut map = serializer.serialize_map(None)?;
+    for (registry, patch) in patch {
+        if !patch.crates.is_empty() {
+            map.serialize_entry(registry, patch)?;
+        }
+    }
+    map.end()
+}
+
+fn empty_patch(patch: &Map<String, RegistryPatch>) -> bool {
+    patch
+        .values()
+        .all(|registry_patch| registry_patch.crates.is_empty())
+}
diff --git a/src/message.rs b/src/message.rs
new file mode 100644 (file)
index 0000000..848e815
--- /dev/null
@@ -0,0 +1,241 @@
+use crate::diff::{Diff, Render};
+use crate::error::Error;
+use crate::{normalize, term, Expected, Test};
+use std::env;
+use std::path::Path;
+use std::process::Output;
+use termcolor::Color::{self, *};
+
+pub(crate) enum Level {
+    Fail,
+    Warn,
+}
+
+pub(crate) use self::Level::*;
+
+pub(crate) fn prepare_fail(err: Error) {
+    if err.already_printed() {
+        return;
+    }
+
+    term::bold_color(Red);
+    print!("ERROR");
+    term::reset();
+    println!(": {}", err);
+    println!();
+}
+
+pub(crate) fn test_fail(err: Error) {
+    if err.already_printed() {
+        return;
+    }
+
+    term::bold_color(Red);
+    println!("error");
+    term::color(Red);
+    println!("{}", err);
+    term::reset();
+    println!();
+}
+
+pub(crate) fn no_tests_enabled() {
+    term::color(Yellow);
+    println!("There are no trybuild tests enabled yet.");
+    term::reset();
+}
+
+pub(crate) fn ok() {
+    term::color(Green);
+    println!("ok");
+    term::reset();
+}
+
+pub(crate) fn begin_test(test: &Test, show_expected: bool) {
+    let display_name = test.path.as_os_str().to_string_lossy();
+
+    print!("test ");
+    term::bold();
+    print!("{}", display_name);
+    term::reset();
+
+    if show_expected {
+        match test.expected {
+            Expected::Pass => print!(" [should pass]"),
+            Expected::CompileFail => print!(" [should fail to compile]"),
+        }
+    }
+
+    print!(" ... ");
+}
+
+pub(crate) fn failed_to_build(stderr: &str) {
+    term::bold_color(Red);
+    println!("error");
+    snippet(Red, stderr);
+    println!();
+}
+
+pub(crate) fn should_not_have_compiled() {
+    term::bold_color(Red);
+    println!("error");
+    term::color(Red);
+    println!("Expected test case to fail to compile, but it succeeded.");
+    term::reset();
+    println!();
+}
+
+pub(crate) fn write_stderr_wip(wip_path: &Path, stderr_path: &Path, stderr: &str) {
+    let wip_path = wip_path.to_string_lossy();
+    let stderr_path = stderr_path.to_string_lossy();
+
+    term::bold_color(Yellow);
+    println!("wip");
+    println!();
+    print!("NOTE");
+    term::reset();
+    println!(": writing the following output to `{}`.", wip_path);
+    println!(
+        "Move this file to `{}` to accept it as correct.",
+        stderr_path,
+    );
+    snippet(Yellow, stderr);
+    println!();
+}
+
+pub(crate) fn overwrite_stderr(stderr_path: &Path, stderr: &str) {
+    let stderr_path = stderr_path.to_string_lossy();
+
+    term::bold_color(Yellow);
+    println!("wip");
+    println!();
+    print!("NOTE");
+    term::reset();
+    println!(": writing the following output to `{}`.", stderr_path);
+    snippet(Yellow, stderr);
+    println!();
+}
+
+pub(crate) fn mismatch(expected: &str, actual: &str) {
+    term::bold_color(Red);
+    println!("mismatch");
+    term::reset();
+    println!();
+    let diff = if env::var_os("TERM").map_or(true, |term| term == "dumb") {
+        // No diff in dumb terminal or when TERM is unset.
+        None
+    } else {
+        Diff::compute(expected, actual)
+    };
+    term::bold_color(Blue);
+    println!("EXPECTED:");
+    snippet_diff(Blue, expected, diff.as_ref());
+    println!();
+    term::bold_color(Red);
+    println!("ACTUAL OUTPUT:");
+    snippet_diff(Red, actual, diff.as_ref());
+    print!("note: If the ");
+    term::color(Red);
+    print!("actual output");
+    term::reset();
+    println!(" is the correct output you can bless it by rerunning");
+    println!("      your test with the environment variable TRYBUILD=overwrite");
+    println!();
+}
+
+pub(crate) fn output(warnings: &str, output: &Output) {
+    let success = output.status.success();
+    let stdout = normalize::trim(&output.stdout);
+    let stderr = normalize::trim(&output.stderr);
+    let has_output = !stdout.is_empty() || !stderr.is_empty();
+
+    if success {
+        ok();
+        if has_output || !warnings.is_empty() {
+            println!();
+        }
+    } else {
+        term::bold_color(Red);
+        println!("error");
+        term::color(Red);
+        if has_output {
+            println!("Test case failed at runtime.");
+        } else {
+            println!("Execution of the test case was unsuccessful but there was no output.");
+        }
+        term::reset();
+        println!();
+    }
+
+    self::warnings(warnings);
+
+    let color = if success { Yellow } else { Red };
+
+    for (name, content) in &[("STDOUT", stdout), ("STDERR", stderr)] {
+        if !content.is_empty() {
+            term::bold_color(color);
+            println!("{}:", name);
+            snippet(color, &normalize::trim(content));
+            println!();
+        }
+    }
+}
+
+pub(crate) fn fail_output(level: Level, stdout: &str) {
+    let color = match level {
+        Fail => Red,
+        Warn => Yellow,
+    };
+
+    if !stdout.is_empty() {
+        term::bold_color(color);
+        println!("STDOUT:");
+        snippet(color, &normalize::trim(stdout));
+        println!();
+    }
+}
+
+pub(crate) fn warnings(warnings: &str) {
+    if warnings.is_empty() {
+        return;
+    }
+
+    term::bold_color(Yellow);
+    println!("WARNINGS:");
+    snippet(Yellow, warnings);
+    println!();
+}
+
+fn snippet(color: Color, content: &str) {
+    snippet_diff(color, content, None);
+}
+
+fn snippet_diff(color: Color, content: &str, diff: Option<&Diff>) {
+    fn dotted_line() {
+        println!("{}", "┈".repeat(60));
+    }
+
+    term::color(color);
+    dotted_line();
+
+    match diff {
+        Some(diff) => {
+            for chunk in diff.iter(content) {
+                match chunk {
+                    Render::Common(s) => {
+                        term::color(color);
+                        print!("{}", s);
+                    }
+                    Render::Unique(s) => {
+                        term::bold_color(color);
+                        print!("\x1B[7m{}", s);
+                    }
+                }
+            }
+        }
+        None => print!("{}", content),
+    }
+
+    term::color(color);
+    dotted_line();
+    term::reset();
+}
diff --git a/src/normalize.rs b/src/normalize.rs
new file mode 100644 (file)
index 0000000..d738203
--- /dev/null
@@ -0,0 +1,592 @@
+#[cfg(test)]
+#[path = "tests.rs"]
+mod tests;
+
+use self::Normalization::*;
+use crate::directory::Directory;
+use crate::run::PathDependency;
+use std::cmp;
+use std::path::Path;
+
+#[derive(Copy, Clone)]
+pub struct Context<'a> {
+    pub krate: &'a str,
+    pub source_dir: &'a Directory,
+    pub workspace: &'a Directory,
+    pub input_file: &'a Path,
+    pub target_dir: &'a Directory,
+    pub path_dependencies: &'a [PathDependency],
+}
+
+macro_rules! normalizations {
+    ($($name:ident,)*) => {
+        #[derive(PartialOrd, PartialEq, Copy, Clone)]
+        enum Normalization {
+            $($name,)*
+        }
+
+        impl Normalization {
+            const ALL: &'static [Self] = &[$($name),*];
+        }
+
+        impl Default for Variations {
+            fn default() -> Self {
+                Variations {
+                    variations: [$(($name, String::new()).1),*],
+                }
+            }
+        }
+    };
+}
+
+normalizations! {
+    Basic,
+    StripCouldNotCompile,
+    StripCouldNotCompile2,
+    StripForMoreInformation,
+    StripForMoreInformation2,
+    TrimEnd,
+    RustLib,
+    TypeDirBackslash,
+    WorkspaceLines,
+    PathDependencies,
+    CargoRegistry,
+    ArrowOtherCrate,
+    RelativeToDir,
+    LinesOutsideInputFile,
+    Unindent,
+    AndOthers,
+    StripLongTypeNameFiles,
+    // New normalization steps are to be inserted here at the end so that any
+    // snapshots saved before your normalization change remain passing.
+}
+
+/// For a given compiler output, produces the set of saved outputs against which
+/// the compiler's output would be considered correct. If the test's saved
+/// stderr file is identical to any one of these variations, the test will pass.
+///
+/// This is a set rather than just one normalized output in order to avoid
+/// breaking existing tests when introducing new normalization steps. Someone
+/// may have saved stderr snapshots with an older version of trybuild, and those
+/// tests need to continue to pass with newer versions of trybuild.
+///
+/// There is one "preferred" variation which is what we print when the stderr
+/// file is absent or not a match.
+pub fn diagnostics(output: &str, context: Context) -> Variations {
+    let output = output.replace("\r\n", "\n");
+
+    let mut result = Variations::default();
+    for (i, normalization) in Normalization::ALL.iter().enumerate() {
+        result.variations[i] = apply(&output, *normalization, context);
+    }
+
+    result
+}
+
+pub struct Variations {
+    variations: [String; Normalization::ALL.len()],
+}
+
+impl Variations {
+    pub fn preferred(&self) -> &str {
+        self.variations.last().unwrap()
+    }
+
+    pub fn any<F: FnMut(&str) -> bool>(&self, mut f: F) -> bool {
+        self.variations.iter().any(|stderr| f(stderr))
+    }
+
+    pub fn concat(&mut self, other: &Self) {
+        for (this, other) in self.variations.iter_mut().zip(&other.variations) {
+            if !this.is_empty() && !other.is_empty() {
+                this.push('\n');
+            }
+            this.push_str(other);
+        }
+    }
+}
+
+pub fn trim<S: AsRef<[u8]>>(output: S) -> String {
+    let bytes = output.as_ref();
+    let mut normalized = String::from_utf8_lossy(bytes).into_owned();
+
+    let len = normalized.trim_end().len();
+    normalized.truncate(len);
+
+    if !normalized.is_empty() {
+        normalized.push('\n');
+    }
+
+    normalized
+}
+
+fn apply(original: &str, normalization: Normalization, context: Context) -> String {
+    let mut normalized = String::new();
+
+    let lines: Vec<&str> = original.lines().collect();
+    let mut filter = Filter {
+        all_lines: &lines,
+        normalization,
+        context,
+        hide_numbers: 0,
+    };
+    for i in 0..lines.len() {
+        if let Some(line) = filter.apply(i) {
+            normalized += &line;
+            if !normalized.ends_with("\n\n") {
+                normalized.push('\n');
+            }
+        }
+    }
+
+    if normalization >= Unindent {
+        normalized = unindent(normalized);
+    }
+
+    trim(normalized)
+}
+
+struct Filter<'a> {
+    all_lines: &'a [&'a str],
+    normalization: Normalization,
+    context: Context<'a>,
+    hide_numbers: usize,
+}
+
+impl<'a> Filter<'a> {
+    fn apply(&mut self, index: usize) -> Option<String> {
+        let mut line = self.all_lines[index].to_owned();
+
+        if self.hide_numbers > 0 {
+            hide_leading_numbers(&mut line);
+            self.hide_numbers -= 1;
+        }
+
+        let trim_start = line.trim_start();
+        let indent = line.len() - trim_start.len();
+        let prefix = if trim_start.starts_with("--> ") {
+            Some("--> ")
+        } else if trim_start.starts_with("::: ") {
+            Some("::: ")
+        } else {
+            None
+        };
+
+        if prefix == Some("--> ") && self.normalization < ArrowOtherCrate {
+            if let Some(cut_end) = line.rfind(&['/', '\\'][..]) {
+                let cut_start = indent + 4;
+                line.replace_range(cut_start..cut_end + 1, "$DIR/");
+                return Some(line);
+            }
+        }
+
+        if prefix.is_some() {
+            line = line.replace('\\', "/");
+            let line_lower = line.to_ascii_lowercase();
+            let target_dir_pat = self
+                .context
+                .target_dir
+                .to_string_lossy()
+                .to_ascii_lowercase()
+                .replace('\\', "/");
+            let source_dir_pat = self
+                .context
+                .source_dir
+                .to_string_lossy()
+                .to_ascii_lowercase()
+                .replace('\\', "/");
+            let mut other_crate = false;
+            if line_lower.find(&target_dir_pat) == Some(indent + 4) {
+                let mut offset = indent + 4 + target_dir_pat.len();
+                let mut out_dir_crate_name = None;
+                while let Some(slash) = line[offset..].find('/') {
+                    let component = &line[offset..offset + slash];
+                    if component == "out" {
+                        if let Some(out_dir_crate_name) = out_dir_crate_name {
+                            let replacement = format!("$OUT_DIR[{}]", out_dir_crate_name);
+                            line.replace_range(indent + 4..offset + 3, &replacement);
+                            other_crate = true;
+                            break;
+                        }
+                    } else if component.len() > 17
+                        && component.rfind('-') == Some(component.len() - 17)
+                        && is_ascii_lowercase_hex(&component[component.len() - 16..])
+                    {
+                        out_dir_crate_name = Some(&component[..component.len() - 17]);
+                    } else {
+                        out_dir_crate_name = None;
+                    }
+                    offset += slash + 1;
+                }
+            } else if let Some(i) = line_lower.find(&source_dir_pat) {
+                if self.normalization >= RelativeToDir && i == indent + 4 {
+                    line.replace_range(i..i + source_dir_pat.len(), "");
+                    if self.normalization < LinesOutsideInputFile {
+                        return Some(line);
+                    }
+                    let input_file_pat = self
+                        .context
+                        .input_file
+                        .to_string_lossy()
+                        .to_ascii_lowercase()
+                        .replace('\\', "/");
+                    if line_lower[i + source_dir_pat.len()..].starts_with(&input_file_pat) {
+                        // Keep line numbers only within the input file (the
+                        // path passed to our `fn compile_fail`. All other
+                        // source files get line numbers erased below.
+                        return Some(line);
+                    }
+                } else {
+                    line.replace_range(i..i + source_dir_pat.len() - 1, "$DIR");
+                    if self.normalization < LinesOutsideInputFile {
+                        return Some(line);
+                    }
+                }
+                other_crate = true;
+            } else {
+                let workspace_pat = self
+                    .context
+                    .workspace
+                    .to_string_lossy()
+                    .to_ascii_lowercase()
+                    .replace('\\', "/");
+                if let Some(i) = line_lower.find(&workspace_pat) {
+                    line.replace_range(i..i + workspace_pat.len() - 1, "$WORKSPACE");
+                    other_crate = true;
+                }
+            }
+            if self.normalization >= PathDependencies && !other_crate {
+                for path_dep in self.context.path_dependencies {
+                    let path_dep_pat = path_dep
+                        .normalized_path
+                        .to_string_lossy()
+                        .to_ascii_lowercase()
+                        .replace('\\', "/");
+                    if let Some(i) = line_lower.find(&path_dep_pat) {
+                        let var = format!("${}", path_dep.name.to_uppercase().replace('-', "_"));
+                        line.replace_range(i..i + path_dep_pat.len() - 1, &var);
+                        other_crate = true;
+                        break;
+                    }
+                }
+            }
+            if self.normalization >= RustLib && !other_crate {
+                if let Some(pos) = line.find("/rustlib/src/rust/src/") {
+                    // --> /home/.rustup/toolchains/nightly/lib/rustlib/src/rust/src/libstd/net/ip.rs:83:1
+                    // --> $RUST/src/libstd/net/ip.rs:83:1
+                    line.replace_range(indent + 4..pos + 17, "$RUST");
+                    other_crate = true;
+                } else if let Some(pos) = line.find("/rustlib/src/rust/library/") {
+                    // --> /home/.rustup/toolchains/nightly/lib/rustlib/src/rust/library/std/src/net/ip.rs:83:1
+                    // --> $RUST/std/src/net/ip.rs:83:1
+                    line.replace_range(indent + 4..pos + 25, "$RUST");
+                    other_crate = true;
+                } else if line[indent + 4..].starts_with("/rustc/")
+                    && line
+                        .get(indent + 11..indent + 51)
+                        .map_or(false, is_ascii_lowercase_hex)
+                    && line[indent + 51..].starts_with("/library/")
+                {
+                    // --> /rustc/c5c7d2b37780dac1092e75f12ab97dd56c30861e/library/std/src/net/ip.rs:83:1
+                    // --> $RUST/std/src/net/ip.rs:83:1
+                    line.replace_range(indent + 4..indent + 59, "$RUST");
+                    other_crate = true;
+                }
+            }
+            if self.normalization >= CargoRegistry && !other_crate {
+                if let Some(pos) = line
+                    .find("/registry/src/github.com-")
+                    .or_else(|| line.find("/registry/src/index.crates.io-"))
+                {
+                    let hash_start = pos + line[pos..].find('-').unwrap() + 1;
+                    let hash_end = hash_start + 16;
+                    if line
+                        .get(hash_start..hash_end)
+                        .map_or(false, is_ascii_lowercase_hex)
+                        && line[hash_end..].starts_with('/')
+                    {
+                        // --> /home/.cargo/registry/src/github.com-1ecc6299db9ec823/serde_json-1.0.64/src/de.rs:2584:8
+                        // --> $CARGO/serde_json-1.0.64/src/de.rs:2584:8
+                        line.replace_range(indent + 4..hash_end, "$CARGO");
+                        other_crate = true;
+                    }
+                }
+            }
+            if other_crate && self.normalization >= WorkspaceLines {
+                // Blank out line numbers for this particular error since rustc
+                // tends to reach into code from outside of the test case. The
+                // test stderr shouldn't need to be updated every time we touch
+                // those files.
+                hide_trailing_numbers(&mut line);
+                self.hide_numbers = 1;
+                while let Some(next_line) = self.all_lines.get(index + self.hide_numbers) {
+                    match next_line.trim_start().chars().next().unwrap_or_default() {
+                        '0'..='9' | '|' | '.' => self.hide_numbers += 1,
+                        _ => break,
+                    }
+                }
+            }
+            return Some(line);
+        }
+
+        if line.starts_with("error: aborting due to ") {
+            return None;
+        }
+
+        if line == "To learn more, run the command again with --verbose." {
+            return None;
+        }
+
+        if self.normalization >= StripCouldNotCompile {
+            if line.starts_with("error: Could not compile `") {
+                return None;
+            }
+        }
+
+        if self.normalization >= StripCouldNotCompile2 {
+            if line.starts_with("error: could not compile `") {
+                return None;
+            }
+        }
+
+        if self.normalization >= StripForMoreInformation {
+            if line.starts_with("For more information about this error, try `rustc --explain") {
+                return None;
+            }
+        }
+
+        if self.normalization >= StripForMoreInformation2 {
+            if line.starts_with("Some errors have detailed explanations:") {
+                return None;
+            }
+            if line.starts_with("For more information about an error, try `rustc --explain") {
+                return None;
+            }
+        }
+
+        if self.normalization >= TrimEnd {
+            line.truncate(line.trim_end().len());
+        }
+
+        if self.normalization >= TypeDirBackslash {
+            if line
+                .trim_start()
+                .starts_with("= note: required because it appears within the type")
+            {
+                line = line.replace('\\', "/");
+            }
+        }
+
+        if self.normalization >= AndOthers {
+            let trim_start = line.trim_start();
+            if trim_start.starts_with("and ") && line.ends_with(" others") {
+                let indent = line.len() - trim_start.len();
+                let num_start = indent + "and ".len();
+                let num_end = line.len() - " others".len();
+                if num_start < num_end
+                    && line[num_start..num_end].bytes().all(|b| b.is_ascii_digit())
+                {
+                    line.replace_range(num_start..num_end, "$N");
+                }
+            }
+        }
+
+        if self.normalization >= StripLongTypeNameFiles {
+            let trimmed_line = line.trim_start();
+            let trimmed_line = trimmed_line
+                .strip_prefix("= note: ")
+                .unwrap_or(trimmed_line);
+            if trimmed_line.starts_with("the full type name has been written to") {
+                return None;
+            }
+        }
+
+        line = line.replace(self.context.krate, "$CRATE");
+        line = replace_case_insensitive(&line, &self.context.source_dir.to_string_lossy(), "$DIR/");
+        line = replace_case_insensitive(
+            &line,
+            &self.context.workspace.to_string_lossy(),
+            "$WORKSPACE/",
+        );
+
+        Some(line)
+    }
+}
+
+fn is_ascii_lowercase_hex(s: &str) -> bool {
+    s.bytes().all(|b| matches!(b, b'0'..=b'9' | b'a'..=b'f'))
+}
+
+// "10 | T: Send,"  ->  "   | T: Send,"
+fn hide_leading_numbers(line: &mut String) {
+    let n = line.bytes().take_while(u8::is_ascii_digit).count();
+    for i in 0..n {
+        line.replace_range(i..i + 1, " ");
+    }
+}
+
+// "main.rs:22:29"  ->  "main.rs"
+fn hide_trailing_numbers(line: &mut String) {
+    for _ in 0..2 {
+        let digits = line.bytes().rev().take_while(u8::is_ascii_digit).count();
+        if digits == 0 || !line[..line.len() - digits].ends_with(':') {
+            return;
+        }
+        line.truncate(line.len() - digits - 1);
+    }
+}
+
+fn replace_case_insensitive(line: &str, pattern: &str, replacement: &str) -> String {
+    let line_lower = line.to_ascii_lowercase().replace('\\', "/");
+    let pattern_lower = pattern.to_ascii_lowercase().replace('\\', "/");
+    let mut replaced = String::with_capacity(line.len());
+
+    let line_lower = line_lower.as_str();
+    let mut split = line_lower.split(&pattern_lower);
+    let mut pos = 0;
+    let mut insert_replacement = false;
+    while let Some(keep) = split.next() {
+        if insert_replacement {
+            replaced.push_str(replacement);
+            pos += pattern.len();
+        }
+        let mut keep = &line[pos..pos + keep.len()];
+        if insert_replacement {
+            let end_of_maybe_path = keep.find(&[' ', ':'][..]).unwrap_or(keep.len());
+            replaced.push_str(&keep[..end_of_maybe_path].replace('\\', "/"));
+            pos += end_of_maybe_path;
+            keep = &keep[end_of_maybe_path..];
+        }
+        replaced.push_str(keep);
+        pos += keep.len();
+        insert_replacement = true;
+        if replaced.ends_with(|ch: char| ch.is_ascii_alphanumeric()) {
+            if let Some(ch) = line[pos..].chars().next() {
+                replaced.push(ch);
+                pos += ch.len_utf8();
+                split = line_lower[pos..].split(&pattern_lower);
+                insert_replacement = false;
+            }
+        }
+    }
+
+    replaced
+}
+
+#[derive(PartialEq)]
+enum IndentedLineKind {
+    // `error`
+    // `warning`
+    Heading,
+
+    // Contains max number of spaces that can be cut based on this line.
+    // `   --> foo` = 2
+    // `    | foo` = 3
+    // `   ::: foo` = 2
+    // `10  | foo` = 1
+    Code(usize),
+
+    // `note:`
+    // `...`
+    Note,
+
+    // Contains number of leading spaces.
+    Other(usize),
+}
+
+fn unindent(diag: String) -> String {
+    let mut normalized = String::new();
+    let mut lines = diag.lines();
+
+    while let Some(line) = lines.next() {
+        normalized.push_str(line);
+        normalized.push('\n');
+
+        if indented_line_kind(line) != IndentedLineKind::Heading {
+            continue;
+        }
+
+        let mut ahead = lines.clone();
+        let next_line = match ahead.next() {
+            Some(line) => line,
+            None => continue,
+        };
+
+        if let IndentedLineKind::Code(indent) = indented_line_kind(next_line) {
+            if next_line[indent + 1..].starts_with("--> ") {
+                let mut lines_in_block = 1;
+                let mut least_indent = indent;
+                while let Some(line) = ahead.next() {
+                    match indented_line_kind(line) {
+                        IndentedLineKind::Heading => break,
+                        IndentedLineKind::Code(indent) => {
+                            lines_in_block += 1;
+                            least_indent = cmp::min(least_indent, indent);
+                        }
+                        IndentedLineKind::Note => lines_in_block += 1,
+                        IndentedLineKind::Other(spaces) => {
+                            if spaces > 10 {
+                                lines_in_block += 1;
+                            } else {
+                                break;
+                            }
+                        }
+                    }
+                }
+                for _ in 0..lines_in_block {
+                    let line = lines.next().unwrap();
+                    if let IndentedLineKind::Code(_) | IndentedLineKind::Other(_) =
+                        indented_line_kind(line)
+                    {
+                        let space = line.find(' ').unwrap();
+                        normalized.push_str(&line[..space]);
+                        normalized.push_str(&line[space + least_indent..]);
+                    } else {
+                        normalized.push_str(line);
+                    }
+                    normalized.push('\n');
+                }
+            }
+        }
+    }
+
+    normalized
+}
+
+fn indented_line_kind(line: &str) -> IndentedLineKind {
+    if let Some(heading_len) = if line.starts_with("error") {
+        Some("error".len())
+    } else if line.starts_with("warning") {
+        Some("warning".len())
+    } else {
+        None
+    } {
+        if line[heading_len..].starts_with(&[':', '['][..]) {
+            return IndentedLineKind::Heading;
+        }
+    }
+
+    if line.starts_with("note:") || line == "..." {
+        return IndentedLineKind::Note;
+    }
+
+    let is_space = |b: &u8| *b == b' ';
+    if let Some(rest) = line.strip_prefix("... ") {
+        let spaces = rest.bytes().take_while(is_space).count();
+        return IndentedLineKind::Code(spaces);
+    }
+
+    let digits = line.bytes().take_while(u8::is_ascii_digit).count();
+    let spaces = line[digits..].bytes().take_while(|b| *b == b' ').count();
+    let rest = &line[digits + spaces..];
+    if spaces > 0
+        && (rest == "|"
+            || rest.starts_with("| ")
+            || digits == 0
+                && (rest.starts_with("--> ") || rest.starts_with("::: ") || rest.starts_with("= ")))
+    {
+        return IndentedLineKind::Code(spaces - 1);
+    }
+
+    IndentedLineKind::Other(if digits == 0 { spaces } else { 0 })
+}
diff --git a/src/path.rs b/src/path.rs
new file mode 100644 (file)
index 0000000..56e94c9
--- /dev/null
@@ -0,0 +1,48 @@
+macro_rules! path {
+    ($($tt:tt)+) => {
+        tokenize_path!([] [] $($tt)+)
+    };
+}
+
+// Private implementation detail.
+macro_rules! tokenize_path {
+    ([$(($($component:tt)+))*] [$($cur:tt)+] /) => {
+        crate::directory::Directory::new(tokenize_path!([$(($($component)+))*] [$($cur)+]))
+    };
+
+    ([$(($($component:tt)+))*] [$($cur:tt)+] / $($rest:tt)+) => {
+        tokenize_path!([$(($($component)+))* ($($cur)+)] [] $($rest)+)
+    };
+
+    ([$(($($component:tt)+))*] [$($cur:tt)*] $first:tt $($rest:tt)*) => {
+        tokenize_path!([$(($($component)+))*] [$($cur)* $first] $($rest)*)
+    };
+
+    ([$(($($component:tt)+))*] [$($cur:tt)+]) => {
+        tokenize_path!([$(($($component)+))* ($($cur)+)])
+    };
+
+    ([$(($($component:tt)+))*]) => {{
+        let mut path = std::path::PathBuf::new();
+        $(
+            path.push(&($($component)+));
+        )*
+        path
+    }};
+}
+
+#[test]
+fn test_path_macro() {
+    use std::path::{Path, PathBuf};
+
+    struct Project {
+        dir: PathBuf,
+    }
+
+    let project = Project {
+        dir: PathBuf::from("../target/tests"),
+    };
+
+    let cargo_dir = path!(project.dir / ".cargo" / "config.toml");
+    assert_eq!(cargo_dir, Path::new("../target/tests/.cargo/config.toml"));
+}
diff --git a/src/run.rs b/src/run.rs
new file mode 100644 (file)
index 0000000..a94d7ad
--- /dev/null
@@ -0,0 +1,675 @@
+use crate::cargo::{self, Metadata, PackageMetadata};
+use crate::dependencies::{self, Dependency, EditionOrInherit};
+use crate::directory::Directory;
+use crate::env::Update;
+use crate::error::{Error, Result};
+use crate::expand::{expand_globs, ExpandedTest};
+use crate::flock::Lock;
+use crate::manifest::{Bin, Build, Config, Manifest, Name, Package, Workspace};
+use crate::message::{self, Fail, Warn};
+use crate::normalize::{self, Context, Variations};
+use crate::{features, rustflags, Expected, Runner, Test};
+use serde_derive::Deserialize;
+use std::collections::{BTreeMap as Map, BTreeSet as Set};
+use std::env;
+use std::ffi::{OsStr, OsString};
+use std::fs::{self, File};
+use std::mem;
+use std::path::{Path, PathBuf};
+use std::str;
+
+#[derive(Debug)]
+pub struct Project {
+    pub dir: Directory,
+    source_dir: Directory,
+    pub target_dir: Directory,
+    pub name: String,
+    update: Update,
+    pub has_pass: bool,
+    has_compile_fail: bool,
+    pub features: Option<Vec<String>>,
+    pub workspace: Directory,
+    pub path_dependencies: Vec<PathDependency>,
+    manifest: Manifest,
+    pub keep_going: bool,
+}
+
+#[derive(Debug)]
+pub struct PathDependency {
+    pub name: String,
+    pub normalized_path: Directory,
+}
+
+struct Report {
+    failures: usize,
+    created_wip: usize,
+}
+
+impl Runner {
+    pub fn run(&mut self) {
+        let mut tests = expand_globs(&self.tests);
+        filter(&mut tests);
+
+        let (project, _lock) = (|| {
+            let mut project = self.prepare(&tests)?;
+            let lock = Lock::acquire(path!(project.dir / ".lock"));
+            self.write(&mut project)?;
+            Ok((project, lock))
+        })()
+        .unwrap_or_else(|err| {
+            message::prepare_fail(err);
+            panic!("tests failed");
+        });
+
+        print!("\n\n");
+
+        let len = tests.len();
+        let mut report = Report {
+            failures: 0,
+            created_wip: 0,
+        };
+
+        if tests.is_empty() {
+            message::no_tests_enabled();
+        } else if project.keep_going && !project.has_pass {
+            report = match self.run_all(&project, tests) {
+                Ok(failures) => failures,
+                Err(err) => {
+                    message::test_fail(err);
+                    Report {
+                        failures: len,
+                        created_wip: 0,
+                    }
+                }
+            }
+        } else {
+            for test in tests {
+                match test.run(&project) {
+                    Ok(Outcome::Passed) => {}
+                    Ok(Outcome::CreatedWip) => report.created_wip += 1,
+                    Err(err) => {
+                        report.failures += 1;
+                        message::test_fail(err);
+                    }
+                }
+            }
+        }
+
+        print!("\n\n");
+
+        if report.failures > 0 && project.name != "trybuild-tests" {
+            panic!("{} of {} tests failed", report.failures, len);
+        }
+        if report.created_wip > 0 && project.name != "trybuild-tests" {
+            panic!(
+                "successfully created new stderr files for {} test cases",
+                report.created_wip,
+            );
+        }
+    }
+
+    fn prepare(&self, tests: &[ExpandedTest]) -> Result<Project> {
+        let Metadata {
+            target_directory: target_dir,
+            workspace_root: workspace,
+            packages,
+        } = cargo::metadata()?;
+
+        let mut has_pass = false;
+        let mut has_compile_fail = false;
+        for e in tests {
+            match e.test.expected {
+                Expected::Pass => has_pass = true,
+                Expected::CompileFail => has_compile_fail = true,
+            }
+        }
+
+        let source_dir = cargo::manifest_dir()?;
+        let source_manifest = dependencies::get_manifest(&source_dir)?;
+
+        let mut features = features::find();
+
+        let path_dependencies = source_manifest
+            .dependencies
+            .iter()
+            .filter_map(|(name, dep)| {
+                let path = dep.path.as_ref()?;
+                if packages.iter().any(|p| &p.name == name) {
+                    // Skip path dependencies coming from the workspace itself
+                    None
+                } else {
+                    Some(PathDependency {
+                        name: name.clone(),
+                        normalized_path: path.canonicalize().ok()?,
+                    })
+                }
+            })
+            .collect();
+
+        let crate_name = &source_manifest.package.name;
+        let project_dir = path!(target_dir / "tests" / "trybuild" / crate_name /);
+        fs::create_dir_all(&project_dir)?;
+
+        let project_name = format!("{}-tests", crate_name);
+        let manifest = self.make_manifest(
+            &workspace,
+            &project_name,
+            &source_dir,
+            &packages,
+            tests,
+            source_manifest,
+        )?;
+
+        if let Some(enabled_features) = &mut features {
+            enabled_features.retain(|feature| manifest.features.contains_key(feature));
+        }
+
+        Ok(Project {
+            dir: project_dir,
+            source_dir,
+            target_dir,
+            name: project_name,
+            update: Update::env()?,
+            has_pass,
+            has_compile_fail,
+            features,
+            workspace,
+            path_dependencies,
+            manifest,
+            keep_going: false,
+        })
+    }
+
+    fn write(&self, project: &mut Project) -> Result<()> {
+        let manifest_toml = basic_toml::to_string(&project.manifest)?;
+
+        let config = self.make_config();
+        let config_toml = basic_toml::to_string(&config)?;
+
+        fs::create_dir_all(path!(project.dir / ".cargo"))?;
+        fs::write(path!(project.dir / ".cargo" / "config.toml"), config_toml)?;
+        fs::write(path!(project.dir / "Cargo.toml"), manifest_toml)?;
+
+        let main_rs = b"\
+            #![allow(unknown_lints, unused_crate_dependencies, missing_docs)]\n\
+            fn main() {}\n\
+        ";
+        fs::write(path!(project.dir / "main.rs"), &main_rs[..])?;
+
+        cargo::build_dependencies(project)?;
+
+        Ok(())
+    }
+
+    fn make_manifest(
+        &self,
+        workspace: &Directory,
+        project_name: &str,
+        source_dir: &Directory,
+        packages: &[PackageMetadata],
+        tests: &[ExpandedTest],
+        source_manifest: dependencies::Manifest,
+    ) -> Result<Manifest> {
+        let crate_name = source_manifest.package.name;
+        let workspace_manifest = dependencies::get_workspace_manifest(workspace);
+
+        let edition = match source_manifest.package.edition {
+            EditionOrInherit::Edition(edition) => edition,
+            EditionOrInherit::Inherit => workspace_manifest
+                .workspace
+                .package
+                .edition
+                .ok_or(Error::NoWorkspaceManifest)?,
+        };
+
+        let mut dependencies = Map::new();
+        dependencies.extend(source_manifest.dependencies);
+        dependencies.extend(source_manifest.dev_dependencies);
+
+        let cargo_toml_path = source_dir.join("Cargo.toml");
+        let mut has_lib_target = true;
+        for package_metadata in packages {
+            if package_metadata.manifest_path == cargo_toml_path {
+                has_lib_target = package_metadata
+                    .targets
+                    .iter()
+                    .any(|target| target.crate_types != ["bin"]);
+            }
+        }
+        if has_lib_target {
+            dependencies.insert(
+                crate_name.clone(),
+                Dependency {
+                    version: None,
+                    path: Some(source_dir.clone()),
+                    optional: false,
+                    default_features: false,
+                    features: Vec::new(),
+                    git: None,
+                    branch: None,
+                    tag: None,
+                    rev: None,
+                    workspace: false,
+                    rest: Map::new(),
+                },
+            );
+        }
+
+        let mut targets = source_manifest.target;
+        for target in targets.values_mut() {
+            let dev_dependencies = mem::take(&mut target.dev_dependencies);
+            target.dependencies.extend(dev_dependencies);
+        }
+
+        let mut features = source_manifest.features;
+        for (feature, enables) in &mut features {
+            enables.retain(|en| {
+                let dep_name = match en.strip_prefix("dep:") {
+                    Some(dep_name) => dep_name,
+                    None => return false,
+                };
+                if let Some(Dependency { optional: true, .. }) = dependencies.get(dep_name) {
+                    return true;
+                }
+                for target in targets.values() {
+                    if let Some(Dependency { optional: true, .. }) =
+                        target.dependencies.get(dep_name)
+                    {
+                        return true;
+                    }
+                }
+                false
+            });
+            if has_lib_target {
+                enables.insert(0, format!("{}/{}", crate_name, feature));
+            }
+        }
+
+        let mut manifest = Manifest {
+            package: Package {
+                name: project_name.to_owned(),
+                version: "0.0.0".to_owned(),
+                edition,
+                resolver: source_manifest.package.resolver,
+                publish: false,
+            },
+            features,
+            dependencies,
+            target: targets,
+            bins: Vec::new(),
+            workspace: Some(Workspace {
+                dependencies: workspace_manifest.workspace.dependencies,
+            }),
+            // Within a workspace, only the [patch] and [replace] sections in
+            // the workspace root's Cargo.toml are applied by Cargo.
+            patch: workspace_manifest.patch,
+            replace: workspace_manifest.replace,
+        };
+
+        manifest.bins.push(Bin {
+            name: Name(project_name.to_owned()),
+            path: Path::new("main.rs").to_owned(),
+        });
+
+        for expanded in tests {
+            if expanded.error.is_none() {
+                manifest.bins.push(Bin {
+                    name: expanded.name.clone(),
+                    path: source_dir.join(&expanded.test.path),
+                });
+            }
+        }
+
+        Ok(manifest)
+    }
+
+    fn make_config(&self) -> Config {
+        Config {
+            build: Build {
+                rustflags: rustflags::make_vec(),
+            },
+        }
+    }
+
+    fn run_all(&self, project: &Project, tests: Vec<ExpandedTest>) -> Result<Report> {
+        let mut report = Report {
+            failures: 0,
+            created_wip: 0,
+        };
+
+        let mut path_map = Map::new();
+        for t in &tests {
+            let src_path = project.source_dir.join(&t.test.path);
+            path_map.insert(src_path, (&t.name, &t.test));
+        }
+
+        let output = cargo::build_all_tests(project)?;
+        let parsed = parse_cargo_json(project, &output.stdout, &path_map);
+        let fallback = Stderr::default();
+
+        for mut t in tests {
+            let show_expected = false;
+            message::begin_test(&t.test, show_expected);
+
+            if t.error.is_none() {
+                t.error = check_exists(&t.test.path).err();
+            }
+
+            if t.error.is_none() {
+                let src_path = project.source_dir.join(&t.test.path);
+                let this_test = parsed.stderrs.get(&src_path).unwrap_or(&fallback);
+                match t.test.check(project, &t.name, this_test, "") {
+                    Ok(Outcome::Passed) => {}
+                    Ok(Outcome::CreatedWip) => report.created_wip += 1,
+                    Err(error) => t.error = Some(error),
+                }
+            }
+
+            if let Some(err) = t.error {
+                report.failures += 1;
+                message::test_fail(err);
+            }
+        }
+
+        Ok(report)
+    }
+}
+
+enum Outcome {
+    Passed,
+    CreatedWip,
+}
+
+impl Test {
+    fn run(&self, project: &Project, name: &Name) -> Result<Outcome> {
+        let show_expected = project.has_pass && project.has_compile_fail;
+        message::begin_test(self, show_expected);
+        check_exists(&self.path)?;
+
+        let mut path_map = Map::new();
+        let src_path = project.source_dir.join(&self.path);
+        path_map.insert(src_path.clone(), (name, self));
+
+        let output = cargo::build_test(project, name)?;
+        let parsed = parse_cargo_json(project, &output.stdout, &path_map);
+        let fallback = Stderr::default();
+        let this_test = parsed.stderrs.get(&src_path).unwrap_or(&fallback);
+        self.check(project, name, this_test, &parsed.stdout)
+    }
+
+    fn check(
+        &self,
+        project: &Project,
+        name: &Name,
+        result: &Stderr,
+        build_stdout: &str,
+    ) -> Result<Outcome> {
+        let check = match self.expected {
+            Expected::Pass => Test::check_pass,
+            Expected::CompileFail => Test::check_compile_fail,
+        };
+
+        check(
+            self,
+            project,
+            name,
+            result.success,
+            build_stdout,
+            &result.stderr,
+        )
+    }
+
+    fn check_pass(
+        &self,
+        project: &Project,
+        name: &Name,
+        success: bool,
+        build_stdout: &str,
+        variations: &Variations,
+    ) -> Result<Outcome> {
+        let preferred = variations.preferred();
+        if !success {
+            message::failed_to_build(preferred);
+            return Err(Error::CargoFail);
+        }
+
+        let mut output = cargo::run_test(project, name)?;
+        output.stdout.splice(..0, build_stdout.bytes());
+        message::output(preferred, &output);
+        if output.status.success() {
+            Ok(Outcome::Passed)
+        } else {
+            Err(Error::RunFailed)
+        }
+    }
+
+    fn check_compile_fail(
+        &self,
+        project: &Project,
+        _name: &Name,
+        success: bool,
+        build_stdout: &str,
+        variations: &Variations,
+    ) -> Result<Outcome> {
+        let preferred = variations.preferred();
+
+        if success {
+            message::should_not_have_compiled();
+            message::fail_output(Fail, build_stdout);
+            message::warnings(preferred);
+            return Err(Error::ShouldNotHaveCompiled);
+        }
+
+        let stderr_path = self.path.with_extension("stderr");
+
+        if !stderr_path.exists() {
+            let outcome = match project.update {
+                Update::Wip => {
+                    let wip_dir = Path::new("wip");
+                    fs::create_dir_all(wip_dir)?;
+                    let gitignore_path = wip_dir.join(".gitignore");
+                    fs::write(gitignore_path, "*\n")?;
+                    let stderr_name = stderr_path
+                        .file_name()
+                        .unwrap_or_else(|| OsStr::new("test.stderr"));
+                    let wip_path = wip_dir.join(stderr_name);
+                    message::write_stderr_wip(&wip_path, &stderr_path, preferred);
+                    fs::write(wip_path, preferred).map_err(Error::WriteStderr)?;
+                    Outcome::CreatedWip
+                }
+                Update::Overwrite => {
+                    message::overwrite_stderr(&stderr_path, preferred);
+                    fs::write(stderr_path, preferred).map_err(Error::WriteStderr)?;
+                    Outcome::Passed
+                }
+            };
+            message::fail_output(Warn, build_stdout);
+            return Ok(outcome);
+        }
+
+        let expected = fs::read_to_string(&stderr_path)
+            .map_err(Error::ReadStderr)?
+            .replace("\r\n", "\n");
+
+        if variations.any(|stderr| expected == stderr) {
+            message::ok();
+            return Ok(Outcome::Passed);
+        }
+
+        match project.update {
+            Update::Wip => {
+                message::mismatch(&expected, preferred);
+                Err(Error::Mismatch)
+            }
+            Update::Overwrite => {
+                message::overwrite_stderr(&stderr_path, preferred);
+                fs::write(stderr_path, preferred).map_err(Error::WriteStderr)?;
+                Ok(Outcome::Passed)
+            }
+        }
+    }
+}
+
+fn check_exists(path: &Path) -> Result<()> {
+    if path.exists() {
+        return Ok(());
+    }
+    match File::open(path) {
+        Ok(_) => Ok(()),
+        Err(err) => Err(Error::Open(path.to_owned(), err)),
+    }
+}
+
+impl ExpandedTest {
+    fn run(self, project: &Project) -> Result<Outcome> {
+        match self.error {
+            None => self.test.run(project, &self.name),
+            Some(error) => {
+                let show_expected = false;
+                message::begin_test(&self.test, show_expected);
+                Err(error)
+            }
+        }
+    }
+}
+
+// Filter which test cases are run by trybuild.
+//
+//     $ cargo test -- ui trybuild=tuple_structs.rs
+//
+// The first argument after `--` must be the trybuild test name i.e. the name of
+// the function that has the #[test] attribute and calls trybuild. That's to get
+// Cargo to run the test at all. The next argument starting with `trybuild=`
+// provides a filename filter. Only test cases whose filename contains the
+// filter string will be run.
+#[allow(clippy::needless_collect)] // false positive https://github.com/rust-lang/rust-clippy/issues/5991
+fn filter(tests: &mut Vec<ExpandedTest>) {
+    let filters = env::args_os()
+        .flat_map(OsString::into_string)
+        .filter_map(|mut arg| {
+            const PREFIX: &str = "trybuild=";
+            if arg.starts_with(PREFIX) && arg != PREFIX {
+                Some(arg.split_off(PREFIX.len()))
+            } else {
+                None
+            }
+        })
+        .collect::<Vec<String>>();
+
+    if filters.is_empty() {
+        return;
+    }
+
+    tests.retain(|t| {
+        filters
+            .iter()
+            .any(|f| t.test.path.to_string_lossy().contains(f))
+    });
+}
+
+#[derive(Deserialize)]
+struct CargoMessage {
+    #[allow(dead_code)]
+    reason: Reason,
+    target: RustcTarget,
+    message: RustcMessage,
+}
+
+#[derive(Deserialize)]
+enum Reason {
+    #[serde(rename = "compiler-message")]
+    CompilerMessage,
+}
+
+#[derive(Deserialize)]
+struct RustcTarget {
+    src_path: PathBuf,
+}
+
+#[derive(Deserialize)]
+struct RustcMessage {
+    rendered: String,
+    level: String,
+}
+
+struct ParsedOutputs {
+    stdout: String,
+    stderrs: Map<PathBuf, Stderr>,
+}
+
+struct Stderr {
+    success: bool,
+    stderr: Variations,
+}
+
+impl Default for Stderr {
+    fn default() -> Self {
+        Stderr {
+            success: true,
+            stderr: Variations::default(),
+        }
+    }
+}
+
+fn parse_cargo_json(
+    project: &Project,
+    stdout: &[u8],
+    path_map: &Map<PathBuf, (&Name, &Test)>,
+) -> ParsedOutputs {
+    let mut map = Map::new();
+    let mut nonmessage_stdout = String::new();
+    let mut remaining = &*String::from_utf8_lossy(stdout);
+    let mut seen = Set::new();
+    while !remaining.is_empty() {
+        let begin = match remaining.find("{\"reason\":") {
+            Some(begin) => begin,
+            None => break,
+        };
+        let (nonmessage, rest) = remaining.split_at(begin);
+        nonmessage_stdout.push_str(nonmessage);
+        let len = match rest.find('\n') {
+            Some(end) => end + 1,
+            None => rest.len(),
+        };
+        let (message, rest) = rest.split_at(len);
+        remaining = rest;
+        if !seen.insert(message) {
+            // Discard duplicate messages. This might no longer be necessary
+            // after https://github.com/rust-lang/rust/issues/106571 is fixed.
+            // Normally rustc would filter duplicates itself and I think this is
+            // a short-lived bug.
+            continue;
+        }
+        if let Ok(de) = serde_json::from_str::<CargoMessage>(message) {
+            if de.message.level != "failure-note" {
+                let (name, test) = match path_map.get(&de.target.src_path) {
+                    Some(test) => test,
+                    None => continue,
+                };
+                let mut entry = map
+                    .entry(de.target.src_path)
+                    .or_insert_with(Stderr::default);
+                if de.message.level == "error" {
+                    entry.success = false;
+                }
+                let normalized = normalize::diagnostics(
+                    &de.message.rendered,
+                    Context {
+                        krate: &name.0,
+                        source_dir: &project.source_dir,
+                        workspace: &project.workspace,
+                        input_file: &test.path,
+                        target_dir: &project.target_dir,
+                        path_dependencies: &project.path_dependencies,
+                    },
+                );
+                entry.stderr.concat(&normalized);
+            }
+        }
+    }
+    nonmessage_stdout.push_str(remaining);
+    ParsedOutputs {
+        stdout: nonmessage_stdout,
+        stderrs: map,
+    }
+}
diff --git a/src/rustflags.rs b/src/rustflags.rs
new file mode 100644 (file)
index 0000000..5279859
--- /dev/null
@@ -0,0 +1,27 @@
+use std::env;
+use std::ffi::OsString;
+
+const RUSTFLAGS: &str = "RUSTFLAGS";
+const IGNORED_LINTS: &[&str] = &["dead_code"];
+
+pub fn make_vec() -> Vec<&'static str> {
+    let mut rustflags = vec!["--cfg", "trybuild"];
+
+    for &lint in IGNORED_LINTS {
+        rustflags.push("-A");
+        rustflags.push(lint);
+    }
+
+    rustflags
+}
+
+pub fn envs() -> impl IntoIterator<Item = (&'static str, OsString)> {
+    let mut rustflags = env::var_os(RUSTFLAGS)?;
+
+    for flag in make_vec() {
+        rustflags.push(" ");
+        rustflags.push(flag);
+    }
+
+    Some((RUSTFLAGS, rustflags))
+}
diff --git a/src/term.rs b/src/term.rs
new file mode 100644 (file)
index 0000000..1d4346f
--- /dev/null
@@ -0,0 +1,106 @@
+use once_cell::sync::OnceCell;
+use std::io::{Result, Write};
+use std::sync::{Mutex, MutexGuard, PoisonError};
+use termcolor::{Color, ColorChoice, ColorSpec, StandardStream as Stream, WriteColor};
+
+static TERM: OnceCell<Mutex<Term>> = OnceCell::new();
+
+pub fn lock() -> MutexGuard<'static, Term> {
+    TERM.get_or_init(|| Mutex::new(Term::new()))
+        .lock()
+        .unwrap_or_else(PoisonError::into_inner)
+}
+
+pub fn bold() {
+    lock().set_color(ColorSpec::new().set_bold(true));
+}
+
+pub fn color(color: Color) {
+    lock().set_color(ColorSpec::new().set_fg(Some(color)));
+}
+
+pub fn bold_color(color: Color) {
+    lock().set_color(ColorSpec::new().set_bold(true).set_fg(Some(color)));
+}
+
+pub fn reset() {
+    lock().reset();
+}
+
+#[deny(unused_macros)]
+macro_rules! print {
+    ($($args:tt)*) => {{
+        use std::io::Write;
+        let _ = std::write!($crate::term::lock(), $($args)*);
+    }};
+}
+
+#[deny(unused_macros)]
+macro_rules! println {
+    ($($args:tt)*) => {{
+        use std::io::Write;
+        let _ = std::writeln!($crate::term::lock(), $($args)*);
+    }};
+}
+
+pub struct Term {
+    spec: ColorSpec,
+    stream: Stream,
+    start_of_line: bool,
+}
+
+impl Term {
+    fn new() -> Self {
+        Term {
+            spec: ColorSpec::new(),
+            stream: Stream::stderr(ColorChoice::Auto),
+            start_of_line: true,
+        }
+    }
+
+    fn set_color(&mut self, spec: &ColorSpec) {
+        if self.spec != *spec {
+            self.spec = spec.clone();
+            self.start_of_line = true;
+        }
+    }
+
+    fn reset(&mut self) {
+        self.spec = ColorSpec::new();
+        let _ = self.stream.reset();
+    }
+}
+
+impl Write for Term {
+    // Color one line at a time because Travis does not preserve color setting
+    // across output lines.
+    fn write(&mut self, mut buf: &[u8]) -> Result<usize> {
+        if self.spec.is_none() {
+            return self.stream.write(buf);
+        }
+
+        let len = buf.len();
+        while !buf.is_empty() {
+            if self.start_of_line {
+                let _ = self.stream.set_color(&self.spec);
+            }
+            match buf.iter().position(|byte| *byte == b'\n') {
+                Some(line_len) => {
+                    self.stream.write_all(&buf[..line_len + 1])?;
+                    self.start_of_line = true;
+                    buf = &buf[line_len + 1..];
+                }
+                None => {
+                    self.stream.write_all(buf)?;
+                    self.start_of_line = false;
+                    break;
+                }
+            }
+        }
+        Ok(len)
+    }
+
+    fn flush(&mut self) -> Result<()> {
+        self.stream.flush()
+    }
+}
diff --git a/src/tests.rs b/src/tests.rs
new file mode 100644 (file)
index 0000000..812c8a6
--- /dev/null
@@ -0,0 +1,36 @@
+macro_rules! test_normalize {
+    (
+        $(DIR=$dir:literal)?
+        $(WORKSPACE=$workspace:literal)?
+        $(INPUT=$input:literal)?
+        $(TARGET=$target:literal)?
+        $original:literal
+        $expected:literal
+    ) => {
+        #[test]
+        fn test() {
+            let context = crate::normalize::Context {
+                krate: "trybuild000",
+                input_file: std::path::Path::new({ "tests/ui/error.rs" $(; $input)? }),
+                source_dir: &crate::directory::Directory::new({ "/git/trybuild/test_suite" $(; $dir)? }),
+                workspace: &crate::directory::Directory::new({ "/git/trybuild" $(; $workspace)? }),
+                target_dir: &crate::directory::Directory::new({ "/git/trybuild/target" $(; $target)? }),
+                path_dependencies: &[crate::run::PathDependency {
+                    name: String::from("diesel"),
+                    normalized_path: crate::directory::Directory::new("/home/user/documents/rust/diesel/diesel"),
+                }],
+            };
+            let original = $original;
+            let variations = crate::normalize::diagnostics(original, context);
+            let preferred = variations.preferred();
+            let expected = $expected;
+            if preferred != expected {
+                panic!("\nACTUAL: \"{}\"\nEXPECTED: \"{}\"", preferred, expected);
+            }
+        }
+    };
+}
+
+mod tests {
+    automod::dir!("src/tests");
+}
diff --git a/src/tests/and-n-others.rs b/src/tests/and-n-others.rs
new file mode 100644 (file)
index 0000000..f1a7cf8
--- /dev/null
@@ -0,0 +1,37 @@
+test_normalize! {"
+error[E0277]: no implementation for `u8 >> &str`
+ --> src/main.rs:2:20
+  |
+2 |     let _x = 42_u8 >> \"bar\";
+  |                    ^^ no implementation for `u8 >> &str`
+  |
+  = help: the trait `Shr<&str>` is not implemented for `u8`
+  = help: the following other types implement trait `Shr<Rhs>`:
+            <&'a i128 as Shr<i128>>
+            <&'a i128 as Shr<i16>>
+            <&'a i128 as Shr<i32>>
+            <&'a i128 as Shr<i64>>
+            <&'a i128 as Shr<i8>>
+            <&'a i128 as Shr<isize>>
+            <&'a i128 as Shr<u128>>
+            <&'a i128 as Shr<u16>>
+          and 568 others
+" "
+error[E0277]: no implementation for `u8 >> &str`
+ --> src/main.rs:2:20
+  |
+2 |     let _x = 42_u8 >> \"bar\";
+  |                    ^^ no implementation for `u8 >> &str`
+  |
+  = help: the trait `Shr<&str>` is not implemented for `u8`
+  = help: the following other types implement trait `Shr<Rhs>`:
+            <&'a i128 as Shr<i128>>
+            <&'a i128 as Shr<i16>>
+            <&'a i128 as Shr<i32>>
+            <&'a i128 as Shr<i64>>
+            <&'a i128 as Shr<i8>>
+            <&'a i128 as Shr<isize>>
+            <&'a i128 as Shr<u128>>
+            <&'a i128 as Shr<u16>>
+          and $N others
+"}
diff --git a/src/tests/basic.rs b/src/tests/basic.rs
new file mode 100644 (file)
index 0000000..12f3e93
--- /dev/null
@@ -0,0 +1,20 @@
+test_normalize! {"
+error: `self` parameter is only allowed in associated functions
+  --> /git/trybuild/test_suite/tests/ui/error.rs:11:23
+   |
+11 | async fn bad_endpoint(self) -> Result<HttpResponseOkObject<()>, HttpError> {
+   |                       ^^^^ not semantically valid as function parameter
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0401`.
+error: could not compile `trybuild-tests`.
+
+To learn more, run the command again with --verbose.
+" "
+error: `self` parameter is only allowed in associated functions
+  --> tests/ui/error.rs:11:23
+   |
+11 | async fn bad_endpoint(self) -> Result<HttpResponseOkObject<()>, HttpError> {
+   |                       ^^^^ not semantically valid as function parameter
+"}
diff --git a/src/tests/cargo-registry-sparse.rs b/src/tests/cargo-registry-sparse.rs
new file mode 100644 (file)
index 0000000..037d633
--- /dev/null
@@ -0,0 +1,35 @@
+test_normalize! {"
+error[E0308]: mismatched types
+  --> tests/compile-fail/surface_source_interval_badarg.rs:7:25
+   |
+5  |       let mut df = hydroflow_syntax! {
+   |  __________________-
+6  | |         // Should be a `Duration`.
+7  | |         source_interval(5) -> for_each(std::mem::drop);
+   | |                         ^ expected `Duration`, found integer
+8  | |     };
+   | |_____- arguments to this function are incorrect
+   |
+note: function defined here
+  --> /home/runner/.cargo/registry/src/index.crates.io-6f17d22bba15001f/tokio-1.26.0/src/time/interval.rs:74:8
+   |
+74 | pub fn interval(period: Duration) -> Interval {
+   |        ^^^^^^^^
+" "
+error[E0308]: mismatched types
+ --> tests/compile-fail/surface_source_interval_badarg.rs:7:25
+  |
+5 |       let mut df = hydroflow_syntax! {
+  |  __________________-
+6 | |         // Should be a `Duration`.
+7 | |         source_interval(5) -> for_each(std::mem::drop);
+  | |                         ^ expected `Duration`, found integer
+8 | |     };
+  | |_____- arguments to this function are incorrect
+  |
+note: function defined here
+ --> $CARGO/tokio-1.26.0/src/time/interval.rs
+  |
+  | pub fn interval(period: Duration) -> Interval {
+  |        ^^^^^^^^
+"}
diff --git a/src/tests/cargo-registry.rs b/src/tests/cargo-registry.rs
new file mode 100644 (file)
index 0000000..f975182
--- /dev/null
@@ -0,0 +1,26 @@
+test_normalize! {"
+error[E0277]: the trait bound `Thread: serde::de::Deserialize<'_>` is not satisfied
+    --> src/main.rs:2:36
+     |
+2    |     let _ = serde_json::from_str::<std::thread::Thread>(\"???\");
+     |                                    ^^^^^^^^^^^^^^^^^^^ the trait `serde::de::Deserialize<'_>` is not implemented for `Thread`
+     |
+    ::: /home/ferris/.cargo/registry/src/github.com-1ecc6299db9ec823/serde_json-1.0.64/src/de.rs:2584:8
+     |
+2584 |     T: de::Deserialize<'a>,
+     |        ------------------- required by this bound in `serde_json::from_str`
+
+For more information about this error, try `rustc --explain E0277`.
+error: could not compile `testing` due to previous error
+" "
+error[E0277]: the trait bound `Thread: serde::de::Deserialize<'_>` is not satisfied
+ --> src/main.rs:2:36
+  |
+2 |     let _ = serde_json::from_str::<std::thread::Thread>(\"???\");
+  |                                    ^^^^^^^^^^^^^^^^^^^ the trait `serde::de::Deserialize<'_>` is not implemented for `Thread`
+  |
+ ::: $CARGO/serde_json-1.0.64/src/de.rs
+  |
+  |     T: de::Deserialize<'a>,
+  |        ------------------- required by this bound in `serde_json::from_str`
+"}
diff --git a/src/tests/dir-backslash.rs b/src/tests/dir-backslash.rs
new file mode 100644 (file)
index 0000000..b9ae62d
--- /dev/null
@@ -0,0 +1,7 @@
+test_normalize! {"
+error[E0277]: the trait bound `QueryParams: serde::de::Deserialize<'de>` is not satisfied
+   --> \\git\\trybuild\\test_suite\\tests\\ui\\error.rs:22:61
+" "
+error[E0277]: the trait bound `QueryParams: serde::de::Deserialize<'de>` is not satisfied
+ --> tests/ui/error.rs:22:61
+"}
diff --git a/src/tests/dropshot-required-by.rs b/src/tests/dropshot-required-by.rs
new file mode 100644 (file)
index 0000000..0ff0cac
--- /dev/null
@@ -0,0 +1,29 @@
+test_normalize! {
+    DIR="/git/dropshot/dropshot"
+    WORKSPACE="/git/dropshot"
+    INPUT="tests/fail/bad_endpoint4.rs"
+"
+error[E0277]: the trait bound `QueryParams: schemars::JsonSchema` is not satisfied
+   --> /git/dropshot/dropshot/tests/fail/bad_endpoint4.rs:24:14
+    |
+24  |     _params: Query<QueryParams>,
+    |              ^^^^^^^^^^^^^^^^^^ the trait `schemars::JsonSchema` is not implemented for `QueryParams`
+    |
+note: required by a bound in `dropshot::Query`
+   --> /git/dropshot/dropshot/src/handler.rs:547:48
+    |
+547 | pub struct Query<QueryType: DeserializeOwned + JsonSchema + Send + Sync> {
+    |                                                ^^^^^^^^^^ required by this bound in `dropshot::Query`
+" "
+error[E0277]: the trait bound `QueryParams: schemars::JsonSchema` is not satisfied
+  --> tests/fail/bad_endpoint4.rs:24:14
+   |
+24 |     _params: Query<QueryParams>,
+   |              ^^^^^^^^^^^^^^^^^^ the trait `schemars::JsonSchema` is not implemented for `QueryParams`
+   |
+note: required by a bound in `dropshot::Query`
+  --> src/handler.rs
+   |
+   | pub struct Query<QueryType: DeserializeOwned + JsonSchema + Send + Sync> {
+   |                                                ^^^^^^^^^^ required by this bound in `dropshot::Query`
+"}
diff --git a/src/tests/long-file-names.rs b/src/tests/long-file-names.rs
new file mode 100644 (file)
index 0000000..175e8f2
--- /dev/null
@@ -0,0 +1,27 @@
+test_normalize! {"
+error: reached the recursion limit while instantiating `test::<Cons<Cons<Cons<Cons<Cons<...>>>>>>`
+  --> src/main.rs:18:11
+   |
+18 |     _ => {test (n-1, i+1, Cons {head:2*i+1, tail:first}, Cons{head:i*i, tail:second})}
+   |           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+note: `test` defined here
+  --> src/main.rs:16:1
+   |
+16 | fn test<T:Dot> (n:isize, i:isize, first:T, second:T) ->isize {
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   = note: the full type name has been written to `/playground/target/debug/deps/playground-c53df771d95c66fb.c7a39e8d0dd9c781.long-type-16688711729771999621.txt`
+           the full type name has been written to `/playground/target/debug/deps/playground-c53df771d95c66fb.c7a39e8d0dd9c781.long-type-16688711729771999621.txt`
+" "
+error: reached the recursion limit while instantiating `test::<Cons<Cons<Cons<Cons<Cons<...>>>>>>`
+  --> src/main.rs:18:11
+   |
+18 |     _ => {test (n-1, i+1, Cons {head:2*i+1, tail:first}, Cons{head:i*i, tail:second})}
+   |           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+note: `test` defined here
+  --> src/main.rs:16:1
+   |
+16 | fn test<T:Dot> (n:isize, i:isize, first:T, second:T) ->isize {
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+"}
diff --git a/src/tests/proc-macro-panic.rs b/src/tests/proc-macro-panic.rs
new file mode 100644 (file)
index 0000000..a4340f1
--- /dev/null
@@ -0,0 +1,16 @@
+test_normalize! {
+    DIR="D:\\repro"
+    INPUT="tests\\ui\\nonzero_fail.rs"
+"
+error[E0080]: evaluation of constant value failed
+ --> D:\\repro\\tests\\ui\\nonzero_fail.rs:7:10
+  |
+7 | #[derive(NonZeroRepr)]
+  |          ^^^^^^^^^^^ the evaluated program panicked at 'expected non-zero discriminant expression', D:\\repro\\tests\\ui\\nonzero_fail.rs:7:10
+" "
+error[E0080]: evaluation of constant value failed
+ --> tests/ui/nonzero_fail.rs:7:10
+  |
+7 | #[derive(NonZeroRepr)]
+  |          ^^^^^^^^^^^ the evaluated program panicked at 'expected non-zero discriminant expression', $DIR/tests/ui/nonzero_fail.rs:7:10
+"}
diff --git a/src/tests/py03-url.rs b/src/tests/py03-url.rs
new file mode 100644 (file)
index 0000000..866c575
--- /dev/null
@@ -0,0 +1,20 @@
+test_normalize! {
+    DIR="/pyo3"
+    WORKSPACE="/pyo3"
+"
+error: `async fn` is not yet supported for Python functions.
+
+Additional crates such as `pyo3-asyncio` can be used to integrate async Rust and Python. For more information, see https://github.com/PyO3/pyo3/issues/1632
+  --> tests/ui/invalid_pyfunctions.rs:10:1
+   |
+10 | async fn async_function() {}
+   | ^^^^^
+" "
+error: `async fn` is not yet supported for Python functions.
+
+Additional crates such as `pyo3-asyncio` can be used to integrate async Rust and Python. For more information, see https://github.com/PyO3/pyo3/issues/1632
+  --> tests/ui/invalid_pyfunctions.rs:10:1
+   |
+10 | async fn async_function() {}
+   | ^^^^^
+"}
diff --git a/src/tests/rust-lib-with-githash.rs b/src/tests/rust-lib-with-githash.rs
new file mode 100644 (file)
index 0000000..221ebaa
--- /dev/null
@@ -0,0 +1,39 @@
+test_normalize! {"
+error[E0599]: the method `to_cxx_exception` exists for reference `&NonError`, but its trait bounds were not satisfied
+ --> tests/ui/result_no_display.rs:4:19
+  |
+4 |         fn f() -> Result<()>;
+  |                   ^^^^^^^^^^ method cannot be called on `&NonError` due to unsatisfied trait bounds
+...
+8 | pub struct NonError;
+  | ------------------- doesn't satisfy `NonError: std::fmt::Display`
+  |
+  = note: the following trait bounds were not satisfied:
+          `NonError: std::fmt::Display`
+          which is required by `&NonError: ToCxxExceptionDefault`
+note: the trait `std::fmt::Display` must be implemented
+ --> /rustc/c5c7d2b37780dac1092e75f12ab97dd56c30861d/library/core/src/fmt/mod.rs:786:1
+  |
+  | pub trait Display {
+  | ^^^^^^^^^^^^^^^^^
+  = note: this error originates in the macro `::cxx::map_rust_error_to_cxx_exception` (in Nightly builds, run with -Z macro-backtrace for more info)
+" "
+error[E0599]: the method `to_cxx_exception` exists for reference `&NonError`, but its trait bounds were not satisfied
+ --> tests/ui/result_no_display.rs:4:19
+  |
+4 |         fn f() -> Result<()>;
+  |                   ^^^^^^^^^^ method cannot be called on `&NonError` due to unsatisfied trait bounds
+...
+8 | pub struct NonError;
+  | ------------------- doesn't satisfy `NonError: std::fmt::Display`
+  |
+  = note: the following trait bounds were not satisfied:
+          `NonError: std::fmt::Display`
+          which is required by `&NonError: ToCxxExceptionDefault`
+note: the trait `std::fmt::Display` must be implemented
+ --> $RUST/core/src/fmt/mod.rs
+  |
+  | pub trait Display {
+  | ^^^^^^^^^^^^^^^^^
+  = note: this error originates in the macro `::cxx::map_rust_error_to_cxx_exception` (in Nightly builds, run with -Z macro-backtrace for more info)
+"}
diff --git a/src/tests/rust-lib.rs b/src/tests/rust-lib.rs
new file mode 100644 (file)
index 0000000..b71a05d
--- /dev/null
@@ -0,0 +1,31 @@
+test_normalize! {
+    INPUT="tests/ui/not-repeatable.rs"
+"
+error[E0599]: no method named `quote_into_iter` found for struct `std::net::Ipv4Addr` in the current scope
+  --> /git/trybuild/test_suite/tests/ui/not-repeatable.rs:6:13
+   |
+6  |     let _ = quote! { #(#ip)* };
+   |             ^^^^^^^^^^^^^^^^^^ method not found in `std::net::Ipv4Addr`
+   |
+  ::: /rustlib/src/rust/src/libstd/net/ip.rs:83:1
+  ::: /rustlib/src/rust/library/std/src/net/ip.rs:83:1
+   |
+83 | pub struct Ipv4Addr {
+   | -------------------
+   | |
+   | doesn't satisfy `std::net::Ipv4Addr: quote::to_tokens::ToTokens`
+" "
+error[E0599]: no method named `quote_into_iter` found for struct `std::net::Ipv4Addr` in the current scope
+ --> tests/ui/not-repeatable.rs:6:13
+  |
+6 |     let _ = quote! { #(#ip)* };
+  |             ^^^^^^^^^^^^^^^^^^ method not found in `std::net::Ipv4Addr`
+  |
+ ::: $RUST/src/libstd/net/ip.rs
+ ::: $RUST/std/src/net/ip.rs
+  |
+  | pub struct Ipv4Addr {
+  | -------------------
+  | |
+  | doesn't satisfy `std::net::Ipv4Addr: quote::to_tokens::ToTokens`
+"}
diff --git a/src/tests/strip-path-dependencies.rs b/src/tests/strip-path-dependencies.rs
new file mode 100644 (file)
index 0000000..1c55c6b
--- /dev/null
@@ -0,0 +1,29 @@
+test_normalize! {"
+error[E0277]: the trait bound `diesel::query_builder::SelectStatement<users::table, diesel::query_builder::select_clause::DefaultSelectClause, diesel::query_builder::distinct_clause::NoDistinctClause, diesel::query_builder::where_clause::WhereClause<diesel::expression::grouped::Grouped<diesel::expression::operators::Eq<posts::columns::id, diesel::expression::bound::Bound<diesel::sql_types::Integer, i32>>>>>: diesel::query_builder::IntoUpdateTarget` is not satisfied
+  --> $DIR/update_requires_valid_where_clause.rs:21:12
+   |
+21 |     update(users::table.filter(posts::id.eq(1)));
+   |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `diesel::query_builder::IntoUpdateTarget` is not implemented for `diesel::query_builder::SelectStatement<users::table, diesel::query_builder::select_clause::DefaultSelectClause, diesel::query_builder::distinct_clause::NoDistinctClause, diesel::query_builder::where_clause::WhereClause<diesel::expression::grouped::Grouped<diesel::expression::operators::Eq<posts::columns::id, diesel::expression::bound::Bound<diesel::sql_types::Integer, i32>>>>>`
+   |
+  ::: /home/user/documents/rust/diesel/diesel/src/query_builder/functions.rs:78:18
+   |
+78 | pub fn update<T: IntoUpdateTarget>(source: T) -> UpdateStatement<T::Table, T::WhereClause> {
+   |                  ---------------- required by this bound in `diesel::update`
+   |
+   = help: the following implementations were found:
+             <diesel::query_builder::SelectStatement<F, diesel::query_builder::select_clause::DefaultSelectClause, diesel::query_builder::distinct_clause::NoDistinctClause, W> as diesel::query_builder::IntoUpdateTarget>
+" "
+error[E0277]: the trait bound `diesel::query_builder::SelectStatement<users::table, diesel::query_builder::select_clause::DefaultSelectClause, diesel::query_builder::distinct_clause::NoDistinctClause, diesel::query_builder::where_clause::WhereClause<diesel::expression::grouped::Grouped<diesel::expression::operators::Eq<posts::columns::id, diesel::expression::bound::Bound<diesel::sql_types::Integer, i32>>>>>: diesel::query_builder::IntoUpdateTarget` is not satisfied
+  --> $DIR/update_requires_valid_where_clause.rs:21:12
+   |
+21 |     update(users::table.filter(posts::id.eq(1)));
+   |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `diesel::query_builder::IntoUpdateTarget` is not implemented for `diesel::query_builder::SelectStatement<users::table, diesel::query_builder::select_clause::DefaultSelectClause, diesel::query_builder::distinct_clause::NoDistinctClause, diesel::query_builder::where_clause::WhereClause<diesel::expression::grouped::Grouped<diesel::expression::operators::Eq<posts::columns::id, diesel::expression::bound::Bound<diesel::sql_types::Integer, i32>>>>>`
+   |
+  ::: $DIESEL/src/query_builder/functions.rs
+   |
+   | pub fn update<T: IntoUpdateTarget>(source: T) -> UpdateStatement<T::Table, T::WhereClause> {
+   |                  ---------------- required by this bound in `diesel::update`
+   |
+   = help: the following implementations were found:
+             <diesel::query_builder::SelectStatement<F, diesel::query_builder::select_clause::DefaultSelectClause, diesel::query_builder::distinct_clause::NoDistinctClause, W> as diesel::query_builder::IntoUpdateTarget>
+"}
diff --git a/src/tests/traits-must-be-implemented.rs b/src/tests/traits-must-be-implemented.rs
new file mode 100644 (file)
index 0000000..29d7d8d
--- /dev/null
@@ -0,0 +1,85 @@
+test_normalize! {"
+error[E0599]: the method `anyhow_kind` exists for reference `&Error`, but its trait bounds were not satisfied
+   --> src/main.rs:7:13
+    |
+4   | struct Error;
+    | -------------
+    | |
+    | doesn't satisfy `Error: Into<anyhow::Error>`
+    | doesn't satisfy `Error: anyhow::private::kind::TraitKind`
+    | doesn't satisfy `Error: std::fmt::Display`
+...
+7   |     let _ = anyhow!(Error);
+    |             ^^^^^^^^^^^^^^ method cannot be called on `&Error` due to unsatisfied trait bounds
+    |
+    = note: the following trait bounds were not satisfied:
+            `Error: Into<anyhow::Error>`
+            which is required by `Error: anyhow::private::kind::TraitKind`
+            `Error: std::fmt::Display`
+            which is required by `&Error: anyhow::private::kind::AdhocKind`
+            `&Error: Into<anyhow::Error>`
+            which is required by `&Error: anyhow::private::kind::TraitKind`
+note: the following traits must be implemented
+   --> /rustup/toolchains/nightly-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/core/src/convert/mod.rs:274:1
+    |
+274 | / pub trait Into<T>: Sized {
+275 | |     /// Performs the conversion.
+276 | |     #[stable(feature = \"rust1\", since = \"1.0.0\")]
+277 | |     fn into(self) -> T;
+278 | | }
+    | |_^
+    |
+   ::: /rustup/toolchains/nightly-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/core/src/fmt/mod.rs:715:1
+    |
+715 | / pub trait Display {
+716 | |     /// Formats the value using the given formatter.
+717 | |     ///
+718 | |     /// # Examples
+...   |
+738 | |     fn fmt(&self, f: &mut Formatter<'_>) -> Result;
+739 | | }
+    | |_^
+    = note: this error originates in the macro `anyhow` (in Nightly builds, run with -Z macro-backtrace for more info)
+" "
+error[E0599]: the method `anyhow_kind` exists for reference `&Error`, but its trait bounds were not satisfied
+ --> src/main.rs:7:13
+  |
+4 | struct Error;
+  | -------------
+  | |
+  | doesn't satisfy `Error: Into<anyhow::Error>`
+  | doesn't satisfy `Error: anyhow::private::kind::TraitKind`
+  | doesn't satisfy `Error: std::fmt::Display`
+...
+7 |     let _ = anyhow!(Error);
+  |             ^^^^^^^^^^^^^^ method cannot be called on `&Error` due to unsatisfied trait bounds
+  |
+  = note: the following trait bounds were not satisfied:
+          `Error: Into<anyhow::Error>`
+          which is required by `Error: anyhow::private::kind::TraitKind`
+          `Error: std::fmt::Display`
+          which is required by `&Error: anyhow::private::kind::AdhocKind`
+          `&Error: Into<anyhow::Error>`
+          which is required by `&Error: anyhow::private::kind::TraitKind`
+note: the following traits must be implemented
+ --> $RUST/core/src/convert/mod.rs
+  |
+  | / pub trait Into<T>: Sized {
+  | |     /// Performs the conversion.
+  | |     #[stable(feature = \"rust1\", since = \"1.0.0\")]
+  | |     fn into(self) -> T;
+  | | }
+  | |_^
+  |
+ ::: $RUST/core/src/fmt/mod.rs
+  |
+  | / pub trait Display {
+  | |     /// Formats the value using the given formatter.
+  | |     ///
+  | |     /// # Examples
+... |
+  | |     fn fmt(&self, f: &mut Formatter<'_>) -> Result;
+  | | }
+  | |_^
+  = note: this error originates in the macro `anyhow` (in Nightly builds, run with -Z macro-backtrace for more info)
+"}
diff --git a/src/tests/type-dir-backslash.rs b/src/tests/type-dir-backslash.rs
new file mode 100644 (file)
index 0000000..fe07582
--- /dev/null
@@ -0,0 +1,23 @@
+test_normalize! {
+    INPUT="tests/ui/compile-fail-3.rs"
+"
+error[E0277]: `*mut _` cannot be shared between threads safely
+   --> /git/trybuild/test_suite/tests/ui/compile-fail-3.rs:7:5
+    |
+7   |     thread::spawn(|| {
+    |     ^^^^^^^^^^^^^ `*mut _` cannot be shared between threads safely
+    |
+    = help: the trait `std::marker::Sync` is not implemented for `*mut _`
+    = note: required because of the requirements on the impl of `std::marker::Send` for `&*mut _`
+    = note: required because it appears within the type `[closure@/git/trybuild/test_suite/ui/compile-fail-3.rs:7:19: 9:6 x:&*mut _]`
+" "
+error[E0277]: `*mut _` cannot be shared between threads safely
+ --> tests/ui/compile-fail-3.rs:7:5
+  |
+7 |     thread::spawn(|| {
+  |     ^^^^^^^^^^^^^ `*mut _` cannot be shared between threads safely
+  |
+  = help: the trait `std::marker::Sync` is not implemented for `*mut _`
+  = note: required because of the requirements on the impl of `std::marker::Send` for `&*mut _`
+  = note: required because it appears within the type `[closure@$DIR/ui/compile-fail-3.rs:7:19: 9:6 x:&*mut _]`
+"}
diff --git a/src/tests/uniffi-out-dir.rs b/src/tests/uniffi-out-dir.rs
new file mode 100644 (file)
index 0000000..7143195
--- /dev/null
@@ -0,0 +1,25 @@
+test_normalize! {
+    DIR="/git/uniffi-rs/fixtures/uitests"
+    WORKSPACE="/git/uniffi-rs"
+    TARGET="/git/uniffi-rs/target"
+"
+error[E0277]: the trait bound `Arc<Counter>: FfiConverter` is not satisfied
+   --> /git/uniffi-rs/target/debug/build/uniffi_uitests-1a51d46aecb559a7/out/counter.uniffi.rs:160:19
+    |
+160 |             match <std::sync::Arc<Counter> as uniffi::FfiConverter>::try_lift(ptr) {
+    |                   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `FfiConverter` is not implemented for `Arc<Counter>`
+    |
+    = help: the following implementations were found:
+              <Arc<T> as FfiConverter>
+    = note: required by `try_lift`
+" "
+error[E0277]: the trait bound `Arc<Counter>: FfiConverter` is not satisfied
+ --> $OUT_DIR[uniffi_uitests]/counter.uniffi.rs
+  |
+  |             match <std::sync::Arc<Counter> as uniffi::FfiConverter>::try_lift(ptr) {
+  |                   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `FfiConverter` is not implemented for `Arc<Counter>`
+  |
+  = help: the following implementations were found:
+            <Arc<T> as FfiConverter>
+  = note: required by `try_lift`
+"}
diff --git a/tests/test.rs b/tests/test.rs
new file mode 100644 (file)
index 0000000..a3bdf91
--- /dev/null
@@ -0,0 +1,22 @@
+#[test]
+fn test() {
+    let t = trybuild::TestCases::new();
+    t.pass("tests/ui/run-pass-0.rs");
+    t.pass("tests/ui/print-stdout.rs");
+    t.pass("tests/ui/run-pass-1.rs");
+    t.pass("tests/ui/print-stderr.rs");
+    t.pass("tests/ui/run-pass-2.rs");
+    t.pass("tests/ui/print-both.rs");
+    t.pass("tests/ui/run-pass-4.rs");
+    t.compile_fail("tests/ui/run-pass-3.rs");
+    t.pass("tests/ui/run-pass-5.rs");
+    t.pass("tests/ui/compile-fail-0.rs");
+    t.pass("tests/ui/run-pass-6.rs");
+    t.pass("tests/ui/run-pass-7.rs");
+    t.pass("tests/ui/run-pass-8.rs");
+    t.compile_fail("tests/ui/compile-fail-1.rs");
+    t.pass("tests/ui/run-fail.rs");
+    t.pass("tests/ui/run-pass-9.rs");
+    t.compile_fail("tests/ui/compile-fail-2.rs");
+    t.compile_fail("tests/ui/compile-fail-3.rs");
+}
diff --git a/tests/ui/compile-fail-0.rs b/tests/ui/compile-fail-0.rs
new file mode 100644 (file)
index 0000000..0eea6cc
--- /dev/null
@@ -0,0 +1,3 @@
+compile_error!("ERROR");
+
+fn main() {}
diff --git a/tests/ui/compile-fail-1.rs b/tests/ui/compile-fail-1.rs
new file mode 100644 (file)
index 0000000..0eea6cc
--- /dev/null
@@ -0,0 +1,3 @@
+compile_error!("ERROR");
+
+fn main() {}
diff --git a/tests/ui/compile-fail-2.rs b/tests/ui/compile-fail-2.rs
new file mode 100644 (file)
index 0000000..0eea6cc
--- /dev/null
@@ -0,0 +1,3 @@
+compile_error!("ERROR");
+
+fn main() {}
diff --git a/tests/ui/compile-fail-2.stderr b/tests/ui/compile-fail-2.stderr
new file mode 100644 (file)
index 0000000..a8c7fec
--- /dev/null
@@ -0,0 +1,5 @@
+error: ERROR
+ --> tests/ui/compile-fail-2.rs:1:1
+  |
+1 | compile_error!("ERROR");
+  | ^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/tests/ui/compile-fail-3.rs b/tests/ui/compile-fail-3.rs
new file mode 100644 (file)
index 0000000..b315213
--- /dev/null
@@ -0,0 +1,10 @@
+use std::ptr;
+use std::thread;
+
+fn main() {
+    let x = ptr::null_mut();
+
+    thread::spawn(|| {
+        println!("{:?}", x)
+    });
+}
diff --git a/tests/ui/compile-fail-3.stderr b/tests/ui/compile-fail-3.stderr
new file mode 100644 (file)
index 0000000..7fc2291
--- /dev/null
@@ -0,0 +1,9 @@
+error[E0277]: `*mut _` cannot be shared between threads safely
+   --> tests/ui/compile-fail-3.rs:7:5
+    |
+7   |     thread::spawn(|| {
+    |     ^^^^^^^^^^^^^ `*mut _` cannot be shared between threads safely
+    |
+    = help: the trait `Sync` is not implemented for `*mut _`
+    = note: required because of the requirements on the impl of `Send` for `&*mut _`
+    = note: required because it appears within the type `[closure@$DIR/tests/ui/compile-fail-3.rs:7:19: 9:6]`
diff --git a/tests/ui/print-both.rs b/tests/ui/print-both.rs
new file mode 100644 (file)
index 0000000..53adc42
--- /dev/null
@@ -0,0 +1,4 @@
+fn main() {
+    println!("{:?}", "STDOUT".chars());
+    eprintln!("{:?}", "STDERR".chars());
+}
diff --git a/tests/ui/print-stderr.rs b/tests/ui/print-stderr.rs
new file mode 100644 (file)
index 0000000..3c2a498
--- /dev/null
@@ -0,0 +1,3 @@
+fn main() {
+    eprintln!("{:?}", "STDERR".chars());
+}
diff --git a/tests/ui/print-stdout.rs b/tests/ui/print-stdout.rs
new file mode 100644 (file)
index 0000000..24ba506
--- /dev/null
@@ -0,0 +1,3 @@
+fn main() {
+    println!("{:?}", "STDOUT".chars());
+}
diff --git a/tests/ui/run-fail.rs b/tests/ui/run-fail.rs
new file mode 100644 (file)
index 0000000..8a709d5
--- /dev/null
@@ -0,0 +1,3 @@
+fn main() {
+    assert!(false);
+}
diff --git a/tests/ui/run-pass-0.rs b/tests/ui/run-pass-0.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}
diff --git a/tests/ui/run-pass-1.rs b/tests/ui/run-pass-1.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}
diff --git a/tests/ui/run-pass-2.rs b/tests/ui/run-pass-2.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}
diff --git a/tests/ui/run-pass-3.rs b/tests/ui/run-pass-3.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}
diff --git a/tests/ui/run-pass-4.rs b/tests/ui/run-pass-4.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}
diff --git a/tests/ui/run-pass-5.rs b/tests/ui/run-pass-5.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}
diff --git a/tests/ui/run-pass-6.rs b/tests/ui/run-pass-6.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}
diff --git a/tests/ui/run-pass-7.rs b/tests/ui/run-pass-7.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}
diff --git a/tests/ui/run-pass-8.rs b/tests/ui/run-pass-8.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}
diff --git a/tests/ui/run-pass-9.rs b/tests/ui/run-pass-9.rs
new file mode 100644 (file)
index 0000000..f328e4d
--- /dev/null
@@ -0,0 +1 @@
+fn main() {}